diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 669844854b2fe5..9e190d43b28ef9 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -81,6 +81,17 @@ Programs/_bootstrap_python.c @ericsnowcurrently Programs/python.c @ericsnowcurrently Tools/build/generate_global_objects.py @ericsnowcurrently +# Initialization +Doc/library/sys_path_init.rst @FFY00 +Doc/c-api/init_config.rst @FFY00 + +# getpath +**/*getpath* @FFY00 + +# site +**/*site.py @FFY00 +Doc/library/site.rst @FFY00 + # Exceptions Lib/test/test_except*.py @iritkatriel Objects/exceptions.c @iritkatriel @@ -97,7 +108,7 @@ Modules/_hacl/** @gpshead **/*logging* @vsajip # venv -**/*venv* @vsajip +**/*venv* @vsajip @FFY00 # Launcher /PC/launcher.c @vsajip diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml new file mode 100644 index 00000000000000..3701f7297ffeb2 --- /dev/null +++ b/.github/actionlint.yaml @@ -0,0 +1,10 @@ +self-hosted-runner: + labels: ["ubuntu-24.04-aarch64", "windows-aarch64"] + +config-variables: null + +paths: + .github/workflows/**/*.yml: + ignore: + - 1st argument of function call is not assignable + - SC2(015|038|086|091|097|098|129|155) \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c854c13e12f922..1f8c468475470c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -53,7 +53,7 @@ jobs: steps: - name: Install Git run: | - apt install git -yq + apt update && apt install git -yq git config --global --add safe.directory "$GITHUB_WORKSPACE" - uses: actions/checkout@v4 with: @@ -150,16 +150,28 @@ jobs: needs: check_source if: fromJSON(needs.check_source.outputs.run_tests) strategy: + fail-fast: false matrix: + os: + - windows-latest arch: - - Win32 - - x64 - - arm64 + - x64 free-threading: - - false - - true + - false + - true + include: + - os: windows-latest # FIXME(diegorusso): change to os: windows-aarch64 + arch: arm64 + free-threading: false + - os: windows-latest # FIXME(diegorusso): change to os: windows-aarch64 + arch: arm64 + free-threading: true + - os: windows-latest + arch: Win32 + free-threading: false uses: ./.github/workflows/reusable-windows.yml with: + os: ${{ matrix.os }} arch: ${{ matrix.arch }} free-threading: ${{ matrix.free-threading }} @@ -238,7 +250,8 @@ jobs: fail-fast: false matrix: os: [ubuntu-24.04] - openssl_ver: [3.0.15, 3.1.7, 3.2.3, 3.3.2] + openssl_ver: [3.0.15, 3.1.7, 3.2.3, 3.3.2, 3.4.0] + # See Tools/ssl/make_ssl_data.py for notes on adding a new version env: OPENSSL_VER: ${{ matrix.openssl_ver }} MULTISSL_DIR: ${{ github.workspace }}/multissl diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml index 35d5d59b762660..7dbbe71b2131e7 100644 --- a/.github/workflows/jit.yml +++ b/.github/workflows/jit.yml @@ -54,9 +54,7 @@ jobs: - x86_64-apple-darwin/clang - aarch64-apple-darwin/clang - x86_64-unknown-linux-gnu/gcc - - x86_64-unknown-linux-gnu/clang - aarch64-unknown-linux-gnu/gcc - - aarch64-unknown-linux-gnu/clang debug: - true - false @@ -66,41 +64,24 @@ jobs: - target: i686-pc-windows-msvc/msvc architecture: Win32 runner: windows-latest - compiler: msvc - target: x86_64-pc-windows-msvc/msvc architecture: x64 runner: windows-latest - compiler: msvc - target: aarch64-pc-windows-msvc/msvc architecture: ARM64 runner: windows-latest - compiler: msvc - target: x86_64-apple-darwin/clang architecture: x86_64 runner: macos-13 - compiler: clang - target: aarch64-apple-darwin/clang architecture: aarch64 runner: macos-14 - compiler: clang - target: x86_64-unknown-linux-gnu/gcc architecture: x86_64 runner: ubuntu-22.04 - compiler: gcc - - target: x86_64-unknown-linux-gnu/clang - architecture: x86_64 - runner: ubuntu-22.04 - compiler: clang - target: aarch64-unknown-linux-gnu/gcc architecture: aarch64 runner: ubuntu-22.04 - compiler: gcc - - target: aarch64-unknown-linux-gnu/clang - architecture: aarch64 - runner: ubuntu-22.04 - compiler: clang - env: - CC: ${{ matrix.compiler }} steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 @@ -111,10 +92,10 @@ jobs: if: runner.os == 'Windows' && matrix.architecture != 'ARM64' run: | choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0 - ./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '--pgo' }} -p ${{ matrix.architecture }} + ./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }} ./PCbuild/rt.bat ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }} -q --multiprocess 0 --timeout 4500 --verbose2 --verbose3 - # No PGO or tests (yet): + # No tests (yet): - name: Emulated Windows if: runner.os == 'Windows' && matrix.architecture == 'ARM64' run: | @@ -132,7 +113,7 @@ jobs: find /usr/local/bin -lname '*/Library/Frameworks/Python.framework/*' -delete brew install llvm@${{ matrix.llvm }} export SDKROOT="$(xcrun --show-sdk-path)" - ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }} + ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '' }} make all --jobs 4 ./python.exe -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3 @@ -141,7 +122,7 @@ jobs: run: | sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }} export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH" - ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--enable-optimizations --with-lto' }} + ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '' }} make all --jobs 4 ./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3 @@ -156,13 +137,11 @@ jobs: make clean --jobs 4 export HOST=${{ matrix.architecture }}-linux-gnu sudo apt install --yes "gcc-$HOST" qemu-user - ${{ !matrix.debug && matrix.compiler == 'clang' && './configure --enable-optimizations' || '' }} - ${{ !matrix.debug && matrix.compiler == 'clang' && 'make profile-run-stamp --jobs 4' || '' }} export QEMU_LD_PREFIX="/usr/$HOST" - CC="${{ matrix.compiler == 'clang' && 'clang --target=$HOST' || '$HOST-gcc' }}" \ - CPP="$CC --preprocess" \ + CC="$HOST-gcc" \ + CPP="$HOST-gcc --preprocess" \ HOSTRUNNER=qemu-${{ matrix.architecture }} \ - ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '--with-lto' }} --build=x86_64-linux-gnu --host="$HOST" --with-build-python=../build/bin/python3 --with-pkg-config=no ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptc=no ac_cv_file__dev_ptmx=yes + ./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '' }} --build=x86_64-linux-gnu --host="$HOST" --with-build-python=../build/bin/python3 --with-pkg-config=no ac_cv_buggy_getaddrinfo=no ac_cv_file__dev_ptc=no ac_cv_file__dev_ptmx=yes make all --jobs 4 ./python -m test --ignorefile=Tools/jit/ignore-tests-emulated-linux.txt --multiprocess 0 --timeout 4500 --verbose2 --verbose3 diff --git a/.github/workflows/reusable-macos.yml b/.github/workflows/reusable-macos.yml index 915481d0737c7d..4c3dd10194f8cb 100644 --- a/.github/workflows/reusable-macos.yml +++ b/.github/workflows/reusable-macos.yml @@ -37,7 +37,10 @@ jobs: path: config.cache key: ${{ github.job }}-${{ inputs.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }} - name: Install Homebrew dependencies - run: brew install pkg-config openssl@3.0 xz gdbm tcl-tk make + run: | + brew install pkg-config openssl@3.0 xz gdbm tcl-tk@8 make + # Because alternate versions are not symlinked into place by default: + brew link tcl-tk@8 - name: Configure CPython run: | GDBM_CFLAGS="-I$(brew --prefix gdbm)/include" \ diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml index ec39025504efd1..642354f8b4f61b 100644 --- a/.github/workflows/reusable-ubuntu.yml +++ b/.github/workflows/reusable-ubuntu.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-24.04] + os: [ubuntu-24.04, ubuntu-24.04-aarch64] env: FORCE_COLOR: 1 OPENSSL_VER: 3.0.15 @@ -82,11 +82,11 @@ jobs: - name: Build CPython out-of-tree if: ${{ inputs.free-threading }} working-directory: ${{ env.CPYTHON_BUILDDIR }} - run: make -j4 + run: make -j - name: Build CPython out-of-tree (for compiler warning check) if: ${{ !inputs.free-threading}} working-directory: ${{ env.CPYTHON_BUILDDIR }} - run: set -o pipefail; make -j4 --output-sync 2>&1 | tee compiler_output_ubuntu.txt + run: set -o pipefail; make -j --output-sync 2>&1 | tee compiler_output_ubuntu.txt - name: Display build info working-directory: ${{ env.CPYTHON_BUILDDIR }} run: make pythoninfo diff --git a/.github/workflows/reusable-windows.yml b/.github/workflows/reusable-windows.yml index dcfc62d7f5d145..12b68d68466d62 100644 --- a/.github/workflows/reusable-windows.yml +++ b/.github/workflows/reusable-windows.yml @@ -3,6 +3,10 @@ name: Reusable Windows on: workflow_call: inputs: + os: + description: OS to run on + required: true + type: string arch: description: CPU architecture required: true @@ -19,10 +23,8 @@ env: jobs: build: - name: >- - build${{ inputs.arch != 'arm64' && ' and test' || '' }} - (${{ inputs.arch }}) - runs-on: windows-latest + name: 'build and test (${{ inputs.arch }})' + runs-on: ${{ inputs.os }} timeout-minutes: 60 steps: - uses: actions/checkout@v4 @@ -31,17 +33,17 @@ jobs: run: echo "::add-matcher::.github/problem-matchers/msvc.json" - name: Build CPython run: >- - .\PCbuild\build.bat + .\\PCbuild\\build.bat -e -d -v -p ${{ inputs.arch }} ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} - - name: Display build info + - name: Display build info # FIXME(diegorusso): remove the `if` if: inputs.arch != 'arm64' - run: .\python.bat -m test.pythoninfo - - name: Tests + run: .\\python.bat -m test.pythoninfo + - name: Tests # FIXME(diegorusso): remove the `if` if: inputs.arch != 'arm64' run: >- - .\PCbuild\rt.bat + .\\PCbuild\\rt.bat -p ${{ inputs.arch }} -d -q --fast-ci ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ec769d7ff70314..ccaf2390d99fae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -57,13 +57,9 @@ repos: - id: check-github-workflows - repo: https://github.com/rhysd/actionlint - rev: v1.7.3 + rev: v1.7.4 hooks: - id: actionlint - args: [ - -ignore=1st argument of function call is not assignable, - -ignore=SC2(015|038|086|091|097|098|129|155), - ] - repo: https://github.com/sphinx-contrib/sphinx-lint rev: v1.0.0 diff --git a/Doc/Makefile b/Doc/Makefile index a090ee5ba92705..22e43ee3e542ee 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -294,7 +294,7 @@ check: _ensure-pre-commit .PHONY: serve serve: - @echo "The serve target was removed, use htmlview instead (see bpo-36329)" + @echo "The serve target was removed, use htmllive instead (see gh-80510)" # Targets for daily automated doc build # By default, Sphinx only rebuilds pages where the page content has changed. diff --git a/Doc/c-api/complex.rst b/Doc/c-api/complex.rst index 16bd79475dc1e6..d1f5d8eda676ef 100644 --- a/Doc/c-api/complex.rst +++ b/Doc/c-api/complex.rst @@ -44,12 +44,36 @@ pointers. This is consistent throughout the API. representation. +.. c:function:: Py_complex _Py_cr_sum(Py_complex left, double right) + + Return the sum of a complex number and a real number, using the C :c:type:`Py_complex` + representation. + + .. versionadded:: 3.14 + + .. c:function:: Py_complex _Py_c_diff(Py_complex left, Py_complex right) Return the difference between two complex numbers, using the C :c:type:`Py_complex` representation. +.. c:function:: Py_complex _Py_cr_diff(Py_complex left, double right) + + Return the difference between a complex number and a real number, using the C + :c:type:`Py_complex` representation. + + .. versionadded:: 3.14 + + +.. c:function:: Py_complex _Py_rc_diff(double left, Py_complex right) + + Return the difference between a real number and a complex number, using the C + :c:type:`Py_complex` representation. + + .. versionadded:: 3.14 + + .. c:function:: Py_complex _Py_c_neg(Py_complex num) Return the negation of the complex number *num*, using the C @@ -62,6 +86,14 @@ pointers. This is consistent throughout the API. representation. +.. c:function:: Py_complex _Py_cr_prod(Py_complex left, double right) + + Return the product of a complex number and a real number, using the C + :c:type:`Py_complex` representation. + + .. versionadded:: 3.14 + + .. c:function:: Py_complex _Py_c_quot(Py_complex dividend, Py_complex divisor) Return the quotient of two complex numbers, using the C :c:type:`Py_complex` @@ -71,6 +103,28 @@ pointers. This is consistent throughout the API. :c:data:`errno` to :c:macro:`!EDOM`. +.. c:function:: Py_complex _Py_cr_quot(Py_complex dividend, double divisor) + + Return the quotient of a complex number and a real number, using the C + :c:type:`Py_complex` representation. + + If *divisor* is zero, this method returns zero and sets + :c:data:`errno` to :c:macro:`!EDOM`. + + .. versionadded:: 3.14 + + +.. c:function:: Py_complex _Py_rc_quot(double dividend, Py_complex divisor) + + Return the quotient of a real number and a complex number, using the C + :c:type:`Py_complex` representation. + + If *divisor* is zero, this method returns zero and sets + :c:data:`errno` to :c:macro:`!EDOM`. + + .. versionadded:: 3.14 + + .. c:function:: Py_complex _Py_c_pow(Py_complex num, Py_complex exp) Return the exponentiation of *num* by *exp*, using the C :c:type:`Py_complex` diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 6e881590131cab..ba1c2852f0bd53 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -1379,6 +1379,9 @@ All of the following functions must be called after :c:func:`Py_Initialize`. This function now calls the :c:member:`PyThreadState.on_delete` callback. Previously, that happened in :c:func:`PyThreadState_Delete`. + .. versionchanged:: 3.13 + The :c:member:`PyThreadState.on_delete` callback was removed. + .. c:function:: void PyThreadState_Delete(PyThreadState *tstate) @@ -1735,7 +1738,11 @@ function. You can create and destroy them using the following functions: .check_multi_interp_extensions = 1, .gil = PyInterpreterConfig_OWN_GIL, }; - PyThreadState *tstate = Py_NewInterpreterFromConfig(&config); + PyThreadState *tstate = NULL; + PyStatus status = Py_NewInterpreterFromConfig(&tstate, &config); + if (PyStatus_Exception(status)) { + Py_ExitStatusException(status); + } Note that the config is used only briefly and does not get modified. During initialization the config's values are converted into various @@ -2463,7 +2470,7 @@ code triggered by the finalizer blocks and calls :c:func:`PyEval_SaveThread`. { PyCriticalSection2 _py_cs2; - PyCriticalSection_Begin2(&_py_cs2, (PyObject*)(a), (PyObject*)(b)) + PyCriticalSection2_Begin(&_py_cs2, (PyObject*)(a), (PyObject*)(b)) In the default build, this macro expands to ``{``. @@ -2475,7 +2482,7 @@ code triggered by the finalizer blocks and calls :c:func:`PyEval_SaveThread`. In the free-threaded build, this macro expands to:: - PyCriticalSection_End2(&_py_cs2); + PyCriticalSection2_End(&_py_cs2); } In the default build, this macro expands to ``}``. diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 6194d7446c73e4..d6569ddcf586fa 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -6,6 +6,8 @@ Python Initialization Configuration *********************************** +.. _pyconfig_api: + PyConfig C API ============== @@ -1588,9 +1590,24 @@ If a ``._pth`` file is present: * Set :c:member:`~PyConfig.site_import` to ``0``. * Set :c:member:`~PyConfig.safe_path` to ``1``. +If :c:member:`~PyConfig.home` is not set and a ``pyvenv.cfg`` file is present in +the same directory as :c:member:`~PyConfig.executable`, or its parent, +:c:member:`~PyConfig.prefix` and :c:member:`~PyConfig.exec_prefix` are set that +location. When this happens, :c:member:`~PyConfig.base_prefix` and +:c:member:`~PyConfig.base_exec_prefix` still keep their value, pointing to the +base installation. See :ref:`sys-path-init-virtual-environments` for more +information. + The ``__PYVENV_LAUNCHER__`` environment variable is used to set :c:member:`PyConfig.base_executable`. +.. versionchanged:: 3.14 + + :c:member:`~PyConfig.prefix`, and :c:member:`~PyConfig.exec_prefix`, are now + set to the ``pyvenv.cfg`` directory. This was previously done by :mod:`site`, + therefore affected by :option:`-S`. + +.. _pyinitconfig_api: PyInitConfig C API ================== diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 32bb451b08d413..cb12d43d92026f 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -590,7 +590,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. return ``1`` when it's positive and ``0`` otherwise. Else set an exception and return ``-1``. - .. versionadded:: next + .. versionadded:: 3.14 .. c:function:: int PyLong_IsNegative(PyObject *obj) @@ -601,7 +601,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. return ``1`` when it's negative and ``0`` otherwise. Else set an exception and return ``-1``. - .. versionadded:: next + .. versionadded:: 3.14 .. c:function:: int PyLong_IsZero(PyObject *obj) @@ -612,7 +612,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. return ``1`` when it's zero and ``0`` otherwise. Else set an exception and return ``-1``. - .. versionadded:: next + .. versionadded:: 3.14 .. c:function:: PyObject* PyLong_GetInfo(void) diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 1e1cf6e6bfd7e9..1ae3c46bea46ea 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -85,7 +85,7 @@ Object Protocol instead of the :func:`repr`. -.. c:function:: int PyObject_HasAttrWithError(PyObject *o, const char *attr_name) +.. c:function:: int PyObject_HasAttrWithError(PyObject *o, PyObject *attr_name) Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise. This is equivalent to the Python expression ``hasattr(o, attr_name)``. @@ -597,5 +597,5 @@ Object Protocol This function is intended to be used soon after *obj* is created, by the code that creates it. - .. versionadded:: next + .. versionadded:: 3.14 diff --git a/Doc/deprecations/pending-removal-in-future.rst b/Doc/deprecations/pending-removal-in-future.rst index 3be0dabfd1f257..ea7581bf16bd34 100644 --- a/Doc/deprecations/pending-removal-in-future.rst +++ b/Doc/deprecations/pending-removal-in-future.rst @@ -145,10 +145,6 @@ although there is currently no date scheduled for their removal. * ``splitvalue()`` * ``to_bytes()`` -* :mod:`urllib.request`: :class:`~urllib.request.URLopener` and - :class:`~urllib.request.FancyURLopener` style of invoking requests is - deprecated. Use newer :func:`~urllib.request.urlopen` functions and methods. - * :mod:`wsgiref`: ``SimpleHandler.stdout.write()`` should not do partial writes. diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst index 01264bfe823746..f6c3e473f1c36d 100644 --- a/Doc/howto/descriptor.rst +++ b/Doc/howto/descriptor.rst @@ -42,7 +42,7 @@ add new capabilities one by one. Simple example: A descriptor that returns a constant ---------------------------------------------------- -The :class:`Ten` class is a descriptor whose :meth:`__get__` method always +The :class:`!Ten` class is a descriptor whose :meth:`~object.__get__` method always returns the constant ``10``: .. testcode:: @@ -120,10 +120,10 @@ different, updated answers each time:: 2 Besides showing how descriptors can run computations, this example also -reveals the purpose of the parameters to :meth:`__get__`. The *self* +reveals the purpose of the parameters to :meth:`~object.__get__`. The *self* parameter is *size*, an instance of *DirectorySize*. The *obj* parameter is either *g* or *s*, an instance of *Directory*. It is the *obj* parameter that -lets the :meth:`__get__` method learn the target directory. The *objtype* +lets the :meth:`~object.__get__` method learn the target directory. The *objtype* parameter is the class *Directory*. @@ -133,7 +133,7 @@ Managed attributes A popular use for descriptors is managing access to instance data. The descriptor is assigned to a public attribute in the class dictionary while the actual data is stored as a private attribute in the instance dictionary. The -descriptor's :meth:`__get__` and :meth:`__set__` methods are triggered when +descriptor's :meth:`~object.__get__` and :meth:`~object.__set__` methods are triggered when the public attribute is accessed. In the following example, *age* is the public attribute and *_age* is the @@ -215,9 +215,9 @@ Customized names When a class uses descriptors, it can inform each descriptor about which variable name was used. -In this example, the :class:`Person` class has two descriptor instances, -*name* and *age*. When the :class:`Person` class is defined, it makes a -callback to :meth:`__set_name__` in *LoggedAccess* so that the field names can +In this example, the :class:`!Person` class has two descriptor instances, +*name* and *age*. When the :class:`!Person` class is defined, it makes a +callback to :meth:`~object.__set_name__` in *LoggedAccess* so that the field names can be recorded, giving each descriptor its own *public_name* and *private_name*: .. testcode:: @@ -253,8 +253,8 @@ be recorded, giving each descriptor its own *public_name* and *private_name*: def birthday(self): self.age += 1 -An interactive session shows that the :class:`Person` class has called -:meth:`__set_name__` so that the field names would be recorded. Here +An interactive session shows that the :class:`!Person` class has called +:meth:`~object.__set_name__` so that the field names would be recorded. Here we call :func:`vars` to look up the descriptor without triggering it: .. doctest:: @@ -294,10 +294,10 @@ The two *Person* instances contain only the private names: Closing thoughts ---------------- -A :term:`descriptor` is what we call any object that defines :meth:`__get__`, -:meth:`__set__`, or :meth:`__delete__`. +A :term:`descriptor` is what we call any object that defines :meth:`~object.__get__`, +:meth:`~object.__set__`, or :meth:`~object.__delete__`. -Optionally, descriptors can have a :meth:`__set_name__` method. This is only +Optionally, descriptors can have a :meth:`~object.__set_name__` method. This is only used in cases where a descriptor needs to know either the class where it was created or the name of class variable it was assigned to. (This method, if present, is called even if the class is not a descriptor.) @@ -337,7 +337,7 @@ any data, it verifies that the new value meets various type and range restrictions. If those restrictions aren't met, it raises an exception to prevent data corruption at its source. -This :class:`Validator` class is both an :term:`abstract base class` and a +This :class:`!Validator` class is both an :term:`abstract base class` and a managed attribute descriptor: .. testcode:: @@ -360,8 +360,8 @@ managed attribute descriptor: def validate(self, value): pass -Custom validators need to inherit from :class:`Validator` and must supply a -:meth:`validate` method to test various restrictions as needed. +Custom validators need to inherit from :class:`!Validator` and must supply a +:meth:`!validate` method to test various restrictions as needed. Custom validators @@ -369,13 +369,13 @@ Custom validators Here are three practical data validation utilities: -1) :class:`OneOf` verifies that a value is one of a restricted set of options. +1) :class:`!OneOf` verifies that a value is one of a restricted set of options. -2) :class:`Number` verifies that a value is either an :class:`int` or +2) :class:`!Number` verifies that a value is either an :class:`int` or :class:`float`. Optionally, it verifies that a value is between a given minimum or maximum. -3) :class:`String` verifies that a value is a :class:`str`. Optionally, it +3) :class:`!String` verifies that a value is a :class:`str`. Optionally, it validates a given minimum or maximum length. It can validate a user-defined `predicate `_ as well. @@ -501,8 +501,8 @@ Definition and introduction --------------------------- In general, a descriptor is an attribute value that has one of the methods in -the descriptor protocol. Those methods are :meth:`__get__`, :meth:`__set__`, -and :meth:`__delete__`. If any of those methods are defined for an +the descriptor protocol. Those methods are :meth:`~object.__get__`, :meth:`~object.__set__`, +and :meth:`~object.__delete__`. If any of those methods are defined for an attribute, it is said to be a :term:`descriptor`. The default behavior for attribute access is to get, set, or delete the @@ -534,8 +534,8 @@ That is all there is to it. Define any of these methods and an object is considered a descriptor and can override default behavior upon being looked up as an attribute. -If an object defines :meth:`__set__` or :meth:`__delete__`, it is considered -a data descriptor. Descriptors that only define :meth:`__get__` are called +If an object defines :meth:`~object.__set__` or :meth:`~object.__delete__`, it is considered +a data descriptor. Descriptors that only define :meth:`~object.__get__` are called non-data descriptors (they are often used for methods but other uses are possible). @@ -545,9 +545,9 @@ has an entry with the same name as a data descriptor, the data descriptor takes precedence. If an instance's dictionary has an entry with the same name as a non-data descriptor, the dictionary entry takes precedence. -To make a read-only data descriptor, define both :meth:`__get__` and -:meth:`__set__` with the :meth:`__set__` raising an :exc:`AttributeError` when -called. Defining the :meth:`__set__` method with an exception raising +To make a read-only data descriptor, define both :meth:`~object.__get__` and +:meth:`~object.__set__` with the :meth:`~object.__set__` raising an :exc:`AttributeError` when +called. Defining the :meth:`~object.__set__` method with an exception raising placeholder is enough to make it a data descriptor. @@ -574,7 +574,7 @@ Invocation from an instance Instance lookup scans through a chain of namespaces giving data descriptors the highest priority, followed by instance variables, then non-data -descriptors, then class variables, and lastly :meth:`__getattr__` if it is +descriptors, then class variables, and lastly :meth:`~object.__getattr__` if it is provided. If a descriptor is found for ``a.x``, then it is invoked with: @@ -719,12 +719,12 @@ a pure Python equivalent: >>> object_getattribute(u2, 'x') == u2.x == (D1, u2, U2) True -Note, there is no :meth:`__getattr__` hook in the :meth:`__getattribute__` -code. That is why calling :meth:`__getattribute__` directly or with -``super().__getattribute__`` will bypass :meth:`__getattr__` entirely. +Note, there is no :meth:`~object.__getattr__` hook in the :meth:`~object.__getattribute__` +code. That is why calling :meth:`~object.__getattribute__` directly or with +``super().__getattribute__`` will bypass :meth:`~object.__getattr__` entirely. Instead, it is the dot operator and the :func:`getattr` function that are -responsible for invoking :meth:`__getattr__` whenever :meth:`__getattribute__` +responsible for invoking :meth:`~object.__getattr__` whenever :meth:`~object.__getattribute__` raises an :exc:`AttributeError`. Their logic is encapsulated in a helper function: @@ -776,8 +776,8 @@ Invocation from a class ----------------------- The logic for a dotted lookup such as ``A.x`` is in -:meth:`type.__getattribute__`. The steps are similar to those for -:meth:`object.__getattribute__` but the instance dictionary lookup is replaced +:meth:`!type.__getattribute__`. The steps are similar to those for +:meth:`!object.__getattribute__` but the instance dictionary lookup is replaced by a search through the class's :term:`method resolution order`. If a descriptor is found, it is invoked with ``desc.__get__(None, A)``. @@ -789,7 +789,7 @@ The full C implementation can be found in :c:func:`!type_getattro` and Invocation from super --------------------- -The logic for super's dotted lookup is in the :meth:`__getattribute__` method for +The logic for super's dotted lookup is in the :meth:`~object.__getattribute__` method for object returned by :func:`super`. A dotted lookup such as ``super(A, obj).m`` searches ``obj.__class__.__mro__`` @@ -806,21 +806,21 @@ The full C implementation can be found in :c:func:`!super_getattro` in Summary of invocation logic --------------------------- -The mechanism for descriptors is embedded in the :meth:`__getattribute__` +The mechanism for descriptors is embedded in the :meth:`~object.__getattribute__` methods for :class:`object`, :class:`type`, and :func:`super`. The important points to remember are: -* Descriptors are invoked by the :meth:`__getattribute__` method. +* Descriptors are invoked by the :meth:`~object.__getattribute__` method. * Classes inherit this machinery from :class:`object`, :class:`type`, or :func:`super`. -* Overriding :meth:`__getattribute__` prevents automatic descriptor calls +* Overriding :meth:`~object.__getattribute__` prevents automatic descriptor calls because all the descriptor logic is in that method. -* :meth:`object.__getattribute__` and :meth:`type.__getattribute__` make - different calls to :meth:`__get__`. The first includes the instance and may +* :meth:`!object.__getattribute__` and :meth:`!type.__getattribute__` make + different calls to :meth:`~object.__get__`. The first includes the instance and may include the class. The second puts in ``None`` for the instance and always includes the class. @@ -835,16 +835,16 @@ Automatic name notification Sometimes it is desirable for a descriptor to know what class variable name it was assigned to. When a new class is created, the :class:`type` metaclass scans the dictionary of the new class. If any of the entries are descriptors -and if they define :meth:`__set_name__`, that method is called with two +and if they define :meth:`~object.__set_name__`, that method is called with two arguments. The *owner* is the class where the descriptor is used, and the *name* is the class variable the descriptor was assigned to. The implementation details are in :c:func:`!type_new` and :c:func:`!set_names` in :source:`Objects/typeobject.c`. -Since the update logic is in :meth:`type.__new__`, notifications only take +Since the update logic is in :meth:`!type.__new__`, notifications only take place at the time of class creation. If descriptors are added to the class -afterwards, :meth:`__set_name__` will need to be called manually. +afterwards, :meth:`~object.__set_name__` will need to be called manually. ORM example @@ -873,7 +873,7 @@ care of lookups or updates: conn.execute(self.store, [value, obj.key]) conn.commit() -We can use the :class:`Field` class to define `models +We can use the :class:`!Field` class to define `models `_ that describe the schema for each table in a database: @@ -1140,7 +1140,7 @@ to wrap access to the value attribute in a property data descriptor: self.recalc() return self._value -Either the built-in :func:`property` or our :func:`Property` equivalent would +Either the built-in :func:`property` or our :func:`!Property` equivalent would work in this example. @@ -1187,7 +1187,7 @@ roughly equivalent to: return self To support automatic creation of methods, functions include the -:meth:`__get__` method for binding methods during attribute access. This +:meth:`~object.__get__` method for binding methods during attribute access. This means that functions are non-data descriptors that return bound methods during dotted lookup from an instance. Here's how it works: @@ -1231,19 +1231,19 @@ The function has a :term:`qualified name` attribute to support introspection: 'D.f' Accessing the function through the class dictionary does not invoke -:meth:`__get__`. Instead, it just returns the underlying function object:: +:meth:`~object.__get__`. Instead, it just returns the underlying function object:: >>> D.__dict__['f'] -Dotted access from a class calls :meth:`__get__` which just returns the +Dotted access from a class calls :meth:`~object.__get__` which just returns the underlying function unchanged:: >>> D.f The interesting behavior occurs during dotted access from an instance. The -dotted lookup calls :meth:`__get__` which returns a bound method object:: +dotted lookup calls :meth:`~object.__get__` which returns a bound method object:: >>> d = D() >>> d.f @@ -1268,7 +1268,7 @@ Kinds of methods Non-data descriptors provide a simple mechanism for variations on the usual patterns of binding functions into methods. -To recap, functions have a :meth:`__get__` method so that they can be converted +To recap, functions have a :meth:`~object.__get__` method so that they can be converted to a method when accessed as attributes. The non-data descriptor transforms an ``obj.f(*args)`` call into ``f(obj, *args)``. Calling ``cls.f(*args)`` becomes ``f(*args)``. @@ -1671,7 +1671,7 @@ by member descriptors: 'Emulate member_repr() in Objects/descrobject.c' return f'' -The :meth:`type.__new__` method takes care of adding member objects to class +The :meth:`!type.__new__` method takes care of adding member objects to class variables: .. testcode:: @@ -1722,7 +1722,7 @@ Python: ) super().__delattr__(name) -To use the simulation in a real class, just inherit from :class:`Object` and +To use the simulation in a real class, just inherit from :class:`!Object` and set the :term:`metaclass` to :class:`Type`: .. testcode:: diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst index 66929b4104d8de..6441b7aed1eda8 100644 --- a/Doc/howto/enum.rst +++ b/Doc/howto/enum.rst @@ -64,12 +64,12 @@ The *type* of an enumeration member is the enum it belongs to:: >>> isinstance(Weekday.FRIDAY, Weekday) True -Enum members have an attribute that contains just their :attr:`name`:: +Enum members have an attribute that contains just their :attr:`!name`:: >>> print(Weekday.TUESDAY.name) TUESDAY -Likewise, they have an attribute for their :attr:`value`:: +Likewise, they have an attribute for their :attr:`!value`:: >>> Weekday.WEDNESDAY.value @@ -77,17 +77,18 @@ Likewise, they have an attribute for their :attr:`value`:: Unlike many languages that treat enumerations solely as name/value pairs, Python Enums can have behavior added. For example, :class:`datetime.date` -has two methods for returning the weekday: :meth:`weekday` and :meth:`isoweekday`. +has two methods for returning the weekday: +:meth:`~datetime.date.weekday` and :meth:`~datetime.date.isoweekday`. The difference is that one of them counts from 0-6 and the other from 1-7. -Rather than keep track of that ourselves we can add a method to the :class:`Weekday` -enum to extract the day from the :class:`date` instance and return the matching +Rather than keep track of that ourselves we can add a method to the :class:`!Weekday` +enum to extract the day from the :class:`~datetime.date` instance and return the matching enum member:: @classmethod def from_date(cls, date): return cls(date.isoweekday()) -The complete :class:`Weekday` enum now looks like this:: +The complete :class:`!Weekday` enum now looks like this:: >>> class Weekday(Enum): ... MONDAY = 1 @@ -110,7 +111,7 @@ Now we can find out what today is! Observe:: Of course, if you're reading this on some other day, you'll see that day instead. -This :class:`Weekday` enum is great if our variable only needs one day, but +This :class:`!Weekday` enum is great if our variable only needs one day, but what if we need several? Maybe we're writing a function to plot chores during a week, and don't want to use a :class:`list` -- we could use a different type of :class:`Enum`:: @@ -128,7 +129,7 @@ of :class:`Enum`:: We've changed two things: we're inherited from :class:`Flag`, and the values are all powers of 2. -Just like the original :class:`Weekday` enum above, we can have a single selection:: +Just like the original :class:`!Weekday` enum above, we can have a single selection:: >>> first_week_day = Weekday.MONDAY >>> first_week_day @@ -203,7 +204,7 @@ If you want to access enum members by *name*, use item access:: >>> Color['GREEN'] -If you have an enum member and need its :attr:`name` or :attr:`value`:: +If you have an enum member and need its :attr:`!name` or :attr:`!value`:: >>> member = Color.RED >>> member.name @@ -284,7 +285,7 @@ If the exact value is unimportant you can use :class:`auto`:: >>> [member.value for member in Color] [1, 2, 3] -The values are chosen by :func:`_generate_next_value_`, which can be +The values are chosen by :func:`~Enum._generate_next_value_`, which can be overridden:: >>> class AutoName(Enum): @@ -303,7 +304,7 @@ overridden:: .. note:: - The :meth:`_generate_next_value_` method must be defined before any members. + The :meth:`~Enum._generate_next_value_` method must be defined before any members. Iteration --------- @@ -424,18 +425,18 @@ Then:: The rules for what is allowed are as follows: names that start and end with a single underscore are reserved by enum and cannot be used; all other attributes defined within an enumeration will become members of this -enumeration, with the exception of special methods (:meth:`__str__`, -:meth:`__add__`, etc.), descriptors (methods are also descriptors), and -variable names listed in :attr:`_ignore_`. +enumeration, with the exception of special methods (:meth:`~object.__str__`, +:meth:`~object.__add__`, etc.), descriptors (methods are also descriptors), and +variable names listed in :attr:`~Enum._ignore_`. -Note: if your enumeration defines :meth:`__new__` and/or :meth:`__init__`, +Note: if your enumeration defines :meth:`~object.__new__` and/or :meth:`~object.__init__`, any value(s) given to the enum member will be passed into those methods. See `Planet`_ for an example. .. note:: - The :meth:`__new__` method, if defined, is used during creation of the Enum - members; it is then replaced by Enum's :meth:`__new__` which is used after + The :meth:`~object.__new__` method, if defined, is used during creation of the Enum + members; it is then replaced by Enum's :meth:`~object.__new__` which is used after class creation for lookup of existing members. See :ref:`new-vs-init` for more details. @@ -544,7 +545,7 @@ from that module. nested in other classes. It is possible to modify how enum members are pickled/unpickled by defining -:meth:`__reduce_ex__` in the enumeration class. The default method is by-value, +:meth:`~object.__reduce_ex__` in the enumeration class. The default method is by-value, but enums with complicated values may want to use by-name:: >>> import enum @@ -580,7 +581,7 @@ values. The last two options enable assigning arbitrary values to enumerations; the others auto-assign increasing integers starting with 1 (use the ``start`` parameter to specify a different starting value). A new class derived from :class:`Enum` is returned. In other words, the above -assignment to :class:`Animal` is equivalent to:: +assignment to :class:`!Animal` is equivalent to:: >>> class Animal(Enum): ... ANT = 1 @@ -891,7 +892,7 @@ simple to implement independently:: pass This demonstrates how similar derived enumerations can be defined; for example -a :class:`FloatEnum` that mixes in :class:`float` instead of :class:`int`. +a :class:`!FloatEnum` that mixes in :class:`float` instead of :class:`int`. Some rules: @@ -905,32 +906,32 @@ Some rules: additional type, all the members must have values of that type, e.g. :class:`int` above. This restriction does not apply to mix-ins which only add methods and don't specify another type. -4. When another data type is mixed in, the :attr:`value` attribute is *not the +4. When another data type is mixed in, the :attr:`~Enum.value` attribute is *not the same* as the enum member itself, although it is equivalent and will compare equal. -5. A ``data type`` is a mixin that defines :meth:`__new__`, or a +5. A ``data type`` is a mixin that defines :meth:`~object.__new__`, or a :class:`~dataclasses.dataclass` 6. %-style formatting: ``%s`` and ``%r`` call the :class:`Enum` class's - :meth:`__str__` and :meth:`__repr__` respectively; other codes (such as + :meth:`~object.__str__` and :meth:`~object.__repr__` respectively; other codes (such as ``%i`` or ``%h`` for IntEnum) treat the enum member as its mixed-in type. 7. :ref:`Formatted string literals `, :meth:`str.format`, - and :func:`format` will use the enum's :meth:`__str__` method. + and :func:`format` will use the enum's :meth:`~object.__str__` method. .. note:: Because :class:`IntEnum`, :class:`IntFlag`, and :class:`StrEnum` are designed to be drop-in replacements for existing constants, their - :meth:`__str__` method has been reset to their data types' - :meth:`__str__` method. + :meth:`~object.__str__` method has been reset to their data types' + :meth:`~object.__str__` method. .. _new-vs-init: -When to use :meth:`__new__` vs. :meth:`__init__` ------------------------------------------------- +When to use :meth:`~object.__new__` vs. :meth:`~object.__init__` +---------------------------------------------------------------- -:meth:`__new__` must be used whenever you want to customize the actual value of +:meth:`~object.__new__` must be used whenever you want to customize the actual value of the :class:`Enum` member. Any other modifications may go in either -:meth:`__new__` or :meth:`__init__`, with :meth:`__init__` being preferred. +:meth:`~object.__new__` or :meth:`~object.__init__`, with :meth:`~object.__init__` being preferred. For example, if you want to pass several items to the constructor, but only want one of them to be the value:: @@ -969,11 +970,11 @@ Finer Points Supported ``__dunder__`` names """""""""""""""""""""""""""""" -:attr:`__members__` is a read-only ordered mapping of ``member_name``:``member`` +:attr:`~enum.EnumType.__members__` is a read-only ordered mapping of ``member_name``:``member`` items. It is only available on the class. -:meth:`__new__`, if specified, must create and return the enum members; it is -also a very good idea to set the member's :attr:`_value_` appropriately. Once +:meth:`~object.__new__`, if specified, must create and return the enum members; it is +also a very good idea to set the member's :attr:`~Enum._value_` appropriately. Once all the members are created it is no longer used. @@ -989,9 +990,9 @@ Supported ``_sunder_`` names from the final class - :meth:`~Enum._generate_next_value_` -- used to get an appropriate value for an enum member; may be overridden -- :meth:`~Enum._add_alias_` -- adds a new name as an alias to an existing +- :meth:`~EnumType._add_alias_` -- adds a new name as an alias to an existing member. -- :meth:`~Enum._add_value_alias_` -- adds a new value as an alias to an +- :meth:`~EnumType._add_value_alias_` -- adds a new value as an alias to an existing member. See `MultiValueEnum`_ for an example. .. note:: @@ -1009,7 +1010,7 @@ Supported ``_sunder_`` names .. versionadded:: 3.7 ``_ignore_`` .. versionadded:: 3.13 ``_add_alias_``, ``_add_value_alias_`` -To help keep Python 2 / Python 3 code in sync an :attr:`_order_` attribute can +To help keep Python 2 / Python 3 code in sync an :attr:`~Enum._order_` attribute can be provided. It will be checked against the actual order of the enumeration and raise an error if the two do not match:: @@ -1027,7 +1028,7 @@ and raise an error if the two do not match:: .. note:: - In Python 2 code the :attr:`_order_` attribute is necessary as definition + In Python 2 code the :attr:`~Enum._order_` attribute is necessary as definition order is lost before it can be recorded. @@ -1216,12 +1217,12 @@ Enum Classes ^^^^^^^^^^^^ The :class:`EnumType` metaclass is responsible for providing the -:meth:`__contains__`, :meth:`__dir__`, :meth:`__iter__` and other methods that +:meth:`~object.__contains__`, :meth:`~object.__dir__`, :meth:`~object.__iter__` and other methods that allow one to do things with an :class:`Enum` class that fail on a typical class, such as ``list(Color)`` or ``some_enum_var in Color``. :class:`EnumType` is responsible for ensuring that various other methods on the final :class:`Enum` -class are correct (such as :meth:`__new__`, :meth:`__getnewargs__`, -:meth:`__str__` and :meth:`__repr__`). +class are correct (such as :meth:`~object.__new__`, :meth:`~object.__getnewargs__`, +:meth:`~object.__str__` and :meth:`~object.__repr__`). Flag Classes ^^^^^^^^^^^^ @@ -1236,7 +1237,7 @@ Enum Members (aka instances) The most interesting thing about enum members is that they are singletons. :class:`EnumType` creates them all while it is creating the enum class itself, -and then puts a custom :meth:`__new__` in place to ensure that no new ones are +and then puts a custom :meth:`~object.__new__` in place to ensure that no new ones are ever instantiated by returning only the existing member instances. Flag Members @@ -1284,7 +1285,7 @@ is. There are several ways to define this type of simple enumeration: - use instances of :class:`auto` for the value - use instances of :class:`object` as the value - use a descriptive string as the value -- use a tuple as the value and a custom :meth:`__new__` to replace the +- use a tuple as the value and a custom :meth:`~object.__new__` to replace the tuple with an :class:`int` value Using any of these methods signifies to the user that these values are not @@ -1320,7 +1321,7 @@ Using :class:`object` would look like:: > This is also a good example of why you might want to write your own -:meth:`__repr__`:: +:meth:`~object.__repr__`:: >>> class Color(Enum): ... RED = object() @@ -1348,10 +1349,10 @@ Using a string as the value would look like:: -Using a custom :meth:`__new__` -"""""""""""""""""""""""""""""" +Using a custom :meth:`~object.__new__` +"""""""""""""""""""""""""""""""""""""" -Using an auto-numbering :meth:`__new__` would look like:: +Using an auto-numbering :meth:`~object.__new__` would look like:: >>> class AutoNumber(Enum): ... def __new__(cls): @@ -1397,8 +1398,8 @@ to handle any extra arguments:: .. note:: - The :meth:`__new__` method, if defined, is used during creation of the Enum - members; it is then replaced by Enum's :meth:`__new__` which is used after + The :meth:`~object.__new__` method, if defined, is used during creation of the Enum + members; it is then replaced by Enum's :meth:`~object.__new__` which is used after class creation for lookup of existing members. .. warning:: @@ -1504,7 +1505,7 @@ Supports having more than one value per member:: Planet ^^^^^^ -If :meth:`__new__` or :meth:`__init__` is defined, the value of the enum member +If :meth:`~object.__new__` or :meth:`~object.__init__` is defined, the value of the enum member will be passed to those methods:: >>> class Planet(Enum): @@ -1535,7 +1536,7 @@ will be passed to those methods:: TimePeriod ^^^^^^^^^^ -An example to show the :attr:`_ignore_` attribute in use:: +An example to show the :attr:`~Enum._ignore_` attribute in use:: >>> from datetime import timedelta >>> class Period(timedelta, Enum): diff --git a/Doc/library/annotationlib.rst b/Doc/library/annotationlib.rst index 37490456d13312..dcaff3d7fdbec5 100644 --- a/Doc/library/annotationlib.rst +++ b/Doc/library/annotationlib.rst @@ -144,6 +144,17 @@ Classes The exact values of these strings may change in future versions of Python. + .. attribute:: VALUE_WITH_FAKE_GLOBALS + :value: 4 + + Special value used to signal that an annotate function is being + evaluated in a special environment with fake globals. When passed this + value, annotate functions should either return the same value as for + the :attr:`Format.VALUE` format, or raise :exc:`NotImplementedError` + to signal that they do not support execution in this environment. + This format is only used internally and should not be passed to + the functions in this module. + .. versionadded:: 3.14 .. class:: ForwardRef diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index 7638798ca2552f..da4071dee34b8c 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -192,6 +192,12 @@ arguments it contains. The default message can be overridden with the The ``%(prog)s`` format specifier is available to fill in the program name in your usage messages. +When a custom usage message is specified for the main parser, you may also want to +consider passing the ``prog`` argument to :meth:`~ArgumentParser.add_subparsers` +or the ``prog`` and the ``usage`` arguments to +:meth:`~_SubParsersAction.add_parser`, to ensure consistent command prefixes and +usage information across subparsers. + .. _description: @@ -583,6 +589,14 @@ are strings:: >>> parser.parse_args(['--action', 'sumn', 1, 2, 3]) tester.py: error: argument --action: invalid choice: 'sumn', maybe you meant 'sum'? (choose from 'sum', 'max') +If you're writing code that needs to be compatible with older Python versions +and want to opportunistically use ``suggest_on_error`` when it's available, you +can set it as an attribute after initializing the parser instead of using the +keyword argument:: + + >>> parser = argparse.ArgumentParser(description='Process some integers.') + >>> parser.suggest_on_error = True + .. versionadded:: 3.14 @@ -801,7 +815,8 @@ Only actions that consume command-line arguments (e.g. ``'store'``, The recommended way to create a custom action is to extend :class:`Action`, overriding the :meth:`!__call__` method and optionally the :meth:`!__init__` and -:meth:`!format_usage` methods. +:meth:`!format_usage` methods. You can also register custom actions using the +:meth:`~ArgumentParser.register` method and reference them by their registered name. An example of a custom action:: @@ -1020,10 +1035,11 @@ necessary type-checking and type conversions to be performed. If the type_ keyword is used with the default_ keyword, the type converter is only applied if the default is a string. -The argument to ``type`` can be any callable that accepts a single string. +The argument to ``type`` can be a callable that accepts a single string or +the name of a registered type (see :meth:`~ArgumentParser.register`) If the function raises :exc:`ArgumentTypeError`, :exc:`TypeError`, or :exc:`ValueError`, the exception is caught and a nicely formatted error -message is displayed. No other exception types are handled. +message is displayed. Other exception types are not handled. Common built-in types and functions can be used as type converters: @@ -1808,6 +1824,10 @@ Sub-commands .. versionchanged:: 3.7 New *required* keyword-only parameter. + .. versionchanged:: 3.14 + Subparser's *prog* is no longer affected by a custom usage message in + the main parser. + FileType objects ^^^^^^^^^^^^^^^^ @@ -1906,11 +1926,10 @@ Argument groups Note that any arguments not in your user-defined groups will end up back in the usual "positional arguments" and "optional arguments" sections. - .. versionchanged:: 3.11 - Calling :meth:`add_argument_group` on an argument group is deprecated. - This feature was never supported and does not always work correctly. - The function exists on the API by accident through inheritance and - will be removed in the future. + .. deprecated-removed:: 3.11 3.14 + Calling :meth:`add_argument_group` on an argument group now raises an + exception. This nesting was never supported, often failed to work + correctly, and was unintentionally exposed through inheritance. .. deprecated:: 3.14 Passing prefix_chars_ to :meth:`add_argument_group` @@ -1973,11 +1992,11 @@ Mutual exclusion --foo FOO foo help --bar BAR bar help - .. versionchanged:: 3.11 - Calling :meth:`add_argument_group` or :meth:`add_mutually_exclusive_group` - on a mutually exclusive group is deprecated. These features were never - supported and do not always work correctly. The functions exist on the - API by accident through inheritance and will be removed in the future. + .. deprecated-removed:: 3.11 3.14 + Calling :meth:`add_argument_group` or :meth:`add_mutually_exclusive_group` + on a mutually exclusive group now raises an exception. This nesting was + never supported, often failed to work correctly, and was unintentionally + exposed through inheritance. Parser defaults @@ -2163,6 +2182,34 @@ Intermixed parsing .. versionadded:: 3.7 +Registering custom types or actions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. method:: ArgumentParser.register(registry_name, value, object) + + Sometimes it's desirable to use a custom string in error messages to provide + more user-friendly output. In these cases, :meth:`!register` can be used to + register custom actions or types with a parser and allow you to reference the + type by their registered name instead of their callable name. + + The :meth:`!register` method accepts three arguments - a *registry_name*, + specifying the internal registry where the object will be stored (e.g., + ``action``, ``type``), *value*, which is the key under which the object will + be registered, and object, the callable to be registered. + + The following example shows how to register a custom type with a parser:: + + >>> import argparse + >>> parser = argparse.ArgumentParser() + >>> parser.register('type', 'hexadecimal integer', lambda s: int(s, 16)) + >>> parser.add_argument('--foo', type='hexadecimal integer') + _StoreAction(option_strings=['--foo'], dest='foo', nargs=None, const=None, default=None, type='hexadecimal integer', choices=None, required=False, help=None, metavar=None, deprecated=False) + >>> parser.parse_args(['--foo', '0xFA']) + Namespace(foo=250) + >>> parser.parse_args(['--foo', '1.2']) + usage: PROG [-h] [--foo FOO] + PROG: error: argument --foo: invalid 'hexadecimal integer' value: '1.2' + Exceptions ---------- diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index 3fdc79b3c6896c..48f2890c5eef8c 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -92,7 +92,8 @@ and work with streams: family=socket.AF_UNSPEC, \ flags=socket.AI_PASSIVE, sock=None, \ backlog=100, ssl=None, reuse_address=None, \ - reuse_port=None, ssl_handshake_timeout=None, \ + reuse_port=None, keep_alive=None, \ + ssl_handshake_timeout=None, \ ssl_shutdown_timeout=None, start_serving=True) Start a socket server. @@ -128,6 +129,9 @@ and work with streams: .. versionchanged:: 3.11 Added the *ssl_shutdown_timeout* parameter. + .. versionchanged:: 3.13 + Added the *keep_alive* parameter. + .. rubric:: Unix Sockets diff --git a/Doc/library/cmath.rst b/Doc/library/cmath.rst index f122e3644ece56..e7c027dd4d0c22 100644 --- a/Doc/library/cmath.rst +++ b/Doc/library/cmath.rst @@ -24,17 +24,17 @@ the function is then applied to the result of the conversion. imaginary axis we look at the sign of the real part. For example, the :func:`cmath.sqrt` function has a branch cut along the - negative real axis. An argument of ``complex(-2.0, -0.0)`` is treated as + negative real axis. An argument of ``-2-0j`` is treated as though it lies *below* the branch cut, and so gives a result on the negative imaginary axis:: - >>> cmath.sqrt(complex(-2.0, -0.0)) + >>> cmath.sqrt(-2-0j) -1.4142135623730951j - But an argument of ``complex(-2.0, 0.0)`` is treated as though it lies above + But an argument of ``-2+0j`` is treated as though it lies above the branch cut:: - >>> cmath.sqrt(complex(-2.0, 0.0)) + >>> cmath.sqrt(-2+0j) 1.4142135623730951j @@ -63,9 +63,9 @@ rectangular coordinates to polar coordinates and back. along the negative real axis. The sign of the result is the same as the sign of ``x.imag``, even when ``x.imag`` is zero:: - >>> phase(complex(-1.0, 0.0)) + >>> phase(-1+0j) 3.141592653589793 - >>> phase(complex(-1.0, -0.0)) + >>> phase(-1-0j) -3.141592653589793 diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index 2cfd8a1eaee806..a129a26190ba99 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -1042,6 +1042,10 @@ is meant to be exhaustive. Notice that spelling alternatives that only differ in case or use a hyphen instead of an underscore are also valid aliases; therefore, e.g. ``'utf-8'`` is a valid alias for the ``'utf_8'`` codec. +On Windows, ``cpXXX`` codecs are available for all code pages. +But only codecs listed in the following table are guarantead to exist on +other platforms. + .. impl-detail:: Some common encodings can bypass the codecs lookup machinery to @@ -1307,6 +1311,9 @@ particular, the following variants typically exist: .. versionchanged:: 3.8 ``cp65001`` is now an alias to ``utf_8``. +.. versionchanged:: 3.14 + On Windows, ``cpXXX`` codecs are now available for all code pages. + Python Specific Encodings ------------------------- diff --git a/Doc/library/concurrent.futures.rst b/Doc/library/concurrent.futures.rst index 48e027152a9851..5a950081a1c98d 100644 --- a/Doc/library/concurrent.futures.rst +++ b/Doc/library/concurrent.futures.rst @@ -252,7 +252,7 @@ This results in several benefits that help balance the extra effort, including true multi-core parallelism, For example, code written this way can make it easier to reason about concurrency. Another major benefit is that you don't have to deal with several of the -big pain points of using threads, like nrace conditions. +big pain points of using threads, like race conditions. Each worker's interpreter is isolated from all the other interpreters. "Isolated" means each interpreter has its own runtime state and @@ -690,7 +690,7 @@ Exception classes of a :class:`~concurrent.futures.InterpreterPoolExecutor` has failed initializing. - .. versionadded:: next + .. versionadded:: 3.14 .. exception:: ExecutionFailed @@ -699,7 +699,7 @@ Exception classes :meth:`~concurrent.futures.Executor.submit` when there's an uncaught exception from the submitted task. - .. versionadded:: next + .. versionadded:: 3.14 .. currentmodule:: concurrent.futures.process diff --git a/Doc/library/contextlib.rst b/Doc/library/contextlib.rst index f5b349441bcfee..e8f264f949807d 100644 --- a/Doc/library/contextlib.rst +++ b/Doc/library/contextlib.rst @@ -151,9 +151,9 @@ Functions and classes provided: created by :func:`asynccontextmanager` to meet the requirement that context managers support multiple invocations in order to be used as decorators. - .. versionchanged:: 3.10 - Async context managers created with :func:`asynccontextmanager` can - be used as decorators. + .. versionchanged:: 3.10 + Async context managers created with :func:`asynccontextmanager` can + be used as decorators. .. function:: closing(thing) diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 99909de20ef439..bd9529db9ee65a 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -1413,13 +1413,15 @@ way is to instantiate one of the following classes: .. class:: OleDLL(name, mode=DEFAULT_MODE, handle=None, use_errno=False, use_last_error=False, winmode=None) - Windows only: Instances of this class represent loaded shared libraries, + Instances of this class represent loaded shared libraries, functions in these libraries use the ``stdcall`` calling convention, and are assumed to return the windows specific :class:`HRESULT` code. :class:`HRESULT` values contain information specifying whether the function call failed or succeeded, together with additional error code. If the return value signals a failure, an :class:`OSError` is automatically raised. + .. availability:: Windows + .. versionchanged:: 3.3 :exc:`WindowsError` used to be raised, which is now an alias of :exc:`OSError`. @@ -1431,14 +1433,17 @@ way is to instantiate one of the following classes: .. class:: WinDLL(name, mode=DEFAULT_MODE, handle=None, use_errno=False, use_last_error=False, winmode=None) - Windows only: Instances of this class represent loaded shared libraries, + Instances of this class represent loaded shared libraries, functions in these libraries use the ``stdcall`` calling convention, and are assumed to return :c:expr:`int` by default. + .. availability:: Windows + .. versionchanged:: 3.12 The *name* parameter can now be a :term:`path-like object`. + The Python :term:`global interpreter lock` is released before calling any function exported by these libraries, and reacquired afterwards. @@ -1574,13 +1579,17 @@ These prefabricated library loaders are available: .. data:: windll :noindex: - Windows only: Creates :class:`WinDLL` instances. + Creates :class:`WinDLL` instances. + + .. availability:: Windows .. data:: oledll :noindex: - Windows only: Creates :class:`OleDLL` instances. + Creates :class:`OleDLL` instances. + + .. availability:: Windows .. data:: pydll @@ -1707,12 +1716,6 @@ in :mod:`!ctypes`) which inherits from the private :class:`_CFuncPtr` class: and raise an exception if the foreign function call failed. -.. exception:: ArgumentError - - This exception is raised when a foreign function call cannot convert one of the - passed arguments. - - .. audit-event:: ctypes.set_exception code foreign-functions On Windows, when a foreign function call raises a system exception (for @@ -1752,11 +1755,13 @@ See :ref:`ctypes-callback-functions` for examples. .. function:: WINFUNCTYPE(restype, *argtypes, use_errno=False, use_last_error=False) - Windows only: The returned function prototype creates functions that use the + The returned function prototype creates functions that use the ``stdcall`` calling convention. The function will release the GIL during the call. *use_errno* and *use_last_error* have the same meaning as above. + .. availability:: Windows + .. function:: PYFUNCTYPE(restype, *argtypes) @@ -1799,10 +1804,15 @@ different ways, depending on the type and number of the parameters in the call: integer. *name* is name of the COM method. *iid* is an optional pointer to the interface identifier which is used in extended error reporting. + If *iid* is not specified, an :exc:`OSError` is raised if the COM method + call fails. If *iid* is specified, a :exc:`~ctypes.COMError` is raised + instead. + COM methods use a special calling convention: They require a pointer to the COM interface as first argument, in addition to those parameters that are specified in the :attr:`!argtypes` tuple. + The optional *paramflags* parameter creates foreign function wrappers with much more functionality than the features described above. @@ -1982,17 +1992,21 @@ Utility functions .. function:: DllCanUnloadNow() - Windows only: This function is a hook which allows implementing in-process + This function is a hook which allows implementing in-process COM servers with ctypes. It is called from the DllCanUnloadNow function that the _ctypes extension dll exports. + .. availability:: Windows + .. function:: DllGetClassObject() - Windows only: This function is a hook which allows implementing in-process + This function is a hook which allows implementing in-process COM servers with ctypes. It is called from the DllGetClassObject function that the ``_ctypes`` extension dll exports. + .. availability:: Windows + .. function:: find_library(name) :module: ctypes.util @@ -2008,7 +2022,7 @@ Utility functions .. function:: find_msvcrt() :module: ctypes.util - Windows only: return the filename of the VC runtime library used by Python, + Returns the filename of the VC runtime library used by Python, and by the extension modules. If the name of the library cannot be determined, ``None`` is returned. @@ -2016,20 +2030,27 @@ Utility functions with a call to the ``free(void *)``, it is important that you use the function in the same library that allocated the memory. + .. availability:: Windows + .. function:: FormatError([code]) - Windows only: Returns a textual description of the error code *code*. If no - error code is specified, the last error code is used by calling the Windows - api function GetLastError. + Returns a textual description of the error code *code*. If no error code is + specified, the last error code is used by calling the Windows API function + :func:`GetLastError`. + + .. availability:: Windows .. function:: GetLastError() - Windows only: Returns the last error code set by Windows in the calling thread. + Returns the last error code set by Windows in the calling thread. This function calls the Windows ``GetLastError()`` function directly, it does not return the ctypes-private copy of the error code. + .. availability:: Windows + + .. function:: get_errno() Returns the current value of the ctypes-private copy of the system @@ -2039,11 +2060,14 @@ Utility functions .. function:: get_last_error() - Windows only: returns the current value of the ctypes-private copy of the system + Returns the current value of the ctypes-private copy of the system :data:`!LastError` variable in the calling thread. + .. availability:: Windows + .. audit-event:: ctypes.get_last_error "" ctypes.get_last_error + .. function:: memmove(dst, src, count) Same as the standard C memmove library function: copies *count* bytes from @@ -2092,10 +2116,12 @@ Utility functions .. function:: set_last_error(value) - Windows only: set the current value of the ctypes-private copy of the system + Sets the current value of the ctypes-private copy of the system :data:`!LastError` variable in the calling thread to *value* and return the previous value. + .. availability:: Windows + .. audit-event:: ctypes.set_last_error error ctypes.set_last_error @@ -2116,12 +2142,13 @@ Utility functions .. function:: WinError(code=None, descr=None) - Windows only: this function is probably the worst-named thing in ctypes. It - creates an instance of :exc:`OSError`. If *code* is not specified, - ``GetLastError`` is called to determine the error code. If *descr* is not + Creates an instance of :exc:`OSError`. If *code* is not specified, + :func:`GetLastError` is called to determine the error code. If *descr* is not specified, :func:`FormatError` is called to get a textual description of the error. + .. availability:: Windows + .. versionchanged:: 3.3 An instance of :exc:`WindowsError` used to be created, which is now an alias of :exc:`OSError`. @@ -2485,9 +2512,11 @@ These are the fundamental ctypes data types: .. class:: HRESULT - Windows only: Represents a :c:type:`!HRESULT` value, which contains success or + Represents a :c:type:`!HRESULT` value, which contains success or error information for a function or method call. + .. availability:: Windows + .. class:: py_object @@ -2741,3 +2770,41 @@ Arrays and pointers Returns the object to which to pointer points. Assigning to this attribute changes the pointer to point to the assigned object. + + +.. _ctypes-exceptions: + +Exceptions +^^^^^^^^^^ + +.. exception:: ArgumentError + + This exception is raised when a foreign function call cannot convert one of the + passed arguments. + + +.. exception:: COMError(hresult, text, details) + + This exception is raised when a COM method call failed. + + .. attribute:: hresult + + The integer value representing the error code. + + .. attribute:: text + + The error message. + + .. attribute:: details + + The 5-tuple ``(descr, source, helpfile, helpcontext, progid)``. + + *descr* is the textual description. *source* is the language-dependent + ``ProgID`` for the class or application that raised the error. *helpfile* + is the path of the help file. *helpcontext* is the help context + identifier. *progid* is the ``ProgID`` of the interface that defined the + error. + + .. availability:: Windows + + .. versionadded:: next diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index a9aceee4170004..69d9d81c848124 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -481,7 +481,7 @@ The :mod:`functools` module defines the following functions: See :func:`itertools.accumulate` for an iterator that yields all intermediate values. - .. versionchanged:: next + .. versionchanged:: 3.14 *initial* is now supported as a keyword argument. .. decorator:: singledispatch diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst index 1065ec30802841..480a9dec7f133b 100644 --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -204,8 +204,6 @@ The :mod:`gc` module provides the following functions: >>> gc.is_tracked({}) False >>> gc.is_tracked({"a": 1}) - False - >>> gc.is_tracked({"a": []}) True .. versionadded:: 3.1 diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst index 37cd237357aa4b..d80255f5313061 100644 --- a/Doc/library/importlib.metadata.rst +++ b/Doc/library/importlib.metadata.rst @@ -133,7 +133,7 @@ Entry points Details of a collection of installed entry points. - Also provides a ``.groups`` attribute that reports all identifed entry + Also provides a ``.groups`` attribute that reports all identified entry point groups, and a ``.names`` attribute that reports all identified entry point names. diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst index f172a55080efc9..426e3a06e1ef11 100644 --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -281,7 +281,8 @@ The :mod:`locale` module defines the following exception and functions: .. data:: ERA - Get a string that represents the era used in the current locale. + Get a string which describes how years are counted and displayed for + each era in a locale. Most locales do not define this value. An example of a locale which does define this value is the Japanese one. In Japan, the traditional @@ -290,9 +291,10 @@ The :mod:`locale` module defines the following exception and functions: Normally it should not be necessary to use this value directly. Specifying the ``E`` modifier in their format strings causes the :func:`time.strftime` - function to use this information. The format of the returned string is not - specified, and therefore you should not assume knowledge of it on different - systems. + function to use this information. + The format of the returned string is specified in *The Open Group Base + Specifications Issue 8*, paragraph `7.3.5.2 LC_TIME C-Language Access + `_. .. data:: ERA_D_T_FMT diff --git a/Doc/library/marshal.rst b/Doc/library/marshal.rst index 8b14ee449d4631..e8e9071a5c9ef4 100644 --- a/Doc/library/marshal.rst +++ b/Doc/library/marshal.rst @@ -67,7 +67,7 @@ this module. The following types are supported: * Added format version 4, which supports efficient representations of short strings. -.. versionchanged:: next +.. versionchanged:: 3.14 Added format version 5, which allows marshalling slices. diff --git a/Doc/library/math.rst b/Doc/library/math.rst index 5ce2ad2d6aec47..bf79b23a72bbf9 100644 --- a/Doc/library/math.rst +++ b/Doc/library/math.rst @@ -27,36 +27,39 @@ noted otherwise, all return values are floats. ==================================================== ============================================ -**Number-theoretic and representation functions** +**Number-theoretic functions** -------------------------------------------------------------------------------------------------- -:func:`ceil(x) ` Ceiling of *x*, the smallest integer greater than or equal to *x* :func:`comb(n, k) ` Number of ways to choose *k* items from *n* items without repetition and without order -:func:`copysign(x, y) ` Magnitude (absolute value) of *x* with the sign of *y* -:func:`fabs(x) ` Absolute value of *x* :func:`factorial(n) ` *n* factorial -:func:`floor (x) ` Floor of *x*, the largest integer less than or equal to *x* +:func:`gcd(*integers) ` Greatest common divisor of the integer arguments +:func:`isqrt(n) ` Integer square root of a nonnegative integer *n* +:func:`lcm(*integers) ` Least common multiple of the integer arguments +:func:`perm(n, k) ` Number of ways to choose *k* items from *n* items without repetition and with order + +**Floating point arithmetic** +-------------------------------------------------------------------------------------------------- +:func:`ceil(x) ` Ceiling of *x*, the smallest integer greater than or equal to *x* +:func:`fabs(x) ` Absolute value of *x* +:func:`floor(x) ` Floor of *x*, the largest integer less than or equal to *x* :func:`fma(x, y, z) ` Fused multiply-add operation: ``(x * y) + z`` :func:`fmod(x, y) ` Remainder of division ``x / y`` +:func:`modf(x) ` Fractional and integer parts of *x* +:func:`remainder(x, y) ` Remainder of *x* with respect to *y* +:func:`trunc(x) ` Integer part of *x* + +**Floating point manipulation functions** +-------------------------------------------------------------------------------------------------- +:func:`copysign(x, y) ` Magnitude (absolute value) of *x* with the sign of *y* :func:`frexp(x) ` Mantissa and exponent of *x* -:func:`fsum(iterable) ` Sum of values in the input *iterable* -:func:`gcd(*integers) ` Greatest common divisor of the integer arguments :func:`isclose(a, b, rel_tol, abs_tol) ` Check if the values *a* and *b* are close to each other :func:`isfinite(x) ` Check if *x* is neither an infinity nor a NaN :func:`isinf(x) ` Check if *x* is a positive or negative infinity :func:`isnan(x) ` Check if *x* is a NaN (not a number) -:func:`isqrt(n) ` Integer square root of a nonnegative integer *n* -:func:`lcm(*integers) ` Least common multiple of the integer arguments :func:`ldexp(x, i) ` ``x * (2**i)``, inverse of function :func:`frexp` -:func:`modf(x) ` Fractional and integer parts of *x* :func:`nextafter(x, y, steps) ` Floating-point value *steps* steps after *x* towards *y* -:func:`perm(n, k) ` Number of ways to choose *k* items from *n* items without repetition and with order -:func:`prod(iterable, start) ` Product of elements in the input *iterable* with a *start* value -:func:`remainder(x, y) ` Remainder of *x* with respect to *y* -:func:`sumprod(p, q) ` Sum of products from two iterables *p* and *q* -:func:`trunc(x) ` Integer part of *x* :func:`ulp(x) ` Value of the least significant bit of *x* -**Power and logarithmic functions** +**Power, exponential and logarithmic functions** -------------------------------------------------------------------------------------------------- :func:`cbrt(x) ` Cube root of *x* :func:`exp(x) ` *e* raised to the power *x* @@ -69,6 +72,19 @@ noted otherwise, all return values are floats. :func:`pow(x, y) ` *x* raised to the power *y* :func:`sqrt(x) ` Square root of *x* +**Summation and product functions** +-------------------------------------------------------------------------------------------------- +:func:`dist(p, q) ` Euclidean distance between two points *p* and *q* given as an iterable of coordinates +:func:`fsum(iterable) ` Sum of values in the input *iterable* +:func:`hypot(*coordinates) ` Euclidean norm of an iterable of coordinates +:func:`prod(iterable, start) ` Product of elements in the input *iterable* with a *start* value +:func:`sumprod(p, q) ` Sum of products from two iterables *p* and *q* + +**Angular conversion** +-------------------------------------------------------------------------------------------------- +:func:`degrees(x) ` Convert angle *x* from radians to degrees +:func:`radians(x) ` Convert angle *x* from degrees to radians + **Trigonometric functions** -------------------------------------------------------------------------------------------------- :func:`acos(x) ` Arc cosine of *x* @@ -76,16 +92,9 @@ noted otherwise, all return values are floats. :func:`atan(x) ` Arc tangent of *x* :func:`atan2(y, x) ` ``atan(y / x)`` :func:`cos(x) ` Cosine of *x* -:func:`dist(p, q) ` Euclidean distance between two points *p* and *q* given as an iterable of coordinates -:func:`hypot(*coordinates) ` Euclidean norm of an iterable of coordinates :func:`sin(x) ` Sine of *x* :func:`tan(x) ` Tangent of *x* -**Angular conversion** --------------------------------------------------------------------------------------------------- -:func:`degrees(x) ` Convert angle *x* from radians to degrees -:func:`radians(x) ` Convert angle *x* from degrees to radians - **Hyperbolic functions** -------------------------------------------------------------------------------------------------- :func:`acosh(x) ` Inverse hyperbolic cosine of *x* @@ -112,15 +121,8 @@ noted otherwise, all return values are floats. ==================================================== ============================================ -Number-theoretic and representation functions ---------------------------------------------- - -.. function:: ceil(x) - - Return the ceiling of *x*, the smallest integer greater than or equal to *x*. - If *x* is not a float, delegates to :meth:`x.__ceil__ `, - which should return an :class:`~numbers.Integral` value. - +Number-theoretic functions +-------------------------- .. function:: comb(n, k) @@ -140,25 +142,85 @@ Number-theoretic and representation functions .. versionadded:: 3.8 -.. function:: copysign(x, y) +.. function:: factorial(n) - Return a float with the magnitude (absolute value) of *x* but the sign of - *y*. On platforms that support signed zeros, ``copysign(1.0, -0.0)`` - returns *-1.0*. + Return *n* factorial as an integer. Raises :exc:`ValueError` if *n* is not integral or + is negative. + .. versionchanged:: 3.10 + Floats with integral values (like ``5.0``) are no longer accepted. -.. function:: fabs(x) - Return the absolute value of *x*. +.. function:: gcd(*integers) + + Return the greatest common divisor of the specified integer arguments. + If any of the arguments is nonzero, then the returned value is the largest + positive integer that is a divisor of all arguments. If all arguments + are zero, then the returned value is ``0``. ``gcd()`` without arguments + returns ``0``. + .. versionadded:: 3.5 -.. function:: factorial(n) + .. versionchanged:: 3.9 + Added support for an arbitrary number of arguments. Formerly, only two + arguments were supported. - Return *n* factorial as an integer. Raises :exc:`ValueError` if *n* is not integral or - is negative. - .. versionchanged:: 3.10 - Floats with integral values (like ``5.0``) are no longer accepted. +.. function:: isqrt(n) + + Return the integer square root of the nonnegative integer *n*. This is the + floor of the exact square root of *n*, or equivalently the greatest integer + *a* such that *a*\ ² |nbsp| ≤ |nbsp| *n*. + + For some applications, it may be more convenient to have the least integer + *a* such that *n* |nbsp| ≤ |nbsp| *a*\ ², or in other words the ceiling of + the exact square root of *n*. For positive *n*, this can be computed using + ``a = 1 + isqrt(n - 1)``. + + .. versionadded:: 3.8 + + +.. function:: lcm(*integers) + + Return the least common multiple of the specified integer arguments. + If all arguments are nonzero, then the returned value is the smallest + positive integer that is a multiple of all arguments. If any of the arguments + is zero, then the returned value is ``0``. ``lcm()`` without arguments + returns ``1``. + + .. versionadded:: 3.9 + + +.. function:: perm(n, k=None) + + Return the number of ways to choose *k* items from *n* items + without repetition and with order. + + Evaluates to ``n! / (n - k)!`` when ``k <= n`` and evaluates + to zero when ``k > n``. + + If *k* is not specified or is ``None``, then *k* defaults to *n* + and the function returns ``n!``. + + Raises :exc:`TypeError` if either of the arguments are not integers. + Raises :exc:`ValueError` if either of the arguments are negative. + + .. versionadded:: 3.8 + + +Floating point arithmetic +------------------------- + +.. function:: ceil(x) + + Return the ceiling of *x*, the smallest integer greater than or equal to *x*. + If *x* is not a float, delegates to :meth:`x.__ceil__ `, + which should return an :class:`~numbers.Integral` value. + + +.. function:: fabs(x) + + Return the absolute value of *x*. .. function:: floor(x) @@ -199,45 +261,76 @@ Number-theoretic and representation functions floats, while Python's ``x % y`` is preferred when working with integers. -.. function:: frexp(x) +.. function:: modf(x) - Return the mantissa and exponent of *x* as the pair ``(m, e)``. *m* is a float - and *e* is an integer such that ``x == m * 2**e`` exactly. If *x* is zero, - returns ``(0.0, 0)``, otherwise ``0.5 <= abs(m) < 1``. This is used to "pick - apart" the internal representation of a float in a portable way. + Return the fractional and integer parts of *x*. Both results carry the sign + of *x* and are floats. + Note that :func:`modf` has a different call/return pattern + than its C equivalents: it takes a single argument and return a pair of + values, rather than returning its second return value through an 'output + parameter' (there is no such thing in Python). -.. function:: fsum(iterable) - Return an accurate floating-point sum of values in the iterable. Avoids - loss of precision by tracking multiple intermediate partial sums. +.. function:: remainder(x, y) - The algorithm's accuracy depends on IEEE-754 arithmetic guarantees and the - typical case where the rounding mode is half-even. On some non-Windows - builds, the underlying C library uses extended precision addition and may - occasionally double-round an intermediate sum causing it to be off in its - least significant bit. + Return the IEEE 754-style remainder of *x* with respect to *y*. For + finite *x* and finite nonzero *y*, this is the difference ``x - n*y``, + where ``n`` is the closest integer to the exact value of the quotient ``x / + y``. If ``x / y`` is exactly halfway between two consecutive integers, the + nearest *even* integer is used for ``n``. The remainder ``r = remainder(x, + y)`` thus always satisfies ``abs(r) <= 0.5 * abs(y)``. - For further discussion and two alternative approaches, see the `ASPN cookbook - recipes for accurate floating-point summation - `_\. + Special cases follow IEEE 754: in particular, ``remainder(x, math.inf)`` is + *x* for any finite *x*, and ``remainder(x, 0)`` and + ``remainder(math.inf, x)`` raise :exc:`ValueError` for any non-NaN *x*. + If the result of the remainder operation is zero, that zero will have + the same sign as *x*. + On platforms using IEEE 754 binary floating point, the result of this + operation is always exactly representable: no rounding error is introduced. -.. function:: gcd(*integers) + .. versionadded:: 3.7 - Return the greatest common divisor of the specified integer arguments. - If any of the arguments is nonzero, then the returned value is the largest - positive integer that is a divisor of all arguments. If all arguments - are zero, then the returned value is ``0``. ``gcd()`` without arguments - returns ``0``. - .. versionadded:: 3.5 +.. function:: trunc(x) - .. versionchanged:: 3.9 - Added support for an arbitrary number of arguments. Formerly, only two - arguments were supported. + Return *x* with the fractional part + removed, leaving the integer part. This rounds toward 0: ``trunc()`` is + equivalent to :func:`floor` for positive *x*, and equivalent to :func:`ceil` + for negative *x*. If *x* is not a float, delegates to :meth:`x.__trunc__ + `, which should return an :class:`~numbers.Integral` value. + + +For the :func:`ceil`, :func:`floor`, and :func:`modf` functions, note that *all* +floating-point numbers of sufficiently large magnitude are exact integers. +Python floats typically carry no more than 53 bits of precision (the same as the +platform C double type), in which case any float *x* with ``abs(x) >= 2**52`` +necessarily has no fractional bits. +Floating point manipulation functions +------------------------------------- + +.. function:: copysign(x, y) + + Return a float with the magnitude (absolute value) of *x* but the sign of + *y*. On platforms that support signed zeros, ``copysign(1.0, -0.0)`` + returns *-1.0*. + + +.. function:: frexp(x) + + Return the mantissa and exponent of *x* as the pair ``(m, e)``. *m* is a float + and *e* is an integer such that ``x == m * 2**e`` exactly. If *x* is zero, + returns ``(0.0, 0)``, otherwise ``0.5 <= abs(m) < 1``. This is used to "pick + apart" the internal representation of a float in a portable way. + + Note that :func:`frexp` has a different call/return pattern + than its C equivalents: it takes a single argument and return a pair of + values, rather than returning its second return value through an 'output + parameter' (there is no such thing in Python). + .. function:: isclose(a, b, *, rel_tol=1e-09, abs_tol=0.0) Return ``True`` if the values *a* and *b* are close to each other and @@ -291,43 +384,12 @@ Number-theoretic and representation functions Return ``True`` if *x* is a NaN (not a number), and ``False`` otherwise. -.. function:: isqrt(n) - - Return the integer square root of the nonnegative integer *n*. This is the - floor of the exact square root of *n*, or equivalently the greatest integer - *a* such that *a*\ ² |nbsp| ≤ |nbsp| *n*. - - For some applications, it may be more convenient to have the least integer - *a* such that *n* |nbsp| ≤ |nbsp| *a*\ ², or in other words the ceiling of - the exact square root of *n*. For positive *n*, this can be computed using - ``a = 1 + isqrt(n - 1)``. - - .. versionadded:: 3.8 - - -.. function:: lcm(*integers) - - Return the least common multiple of the specified integer arguments. - If all arguments are nonzero, then the returned value is the smallest - positive integer that is a multiple of all arguments. If any of the arguments - is zero, then the returned value is ``0``. ``lcm()`` without arguments - returns ``1``. - - .. versionadded:: 3.9 - - .. function:: ldexp(x, i) Return ``x * (2**i)``. This is essentially the inverse of function :func:`frexp`. -.. function:: modf(x) - - Return the fractional and integer parts of *x*. Both results carry the sign - of *x* and are floats. - - .. function:: nextafter(x, y, steps=1) Return the floating-point value *steps* steps after *x* towards *y*. @@ -348,79 +410,6 @@ Number-theoretic and representation functions .. versionchanged:: 3.12 Added the *steps* argument. -.. function:: perm(n, k=None) - - Return the number of ways to choose *k* items from *n* items - without repetition and with order. - - Evaluates to ``n! / (n - k)!`` when ``k <= n`` and evaluates - to zero when ``k > n``. - - If *k* is not specified or is ``None``, then *k* defaults to *n* - and the function returns ``n!``. - - Raises :exc:`TypeError` if either of the arguments are not integers. - Raises :exc:`ValueError` if either of the arguments are negative. - - .. versionadded:: 3.8 - - -.. function:: prod(iterable, *, start=1) - - Calculate the product of all the elements in the input *iterable*. - The default *start* value for the product is ``1``. - - When the iterable is empty, return the start value. This function is - intended specifically for use with numeric values and may reject - non-numeric types. - - .. versionadded:: 3.8 - - -.. function:: remainder(x, y) - - Return the IEEE 754-style remainder of *x* with respect to *y*. For - finite *x* and finite nonzero *y*, this is the difference ``x - n*y``, - where ``n`` is the closest integer to the exact value of the quotient ``x / - y``. If ``x / y`` is exactly halfway between two consecutive integers, the - nearest *even* integer is used for ``n``. The remainder ``r = remainder(x, - y)`` thus always satisfies ``abs(r) <= 0.5 * abs(y)``. - - Special cases follow IEEE 754: in particular, ``remainder(x, math.inf)`` is - *x* for any finite *x*, and ``remainder(x, 0)`` and - ``remainder(math.inf, x)`` raise :exc:`ValueError` for any non-NaN *x*. - If the result of the remainder operation is zero, that zero will have - the same sign as *x*. - - On platforms using IEEE 754 binary floating point, the result of this - operation is always exactly representable: no rounding error is introduced. - - .. versionadded:: 3.7 - - -.. function:: sumprod(p, q) - - Return the sum of products of values from two iterables *p* and *q*. - - Raises :exc:`ValueError` if the inputs do not have the same length. - - Roughly equivalent to:: - - sum(map(operator.mul, p, q, strict=True)) - - For float and mixed int/float inputs, the intermediate products - and sums are computed with extended precision. - - .. versionadded:: 3.12 - - -.. function:: trunc(x) - - Return *x* with the fractional part - removed, leaving the integer part. This rounds toward 0: ``trunc()`` is - equivalent to :func:`floor` for positive *x*, and equivalent to :func:`ceil` - for negative *x*. If *x* is not a float, delegates to :meth:`x.__trunc__ - `, which should return an :class:`~numbers.Integral` value. .. function:: ulp(x) @@ -447,20 +436,8 @@ Number-theoretic and representation functions .. versionadded:: 3.9 -Note that :func:`frexp` and :func:`modf` have a different call/return pattern -than their C equivalents: they take a single argument and return a pair of -values, rather than returning their second return value through an 'output -parameter' (there is no such thing in Python). - -For the :func:`ceil`, :func:`floor`, and :func:`modf` functions, note that *all* -floating-point numbers of sufficiently large magnitude are exact integers. -Python floats typically carry no more than 53 bits of precision (the same as the -platform C double type), in which case any float *x* with ``abs(x) >= 2**52`` -necessarily has no fractional bits. - - -Power and logarithmic functions -------------------------------- +Power, exponential and logarithmic functions +-------------------------------------------- .. function:: cbrt(x) @@ -557,41 +534,8 @@ Power and logarithmic functions Return the square root of *x*. -Trigonometric functions ------------------------ - -.. function:: acos(x) - - Return the arc cosine of *x*, in radians. The result is between ``0`` and - ``pi``. - - -.. function:: asin(x) - - Return the arc sine of *x*, in radians. The result is between ``-pi/2`` and - ``pi/2``. - - -.. function:: atan(x) - - Return the arc tangent of *x*, in radians. The result is between ``-pi/2`` and - ``pi/2``. - - -.. function:: atan2(y, x) - - Return ``atan(y / x)``, in radians. The result is between ``-pi`` and ``pi``. - The vector in the plane from the origin to point ``(x, y)`` makes this angle - with the positive X axis. The point of :func:`atan2` is that the signs of both - inputs are known to it, so it can compute the correct quadrant for the angle. - For example, ``atan(1)`` and ``atan2(1, 1)`` are both ``pi/4``, but ``atan2(-1, - -1)`` is ``-3*pi/4``. - - -.. function:: cos(x) - - Return the cosine of *x* radians. - +Summation and product functions +------------------------------- .. function:: dist(p, q) @@ -606,6 +550,22 @@ Trigonometric functions .. versionadded:: 3.8 +.. function:: fsum(iterable) + + Return an accurate floating-point sum of values in the iterable. Avoids + loss of precision by tracking multiple intermediate partial sums. + + The algorithm's accuracy depends on IEEE-754 arithmetic guarantees and the + typical case where the rounding mode is half-even. On some non-Windows + builds, the underlying C library uses extended precision addition and may + occasionally double-round an intermediate sum causing it to be off in its + least significant bit. + + For further discussion and two alternative approaches, see the `ASPN cookbook + recipes for accurate floating-point summation + `_\. + + .. function:: hypot(*coordinates) Return the Euclidean norm, ``sqrt(sum(x**2 for x in coordinates))``. @@ -626,14 +586,32 @@ Trigonometric functions is almost always correctly rounded to within 1/2 ulp. -.. function:: sin(x) +.. function:: prod(iterable, *, start=1) - Return the sine of *x* radians. + Calculate the product of all the elements in the input *iterable*. + The default *start* value for the product is ``1``. + When the iterable is empty, return the start value. This function is + intended specifically for use with numeric values and may reject + non-numeric types. -.. function:: tan(x) + .. versionadded:: 3.8 - Return the tangent of *x* radians. + +.. function:: sumprod(p, q) + + Return the sum of products of values from two iterables *p* and *q*. + + Raises :exc:`ValueError` if the inputs do not have the same length. + + Roughly equivalent to:: + + sum(map(operator.mul, p, q, strict=True)) + + For float and mixed int/float inputs, the intermediate products + and sums are computed with extended precision. + + .. versionadded:: 3.12 Angular conversion @@ -649,6 +627,52 @@ Angular conversion Convert angle *x* from degrees to radians. +Trigonometric functions +----------------------- + +.. function:: acos(x) + + Return the arc cosine of *x*, in radians. The result is between ``0`` and + ``pi``. + + +.. function:: asin(x) + + Return the arc sine of *x*, in radians. The result is between ``-pi/2`` and + ``pi/2``. + + +.. function:: atan(x) + + Return the arc tangent of *x*, in radians. The result is between ``-pi/2`` and + ``pi/2``. + + +.. function:: atan2(y, x) + + Return ``atan(y / x)``, in radians. The result is between ``-pi`` and ``pi``. + The vector in the plane from the origin to point ``(x, y)`` makes this angle + with the positive X axis. The point of :func:`atan2` is that the signs of both + inputs are known to it, so it can compute the correct quadrant for the angle. + For example, ``atan(1)`` and ``atan2(1, 1)`` are both ``pi/4``, but ``atan2(-1, + -1)`` is ``-3*pi/4``. + + +.. function:: cos(x) + + Return the cosine of *x* radians. + + +.. function:: sin(x) + + Return the sine of *x* radians. + + +.. function:: tan(x) + + Return the tangent of *x* radians. + + Hyperbolic functions -------------------- diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index 036b8f44b9ff3b..783cb025826483 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -291,7 +291,7 @@ processes: of corruption from processes using different ends of the pipe at the same time. - The :meth:`~Connection.send` method serializes the the object and + The :meth:`~Connection.send` method serializes the object and :meth:`~Connection.recv` re-creates the object. Synchronization between processes @@ -828,7 +828,7 @@ For an example of the usage of queues for interprocess communication see used for receiving messages and ``conn2`` can only be used for sending messages. - The :meth:`~multiprocessing.Connection.send` method serializes the the object using + The :meth:`~multiprocessing.Connection.send` method serializes the object using :mod:`pickle` and the :meth:`~multiprocessing.Connection.recv` re-creates the object. .. class:: Queue([maxsize]) diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 61144256f47ddb..dfe5ef0726ff7d 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -261,7 +261,7 @@ process and user. :data:`os.environ` or :data:`os.environb`, or calling :func:`os.getenv` while reloading, may return an empty result. - .. versionadded:: next + .. versionadded:: 3.14 .. function:: chdir(path) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index d25701c087ed07..2a8592f8bd69c1 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -491,12 +491,6 @@ Directory and files operations or ends with an extension that is in ``PATHEXT``; and filenames that have no extension can now be found. - .. versionchanged:: 3.12.1 - On Windows, if *mode* includes ``os.X_OK``, executables with an - extension in ``PATHEXT`` will be preferred over executables without a - matching extension. - This brings behavior closer to that of Python 3.11. - .. exception:: Error This exception collects exceptions that are raised during a multi-file diff --git a/Doc/library/site.rst b/Doc/library/site.rst index 4508091f679dc7..5f2a0f610e1aa5 100644 --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -49,14 +49,22 @@ added path for configuration files. identified by the "t" suffix in the version-specific directory name, such as :file:`lib/python3.13t/`. -If a file named "pyvenv.cfg" exists one directory above sys.executable, -sys.prefix and sys.exec_prefix are set to that directory and -it is also checked for site-packages (sys.base_prefix and -sys.base_exec_prefix will always be the "real" prefixes of the Python -installation). If "pyvenv.cfg" (a bootstrap configuration file) contains -the key "include-system-site-packages" set to anything other than "true" -(case-insensitive), the system-level prefixes will not be -searched for site-packages; otherwise they will. +.. versionchanged:: 3.14 + + :mod:`site` is no longer responsible for updating :data:`sys.prefix` and + :data:`sys.exec_prefix` on :ref:`sys-path-init-virtual-environments`. This is + now done during the :ref:`path initialization `. As a result, + under :ref:`sys-path-init-virtual-environments`, :data:`sys.prefix` and + :data:`sys.exec_prefix` no longer depend on the :mod:`site` initialization, + and are therefore unaffected by :option:`-S`. + +.. _site-virtual-environments-configuration: + +When running under a :ref:`virtual environment `, +the ``pyvenv.cfg`` file in :data:`sys.prefix` is checked for site-specific +configurations. If the ``include-system-site-packages`` key exists and is set to +``true`` (case-insensitive), the system-level prefixes will be searched for +site-packages, otherwise they won't. .. index:: single: # (hash); comment diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 6358d140484c78..58323ba6514eac 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -979,7 +979,7 @@ The :mod:`socket` module also offers various network-related services: These addresses should generally be tried in order until a connection succeeds (possibly tried in parallel, for example, using a `Happy Eyeballs`_ algorithm). In these cases, limiting the *type* and/or *proto* can help eliminate - unsuccessful or unusable connecton attempts. + unsuccessful or unusable connection attempts. Some systems will, however, only return a single address. (For example, this was reported on Solaris and AIX configurations.) @@ -1596,8 +1596,6 @@ to sockets. .. method:: socket.ioctl(control, option) - :platform: Windows - The :meth:`ioctl` method is a limited interface to the WSAIoctl system interface. Please refer to the `Win32 documentation `_ for more @@ -1609,9 +1607,12 @@ to sockets. Currently only the following control codes are supported: ``SIO_RCVALL``, ``SIO_KEEPALIVE_VALS``, and ``SIO_LOOPBACK_FAST_PATH``. + .. availability:: Windows + .. versionchanged:: 3.6 ``SIO_LOOPBACK_FAST_PATH`` was added. + .. method:: socket.listen([backlog]) Enable a server to accept connections. If *backlog* is specified, it must diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index a9b7662dcb212b..4f4fc9fba63120 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -243,6 +243,9 @@ numeric literal yields an imaginary number (a complex number with a zero real part) which you can add to an integer or float to get a complex number with real and imaginary parts. +The constructors :func:`int`, :func:`float`, and +:func:`complex` can be used to produce numbers of a specific type. + .. index:: single: arithmetic pair: built-in function; int @@ -262,12 +265,15 @@ and imaginary parts. Python fully supports mixed arithmetic: when a binary arithmetic operator has operands of different numeric types, the operand with the "narrower" type is -widened to that of the other, where integer is narrower than floating point, -which is narrower than complex. A comparison between numbers of different types -behaves as though the exact values of those numbers were being compared. [2]_ +widened to that of the other, where integer is narrower than floating point. +Arithmetic with complex and real operands is defined by the usual mathematical +formula, for example:: -The constructors :func:`int`, :func:`float`, and -:func:`complex` can be used to produce numbers of a specific type. + x + complex(u, v) = complex(x + u, v) + x * complex(u, v) = complex(x * u, x * v) + +A comparison between numbers of different types behaves as though the exact +values of those numbers were being compared. [2]_ All numeric types (except complex) support the following operations (for priorities of the operations, see :ref:`operator-summary`): @@ -3889,7 +3895,7 @@ copying. .. versionchanged:: 3.5 memoryviews can now be indexed with tuple of integers. - .. versionchanged:: next + .. versionchanged:: 3.14 memoryview is now a :term:`generic type`. :class:`memoryview` has several methods: diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index d83816ec1502ca..dd6293c722e7ad 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -130,27 +130,26 @@ always available. .. data:: base_exec_prefix - Set during Python startup, before ``site.py`` is run, to the same value as - :data:`exec_prefix`. If not running in a - :ref:`virtual environment `, the values will stay the same; if - ``site.py`` finds that a virtual environment is in use, the values of - :data:`prefix` and :data:`exec_prefix` will be changed to point to the - virtual environment, whereas :data:`base_prefix` and - :data:`base_exec_prefix` will remain pointing to the base Python - installation (the one which the virtual environment was created from). + Equivalent to :data:`exec_prefix`, but refering to the base Python installation. + + When running under :ref:`sys-path-init-virtual-environments`, + :data:`exec_prefix` gets overwritten to the virtual environment prefix. + :data:`base_exec_prefix`, conversely, does not change, and always points to + the base Python installation. + Refer to :ref:`sys-path-init-virtual-environments` for more information. .. versionadded:: 3.3 .. data:: base_prefix - Set during Python startup, before ``site.py`` is run, to the same value as - :data:`prefix`. If not running in a :ref:`virtual environment `, the values - will stay the same; if ``site.py`` finds that a virtual environment is in - use, the values of :data:`prefix` and :data:`exec_prefix` will be changed to - point to the virtual environment, whereas :data:`base_prefix` and - :data:`base_exec_prefix` will remain pointing to the base Python - installation (the one which the virtual environment was created from). + Equivalent to :data:`prefix`, but refering to the base Python installation. + + When running under :ref:`virtual environment `, + :data:`prefix` gets overwritten to the virtual environment prefix. + :data:`base_prefix`, conversely, does not change, and always points to + the base Python installation. + Refer to :ref:`sys-path-init-virtual-environments` for more information. .. versionadded:: 3.3 @@ -483,11 +482,19 @@ always available. .. note:: - If a :ref:`virtual environment ` is in effect, this - value will be changed in ``site.py`` to point to the virtual environment. - The value for the Python installation will still be available, via - :data:`base_exec_prefix`. + If a :ref:`virtual environment ` is in effect, this :data:`exec_prefix` + will point to the virtual environment. The value for the Python installation + will still be available, via :data:`base_exec_prefix`. + Refer to :ref:`sys-path-init-virtual-environments` for more information. + .. versionchanged:: 3.14 + + When running under a :ref:`virtual environment `, + :data:`prefix` and :data:`exec_prefix` are now set to the virtual + environment prefix by the :ref:`path initialization `, + instead of :mod:`site`. This means that :data:`prefix` and + :data:`exec_prefix` always point to the virtual environment, even when + :mod:`site` is disabled (:option:`-S`). .. data:: executable @@ -944,7 +951,7 @@ always available. This function should be used for specialized purposes only. It is not guaranteed to exist in all implementations of Python. - .. versionchanged:: next + .. versionchanged:: 3.14 The result may include objects from other interpreters. @@ -1483,10 +1490,21 @@ always available. argument to the :program:`configure` script. See :ref:`installation_paths` for derived paths. - .. note:: If a :ref:`virtual environment ` is in effect, this - value will be changed in ``site.py`` to point to the virtual - environment. The value for the Python installation will still be - available, via :data:`base_prefix`. + .. note:: + + If a :ref:`virtual environment ` is in effect, this :data:`prefix` + will point to the virtual environment. The value for the Python installation + will still be available, via :data:`base_prefix`. + Refer to :ref:`sys-path-init-virtual-environments` for more information. + + .. versionchanged:: 3.14 + + When running under a :ref:`virtual environment `, + :data:`prefix` and :data:`exec_prefix` are now set to the virtual + environment prefix by the :ref:`path initialization `, + instead of :mod:`site`. This means that :data:`prefix` and + :data:`exec_prefix` always point to the virtual environment, even when + :mod:`site` is disabled (:option:`-S`). .. data:: ps1 diff --git a/Doc/library/sys_path_init.rst b/Doc/library/sys_path_init.rst index a87a41cf829fa8..18fe32d9c7f10a 100644 --- a/Doc/library/sys_path_init.rst +++ b/Doc/library/sys_path_init.rst @@ -47,8 +47,15 @@ however on other platforms :file:`lib/python{majorversion}.{minorversion}/lib-dy ``exec_prefix``. On some platforms :file:`lib` may be :file:`lib64` or another value, see :data:`sys.platlibdir` and :envvar:`PYTHONPLATLIBDIR`. -Once found, ``prefix`` and ``exec_prefix`` are available at :data:`sys.prefix` and -:data:`sys.exec_prefix` respectively. +Once found, ``prefix`` and ``exec_prefix`` are available at +:data:`sys.base_prefix` and :data:`sys.base_exec_prefix` respectively. + +If :envvar:`PYTHONHOME` is not set, and a ``pyvenv.cfg`` file is found alongside +the main executable, or in its parent directory, :data:`sys.prefix` and +:data:`sys.exec_prefix` get set to the directory containing ``pyvenv.cfg``, +otherwise they are set to the same value as :data:`sys.base_prefix` and +:data:`sys.base_exec_prefix`, respectively. +This is used by :ref:`sys-path-init-virtual-environments`. Finally, the :mod:`site` module is processed and :file:`site-packages` directories are added to the module search path. A common way to customize the search path is @@ -60,18 +67,40 @@ the :mod:`site` module documentation. Certain command line options may further affect path calculations. See :option:`-E`, :option:`-I`, :option:`-s` and :option:`-S` for further details. -Virtual environments +.. versionchanged:: 3.14 + + :data:`sys.prefix` and :data:`sys.exec_prefix` are now set to the + ``pyvenv.cfg`` directory during the path initialization. This was previously + done by :mod:`site`, therefore affected by :option:`-S`. + +.. _sys-path-init-virtual-environments: + +Virtual Environments -------------------- -If Python is run in a virtual environment (as described at :ref:`tut-venv`) -then ``prefix`` and ``exec_prefix`` are specific to the virtual environment. +Virtual environments place a ``pyvenv.cfg`` file in their prefix, which causes +:data:`sys.prefix` and :data:`sys.exec_prefix` to point to them, instead of the +base installation. + +The ``prefix`` and ``exec_prefix`` values of the base installation are available +at :data:`sys.base_prefix` and :data:`sys.base_exec_prefix`. -If a ``pyvenv.cfg`` file is found alongside the main executable, or in the -directory one level above the executable, the following variations apply: +As well as being used as a marker to identify virtual environments, +``pyvenv.cfg`` may also be used to configure the :mod:`site` initialization. +Please refer to :mod:`site`'s +:ref:`virtual environments documentation `. + +.. note:: + + :envvar:`PYTHONHOME` overrides the ``pyvenv.cfg`` detection. + +.. note:: -* If ``home`` is an absolute path and :envvar:`PYTHONHOME` is not set, this - path is used instead of the path to the main executable when deducing ``prefix`` - and ``exec_prefix``. + There are other ways how "virtual environments" could be implemented, this + documentation referes implementations based on the ``pyvenv.cfg`` mechanism, + such as :mod:`venv`. Most virtual environment implementations follow the + model set by :mod:`venv`, but there may be exotic implementations that + diverge from it. _pth files ---------- diff --git a/Doc/library/token.rst b/Doc/library/token.rst index 0cc9dddd91ed6b..40982f32b4beee 100644 --- a/Doc/library/token.rst +++ b/Doc/library/token.rst @@ -79,6 +79,13 @@ the :mod:`tokenize` module. ``type_comments=True``. +.. data:: EXACT_TOKEN_TYPES + + A dictionary mapping the string representation of a token to its numeric code. + + .. versionadded:: 3.8 + + .. versionchanged:: 3.5 Added :data:`!AWAIT` and :data:`!ASYNC` tokens. diff --git a/Doc/library/tomllib.rst b/Doc/library/tomllib.rst index 4b88b2e29e7822..30d7ff50a1acc1 100644 --- a/Doc/library/tomllib.rst +++ b/Doc/library/tomllib.rst @@ -84,11 +84,11 @@ The following exceptions are available: The column corresponding to *pos*. - .. versionchanged:: next + .. versionchanged:: 3.14 Added the *msg*, *doc* and *pos* parameters. Added the :attr:`msg`, :attr:`doc`, :attr:`pos`, :attr:`lineno` and :attr:`colno` attributes. - .. deprecated:: next + .. deprecated:: 3.14 Passing free-form positional arguments is deprecated. diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index ce82552a3ae4be..3c07dc4adf434a 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -67,8 +67,7 @@ The :mod:`urllib.request` module defines the following functions: the response headers as it is specified in the documentation for :class:`~http.client.HTTPResponse`. - For FTP, file, and data URLs and requests explicitly handled by legacy - :class:`URLopener` and :class:`FancyURLopener` classes, this function + For FTP, file, and data URLs, this function returns a :class:`urllib.response.addinfourl` object. Raises :exc:`~urllib.error.URLError` on protocol errors. @@ -149,16 +148,42 @@ The :mod:`urllib.request` module defines the following functions: .. function:: pathname2url(path) - Convert the pathname *path* from the local syntax for a path to the form used in - the path component of a URL. This does not produce a complete URL. The return - value will already be quoted using the :func:`~urllib.parse.quote` function. + Convert the given local path to a ``file:`` URL. This function uses + :func:`~urllib.parse.quote` function to encode the path. For historical + reasons, the return value omits the ``file:`` scheme prefix. This example + shows the function being used on Windows:: + >>> from urllib.request import pathname2url + >>> path = 'C:\\Program Files' + >>> 'file:' + pathname2url(path) + 'file:///C:/Program%20Files' -.. function:: url2pathname(path) + .. versionchanged:: 3.14 + Paths beginning with a slash are converted to URLs with authority + sections. For example, the path ``/etc/hosts`` is converted to + the URL ``///etc/hosts``. + + .. versionchanged:: 3.14 + Windows drive letters are no longer converted to uppercase, and ``:`` + characters not following a drive letter no longer cause an + :exc:`OSError` exception to be raised on Windows. + + +.. function:: url2pathname(url) + + Convert the given ``file:`` URL to a local path. This function uses + :func:`~urllib.parse.unquote` to decode the URL. For historical reasons, + the given value *must* omit the ``file:`` scheme prefix. This example shows + the function being used on Windows:: + + >>> from urllib.request import url2pathname + >>> url = 'file:///C:/Program%20Files' + >>> url2pathname(url.removeprefix('file:')) + 'C:\\Program Files' + + .. versionchanged:: 3.14 + Windows drive letters are no longer converted to uppercase. - Convert the path component *path* from a percent-encoded URL to the local syntax for a - path. This does not accept a complete URL. This function uses - :func:`~urllib.parse.unquote` to decode *path*. .. function:: getproxies() @@ -1339,7 +1364,7 @@ environment settings:: >>> import urllib.request >>> proxies = {'http': 'http://proxy.example.com:8080/'} - >>> opener = urllib.request.FancyURLopener(proxies) + >>> opener = urllib.request.build_opener(urllib.request.ProxyHandler(proxies)) >>> with opener.open("http://www.python.org") as f: ... f.read().decode('utf-8') ... @@ -1347,7 +1372,7 @@ environment settings:: The following example uses no proxies at all, overriding environment settings:: >>> import urllib.request - >>> opener = urllib.request.FancyURLopener({}) + >>> opener = urllib.request.build_opener(urllib.request.ProxyHandler({}})) >>> with opener.open("http://www.python.org/") as f: ... f.read().decode('utf-8') ... @@ -1412,121 +1437,6 @@ some point in the future. Cleans up temporary files that may have been left behind by previous calls to :func:`urlretrieve`. -.. class:: URLopener(proxies=None, **x509) - - .. deprecated:: 3.3 - - Base class for opening and reading URLs. Unless you need to support opening - objects using schemes other than :file:`http:`, :file:`ftp:`, or :file:`file:`, - you probably want to use :class:`FancyURLopener`. - - By default, the :class:`URLopener` class sends a :mailheader:`User-Agent` header - of ``urllib/VVV``, where *VVV* is the :mod:`urllib` version number. - Applications can define their own :mailheader:`User-Agent` header by subclassing - :class:`URLopener` or :class:`FancyURLopener` and setting the class attribute - :attr:`version` to an appropriate string value in the subclass definition. - - The optional *proxies* parameter should be a dictionary mapping scheme names to - proxy URLs, where an empty dictionary turns proxies off completely. Its default - value is ``None``, in which case environmental proxy settings will be used if - present, as discussed in the definition of :func:`urlopen`, above. - - Additional keyword parameters, collected in *x509*, may be used for - authentication of the client when using the :file:`https:` scheme. The keywords - *key_file* and *cert_file* are supported to provide an SSL key and certificate; - both are needed to support client authentication. - - :class:`URLopener` objects will raise an :exc:`OSError` exception if the server - returns an error code. - - .. method:: open(fullurl, data=None) - - Open *fullurl* using the appropriate protocol. This method sets up cache and - proxy information, then calls the appropriate open method with its input - arguments. If the scheme is not recognized, :meth:`open_unknown` is called. - The *data* argument has the same meaning as the *data* argument of - :func:`urlopen`. - - This method always quotes *fullurl* using :func:`~urllib.parse.quote`. - - .. method:: open_unknown(fullurl, data=None) - - Overridable interface to open unknown URL types. - - - .. method:: retrieve(url, filename=None, reporthook=None, data=None) - - Retrieves the contents of *url* and places it in *filename*. The return value - is a tuple consisting of a local filename and either an - :class:`email.message.Message` object containing the response headers (for remote - URLs) or ``None`` (for local URLs). The caller must then open and read the - contents of *filename*. If *filename* is not given and the URL refers to a - local file, the input filename is returned. If the URL is non-local and - *filename* is not given, the filename is the output of :func:`tempfile.mktemp` - with a suffix that matches the suffix of the last path component of the input - URL. If *reporthook* is given, it must be a function accepting three numeric - parameters: A chunk number, the maximum size chunks are read in and the total size of the download - (-1 if unknown). It will be called once at the start and after each chunk of data is read from the - network. *reporthook* is ignored for local URLs. - - If the *url* uses the :file:`http:` scheme identifier, the optional *data* - argument may be given to specify a ``POST`` request (normally the request type - is ``GET``). The *data* argument must in standard - :mimetype:`application/x-www-form-urlencoded` format; see the - :func:`urllib.parse.urlencode` function. - - - .. attribute:: version - - Variable that specifies the user agent of the opener object. To get - :mod:`urllib` to tell servers that it is a particular user agent, set this in a - subclass as a class variable or in the constructor before calling the base - constructor. - - -.. class:: FancyURLopener(...) - - .. deprecated:: 3.3 - - :class:`FancyURLopener` subclasses :class:`URLopener` providing default handling - for the following HTTP response codes: 301, 302, 303, 307 and 401. For the 30x - response codes listed above, the :mailheader:`Location` header is used to fetch - the actual URL. For 401 response codes (authentication required), basic HTTP - authentication is performed. For the 30x response codes, recursion is bounded - by the value of the *maxtries* attribute, which defaults to 10. - - For all other response codes, the method :meth:`~BaseHandler.http_error_default` is called - which you can override in subclasses to handle the error appropriately. - - .. note:: - - According to the letter of :rfc:`2616`, 301 and 302 responses to POST requests - must not be automatically redirected without confirmation by the user. In - reality, browsers do allow automatic redirection of these responses, changing - the POST to a GET, and :mod:`urllib` reproduces this behaviour. - - The parameters to the constructor are the same as those for :class:`URLopener`. - - .. note:: - - When performing basic authentication, a :class:`FancyURLopener` instance calls - its :meth:`prompt_user_passwd` method. The default implementation asks the - users for the required information on the controlling terminal. A subclass may - override this method to support more appropriate behavior if needed. - - The :class:`FancyURLopener` class offers one additional method that should be - overloaded to provide the appropriate behavior: - - .. method:: prompt_user_passwd(host, realm) - - Return information needed to authenticate the user at the given host in the - specified security realm. The return value should be a tuple, ``(user, - password)``, which can be used for basic authentication. - - The implementation prompts for this information on the terminal; an application - should override this method to use an appropriate interaction model in the local - environment. - :mod:`urllib.request` Restrictions ---------------------------------- @@ -1578,8 +1488,7 @@ some point in the future. you try to fetch a file whose read permissions make it inaccessible; the FTP code will try to read it, fail with a 550 error, and then perform a directory listing for the unreadable file. If fine-grained control is needed, consider - using the :mod:`ftplib` module, subclassing :class:`FancyURLopener`, or changing - *_urlopener* to meet your needs. + using the :mod:`ftplib` module. diff --git a/Doc/library/uuid.rst b/Doc/library/uuid.rst index 9be12edd36b9a8..39e82d0e19a9ac 100644 --- a/Doc/library/uuid.rst +++ b/Doc/library/uuid.rst @@ -153,7 +153,7 @@ which relays any information about the UUID's safety, using this enumeration: The UUID version number (1 through 8, meaningful only when the variant is :const:`RFC_4122`). - .. versionchanged:: next + .. versionchanged:: 3.14 Added UUID version 8. @@ -233,7 +233,7 @@ The :mod:`uuid` module defines the following functions: non-specified arguments are substituted for a pseudo-random integer of appropriate size. - .. versionadded:: next + .. versionadded:: 3.14 .. index:: single: uuid8 @@ -323,7 +323,7 @@ The following options are accepted: Specify the function name to use to generate the uuid. By default :func:`uuid4` is used. - .. versionadded:: next + .. versionadded:: 3.14 Allow generating UUID version 8. .. option:: -n diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index 5205c6c211d9bf..bed799aedfdfb1 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -25,6 +25,9 @@ A virtual environment is created on top of an existing Python installation, known as the virtual environment's "base" Python, and may optionally be isolated from the packages in the base environment, so only those explicitly installed in the virtual environment are available. +See :ref:`sys-path-init-virtual-environments` and :mod:`site`'s +:ref:`virtual environments documentation ` +for more information. When used from within a virtual environment, common installation tools such as :pypi:`pip` will install Python packages into a virtual environment diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index 3eaceae41f7eaf..7c95b207b1aed2 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -28,13 +28,12 @@ Arithmetic conversions .. index:: pair: arithmetic; conversion When a description of an arithmetic operator below uses the phrase "the numeric -arguments are converted to a common type", this means that the operator +arguments are converted to a common real type", this means that the operator implementation for built-in types works as follows: -* If either argument is a complex number, the other is converted to complex; +* If both arguments are complex numbers, no conversion is performed; -* otherwise, if either argument is a floating-point number, the other is - converted to floating point; +* if either argument is a complex or a floating-point number, the other is converted to a floating-point number; * otherwise, both must be integers and no conversion is necessary. @@ -1323,12 +1322,16 @@ operators and one for additive operators: The ``*`` (multiplication) operator yields the product of its arguments. The arguments must either both be numbers, or one argument must be an integer and the other must be a sequence. In the former case, the numbers are converted to a -common type and then multiplied together. In the latter case, sequence +common real type and then multiplied together. In the latter case, sequence repetition is performed; a negative repetition factor yields an empty sequence. This operation can be customized using the special :meth:`~object.__mul__` and :meth:`~object.__rmul__` methods. +.. versionchanged:: 3.14 + If only one operand is a complex number, the other operand is converted + to a floating-point number. + .. index:: single: matrix multiplication pair: operator; @ (at) @@ -1396,23 +1399,31 @@ floating-point number using the :func:`abs` function if appropriate. The ``+`` (addition) operator yields the sum of its arguments. The arguments must either both be numbers or both be sequences of the same type. In the -former case, the numbers are converted to a common type and then added together. +former case, the numbers are converted to a common real type and then added together. In the latter case, the sequences are concatenated. This operation can be customized using the special :meth:`~object.__add__` and :meth:`~object.__radd__` methods. +.. versionchanged:: 3.14 + If only one operand is a complex number, the other operand is converted + to a floating-point number. + .. index:: single: subtraction single: operator; - (minus) single: - (minus); binary operator The ``-`` (subtraction) operator yields the difference of its arguments. The -numeric arguments are first converted to a common type. +numeric arguments are first converted to a common real type. This operation can be customized using the special :meth:`~object.__sub__` and :meth:`~object.__rsub__` methods. +.. versionchanged:: 3.14 + If only one operand is a complex number, the other operand is converted + to a floating-point number. + .. _shifting: diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index 66914f79f3d4ec..711c0b64095bd2 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -13,8 +13,6 @@ Doc/c-api/type.rst Doc/c-api/typeobj.rst Doc/extending/extending.rst Doc/glossary.rst -Doc/howto/descriptor.rst -Doc/howto/enum.rst Doc/library/ast.rst Doc/library/asyncio-extending.rst Doc/library/asyncio-subprocess.rst diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst index 4c61cbb2b5bc3d..c01cb8c14a0360 100644 --- a/Doc/tutorial/errors.rst +++ b/Doc/tutorial/errors.rst @@ -23,7 +23,7 @@ complaint you get while you are still learning Python:: ^^^^^ SyntaxError: invalid syntax -The parser repeats the offending line and displays little 'arrow's pointing +The parser repeats the offending line and displays little arrows pointing at the token in the line where the error was detected. The error may be caused by the absence of a token *before* the indicated token. In the example, the error is detected at the function :func:`print`, since a colon diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst index e5c6d7cd308504..ed41ecd50b0011 100644 --- a/Doc/whatsnew/3.11.rst +++ b/Doc/whatsnew/3.11.rst @@ -1670,7 +1670,7 @@ Replaced opcodes | | | for each direction | | | | | +------------------------------------+------------------------------------+-----------------------------------------+ -| | :opcode:`!SETUP_WITH` | :opcode:`BEFORE_WITH` | :keyword:`with` block setup | +| | :opcode:`!SETUP_WITH` | :opcode:`!BEFORE_WITH` | :keyword:`with` block setup | | | :opcode:`!SETUP_ASYNC_WITH` | | | +------------------------------------+------------------------------------+-----------------------------------------+ diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index 0e526f9959b8ec..20e1eaad2316c9 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -65,6 +65,8 @@ Summary -- release highlights .. PEP-sized items next. +* :ref:`PEP 649: deferred evaluation of annotations ` +* :ref:`PEP 741: Python Configuration C API ` New features @@ -172,6 +174,40 @@ Improved error messages ValueError: too many values to unpack (expected 3, got 4) +.. _whatsnew314-pep741: + +PEP 741: Python Configuration C API +----------------------------------- + +Add a :ref:`PyInitConfig C API ` to configure the Python +initialization without relying on C structures and the ability to make +ABI-compatible changes in the future. + +Complete the :pep:`587` :ref:`PyConfig C API ` by adding +:c:func:`PyInitConfig_AddModule` which can be used to add a built-in extension +module; feature previously referred to as the “inittab”. + +Add :c:func:`PyConfig_Get` and :c:func:`PyConfig_Set` functions to get and set +the current runtime configuration. + +PEP 587 “Python Initialization Configuration” unified all the ways to configure +the Python initialization. This PEP unifies also the configuration of the +Python preinitialization and the Python initialization in a single API. +Moreover, this PEP only provides a single choice to embed Python, instead of +having two “Python” and “Isolated” choices (PEP 587), to simplify the API +further. + +The lower level PEP 587 PyConfig API remains available for use cases with an +intentionally higher level of coupling to CPython implementation details (such +as emulating the full functionality of CPython’s CLI, including its +configuration mechanisms). + +(Contributed by Victor Stinner in :gh:`107954`.) + +.. seealso:: + :pep:`741`. + + Other language changes ====================== @@ -194,6 +230,13 @@ Other language changes They raise an error if the argument is a string. (Contributed by Serhiy Storchaka in :gh:`84978`.) +* Implement mixed-mode arithmetic rules combining real and complex numbers as + specified by C standards since C99. + (Contributed by Sergey B Kirpichev in :gh:`69639`.) + +* All Windows code pages are now supported as "cpXXX" codecs on Windows. + (Contributed by Serhiy Storchaka in :gh:`123803`.) + * :class:`super` objects are now :mod:`pickleable ` and :mod:`copyable `. (Contributed by Serhiy Storchaka in :gh:`125767`.) @@ -225,7 +268,7 @@ argparse * Introduced the optional *suggest_on_error* parameter to :class:`argparse.ArgumentParser`, enabling suggestions for argument choices and subparser names if mistyped by the user. - (Contributed by Savannah Ostrowski in :gh:`124456`) + (Contributed by Savannah Ostrowski in :gh:`124456`.) ast --- @@ -270,12 +313,8 @@ ctypes to help match a non-default ABI. (Contributed by Petr Viktorin in :gh:`97702`.) -decimal -------- - -* Add alternative :class:`~decimal.Decimal` constructor - :meth:`Decimal.from_number() `. - (Contributed by Serhiy Storchaka in :gh:`121798`.) +* The :exc:`~ctypes.COMError` exception is now public. + (Contributed by Jun Komoda in :gh:`126686`.) datetime -------- @@ -283,6 +322,13 @@ datetime * Add :meth:`datetime.time.strptime` and :meth:`datetime.date.strptime`. (Contributed by Wannes Boeykens in :gh:`41431`.) +decimal +------- + +* Add alternative :class:`~decimal.Decimal` constructor + :meth:`Decimal.from_number() `. + (Contributed by Serhiy Storchaka in :gh:`121798`.) + dis --- @@ -393,6 +439,17 @@ mimetypes (Contributed by Hugo van Kemenade in :gh:`89416`.) +* Add MIME types for images with RFCs: + + * :rfc:`1494`: CCITT Group 3 (``.g3``) + * :rfc:`3362`: Real-time Facsimile, T.38 (``.t38``) + * :rfc:`3745`: JPEG 2000 (``.jp2``), extension (``.jpx``) and compound (``.jpm``) + * :rfc:`3950`: Tag Image File Format Fax eXtended, TIFF-FX (``.tfx``) + * :rfc:`4047`: Flexible Image Transport System (``.fits``) + * :rfc:`7903`: Enhanced Metafile (``.emf``) and Windows Metafile (``.wmf``) + + (Contributed by Hugo van Kemenade in :gh:`85957`.) + multiprocessing --------------- @@ -404,6 +461,11 @@ multiprocessing :func:`multiprocessing.get_context` (preferred) or change the default via :func:`multiprocessing.set_start_method`. (Contributed by Gregory P. Smith in :gh:`84559`.) +* :mod:`multiprocessing`'s ``"forkserver"`` start method now authenticates + its control socket to avoid solely relying on filesystem permissions + to restrict what other processes could cause the forkserver to spawn workers + and run code. + (Contributed by Gregory P. Smith for :gh:`97514`.) * The :ref:`multiprocessing proxy objects ` for *list* and *dict* types gain previously overlooked missing methods: @@ -411,7 +473,7 @@ multiprocessing * :meth:`~dict.fromkeys`, ``reversed(d)``, ``d | {}``, ``{} | d``, ``d |= {'b': 2}`` for proxies of :class:`dict`. - (Contributed by Roy Hyunjin Han for :gh:`103134`) + (Contributed by Roy Hyunjin Han for :gh:`103134`.) operator @@ -454,14 +516,6 @@ pathlib (Contributed by Barney Gale in :gh:`125413`.) -platform --------- - -* Add :func:`platform.invalidate_caches` to invalidate the cached results. - - (Contributed by Bénédikt Tran in :gh:`122549`.) - - pdb --- @@ -476,6 +530,14 @@ pdb command when :mod:`pdb` is in ``inline`` mode. (Contributed by Tian Gao in :gh:`123757`.) +platform +-------- + +* Add :func:`platform.invalidate_caches` to invalidate the cached results. + + (Contributed by Bénédikt Tran in :gh:`122549`.) + + pickle ------ @@ -619,6 +681,14 @@ argparse of :class:`!argparse.BooleanOptionalAction`. They were deprecated since 3.12. +* Calling :meth:`~argparse.ArgumentParser.add_argument_group` on an argument + group, and calling :meth:`~argparse.ArgumentParser.add_argument_group` or + :meth:`~argparse.ArgumentParser.add_mutually_exclusive_group` on a mutually + exclusive group now raise exceptions. This nesting was never supported, + often failed to work correctly, and was unintentionally exposed through + inheritance. This functionality has been deprecated since Python 3.11. + (Contributed by Savannah Ostrowski in :gh:`127186`.) + ast --- @@ -755,6 +825,17 @@ urllib * Remove deprecated :class:`!Quoter` class from :mod:`urllib.parse`. It had previously raised a :exc:`DeprecationWarning` since Python 3.11. (Contributed by Nikita Sobolev in :gh:`118827`.) +* Remove deprecated :class:`!URLopener` and :class:`!FancyURLopener` classes + from :mod:`urllib.request`. They had previously raised a + :exc:`DeprecationWarning` since Python 3.3. + + ``myopener.open()`` can be replaced with :func:`~urllib.request.urlopen`, + and ``myopener.retrieve()`` can be replaced with + :func:`~urllib.request.urlretrieve`. Customizations to the opener + classes can be replaced by passing customized handlers to + :func:`~urllib.request.build_opener`. + + (Contributed by Barney Gale in :gh:`84850`.) Others ------ @@ -846,7 +927,7 @@ New features * Add :c:func:`PyLong_IsPositive`, :c:func:`PyLong_IsNegative` and :c:func:`PyLong_IsZero` for checking if :c:type:`PyLongObject` is positive, negative, or zero, respectively. - (Contribued by James Roy and Sergey B Kirpichev in :gh:`126061`.) + (Contributed by James Roy and Sergey B Kirpichev in :gh:`126061`.) * Add new functions to convert C ```` numbers from/to Python :class:`int`: diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 9d746b378995c3..71b186aeed7359 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -1979,7 +1979,7 @@ Other Improvements now works correctly (previously it silently returned the first python module in the file). (Contributed by Václav Šmilauer in :issue:`16421`.) -* A new opcode, :opcode:`LOAD_CLASSDEREF`, has been added to fix a bug in the +* A new opcode, :opcode:`!LOAD_CLASSDEREF`, has been added to fix a bug in the loading of free variables in class bodies that could be triggered by certain uses of :ref:`__prepare__ `. (Contributed by Benjamin Peterson in :issue:`17853`.) diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst index 2276fed60c8db3..1fcc5d7cbfb387 100644 --- a/Doc/whatsnew/3.6.rst +++ b/Doc/whatsnew/3.6.rst @@ -2366,27 +2366,27 @@ There have been several major changes to the :term:`bytecode` in Python 3.6. (Contributed by Demur Rumed with input and reviews from Serhiy Storchaka and Victor Stinner in :issue:`26647` and :issue:`28050`.) -* The new :opcode:`FORMAT_VALUE` and :opcode:`BUILD_STRING` opcodes as part +* The new :opcode:`!FORMAT_VALUE` and :opcode:`BUILD_STRING` opcodes as part of the :ref:`formatted string literal ` implementation. (Contributed by Eric Smith in :issue:`25483` and Serhiy Storchaka in :issue:`27078`.) -* The new :opcode:`BUILD_CONST_KEY_MAP` opcode to optimize the creation +* The new :opcode:`!BUILD_CONST_KEY_MAP` opcode to optimize the creation of dictionaries with constant keys. (Contributed by Serhiy Storchaka in :issue:`27140`.) * The function call opcodes have been heavily reworked for better performance and simpler implementation. - The :opcode:`MAKE_FUNCTION`, :opcode:`CALL_FUNCTION`, - :opcode:`CALL_FUNCTION_KW` and :opcode:`BUILD_MAP_UNPACK_WITH_CALL` opcodes + The :opcode:`MAKE_FUNCTION`, :opcode:`!CALL_FUNCTION`, + :opcode:`!CALL_FUNCTION_KW` and :opcode:`!BUILD_MAP_UNPACK_WITH_CALL` opcodes have been modified, the new :opcode:`CALL_FUNCTION_EX` and - :opcode:`BUILD_TUPLE_UNPACK_WITH_CALL` have been added, and + :opcode:`!BUILD_TUPLE_UNPACK_WITH_CALL` have been added, and ``CALL_FUNCTION_VAR``, ``CALL_FUNCTION_VAR_KW`` and ``MAKE_CLOSURE`` opcodes have been removed. (Contributed by Demur Rumed in :issue:`27095`, and Serhiy Storchaka in :issue:`27213`, :issue:`28257`.) -* The new :opcode:`SETUP_ANNOTATIONS` and :opcode:`STORE_ANNOTATION` opcodes +* The new :opcode:`SETUP_ANNOTATIONS` and :opcode:`!STORE_ANNOTATION` opcodes have been added to support the new :term:`variable annotation` syntax. (Contributed by Ivan Levkivskyi in :issue:`27985`.) diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst index 2d433ef4759d52..f420fa5c04479b 100644 --- a/Doc/whatsnew/3.7.rst +++ b/Doc/whatsnew/3.7.rst @@ -2476,10 +2476,10 @@ avoiding possible problems use new functions :c:func:`PySlice_Unpack` and CPython bytecode changes ------------------------ -There are two new opcodes: :opcode:`LOAD_METHOD` and :opcode:`CALL_METHOD`. +There are two new opcodes: :opcode:`LOAD_METHOD` and :opcode:`!CALL_METHOD`. (Contributed by Yury Selivanov and INADA Naoki in :issue:`26110`.) -The :opcode:`STORE_ANNOTATION` opcode has been removed. +The :opcode:`!STORE_ANNOTATION` opcode has been removed. (Contributed by Mark Shannon in :issue:`32550`.) diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index bdc4ca5cab5245..7aca35b2959cd2 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -2152,11 +2152,11 @@ CPython bytecode changes cleaning-up code for :keyword:`break`, :keyword:`continue` and :keyword:`return`. - Removed opcodes :opcode:`BREAK_LOOP`, :opcode:`CONTINUE_LOOP`, - :opcode:`SETUP_LOOP` and :opcode:`SETUP_EXCEPT`. Added new opcodes - :opcode:`ROT_FOUR`, :opcode:`BEGIN_FINALLY`, :opcode:`CALL_FINALLY` and - :opcode:`POP_FINALLY`. Changed the behavior of :opcode:`END_FINALLY` - and :opcode:`WITH_CLEANUP_START`. + Removed opcodes :opcode:`!BREAK_LOOP`, :opcode:`!CONTINUE_LOOP`, + :opcode:`!SETUP_LOOP` and :opcode:`!SETUP_EXCEPT`. Added new opcodes + :opcode:`!ROT_FOUR`, :opcode:`!BEGIN_FINALLY`, :opcode:`!CALL_FINALLY` and + :opcode:`!POP_FINALLY`. Changed the behavior of :opcode:`!END_FINALLY` + and :opcode:`!WITH_CLEANUP_START`. (Contributed by Mark Shannon, Antoine Pitrou and Serhiy Storchaka in :issue:`17611`.) diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 6118b02dd9bd48..b062e6b4c9bca0 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -1203,7 +1203,7 @@ Changes in the C API CPython bytecode changes ------------------------ -* The :opcode:`LOAD_ASSERTION_ERROR` opcode was added for handling the +* The :opcode:`!LOAD_ASSERTION_ERROR` opcode was added for handling the :keyword:`assert` statement. Previously, the assert statement would not work correctly if the :exc:`AssertionError` exception was being shadowed. (Contributed by Zackery Spytz in :issue:`34880`.) diff --git a/Include/cpython/code.h b/Include/cpython/code.h index 370f1d259abe0f..3899d4269233a1 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -131,7 +131,8 @@ typedef struct { \ /* redundant values (derived from co_localsplusnames and \ co_localspluskinds) */ \ - int co_nlocalsplus; /* number of local + cell + free variables */ \ + int co_nlocalsplus; /* number of spaces for holding local, cell, \ + and free variables */ \ int co_framesize; /* Size of frame in words */ \ int co_nlocals; /* number of local variables */ \ int co_ncellvars; /* total number of cell variables */ \ diff --git a/Include/cpython/complexobject.h b/Include/cpython/complexobject.h index fbdc6a91fe895c..28576afad0b6b5 100644 --- a/Include/cpython/complexobject.h +++ b/Include/cpython/complexobject.h @@ -9,10 +9,16 @@ typedef struct { // Operations on complex numbers. PyAPI_FUNC(Py_complex) _Py_c_sum(Py_complex, Py_complex); +PyAPI_FUNC(Py_complex) _Py_cr_sum(Py_complex, double); PyAPI_FUNC(Py_complex) _Py_c_diff(Py_complex, Py_complex); +PyAPI_FUNC(Py_complex) _Py_cr_diff(Py_complex, double); +PyAPI_FUNC(Py_complex) _Py_rc_diff(double, Py_complex); PyAPI_FUNC(Py_complex) _Py_c_neg(Py_complex); PyAPI_FUNC(Py_complex) _Py_c_prod(Py_complex, Py_complex); +PyAPI_FUNC(Py_complex) _Py_cr_prod(Py_complex, double); PyAPI_FUNC(Py_complex) _Py_c_quot(Py_complex, Py_complex); +PyAPI_FUNC(Py_complex) _Py_cr_quot(Py_complex, double); +PyAPI_FUNC(Py_complex) _Py_rc_quot(double, Py_complex); PyAPI_FUNC(Py_complex) _Py_c_pow(Py_complex, Py_complex); PyAPI_FUNC(double) _Py_c_abs(Py_complex); diff --git a/Include/internal/pycore_backoff.h b/Include/internal/pycore_backoff.h index 20436a68b69677..3e02728522828e 100644 --- a/Include/internal/pycore_backoff.h +++ b/Include/internal/pycore_backoff.h @@ -102,10 +102,9 @@ backoff_counter_triggers(_Py_BackoffCounter counter) } /* Initial JUMP_BACKWARD counter. - * This determines when we create a trace for a loop. -* Backoff sequence 16, 32, 64, 128, 256, 512, 1024, 2048, 4096. */ -#define JUMP_BACKWARD_INITIAL_VALUE 15 -#define JUMP_BACKWARD_INITIAL_BACKOFF 4 + * This determines when we create a trace for a loop. */ +#define JUMP_BACKWARD_INITIAL_VALUE 4095 +#define JUMP_BACKWARD_INITIAL_BACKOFF 12 static inline _Py_BackoffCounter initial_jump_backoff_counter(void) { diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index c5399ad8e0497f..6e4a308226f3fe 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -43,8 +43,6 @@ extern int _PyDict_Next( extern int _PyDict_HasOnlyStringKeys(PyObject *mp); -extern void _PyDict_MaybeUntrack(PyObject *mp); - // Export for '_ctypes' shared extension PyAPI_FUNC(Py_ssize_t) _PyDict_SizeOf(PyDictObject *); @@ -92,6 +90,17 @@ extern PyObject *_PyDict_FromKeys(PyObject *, PyObject *, PyObject *); extern uint32_t _PyDictKeys_GetVersionForCurrentState( PyInterpreterState *interp, PyDictKeysObject *dictkeys); +/* Gets a version number unique to the current state of the keys of dict, if possible. + * + * In free-threaded builds ensures that the dict can be used for lock-free + * reads if a version was assigned. + * + * The caller must hold the per-object lock on dict. + * + * Returns the version number, or zero if it was not possible to get a version number. */ +extern uint32_t _PyDict_GetKeysVersionForCurrentState( + PyInterpreterState *interp, PyDictObject *dict); + extern size_t _PyDict_KeysSize(PyDictKeysObject *keys); extern void _PyDictKeys_DecRef(PyDictKeysObject *keys); diff --git a/Include/internal/pycore_floatobject.h b/Include/internal/pycore_floatobject.h index be1c6cc97720d2..f44b081b06cea5 100644 --- a/Include/internal/pycore_floatobject.h +++ b/Include/internal/pycore_floatobject.h @@ -54,6 +54,8 @@ extern PyObject* _Py_string_to_number_with_underscores( extern double _Py_parse_inf_or_nan(const char *p, char **endptr); +extern int _Py_convert_int_to_double(PyObject **v, double *dbl); + #ifdef __cplusplus } diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 38a1c56c09d9db..479fe10d00066d 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -50,7 +50,6 @@ static inline PyObject* _Py_FROM_GC(PyGC_Head *gc) { # define _PyGC_BITS_UNREACHABLE (4) # define _PyGC_BITS_FROZEN (8) # define _PyGC_BITS_SHARED (16) -# define _PyGC_BITS_SHARED_INLINE (32) # define _PyGC_BITS_DEFERRED (64) // Use deferred reference counting #endif @@ -119,23 +118,6 @@ static inline void _PyObject_GC_SET_SHARED(PyObject *op) { } #define _PyObject_GC_SET_SHARED(op) _PyObject_GC_SET_SHARED(_Py_CAST(PyObject*, op)) -/* True if the memory of the object is shared between multiple - * threads and needs special purpose when freeing due to - * the possibility of in-flight lock-free reads occurring. - * Objects with this bit that are GC objects will automatically - * delay-freed by PyObject_GC_Del. */ -static inline int _PyObject_GC_IS_SHARED_INLINE(PyObject *op) { - return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_SHARED_INLINE); -} -#define _PyObject_GC_IS_SHARED_INLINE(op) \ - _PyObject_GC_IS_SHARED_INLINE(_Py_CAST(PyObject*, op)) - -static inline void _PyObject_GC_SET_SHARED_INLINE(PyObject *op) { - _PyObject_SET_GC_BITS(op, _PyGC_BITS_SHARED_INLINE); -} -#define _PyObject_GC_SET_SHARED_INLINE(op) \ - _PyObject_GC_SET_SHARED_INLINE(_Py_CAST(PyObject*, op)) - #endif /* Bit flags for _gc_prev */ diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index e4f0138e17edfa..c12e242d560bde 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -982,6 +982,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hi)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hook)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hour)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(id)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ident)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(identity_hint)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ignore)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index e70f11e2a26cd5..dfd9f2b799ec8e 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -471,6 +471,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(hi) STRUCT_FOR_ID(hook) STRUCT_FOR_ID(hour) + STRUCT_FOR_ID(id) STRUCT_FOR_ID(ident) STRUCT_FOR_ID(identity_hint) STRUCT_FOR_ID(ignore) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 824b865eda60df..87cdcb5b119d15 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -130,6 +130,7 @@ struct _is { uint64_t next_unique_id; /* The linked list of threads, newest first. */ PyThreadState *head; + _PyThreadStateImpl *preallocated; /* The thread currently executing in the __main__ module, if any. */ PyThreadState *main; /* Used in Modules/_threadmodule.c. */ @@ -278,9 +279,12 @@ struct _is { struct _Py_interp_cached_objects cached_objects; struct _Py_interp_static_objects static_objects; + Py_ssize_t _interactive_src_count; + /* the initial PyInterpreterState.threads.head */ _PyThreadStateImpl _initial_thread; - Py_ssize_t _interactive_src_count; + // _initial_thread should be the last field of PyInterpreterState. + // See https://github.com/python/cpython/issues/127117. }; diff --git a/Include/internal/pycore_modsupport.h b/Include/internal/pycore_modsupport.h index c661f1d82a84f6..614e9f93751834 100644 --- a/Include/internal/pycore_modsupport.h +++ b/Include/internal/pycore_modsupport.h @@ -1,8 +1,6 @@ #ifndef Py_INTERNAL_MODSUPPORT_H #define Py_INTERNAL_MODSUPPORT_H -#include "pycore_lock.h" // _PyOnceFlag - #ifdef __cplusplus extern "C" { #endif diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index c7af720b1ce43d..34d835a7f84ee7 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -14,6 +14,7 @@ extern "C" { #include "pycore_interp.h" // PyInterpreterState.gc #include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_STORE_PTR_RELAXED #include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_stackref.h" #include "pycore_uniqueid.h" // _PyObject_ThreadIncrefSlow() // This value is added to `ob_ref_shared` for objects that use deferred @@ -94,6 +95,14 @@ PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc( #define _Py_FatalRefcountError(message) \ _Py_FatalRefcountErrorFunc(__func__, (message)) +#define _PyReftracerTrack(obj, operation) \ + do { \ + struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; \ + if (tracer->tracer_func != NULL) { \ + void *data = tracer->tracer_data; \ + tracer->tracer_func((obj), (operation), data); \ + } \ + } while(0) #ifdef Py_REF_DEBUG /* The symbol is only exposed in the API for the sake of extensions @@ -208,11 +217,7 @@ _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct) #ifdef Py_TRACE_REFS _Py_ForgetReference(op); #endif - struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; - if (tracer->tracer_func != NULL) { - void* data = tracer->tracer_data; - tracer->tracer_func(op, PyRefTracer_DESTROY, data); - } + _PyReftracerTrack(op, PyRefTracer_DESTROY); destruct(op); } } @@ -591,6 +596,20 @@ _Py_TryIncrefCompare(PyObject **src, PyObject *op) return 1; } +static inline int +_Py_TryIncrefCompareStackRef(PyObject **src, PyObject *op, _PyStackRef *out) +{ + if (_Py_IsImmortal(op) || _PyObject_HasDeferredRefcount(op)) { + *out = (_PyStackRef){ .bits = (intptr_t)op | Py_TAG_DEFERRED }; + return 1; + } + if (_Py_TryIncrefCompare(src, op)) { + *out = PyStackRef_FromPyObjectSteal(op); + return 1; + } + return 0; +} + /* Loads and increfs an object from ptr, which may contain a NULL value. Safe with concurrent (atomic) updates to ptr. NOTE: The writer must set maybe-weakref on the stored object! */ @@ -900,6 +919,13 @@ PyAPI_DATA(int) _Py_SwappedOp[]; extern void _Py_GetConstant_Init(void); +enum _PyAnnotateFormat { + _Py_ANNOTATE_FORMAT_VALUE = 1, + _Py_ANNOTATE_FORMAT_VALUE_WITH_FAKE_GLOBALS = 2, + _Py_ANNOTATE_FORMAT_FORWARDREF = 3, + _Py_ANNOTATE_FORMAT_STRING = 4, +}; + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 58e583eabbcc46..5ce172856e1b19 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -949,6 +949,931 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { #endif +extern int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect); +#ifdef NEED_OPCODE_METADATA +int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { + switch(opcode) { + case BINARY_OP: { + *effect = 0; + return 0; + } + case BINARY_OP_ADD_FLOAT: { + *effect = 0; + return 0; + } + case BINARY_OP_ADD_INT: { + *effect = 0; + return 0; + } + case BINARY_OP_ADD_UNICODE: { + *effect = 0; + return 0; + } + case BINARY_OP_INPLACE_ADD_UNICODE: { + *effect = 0; + return 0; + } + case BINARY_OP_MULTIPLY_FLOAT: { + *effect = 0; + return 0; + } + case BINARY_OP_MULTIPLY_INT: { + *effect = 0; + return 0; + } + case BINARY_OP_SUBTRACT_FLOAT: { + *effect = 0; + return 0; + } + case BINARY_OP_SUBTRACT_INT: { + *effect = 0; + return 0; + } + case BINARY_SLICE: { + *effect = 0; + return 0; + } + case BINARY_SUBSCR: { + *effect = 0; + return 0; + } + case BINARY_SUBSCR_DICT: { + *effect = -1; + return 0; + } + case BINARY_SUBSCR_GETITEM: { + *effect = 0; + return 0; + } + case BINARY_SUBSCR_LIST_INT: { + *effect = -1; + return 0; + } + case BINARY_SUBSCR_STR_INT: { + *effect = -1; + return 0; + } + case BINARY_SUBSCR_TUPLE_INT: { + *effect = -1; + return 0; + } + case BUILD_LIST: { + *effect = 1 - oparg; + return 0; + } + case BUILD_MAP: { + *effect = 1 - oparg*2; + return 0; + } + case BUILD_SET: { + *effect = 1 - oparg; + return 0; + } + case BUILD_SLICE: { + *effect = -1 - ((oparg == 3) ? 1 : 0); + return 0; + } + case BUILD_STRING: { + *effect = 1 - oparg; + return 0; + } + case BUILD_TUPLE: { + *effect = 1 - oparg; + return 0; + } + case CACHE: { + *effect = 0; + return 0; + } + case CALL: { + int max_eff = Py_MAX(0, -1 - oparg); + max_eff = Py_MAX(max_eff, -2 - oparg); + *effect = max_eff; + return 0; + } + case CALL_ALLOC_AND_ENTER_INIT: { + int max_eff = Py_MAX(0, -1 - oparg); + max_eff = Py_MAX(max_eff, -2 - oparg); + *effect = max_eff; + return 0; + } + case CALL_BOUND_METHOD_EXACT_ARGS: { + int max_eff = Py_MAX(0, -1 - oparg); + max_eff = Py_MAX(max_eff, -2 - oparg); + *effect = max_eff; + return 0; + } + case CALL_BOUND_METHOD_GENERAL: { + int max_eff = Py_MAX(0, -1 - oparg); + max_eff = Py_MAX(max_eff, -2 - oparg); + *effect = max_eff; + return 0; + } + case CALL_BUILTIN_CLASS: { + *effect = -1 - oparg; + return 0; + } + case CALL_BUILTIN_FAST: { + *effect = -1 - oparg; + return 0; + } + case CALL_BUILTIN_FAST_WITH_KEYWORDS: { + *effect = -1 - oparg; + return 0; + } + case CALL_BUILTIN_O: { + *effect = -1 - oparg; + return 0; + } + case CALL_FUNCTION_EX: { + *effect = Py_MAX(0, -2 - (oparg & 1)); + return 0; + } + case CALL_INTRINSIC_1: { + *effect = 0; + return 0; + } + case CALL_INTRINSIC_2: { + *effect = -1; + return 0; + } + case CALL_ISINSTANCE: { + *effect = -1 - oparg; + return 0; + } + case CALL_KW: { + int max_eff = Py_MAX(0, -2 - oparg); + max_eff = Py_MAX(max_eff, -3 - oparg); + *effect = max_eff; + return 0; + } + case CALL_KW_BOUND_METHOD: { + int max_eff = Py_MAX(0, -2 - oparg); + max_eff = Py_MAX(max_eff, -3 - oparg); + *effect = max_eff; + return 0; + } + case CALL_KW_NON_PY: { + *effect = Py_MAX(0, -2 - oparg); + return 0; + } + case CALL_KW_PY: { + int max_eff = Py_MAX(0, -2 - oparg); + max_eff = Py_MAX(max_eff, -3 - oparg); + *effect = max_eff; + return 0; + } + case CALL_LEN: { + *effect = -1 - oparg; + return 0; + } + case CALL_LIST_APPEND: { + *effect = -3; + return 0; + } + case CALL_METHOD_DESCRIPTOR_FAST: { + *effect = -1 - oparg; + return 0; + } + case CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: { + *effect = -1 - oparg; + return 0; + } + case CALL_METHOD_DESCRIPTOR_NOARGS: { + *effect = -1 - oparg; + return 0; + } + case CALL_METHOD_DESCRIPTOR_O: { + *effect = -1 - oparg; + return 0; + } + case CALL_NON_PY_GENERAL: { + *effect = Py_MAX(0, -1 - oparg); + return 0; + } + case CALL_PY_EXACT_ARGS: { + int max_eff = Py_MAX(0, -1 - oparg); + max_eff = Py_MAX(max_eff, -2 - oparg); + *effect = max_eff; + return 0; + } + case CALL_PY_GENERAL: { + int max_eff = Py_MAX(0, -1 - oparg); + max_eff = Py_MAX(max_eff, -2 - oparg); + *effect = max_eff; + return 0; + } + case CALL_STR_1: { + *effect = -2; + return 0; + } + case CALL_TUPLE_1: { + *effect = -2; + return 0; + } + case CALL_TYPE_1: { + *effect = -2; + return 0; + } + case CHECK_EG_MATCH: { + *effect = 0; + return 0; + } + case CHECK_EXC_MATCH: { + *effect = 0; + return 0; + } + case CLEANUP_THROW: { + *effect = -1; + return 0; + } + case COMPARE_OP: { + *effect = 0; + return 0; + } + case COMPARE_OP_FLOAT: { + *effect = 0; + return 0; + } + case COMPARE_OP_INT: { + *effect = 0; + return 0; + } + case COMPARE_OP_STR: { + *effect = 0; + return 0; + } + case CONTAINS_OP: { + *effect = 0; + return 0; + } + case CONTAINS_OP_DICT: { + *effect = -1; + return 0; + } + case CONTAINS_OP_SET: { + *effect = -1; + return 0; + } + case CONVERT_VALUE: { + *effect = 0; + return 0; + } + case COPY: { + *effect = 1; + return 0; + } + case COPY_FREE_VARS: { + *effect = 0; + return 0; + } + case DELETE_ATTR: { + *effect = -1; + return 0; + } + case DELETE_DEREF: { + *effect = 0; + return 0; + } + case DELETE_FAST: { + *effect = 0; + return 0; + } + case DELETE_GLOBAL: { + *effect = 0; + return 0; + } + case DELETE_NAME: { + *effect = 0; + return 0; + } + case DELETE_SUBSCR: { + *effect = -2; + return 0; + } + case DICT_MERGE: { + *effect = -1; + return 0; + } + case DICT_UPDATE: { + *effect = -1; + return 0; + } + case END_ASYNC_FOR: { + *effect = -2; + return 0; + } + case END_FOR: { + *effect = -1; + return 0; + } + case END_SEND: { + *effect = -1; + return 0; + } + case ENTER_EXECUTOR: { + *effect = 0; + return 0; + } + case EXIT_INIT_CHECK: { + *effect = -1; + return 0; + } + case EXTENDED_ARG: { + *effect = 0; + return 0; + } + case FORMAT_SIMPLE: { + *effect = 0; + return 0; + } + case FORMAT_WITH_SPEC: { + *effect = -1; + return 0; + } + case FOR_ITER: { + *effect = 1; + return 0; + } + case FOR_ITER_GEN: { + *effect = 1; + return 0; + } + case FOR_ITER_LIST: { + *effect = 1; + return 0; + } + case FOR_ITER_RANGE: { + *effect = 1; + return 0; + } + case FOR_ITER_TUPLE: { + *effect = 1; + return 0; + } + case GET_AITER: { + *effect = 0; + return 0; + } + case GET_ANEXT: { + *effect = 1; + return 0; + } + case GET_AWAITABLE: { + *effect = 0; + return 0; + } + case GET_ITER: { + *effect = 0; + return 0; + } + case GET_LEN: { + *effect = 1; + return 0; + } + case GET_YIELD_FROM_ITER: { + *effect = 0; + return 0; + } + case IMPORT_FROM: { + *effect = 1; + return 0; + } + case IMPORT_NAME: { + *effect = -1; + return 0; + } + case INSTRUMENTED_CALL: { + *effect = Py_MAX(0, -1 - oparg); + return 0; + } + case INSTRUMENTED_CALL_FUNCTION_EX: { + *effect = 0; + return 0; + } + case INSTRUMENTED_CALL_KW: { + *effect = 0; + return 0; + } + case INSTRUMENTED_END_FOR: { + *effect = -1; + return 0; + } + case INSTRUMENTED_END_SEND: { + *effect = -1; + return 0; + } + case INSTRUMENTED_FOR_ITER: { + *effect = 0; + return 0; + } + case INSTRUMENTED_INSTRUCTION: { + *effect = 0; + return 0; + } + case INSTRUMENTED_JUMP_BACKWARD: { + *effect = 0; + return 0; + } + case INSTRUMENTED_JUMP_FORWARD: { + *effect = 0; + return 0; + } + case INSTRUMENTED_LINE: { + *effect = 0; + return 0; + } + case INSTRUMENTED_LOAD_SUPER_ATTR: { + *effect = 0; + return 0; + } + case INSTRUMENTED_POP_JUMP_IF_FALSE: { + *effect = 0; + return 0; + } + case INSTRUMENTED_POP_JUMP_IF_NONE: { + *effect = 0; + return 0; + } + case INSTRUMENTED_POP_JUMP_IF_NOT_NONE: { + *effect = 0; + return 0; + } + case INSTRUMENTED_POP_JUMP_IF_TRUE: { + *effect = 0; + return 0; + } + case INSTRUMENTED_RESUME: { + *effect = 0; + return 0; + } + case INSTRUMENTED_RETURN_VALUE: { + *effect = 0; + return 0; + } + case INSTRUMENTED_YIELD_VALUE: { + *effect = 0; + return 0; + } + case INTERPRETER_EXIT: { + *effect = -1; + return 0; + } + case IS_OP: { + *effect = -1; + return 0; + } + case JUMP: { + *effect = 0; + return 0; + } + case JUMP_BACKWARD: { + *effect = 0; + return 0; + } + case JUMP_BACKWARD_NO_INTERRUPT: { + *effect = 0; + return 0; + } + case JUMP_FORWARD: { + *effect = 0; + return 0; + } + case JUMP_IF_FALSE: { + *effect = 0; + return 0; + } + case JUMP_IF_TRUE: { + *effect = 0; + return 0; + } + case JUMP_NO_INTERRUPT: { + *effect = 0; + return 0; + } + case LIST_APPEND: { + *effect = -1; + return 0; + } + case LIST_EXTEND: { + *effect = -1; + return 0; + } + case LOAD_ATTR: { + *effect = Py_MAX(1, (oparg & 1)); + return 0; + } + case LOAD_ATTR_CLASS: { + *effect = Py_MAX(0, (oparg & 1)); + return 0; + } + case LOAD_ATTR_CLASS_WITH_METACLASS_CHECK: { + *effect = Py_MAX(0, (oparg & 1)); + return 0; + } + case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: { + *effect = 0; + return 0; + } + case LOAD_ATTR_INSTANCE_VALUE: { + *effect = Py_MAX(0, (oparg & 1)); + return 0; + } + case LOAD_ATTR_METHOD_LAZY_DICT: { + *effect = 1; + return 0; + } + case LOAD_ATTR_METHOD_NO_DICT: { + *effect = 1; + return 0; + } + case LOAD_ATTR_METHOD_WITH_VALUES: { + *effect = 1; + return 0; + } + case LOAD_ATTR_MODULE: { + *effect = Py_MAX(0, (oparg & 1)); + return 0; + } + case LOAD_ATTR_NONDESCRIPTOR_NO_DICT: { + *effect = 0; + return 0; + } + case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: { + *effect = 0; + return 0; + } + case LOAD_ATTR_PROPERTY: { + *effect = 0; + return 0; + } + case LOAD_ATTR_SLOT: { + *effect = Py_MAX(0, (oparg & 1)); + return 0; + } + case LOAD_ATTR_WITH_HINT: { + *effect = Py_MAX(0, (oparg & 1)); + return 0; + } + case LOAD_BUILD_CLASS: { + *effect = 1; + return 0; + } + case LOAD_CLOSURE: { + *effect = 1; + return 0; + } + case LOAD_COMMON_CONSTANT: { + *effect = 1; + return 0; + } + case LOAD_CONST: { + *effect = 1; + return 0; + } + case LOAD_CONST_IMMORTAL: { + *effect = 1; + return 0; + } + case LOAD_DEREF: { + *effect = 1; + return 0; + } + case LOAD_FAST: { + *effect = 1; + return 0; + } + case LOAD_FAST_AND_CLEAR: { + *effect = 1; + return 0; + } + case LOAD_FAST_CHECK: { + *effect = 1; + return 0; + } + case LOAD_FAST_LOAD_FAST: { + *effect = 2; + return 0; + } + case LOAD_FROM_DICT_OR_DEREF: { + *effect = 0; + return 0; + } + case LOAD_FROM_DICT_OR_GLOBALS: { + *effect = 0; + return 0; + } + case LOAD_GLOBAL: { + *effect = Py_MAX(1, 1 + (oparg & 1)); + return 0; + } + case LOAD_GLOBAL_BUILTIN: { + *effect = Py_MAX(1, 1 + (oparg & 1)); + return 0; + } + case LOAD_GLOBAL_MODULE: { + *effect = Py_MAX(1, 1 + (oparg & 1)); + return 0; + } + case LOAD_LOCALS: { + *effect = 1; + return 0; + } + case LOAD_NAME: { + *effect = 1; + return 0; + } + case LOAD_SMALL_INT: { + *effect = 1; + return 0; + } + case LOAD_SPECIAL: { + *effect = 1; + return 0; + } + case LOAD_SUPER_ATTR: { + *effect = Py_MAX(0, -2 + (oparg & 1)); + return 0; + } + case LOAD_SUPER_ATTR_ATTR: { + *effect = -2; + return 0; + } + case LOAD_SUPER_ATTR_METHOD: { + *effect = -1; + return 0; + } + case MAKE_CELL: { + *effect = 0; + return 0; + } + case MAKE_FUNCTION: { + *effect = 0; + return 0; + } + case MAP_ADD: { + *effect = -2; + return 0; + } + case MATCH_CLASS: { + *effect = -2; + return 0; + } + case MATCH_KEYS: { + *effect = 1; + return 0; + } + case MATCH_MAPPING: { + *effect = 1; + return 0; + } + case MATCH_SEQUENCE: { + *effect = 1; + return 0; + } + case NOP: { + *effect = 0; + return 0; + } + case POP_BLOCK: { + *effect = 0; + return 0; + } + case POP_EXCEPT: { + *effect = -1; + return 0; + } + case POP_JUMP_IF_FALSE: { + *effect = -1; + return 0; + } + case POP_JUMP_IF_NONE: { + *effect = 0; + return 0; + } + case POP_JUMP_IF_NOT_NONE: { + *effect = 0; + return 0; + } + case POP_JUMP_IF_TRUE: { + *effect = -1; + return 0; + } + case POP_TOP: { + *effect = -1; + return 0; + } + case PUSH_EXC_INFO: { + *effect = 1; + return 0; + } + case PUSH_NULL: { + *effect = 1; + return 0; + } + case RAISE_VARARGS: { + *effect = -oparg; + return 0; + } + case RERAISE: { + *effect = -1; + return 0; + } + case RESERVED: { + *effect = 0; + return 0; + } + case RESUME: { + *effect = 0; + return 0; + } + case RESUME_CHECK: { + *effect = 0; + return 0; + } + case RETURN_GENERATOR: { + *effect = 1; + return 0; + } + case RETURN_VALUE: { + *effect = 0; + return 0; + } + case SEND: { + *effect = 0; + return 0; + } + case SEND_GEN: { + *effect = 0; + return 0; + } + case SETUP_ANNOTATIONS: { + *effect = 0; + return 0; + } + case SETUP_CLEANUP: { + *effect = 2; + return 0; + } + case SETUP_FINALLY: { + *effect = 1; + return 0; + } + case SETUP_WITH: { + *effect = 1; + return 0; + } + case SET_ADD: { + *effect = -1; + return 0; + } + case SET_FUNCTION_ATTRIBUTE: { + *effect = -1; + return 0; + } + case SET_UPDATE: { + *effect = -1; + return 0; + } + case STORE_ATTR: { + *effect = 0; + return 0; + } + case STORE_ATTR_INSTANCE_VALUE: { + *effect = 0; + return 0; + } + case STORE_ATTR_SLOT: { + *effect = 0; + return 0; + } + case STORE_ATTR_WITH_HINT: { + *effect = 0; + return 0; + } + case STORE_DEREF: { + *effect = -1; + return 0; + } + case STORE_FAST: { + *effect = -1; + return 0; + } + case STORE_FAST_LOAD_FAST: { + *effect = 0; + return 0; + } + case STORE_FAST_MAYBE_NULL: { + *effect = -1; + return 0; + } + case STORE_FAST_STORE_FAST: { + *effect = -2; + return 0; + } + case STORE_GLOBAL: { + *effect = -1; + return 0; + } + case STORE_NAME: { + *effect = -1; + return 0; + } + case STORE_SLICE: { + *effect = 0; + return 0; + } + case STORE_SUBSCR: { + *effect = 0; + return 0; + } + case STORE_SUBSCR_DICT: { + *effect = -3; + return 0; + } + case STORE_SUBSCR_LIST_INT: { + *effect = -3; + return 0; + } + case SWAP: { + *effect = 0; + return 0; + } + case TO_BOOL: { + *effect = 0; + return 0; + } + case TO_BOOL_ALWAYS_TRUE: { + *effect = 0; + return 0; + } + case TO_BOOL_BOOL: { + *effect = 0; + return 0; + } + case TO_BOOL_INT: { + *effect = 0; + return 0; + } + case TO_BOOL_LIST: { + *effect = 0; + return 0; + } + case TO_BOOL_NONE: { + *effect = 0; + return 0; + } + case TO_BOOL_STR: { + *effect = 0; + return 0; + } + case UNARY_INVERT: { + *effect = 0; + return 0; + } + case UNARY_NEGATIVE: { + *effect = 0; + return 0; + } + case UNARY_NOT: { + *effect = 0; + return 0; + } + case UNPACK_EX: { + *effect = (oparg & 0xFF) + (oparg >> 8); + return 0; + } + case UNPACK_SEQUENCE: { + *effect = Py_MAX(1, -1 + oparg); + return 0; + } + case UNPACK_SEQUENCE_LIST: { + *effect = -1 + oparg; + return 0; + } + case UNPACK_SEQUENCE_TUPLE: { + *effect = -1 + oparg; + return 0; + } + case UNPACK_SEQUENCE_TWO_TUPLE: { + *effect = 1; + return 0; + } + case WITH_EXCEPT_START: { + *effect = 1; + return 0; + } + case YIELD_VALUE: { + *effect = 0; + return 0; + } + default: + return -1; + } +} + +#endif + enum InstructionFormat { INSTR_FMT_IB = 1, INSTR_FMT_IBC = 2, @@ -1223,7 +2148,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [UNARY_NOT] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [UNPACK_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNPACK_SEQUENCE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [UNPACK_SEQUENCE_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [UNPACK_SEQUENCE_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [UNPACK_SEQUENCE_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [UNPACK_SEQUENCE_TWO_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [WITH_EXCEPT_START] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index dd6b0762370c92..5bb34001aab1b4 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -120,11 +120,25 @@ extern int _PyMem_DebugEnabled(void); extern void _PyMem_FreeDelayed(void *ptr); // Enqueue an object to be freed possibly after some delay -extern void _PyObject_FreeDelayed(void *ptr); +#ifdef Py_GIL_DISABLED +extern void _PyObject_XDecRefDelayed(PyObject *obj); +#else +static inline void _PyObject_XDecRefDelayed(PyObject *obj) +{ + Py_XDECREF(obj); +} +#endif // Periodically process delayed free requests. extern void _PyMem_ProcessDelayed(PyThreadState *tstate); + +// Periodically process delayed free requests when the world is stopped. +// Notify of any objects whic should be freeed. +typedef void (*delayed_dealloc_cb)(PyObject *, void *); +extern void _PyMem_ProcessDelayedNoDealloc(PyThreadState *tstate, + delayed_dealloc_cb cb, void *state); + // Abandon all thread-local delayed free requests and push them to the // interpreter's queue. extern void _PyMem_AbandonDelayed(PyThreadState *tstate); diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index edcd75a55b686b..54d8803bc0bdb6 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -82,7 +82,7 @@ PyAPI_FUNC(PyObject *) _PyInterpreterState_GetIDObject(PyInterpreterState *); PyAPI_FUNC(int) _PyInterpreterState_SetRunningMain(PyInterpreterState *); PyAPI_FUNC(void) _PyInterpreterState_SetNotRunningMain(PyInterpreterState *); PyAPI_FUNC(int) _PyInterpreterState_IsRunningMain(PyInterpreterState *); -PyAPI_FUNC(int) _PyInterpreterState_FailIfRunningMain(PyInterpreterState *); +PyAPI_FUNC(void) _PyErr_SetInterpreterAlreadyRunning(void); extern int _PyThreadState_IsRunningMain(PyThreadState *); extern void _PyInterpreterState_ReinitRunningMain(PyThreadState *); @@ -269,6 +269,15 @@ extern int _PyOS_InterruptOccurred(PyThreadState *tstate); #define HEAD_UNLOCK(runtime) \ PyMutex_Unlock(&(runtime)->interpreters.mutex) +#define _Py_FOR_EACH_TSTATE_UNLOCKED(interp, t) \ + for (PyThreadState *t = interp->threads.head; t; t = t->next) +#define _Py_FOR_EACH_TSTATE_BEGIN(interp, t) \ + HEAD_LOCK(interp->runtime); \ + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, t) +#define _Py_FOR_EACH_TSTATE_END(interp) \ + HEAD_UNLOCK(interp->runtime) + + // Get the configuration of the current interpreter. // The caller must hold the GIL. // Export for test_peg_generator. diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 2f2cec22cf1589..86d024535fdda8 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -169,6 +169,12 @@ typedef struct pyruntimestate { struct _Py_unicode_runtime_state unicode_state; struct _types_runtime_state types; +#if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) + // Used in "Python/emscripten_trampoline.c" to choose between type + // reflection trampoline and EM_JS trampoline. + bool wasm_type_reflection_available; +#endif + /* All the objects that are shared by the runtime's interpreters. */ struct _Py_cached_objects cached_objects; struct _Py_static_objects static_objects; @@ -189,13 +195,8 @@ typedef struct pyruntimestate { /* _PyRuntimeState.interpreters.main */ PyInterpreterState _main_interpreter; - -#if defined(__EMSCRIPTEN__) && defined(PY_CALL_TRAMPOLINE) - // Used in "Python/emscripten_trampoline.c" to choose between type - // reflection trampoline and EM_JS trampoline. - bool wasm_type_reflection_available; -#endif - + // _main_interpreter should be the last field of _PyRuntimeState. + // See https://github.com/python/cpython/issues/127117. } _PyRuntimeState; diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index 8a8f47695fb8b0..9f6748945bab36 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -118,6 +118,9 @@ extern PyTypeObject _PyExc_MemoryError; { \ .id_refcount = -1, \ ._whence = _PyInterpreterState_WHENCE_NOTSET, \ + .threads = { \ + .preallocated = &(INTERP)._initial_thread, \ + }, \ .imports = IMPORTS_INIT, \ .ceval = { \ .recursion_limit = Py_DEFAULT_RECURSION_LIMIT, \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 5d404c8fd91ca6..b631382cae058a 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -980,6 +980,7 @@ extern "C" { INIT_ID(hi), \ INIT_ID(hook), \ INIT_ID(hour), \ + INIT_ID(id), \ INIT_ID(ident), \ INIT_ID(identity_hint), \ INIT_ID(ignore), \ diff --git a/Include/internal/pycore_stackref.h b/Include/internal/pycore_stackref.h index 588e57f6cd97e0..90a3118352f7ae 100644 --- a/Include/internal/pycore_stackref.h +++ b/Include/internal/pycore_stackref.h @@ -99,8 +99,7 @@ _PyStackRef_FromPyObjectSteal(PyObject *obj) assert(obj != NULL); // Make sure we don't take an already tagged value. assert(((uintptr_t)obj & Py_TAG_BITS) == 0); - unsigned int tag = _Py_IsImmortal(obj) ? (Py_TAG_DEFERRED) : Py_TAG_PTR; - return ((_PyStackRef){.bits = ((uintptr_t)(obj)) | tag}); + return (_PyStackRef){ .bits = (uintptr_t)obj }; } # define PyStackRef_FromPyObjectSteal(obj) _PyStackRef_FromPyObjectSteal(_PyObject_CAST(obj)) @@ -190,9 +189,16 @@ static const _PyStackRef PyStackRef_NULL = { .bits = 0 }; #endif // Py_GIL_DISABLED -// Note: this is a macro because MSVC (Windows) has trouble inlining it. +// Check if a stackref is exactly the same as another stackref, including the +// the deferred bit. This can only be used safely if you know that the deferred +// bits of `a` and `b` match. +#define PyStackRef_IsExactly(a, b) \ + (assert(((a).bits & Py_TAG_BITS) == ((b).bits & Py_TAG_BITS)), (a).bits == (b).bits) -#define PyStackRef_Is(a, b) ((a).bits == (b).bits) +// Checks that mask out the deferred bit in the free threading build. +#define PyStackRef_IsNone(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_None) +#define PyStackRef_IsTrue(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_True) +#define PyStackRef_IsFalse(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_False) // Converts a PyStackRef back to a PyObject *, converting the // stackref to a new reference. diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index 5debdd68fe94ca..7b39d07f976ee3 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -269,6 +269,16 @@ extern unsigned int _PyType_GetVersionForCurrentState(PyTypeObject *tp); PyAPI_FUNC(void) _PyType_SetVersion(PyTypeObject *tp, unsigned int version); PyTypeObject *_PyType_LookupByVersion(unsigned int version); +// Function pointer type for user-defined validation function that will be +// called by _PyType_Validate(). +// It should return 0 if the validation is passed, otherwise it will return -1. +typedef int (*_py_validate_type)(PyTypeObject *); + +// It will verify the ``ty`` through user-defined validation function ``validate``, +// and if the validation is passed, it will set the ``tp_version`` as valid +// tp_version_tag from the ``ty``. +extern int _PyType_Validate(PyTypeObject *ty, _py_validate_type validate, unsigned int *tp_version); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index d0bc8d7186c053..24cec3a4fded7a 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -1680,6 +1680,10 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); assert(PyUnicode_GET_LENGTH(string) != 1); + string = &_Py_ID(id); + _PyUnicode_InternStatic(interp, &string); + assert(_PyUnicode_CheckConsistency(string, 1)); + assert(PyUnicode_GET_LENGTH(string) != 1); string = &_Py_ID(ident); _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 1b2880cb6bb67e..1c1f478c3833c8 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -112,7 +112,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_UNPACK_SEQUENCE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_UNPACK_SEQUENCE_TWO_TUPLE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, [_UNPACK_SEQUENCE_TUPLE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_UNPACK_SEQUENCE_LIST] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_UNPACK_SEQUENCE_LIST] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_UNPACK_EX] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_DELETE_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, diff --git a/Include/patchlevel.h b/Include/patchlevel.h index f7b33a8038570b..e99c3a66f84e4f 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 14 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 1 +#define PY_RELEASE_SERIAL 2 /* Version as a string */ -#define PY_VERSION "3.14.0a1+" +#define PY_VERSION "3.14.0a2+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/InternalDocs/README.md b/InternalDocs/README.md index 2ef6e653ac19d4..f6aa3db3b384af 100644 --- a/InternalDocs/README.md +++ b/InternalDocs/README.md @@ -24,14 +24,14 @@ Compiling Python Source Code Runtime Objects --- -- [Code Objects (coming soon)](code_objects.md) - -- [The Source Code Locations Table](locations.md) +- [Code Objects](code_objects.md) - [Generators (coming soon)](generators.md) - [Frames](frames.md) +- [String Interning](string_interning.md) + Program Execution --- diff --git a/InternalDocs/code_objects.md b/InternalDocs/code_objects.md index 284a8b7aee5765..bee4a9d0a08915 100644 --- a/InternalDocs/code_objects.md +++ b/InternalDocs/code_objects.md @@ -1,5 +1,139 @@ -Code objects -============ +# Code objects -Coming soon. +A `CodeObject` is a builtin Python type that represents a compiled executable, +such as a compiled function or class. +It contains a sequence of bytecode instructions along with its associated +metadata: data which is necessary to execute the bytecode instructions (such +as the values of the constants they access) or context information such as +the source code location, which is useful for debuggers and other tools. + +Since 3.11, the final field of the `PyCodeObject` C struct is an array +of indeterminate length containing the bytecode, `code->co_code_adaptive`. +(In older versions the code object was a +[`bytes`](https://docs.python.org/dev/library/stdtypes.html#bytes) +object, `code->co_code`; this was changed to save an allocation and to +allow it to be mutated.) + +Code objects are typically produced by the bytecode [compiler](compiler.md), +although they are often written to disk by one process and read back in by another. +The disk version of a code object is serialized using the +[marshal](https://docs.python.org/dev/library/marshal.html) protocol. + +Code objects are nominally immutable. +Some fields (including `co_code_adaptive` and fields for runtime +information such as `_co_monitoring`) are mutable, but mutable fields are +not included when code objects are hashed or compared. + +## Source code locations + +Whenever an exception occurs, the interpreter adds a traceback entry to +the exception for the current frame, as well as each frame on the stack that +it unwinds. +The `tb_lineno` field of a traceback entry is (lazily) set to the line +number of the instruction that was executing in the frame at the time of +the exception. +This field is computed from the locations table, `co_linetable`, by the function +[`PyCode_Addr2Line`](https://docs.python.org/dev/c-api/code.html#c.PyCode_Addr2Line). +Despite its name, `co_linetable` includes more than line numbers; it represents +a 4-number source location for every instruction, indicating the precise line +and column at which it begins and ends. This is a significant amount of data, +so a compact format is very important. + +Note that traceback objects don't store all this information -- they store the start line +number, for backward compatibility, and the "last instruction" value. +The rest can be computed from the last instruction (`tb_lasti`) with the help of the +locations table. For Python code, there is a convenience method +(`codeobject.co_positions`)[https://docs.python.org/dev/reference/datamodel.html#codeobject.co_positions] +which returns an iterator of `({line}, {endline}, {column}, {endcolumn})` tuples, +one per instruction. +There is also `co_lines()` which returns an iterator of `({start}, {end}, {line})` tuples, +where `{start}` and `{end}` are bytecode offsets. +The latter is described by [`PEP 626`](https://peps.python.org/pep-0626/); it is more +compact, but doesn't return end line numbers or column offsets. +From C code, you need to call +[`PyCode_Addr2Location`](https://docs.python.org/dev/c-api/code.html#c.PyCode_Addr2Location). + +As the locations table is only consulted when displaying a traceback and when +tracing (to pass the line number to the tracing function), lookup is not +performance critical. +In order to reduce the overhead during tracing, the mapping from instruction offset to +line number is cached in the ``_co_linearray`` field. + +### Format of the locations table + +The `co_linetable` bytes object of code objects contains a compact +representation of the source code positions of instructions, which are +returned by the `co_positions()` iterator. + +> [!NOTE] +> `co_linetable` is not to be confused with `co_lnotab`. +> For backwards compatibility, `co_lnotab` exposes the format +> as it existed in Python 3.10 and lower: this older format +> stores only the start line for each instruction. +> It is lazily created from `co_linetable` when accessed. +> See [`Objects/lnotab_notes.txt`](../Objects/lnotab_notes.txt) for more details. + +`co_linetable` consists of a sequence of location entries. +Each entry starts with a byte with the most significant bit set, followed by zero or more bytes with the most significant bit unset. + +Each entry contains the following information: +* The number of code units covered by this entry (length) +* The start line +* The end line +* The start column +* The end column + +The first byte has the following format: + +Bit 7 | Bits 3-6 | Bits 0-2 + ---- | ---- | ---- + 1 | Code | Length (in code units) - 1 + +The codes are enumerated in the `_PyCodeLocationInfoKind` enum. + +## Variable-length integer encodings + +Integers are often encoded using a variable-length integer encoding + +### Unsigned integers (`varint`) + +Unsigned integers are encoded in 6-bit chunks, least significant first. +Each chunk but the last has bit 6 set. +For example: + +* 63 is encoded as `0x3f` +* 200 is encoded as `0x48`, `0x03` + +### Signed integers (`svarint`) + +Signed integers are encoded by converting them to unsigned integers, using the following function: +```Python +def convert(s): + if s < 0: + return ((-s)<<1) | 1 + else: + return (s<<1) +``` + +*Location entries* + +The meaning of the codes and the following bytes are as follows: + +Code | Meaning | Start line | End line | Start column | End column + ---- | ---- | ---- | ---- | ---- | ---- + 0-9 | Short form | Δ 0 | Δ 0 | See below | See below + 10-12 | One line form | Δ (code - 10) | Δ 0 | unsigned byte | unsigned byte + 13 | No column info | Δ svarint | Δ 0 | None | None + 14 | Long form | Δ svarint | Δ varint | varint | varint + 15 | No location | None | None | None | None + +The Δ means the value is encoded as a delta from another value: +* Start line: Delta from the previous start line, or `co_firstlineno` for the first entry. +* End line: Delta from the start line + +*The short forms* + +Codes 0-9 are the short forms. The short form consists of two bytes, the second byte holding additional column information. The code is the start column divided by 8 (and rounded down). +* Start column: `(code*8) + ((second_byte>>4)&7)` +* End column: `start_column + (second_byte&15)` diff --git a/InternalDocs/compiler.md b/InternalDocs/compiler.md index 37964bd99428df..ed4cfb23ca51f7 100644 --- a/InternalDocs/compiler.md +++ b/InternalDocs/compiler.md @@ -443,14 +443,12 @@ reference to the source code (filename, etc). All of this is implemented by Code objects ============ -The result of `PyAST_CompileObject()` is a `PyCodeObject` which is defined in +The result of `_PyAST_Compile()` is a `PyCodeObject` which is defined in [Include/cpython/code.h](../Include/cpython/code.h). And with that you now have executable Python bytecode! -The code objects (byte code) are executed in [Python/ceval.c](../Python/ceval.c). -This file will also need a new case statement for the new opcode in the big switch -statement in `_PyEval_EvalFrameDefault()`. - +The code objects (byte code) are executed in `_PyEval_EvalFrameDefault()` +in [Python/ceval.c](../Python/ceval.c). Important files =============== diff --git a/InternalDocs/garbage_collector.md b/InternalDocs/garbage_collector.md index 377a846428ae0c..272a0834cbfe24 100644 --- a/InternalDocs/garbage_collector.md +++ b/InternalDocs/garbage_collector.md @@ -17,26 +17,26 @@ value returned by this function is always 1 more as the function also has a refe to the object when called): ```pycon - >>> x = object() - >>> sys.getrefcount(x) - 2 - >>> y = x - >>> sys.getrefcount(x) - 3 - >>> del y - >>> sys.getrefcount(x) - 2 +>>> x = object() +>>> sys.getrefcount(x) +2 +>>> y = x +>>> sys.getrefcount(x) +3 +>>> del y +>>> sys.getrefcount(x) +2 ``` The main problem with the reference counting scheme is that it does not handle reference cycles. For instance, consider this code: ```pycon - >>> container = [] - >>> container.append(container) - >>> sys.getrefcount(container) - 3 - >>> del container +>>> container = [] +>>> container.append(container) +>>> sys.getrefcount(container) +3 +>>> del container ``` In this example, `container` holds a reference to itself, so even when we remove @@ -199,26 +199,26 @@ variable `A`, and one self-referencing object which is completely unreachable: ```pycon - >>> import gc - - >>> class Link: - ... def __init__(self, next_link=None): - ... self.next_link = next_link - - >>> link_3 = Link() - >>> link_2 = Link(link_3) - >>> link_1 = Link(link_2) - >>> link_3.next_link = link_1 - >>> A = link_1 - >>> del link_1, link_2, link_3 - - >>> link_4 = Link() - >>> link_4.next_link = link_4 - >>> del link_4 - - # Collect the unreachable Link object (and its .__dict__ dict). - >>> gc.collect() - 2 +>>> import gc +>>> +>>> class Link: +... def __init__(self, next_link=None): +... self.next_link = next_link +... +>>> link_3 = Link() +>>> link_2 = Link(link_3) +>>> link_1 = Link(link_2) +>>> link_3.next_link = link_1 +>>> A = link_1 +>>> del link_1, link_2, link_3 +>>> +>>> link_4 = Link() +>>> link_4.next_link = link_4 +>>> del link_4 +>>> +>>> # Collect the unreachable Link object (and its .__dict__ dict). +>>> gc.collect() +2 ``` The GC starts with a set of candidate objects it wants to scan. In the @@ -439,9 +439,9 @@ These thresholds can be examined using the function: ```pycon - >>> import gc - >>> gc.get_threshold() - (700, 10, 10) +>>> import gc +>>> gc.get_threshold() +(700, 10, 10) ``` The content of these generations can be examined using the @@ -449,38 +449,32 @@ The content of these generations can be examined using the specifically in a generation by calling `gc.collect(generation=NUM)`. ```pycon - >>> import gc - >>> class MyObj: - ... pass - ... - - # Move everything to the old generation so it's easier to inspect - # the young generation. - - >>> gc.collect() - 0 - - # Create a reference cycle. - - >>> x = MyObj() - >>> x.self = x - - # Initially the object is in the young generation. - - >>> gc.get_objects(generation=0) - [..., <__main__.MyObj object at 0x7fbcc12a3400>, ...] - - # After a collection of the youngest generation the object - # moves to the old generation. - - >>> gc.collect(generation=0) - 0 - >>> gc.get_objects(generation=0) - [] - >>> gc.get_objects(generation=1) - [] - >>> gc.get_objects(generation=2) - [..., <__main__.MyObj object at 0x7fbcc12a3400>, ...] +>>> import gc +>>> class MyObj: +... pass +... +>>> # Move everything to the old generation so it's easier to inspect +>>> # the young generation. +>>> gc.collect() +0 +>>> # Create a reference cycle. +>>> x = MyObj() +>>> x.self = x +>>> +>>> # Initially the object is in the young generation. +>>> gc.get_objects(generation=0) +[..., <__main__.MyObj object at 0x7fbcc12a3400>, ...] +>>> +>>> # After a collection of the youngest generation the object +>>> # moves to the old generation. +>>> gc.collect(generation=0) +0 +>>> gc.get_objects(generation=0) +[] +>>> gc.get_objects(generation=1) +[] +>>> gc.get_objects(generation=2) +[..., <__main__.MyObj object at 0x7fbcc12a3400>, ...] ``` @@ -532,8 +526,8 @@ of `PyGC_Head` discussed in the `Memory layout and object structure`_ section: currently in. Instead, when that's needed, ad hoc tricks (like the `NEXT_MASK_UNREACHABLE` flag) are employed. -Optimization: delay tracking containers -======================================= +Optimization: delayed untracking containers +=========================================== Certain types of containers cannot participate in a reference cycle, and so do not need to be tracked by the garbage collector. Untracking these objects @@ -546,44 +540,35 @@ a container: 2. When the container is examined by the garbage collector. As a general rule, instances of atomic types aren't tracked and instances of -non-atomic types (containers, user-defined objects...) are. However, some -type-specific optimizations can be present in order to suppress the garbage -collector footprint of simple instances. Some examples of native types that -benefit from delayed tracking: - -- Tuples containing only immutable objects (integers, strings etc, - and recursively, tuples of immutable objects) do not need to be tracked. The - interpreter creates a large number of tuples, many of which will not survive - until garbage collection. It is therefore not worthwhile to untrack eligible - tuples at creation time. Instead, all tuples except the empty tuple are tracked - when created. During garbage collection it is determined whether any surviving - tuples can be untracked. A tuple can be untracked if all of its contents are - already not tracked. Tuples are examined for untracking in all garbage collection - cycles. It may take more than one cycle to untrack a tuple. - -- Dictionaries containing only immutable objects also do not need to be tracked. - Dictionaries are untracked when created. If a tracked item is inserted into a - dictionary (either as a key or value), the dictionary becomes tracked. During a - full garbage collection (all generations), the collector will untrack any dictionaries - whose contents are not tracked. +non-atomic types (containers, user-defined objects...) are. + +Tuples containing only immutable objects (integers, strings etc, +and recursively, tuples of immutable objects) do not need to be tracked. The +interpreter creates a large number of tuples, many of which will not survive +until garbage collection. It is therefore not worthwhile to untrack eligible +tuples at creation time. Instead, all tuples except the empty tuple are tracked +when created. During garbage collection it is determined whether any surviving +tuples can be untracked. A tuple can be untracked if all of its contents are +already not tracked. Tuples are examined for untracking in all garbage collection +cycles. The garbage collector module provides the Python function `is_tracked(obj)`, which returns the current tracking status of the object. Subsequent garbage collections may change the tracking status of the object. ```pycon - >>> gc.is_tracked(0) - False - >>> gc.is_tracked("a") - False - >>> gc.is_tracked([]) - True - >>> gc.is_tracked({}) - False - >>> gc.is_tracked({"a": 1}) - False - >>> gc.is_tracked({"a": []}) - True +>>> gc.is_tracked(0) +False +>>> gc.is_tracked("a") +False +>>> gc.is_tracked([]) +True +>>> gc.is_tracked(()) +False +>>> gc.is_tracked({}) +True +>>> gc.is_tracked({"a": 1}) +True ``` Differences between GC implementations diff --git a/InternalDocs/interpreter.md b/InternalDocs/interpreter.md index dcfddc99370c0e..4c10cbbed37735 100644 --- a/InternalDocs/interpreter.md +++ b/InternalDocs/interpreter.md @@ -16,7 +16,7 @@ from the instruction definitions in [Python/bytecodes.c](../Python/bytecodes.c) which are written in [a DSL](../Tools/cases_generator/interpreter_definition.md) developed for this purpose. -Recall that the [Python Compiler](compiler.md) produces a [`CodeObject`](code_object.md), +Recall that the [Python Compiler](compiler.md) produces a [`CodeObject`](code_objects.md), which contains the bytecode instructions along with static data that is required to execute them, such as the consts list, variable names, [exception table](exception_handling.md#format-of-the-exception-table), and so on. diff --git a/InternalDocs/locations.md b/InternalDocs/locations.md deleted file mode 100644 index 91a7824e2a8e4d..00000000000000 --- a/InternalDocs/locations.md +++ /dev/null @@ -1,69 +0,0 @@ -# Locations table - -The `co_linetable` bytes object of code objects contains a compact -representation of the source code positions of instructions, which are -returned by the `co_positions()` iterator. - -`co_linetable` consists of a sequence of location entries. -Each entry starts with a byte with the most significant bit set, followed by zero or more bytes with most significant bit unset. - -Each entry contains the following information: -* The number of code units covered by this entry (length) -* The start line -* The end line -* The start column -* The end column - -The first byte has the following format: - -Bit 7 | Bits 3-6 | Bits 0-2 - ---- | ---- | ---- - 1 | Code | Length (in code units) - 1 - -The codes are enumerated in the `_PyCodeLocationInfoKind` enum. - -## Variable length integer encodings - -Integers are often encoded using a variable length integer encoding - -### Unsigned integers (varint) - -Unsigned integers are encoded in 6 bit chunks, least significant first. -Each chunk but the last has bit 6 set. -For example: - -* 63 is encoded as `0x3f` -* 200 is encoded as `0x48`, `0x03` - -### Signed integers (svarint) - -Signed integers are encoded by converting them to unsigned integers, using the following function: -```Python -def convert(s): - if s < 0: - return ((-s)<<1) | 1 - else: - return (s<<1) -``` - -## Location entries - -The meaning of the codes and the following bytes are as follows: - -Code | Meaning | Start line | End line | Start column | End column - ---- | ---- | ---- | ---- | ---- | ---- - 0-9 | Short form | Δ 0 | Δ 0 | See below | See below - 10-12 | One line form | Δ (code - 10) | Δ 0 | unsigned byte | unsigned byte - 13 | No column info | Δ svarint | Δ 0 | None | None - 14 | Long form | Δ svarint | Δ varint | varint | varint - 15 | No location | None | None | None | None - -The Δ means the value is encoded as a delta from another value: -* Start line: Delta from the previous start line, or `co_firstlineno` for the first entry. -* End line: Delta from the start line - -### The short forms - -Codes 0-9 are the short forms. The short form consists of two bytes, the second byte holding additional column information. The code is the start column divided by 8 (and rounded down). -* Start column: `(code*8) + ((second_byte>>4)&7)` -* End column: `start_column + (second_byte&15)` diff --git a/Lib/_pydatetime.py b/Lib/_pydatetime.py index 78e03e32896740..ed01670cfece43 100644 --- a/Lib/_pydatetime.py +++ b/Lib/_pydatetime.py @@ -651,7 +651,19 @@ def __new__(cls, days=0, seconds=0, microseconds=0, # guide the C implementation; it's way more convoluted than speed- # ignoring auto-overflow-to-long idiomatic Python could be. - # XXX Check that all inputs are ints or floats. + for name, value in ( + ("days", days), + ("seconds", seconds), + ("microseconds", microseconds), + ("milliseconds", milliseconds), + ("minutes", minutes), + ("hours", hours), + ("weeks", weeks) + ): + if not isinstance(value, (int, float)): + raise TypeError( + f"unsupported type for timedelta {name} component: {type(value).__name__}" + ) # Final values, all integer. # s and us fit in 32-bit signed ints; d isn't bounded. diff --git a/Lib/_pyrepl/readline.py b/Lib/_pyrepl/readline.py index 5e1d3085874380..888185eb03be66 100644 --- a/Lib/_pyrepl/readline.py +++ b/Lib/_pyrepl/readline.py @@ -450,7 +450,9 @@ def read_history_file(self, filename: str = gethistoryfile()) -> None: def write_history_file(self, filename: str = gethistoryfile()) -> None: maxlength = self.saved_history_length history = self.get_reader().get_trimmed_history(maxlength) - with open(os.path.expanduser(filename), "w", encoding="utf-8") as f: + f = open(os.path.expanduser(filename), "w", + encoding="utf-8", newline="\n") + with f: for entry in history: entry = entry.replace("\n", "\r\n") # multiline history support f.write(entry + "\n") diff --git a/Lib/annotationlib.py b/Lib/annotationlib.py index 732fbfa628cf5f..42f1f3877514d9 100644 --- a/Lib/annotationlib.py +++ b/Lib/annotationlib.py @@ -22,8 +22,9 @@ class Format(enum.IntEnum): VALUE = 1 - FORWARDREF = 2 - STRING = 3 + VALUE_WITH_FAKE_GLOBALS = 2 + FORWARDREF = 3 + STRING = 4 _Union = None @@ -513,6 +514,8 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): on the generated ForwardRef objects. """ + if format == Format.VALUE_WITH_FAKE_GLOBALS: + raise ValueError("The VALUE_WITH_FAKE_GLOBALS format is for internal use only") try: return annotate(format) except NotImplementedError: @@ -546,7 +549,7 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): argdefs=annotate.__defaults__, kwdefaults=annotate.__kwdefaults__, ) - annos = func(Format.VALUE) + annos = func(Format.VALUE_WITH_FAKE_GLOBALS) if _is_evaluate: return annos if isinstance(annos, str) else repr(annos) return { @@ -607,7 +610,7 @@ def call_annotate_function(annotate, format, *, owner=None, _is_evaluate=False): argdefs=annotate.__defaults__, kwdefaults=annotate.__kwdefaults__, ) - result = func(Format.VALUE) + result = func(Format.VALUE_WITH_FAKE_GLOBALS) for obj in globals.stringifiers: obj.__class__ = ForwardRef obj.__stringifier_dict__ = None # not needed for ForwardRef @@ -726,6 +729,8 @@ def get_annotations( # But if we didn't get it, we use __annotations__ instead. ann = _get_dunder_annotations(obj) return annotations_to_string(ann) + case Format.VALUE_WITH_FAKE_GLOBALS: + raise ValueError("The VALUE_WITH_FAKE_GLOBALS format is for internal use only") case _: raise ValueError(f"Unsupported format {format!r}") diff --git a/Lib/argparse.py b/Lib/argparse.py index 5ecfdca17175e3..d24fa72e573d4f 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -1709,14 +1709,7 @@ def _remove_action(self, action): self._group_actions.remove(action) def add_argument_group(self, *args, **kwargs): - import warnings - warnings.warn( - "Nesting argument groups is deprecated.", - category=DeprecationWarning, - stacklevel=2 - ) - return super().add_argument_group(*args, **kwargs) - + raise ValueError('argument groups cannot be nested') class _MutuallyExclusiveGroup(_ArgumentGroup): @@ -1737,15 +1730,8 @@ def _remove_action(self, action): self._container._remove_action(action) self._group_actions.remove(action) - def add_mutually_exclusive_group(self, *args, **kwargs): - import warnings - warnings.warn( - "Nesting mutually exclusive groups is deprecated.", - category=DeprecationWarning, - stacklevel=2 - ) - return super().add_mutually_exclusive_group(*args, **kwargs) - + def add_mutually_exclusive_group(self, **kwargs): + raise ValueError('mutually exclusive groups cannot be nested') def _prog_name(prog=None): if prog is not None: @@ -1889,7 +1875,7 @@ def add_subparsers(self, **kwargs): formatter = self._get_formatter() positionals = self._get_positional_actions() groups = self._mutually_exclusive_groups - formatter.add_usage(self.usage, positionals, groups, '') + formatter.add_usage(None, positionals, groups, '') kwargs['prog'] = formatter.format_help().strip() # create the parsers action and add it to the positionals list diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py index 16cc5533d429ef..909359b648709f 100644 --- a/Lib/concurrent/futures/thread.py +++ b/Lib/concurrent/futures/thread.py @@ -41,6 +41,7 @@ def _python_exit(): os.register_at_fork(before=_global_shutdown_lock.acquire, after_in_child=_global_shutdown_lock._at_fork_reinit, after_in_parent=_global_shutdown_lock.release) + os.register_at_fork(after_in_child=_threads_queues.clear) class WorkerContext: diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index 4a368f02d851c5..ac6493892068e9 100644 --- a/Lib/ctypes/__init__.py +++ b/Lib/ctypes/__init__.py @@ -19,7 +19,7 @@ raise Exception("Version number mismatch", __version__, _ctypes_version) if _os.name == "nt": - from _ctypes import FormatError + from _ctypes import COMError, FormatError DEFAULT_MODE = RTLD_LOCAL if _os.name == "posix" and _sys.platform == "darwin": diff --git a/Lib/email/_policybase.py b/Lib/email/_policybase.py index c7694a44e26639..4b63b97217a835 100644 --- a/Lib/email/_policybase.py +++ b/Lib/email/_policybase.py @@ -302,12 +302,12 @@ def header_source_parse(self, sourcelines): """+ The name is parsed as everything up to the ':' and returned unmodified. The value is determined by stripping leading whitespace off the - remainder of the first line, joining all subsequent lines together, and + remainder of the first line joined with all subsequent lines, and stripping any trailing carriage return or linefeed characters. """ name, value = sourcelines[0].split(':', 1) - value = value.lstrip(' \t') + ''.join(sourcelines[1:]) + value = ''.join((value, *sourcelines[1:])).lstrip(' \t\r\n') return (name, value.rstrip('\r\n')) def header_store_parse(self, name, value): diff --git a/Lib/email/policy.py b/Lib/email/policy.py index 46b7de5bb6d8ae..6e109b65011a44 100644 --- a/Lib/email/policy.py +++ b/Lib/email/policy.py @@ -119,13 +119,13 @@ def header_source_parse(self, sourcelines): """+ The name is parsed as everything up to the ':' and returned unmodified. The value is determined by stripping leading whitespace off the - remainder of the first line, joining all subsequent lines together, and + remainder of the first line joined with all subsequent lines, and stripping any trailing carriage return or linefeed characters. (This is the same as Compat32). """ name, value = sourcelines[0].split(':', 1) - value = value.lstrip(' \t') + ''.join(sourcelines[1:]) + value = ''.join((value, *sourcelines[1:])).lstrip(' \t\r\n') return (name, value.rstrip('\r\n')) def header_store_parse(self, name, value): diff --git a/Lib/encodings/__init__.py b/Lib/encodings/__init__.py index f9075b8f0d98ac..298177eb8003a7 100644 --- a/Lib/encodings/__init__.py +++ b/Lib/encodings/__init__.py @@ -156,19 +156,22 @@ def search_function(encoding): codecs.register(search_function) if sys.platform == 'win32': - # bpo-671666, bpo-46668: If Python does not implement a codec for current - # Windows ANSI code page, use the "mbcs" codec instead: - # WideCharToMultiByte() and MultiByteToWideChar() functions with CP_ACP. - # Python does not support custom code pages. - def _alias_mbcs(encoding): + from ._win_cp_codecs import create_win32_code_page_codec + + def win32_code_page_search_function(encoding): + encoding = encoding.lower() + if not encoding.startswith('cp'): + return None try: - import _winapi - ansi_code_page = "cp%s" % _winapi.GetACP() - if encoding == ansi_code_page: - import encodings.mbcs - return encodings.mbcs.getregentry() - except ImportError: - # Imports may fail while we are shutting down - pass + cp = int(encoding[2:]) + except ValueError: + return None + # Test if the code page is supported + try: + codecs.code_page_encode(cp, 'x') + except (OverflowError, OSError): + return None + + return create_win32_code_page_codec(cp) - codecs.register(_alias_mbcs) + codecs.register(win32_code_page_search_function) diff --git a/Lib/encodings/_win_cp_codecs.py b/Lib/encodings/_win_cp_codecs.py new file mode 100644 index 00000000000000..4f8eb886794404 --- /dev/null +++ b/Lib/encodings/_win_cp_codecs.py @@ -0,0 +1,36 @@ +import codecs + +def create_win32_code_page_codec(cp): + from codecs import code_page_encode, code_page_decode + + def encode(input, errors='strict'): + return code_page_encode(cp, input, errors) + + def decode(input, errors='strict'): + return code_page_decode(cp, input, errors, True) + + class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input, final=False): + return code_page_encode(cp, input, self.errors)[0] + + class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input, errors, final): + return code_page_decode(cp, input, errors, final) + + class StreamWriter(codecs.StreamWriter): + def encode(self, input, errors='strict'): + return code_page_encode(cp, input, errors) + + class StreamReader(codecs.StreamReader): + def decode(self, input, errors, final): + return code_page_decode(cp, input, errors, final) + + return codecs.CodecInfo( + name=f'cp{cp}', + encode=encode, + decode=decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamreader=StreamReader, + streamwriter=StreamWriter, + ) diff --git a/Lib/fnmatch.py b/Lib/fnmatch.py index 73acb1fe8d4106..865baea23467ea 100644 --- a/Lib/fnmatch.py +++ b/Lib/fnmatch.py @@ -77,24 +77,30 @@ def translate(pat): There is no way to quote meta-characters. """ - STAR = object() - parts = _translate(pat, STAR, '.') - return _join_translated_parts(parts, STAR) + parts, star_indices = _translate(pat, '*', '.') + return _join_translated_parts(parts, star_indices) +_re_setops_sub = re.compile(r'([&~|])').sub +_re_escape = functools.lru_cache(maxsize=512)(re.escape) -def _translate(pat, STAR, QUESTION_MARK): +def _translate(pat, star, question_mark): res = [] add = res.append + star_indices = [] + i, n = 0, len(pat) while i < n: c = pat[i] i = i+1 if c == '*': + # store the position of the wildcard + star_indices.append(len(res)) + add(star) # compress consecutive `*` into one - if (not res) or res[-1] is not STAR: - add(STAR) + while i < n and pat[i] == '*': + i += 1 elif c == '?': - add(QUESTION_MARK) + add(question_mark) elif c == '[': j = i if j < n and pat[j] == '!': @@ -133,8 +139,6 @@ def _translate(pat, STAR, QUESTION_MARK): # Hyphens that create ranges shouldn't be escaped. stuff = '-'.join(s.replace('\\', r'\\').replace('-', r'\-') for s in chunks) - # Escape set operations (&&, ~~ and ||). - stuff = re.sub(r'([&~|])', r'\\\1', stuff) i = j+1 if not stuff: # Empty range: never match. @@ -143,50 +147,40 @@ def _translate(pat, STAR, QUESTION_MARK): # Negated empty range: match any character. add('.') else: + # Escape set operations (&&, ~~ and ||). + stuff = _re_setops_sub(r'\\\1', stuff) if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] in ('^', '['): stuff = '\\' + stuff add(f'[{stuff}]') else: - add(re.escape(c)) - assert i == n - return res - - -def _join_translated_parts(inp, STAR): - # Deal with STARs. - res = [] - add = res.append - i, n = 0, len(inp) - # Fixed pieces at the start? - while i < n and inp[i] is not STAR: - add(inp[i]) - i += 1 - # Now deal with STAR fixed STAR fixed ... - # For an interior `STAR fixed` pairing, we want to do a minimal - # .*? match followed by `fixed`, with no possibility of backtracking. - # Atomic groups ("(?>...)") allow us to spell that directly. - # Note: people rely on the undocumented ability to join multiple - # translate() results together via "|" to build large regexps matching - # "one of many" shell patterns. - while i < n: - assert inp[i] is STAR - i += 1 - if i == n: - add(".*") - break - assert inp[i] is not STAR - fixed = [] - while i < n and inp[i] is not STAR: - fixed.append(inp[i]) - i += 1 - fixed = "".join(fixed) - if i == n: - add(".*") - add(fixed) - else: - add(f"(?>.*?{fixed})") + add(_re_escape(c)) assert i == n - res = "".join(res) + return res, star_indices + + +def _join_translated_parts(parts, star_indices): + if not star_indices: + return fr'(?s:{"".join(parts)})\Z' + iter_star_indices = iter(star_indices) + j = next(iter_star_indices) + buffer = parts[:j] # fixed pieces at the start + append, extend = buffer.append, buffer.extend + i = j + 1 + for j in iter_star_indices: + # Now deal with STAR fixed STAR fixed ... + # For an interior `STAR fixed` pairing, we want to do a minimal + # .*? match followed by `fixed`, with no possibility of backtracking. + # Atomic groups ("(?>...)") allow us to spell that directly. + # Note: people rely on the undocumented ability to join multiple + # translate() results together via "|" to build large regexps matching + # "one of many" shell patterns. + append('(?>.*?') + extend(parts[i:j]) + append(')') + i = j + 1 + append('.*') + extend(parts[i:]) + res = ''.join(buffer) return fr'(?s:{res})\Z' diff --git a/Lib/getopt.py b/Lib/getopt.py index a9c452a601ee81..25f3e2439b3e35 100644 --- a/Lib/getopt.py +++ b/Lib/getopt.py @@ -185,11 +185,13 @@ def long_has_args(opt, longopts): return True, opt elif opt + '=?' in possibilities: return '?', opt - # No exact match, so better be unique. + # Possibilities must be unique to be accepted if len(possibilities) > 1: - # XXX since possibilities contains all valid continuations, might be - # nice to work them into the error msg - raise GetoptError(_('option --%s not a unique prefix') % opt, opt) + raise GetoptError( + _("option --%s not a unique prefix; possible options: %s") + % (opt, ", ".join(possibilities)), + opt, + ) assert len(possibilities) == 1 unique_match = possibilities[0] if unique_match.endswith('=?'): diff --git a/Lib/glob.py b/Lib/glob.py index ce9b3698888dd9..690ab1b8b9fb1d 100644 --- a/Lib/glob.py +++ b/Lib/glob.py @@ -312,7 +312,7 @@ def translate(pat, *, recursive=False, include_hidden=False, seps=None): if part: if not include_hidden and part[0] in '*?': results.append(r'(?!\.)') - results.extend(fnmatch._translate(part, f'{not_sep}*', not_sep)) + results.extend(fnmatch._translate(part, f'{not_sep}*', not_sep)[0]) if idx < last_part_idx: results.append(any_sep) res = ''.join(results) diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index 210d2264757d08..753238354f6d36 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -23,11 +23,6 @@ read_mime_types(file) -- parse one file, return a dictionary or None """ -import os -import sys -import posixpath -import urllib.parse - try: from _winapi import _mimetypes_read_windows_registry except ImportError: @@ -119,6 +114,10 @@ def guess_type(self, url, strict=True): Optional 'strict' argument when False adds a bunch of commonly found, but non-standard types. """ + # Lazy import to improve module import time + import os + import urllib.parse + # TODO: Deprecate accepting file paths (in particular path-like objects). url = os.fspath(url) p = urllib.parse.urlparse(url) @@ -146,6 +145,10 @@ def guess_type(self, url, strict=True): if '=' in type or '/' not in type: type = 'text/plain' return type, None # never compressed, so encoding is None + + # Lazy import to improve module import time + import posixpath + return self._guess_file_type(url, strict, posixpath.splitext) def guess_file_type(self, path, *, strict=True): @@ -153,6 +156,9 @@ def guess_file_type(self, path, *, strict=True): Similar to guess_type(), but takes file path instead of URL. """ + # Lazy import to improve module import time + import os + path = os.fsdecode(path) path = os.path.splitdrive(path)[1] return self._guess_file_type(path, strict, os.path.splitext) @@ -399,6 +405,9 @@ def init(files=None): else: db = _db + # Lazy import to improve module import time + import os + for file in files: if os.path.isfile(file): db.read(file) @@ -445,7 +454,7 @@ def _default_mime_types(): } # Before adding new types, make sure they are either registered with IANA, - # at http://www.iana.org/assignments/media-types + # at https://www.iana.org/assignments/media-types/media-types.xhtml # or extensions, i.e. using the x- prefix # If you add to these, please keep them sorted by mime type. @@ -550,19 +559,28 @@ def _default_mime_types(): '.woff2' : 'font/woff2', '.avif' : 'image/avif', '.bmp' : 'image/bmp', + '.emf' : 'image/emf', + '.fits' : 'image/fits', + '.g3' : 'image/g3fax', '.gif' : 'image/gif', '.ief' : 'image/ief', + '.jp2' : 'image/jp2', '.jpg' : 'image/jpeg', '.jpe' : 'image/jpeg', '.jpeg' : 'image/jpeg', + '.jpm' : 'image/jpm', + '.jpx' : 'image/jpx', '.heic' : 'image/heic', '.heif' : 'image/heif', '.png' : 'image/png', '.svg' : 'image/svg+xml', + '.t38' : 'image/t38', '.tiff' : 'image/tiff', '.tif' : 'image/tiff', + '.tfx' : 'image/tiff-fx', '.ico' : 'image/vnd.microsoft.icon', '.webp' : 'image/webp', + '.wmf' : 'image/wmf', '.ras' : 'image/x-cmu-raster', '.pnm' : 'image/x-portable-anymap', '.pbm' : 'image/x-portable-bitmap', @@ -637,6 +655,7 @@ def _default_mime_types(): def _main(): import getopt + import sys USAGE = """\ Usage: mimetypes.py [options] type diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index 7e901cf2fb9852..996887cb713942 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -181,6 +181,10 @@ def close(self): finally: self._handle = None + def _detach(self): + """Stop managing the underlying file descriptor or handle.""" + self._handle = None + def send_bytes(self, buf, offset=0, size=None): """Send the bytes data from a bytes-like object""" self._check_closed() diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py index bff7fb91d974b3..df9b9be9d1898b 100644 --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -9,6 +9,7 @@ import threading import warnings +from . import AuthenticationError from . import connection from . import process from .context import reduction @@ -25,6 +26,7 @@ MAXFDS_TO_SEND = 256 SIGNED_STRUCT = struct.Struct('q') # large enough for pid_t +_AUTHKEY_LEN = 32 # <= PIPEBUF so it fits a single write to an empty pipe. # # Forkserver class @@ -33,6 +35,7 @@ class ForkServer(object): def __init__(self): + self._forkserver_authkey = None self._forkserver_address = None self._forkserver_alive_fd = None self._forkserver_pid = None @@ -59,6 +62,7 @@ def _stop_unlocked(self): if not util.is_abstract_socket_namespace(self._forkserver_address): os.unlink(self._forkserver_address) self._forkserver_address = None + self._forkserver_authkey = None def set_forkserver_preload(self, modules_names): '''Set list of module names to try to load in forkserver process.''' @@ -83,6 +87,7 @@ def connect_to_new_process(self, fds): process data. ''' self.ensure_running() + assert self._forkserver_authkey if len(fds) + 4 >= MAXFDS_TO_SEND: raise ValueError('too many fds') with socket.socket(socket.AF_UNIX) as client: @@ -93,6 +98,18 @@ def connect_to_new_process(self, fds): resource_tracker.getfd()] allfds += fds try: + client.setblocking(True) + wrapped_client = connection.Connection(client.fileno()) + # The other side of this exchange happens in the child as + # implemented in main(). + try: + connection.answer_challenge( + wrapped_client, self._forkserver_authkey) + connection.deliver_challenge( + wrapped_client, self._forkserver_authkey) + finally: + wrapped_client._detach() + del wrapped_client reduction.sendfds(client, allfds) return parent_r, parent_w except: @@ -120,6 +137,7 @@ def ensure_running(self): return # dead, launch it again os.close(self._forkserver_alive_fd) + self._forkserver_authkey = None self._forkserver_address = None self._forkserver_alive_fd = None self._forkserver_pid = None @@ -130,9 +148,9 @@ def ensure_running(self): if self._preload_modules: desired_keys = {'main_path', 'sys_path'} data = spawn.get_preparation_data('ignore') - data = {x: y for x, y in data.items() if x in desired_keys} + main_kws = {x: y for x, y in data.items() if x in desired_keys} else: - data = {} + main_kws = {} with socket.socket(socket.AF_UNIX) as listener: address = connection.arbitrary_address('AF_UNIX') @@ -144,19 +162,31 @@ def ensure_running(self): # all client processes own the write end of the "alive" pipe; # when they all terminate the read end becomes ready. alive_r, alive_w = os.pipe() + # A short lived pipe to initialize the forkserver authkey. + authkey_r, authkey_w = os.pipe() try: - fds_to_pass = [listener.fileno(), alive_r] + fds_to_pass = [listener.fileno(), alive_r, authkey_r] + main_kws['authkey_r'] = authkey_r cmd %= (listener.fileno(), alive_r, self._preload_modules, - data) + main_kws) exe = spawn.get_executable() args = [exe] + util._args_from_interpreter_flags() args += ['-c', cmd] pid = util.spawnv_passfds(exe, args, fds_to_pass) except: os.close(alive_w) + os.close(authkey_w) raise finally: os.close(alive_r) + os.close(authkey_r) + # Authenticate our control socket to prevent access from + # processes we have not shared this key with. + try: + self._forkserver_authkey = os.urandom(_AUTHKEY_LEN) + os.write(authkey_w, self._forkserver_authkey) + finally: + os.close(authkey_w) self._forkserver_address = address self._forkserver_alive_fd = alive_w self._forkserver_pid = pid @@ -165,8 +195,18 @@ def ensure_running(self): # # -def main(listener_fd, alive_r, preload, main_path=None, sys_path=None): - '''Run forkserver.''' +def main(listener_fd, alive_r, preload, main_path=None, sys_path=None, + *, authkey_r=None): + """Run forkserver.""" + if authkey_r is not None: + try: + authkey = os.read(authkey_r, _AUTHKEY_LEN) + assert len(authkey) == _AUTHKEY_LEN, f'{len(authkey)} < {_AUTHKEY_LEN}' + finally: + os.close(authkey_r) + else: + authkey = b'' + if preload: if sys_path is not None: sys.path[:] = sys_path @@ -257,8 +297,24 @@ def sigchld_handler(*_unused): if listener in rfds: # Incoming fork request with listener.accept()[0] as s: - # Receive fds from client - fds = reduction.recvfds(s, MAXFDS_TO_SEND + 1) + try: + if authkey: + wrapped_s = connection.Connection(s.fileno()) + # The other side of this exchange happens in + # in connect_to_new_process(). + try: + connection.deliver_challenge( + wrapped_s, authkey) + connection.answer_challenge( + wrapped_s, authkey) + finally: + wrapped_s._detach() + del wrapped_s + # Receive fds from client + fds = reduction.recvfds(s, MAXFDS_TO_SEND + 1) + except (EOFError, BrokenPipeError, AuthenticationError): + s.close() + continue if len(fds) > MAXFDS_TO_SEND: raise RuntimeError( "Too many ({0:n}) fds to send".format( diff --git a/Lib/multiprocessing/reduction.py b/Lib/multiprocessing/reduction.py index 5593f0682f7fce..fcccd3eef86cc7 100644 --- a/Lib/multiprocessing/reduction.py +++ b/Lib/multiprocessing/reduction.py @@ -139,15 +139,12 @@ def detach(self): __all__ += ['DupFd', 'sendfds', 'recvfds'] import array - # On MacOSX we should acknowledge receipt of fds -- see Issue14669 - ACKNOWLEDGE = sys.platform == 'darwin' - def sendfds(sock, fds): '''Send an array of fds over an AF_UNIX socket.''' fds = array.array('i', fds) msg = bytes([len(fds) % 256]) sock.sendmsg([msg], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fds)]) - if ACKNOWLEDGE and sock.recv(1) != b'A': + if sock.recv(1) != b'A': raise RuntimeError('did not receive acknowledgement of fd') def recvfds(sock, size): @@ -158,8 +155,11 @@ def recvfds(sock, size): if not msg and not ancdata: raise EOFError try: - if ACKNOWLEDGE: - sock.send(b'A') + # We send/recv an Ack byte after the fds to work around an old + # macOS bug; it isn't clear if this is still required but it + # makes unit testing fd sending easier. + # See: https://github.com/python/cpython/issues/58874 + sock.send(b'A') # Acknowledge if len(ancdata) != 1: raise RuntimeError('received %d items of ancdata' % len(ancdata)) diff --git a/Lib/nturl2path.py b/Lib/nturl2path.py index 255eb2f547c2ce..7e13ae3128333d 100644 --- a/Lib/nturl2path.py +++ b/Lib/nturl2path.py @@ -15,21 +15,27 @@ def url2pathname(url): # become # C:\foo\bar\spam.foo import string, urllib.parse + if url[:3] == '///': + # URL has an empty authority section, so the path begins on the third + # character. + url = url[2:] + elif url[:12] == '//localhost/': + # Skip past 'localhost' authority. + url = url[11:] + if url[:3] == '///': + # Skip past extra slash before UNC drive in URL path. + url = url[1:] # Windows itself uses ":" even in URLs. url = url.replace(':', '|') if not '|' in url: # No drive specifier, just convert slashes - if url[:3] == '///': - # URL has an empty authority section, so the path begins on the - # third character. - url = url[2:] # make sure not to convert quoted slashes :-) return urllib.parse.unquote(url.replace('/', '\\')) comp = url.split('|') if len(comp) != 2 or comp[0][-1] not in string.ascii_letters: error = 'Bad URL: ' + url raise OSError(error) - drive = comp[0][-1].upper() + drive = comp[0][-1] tail = urllib.parse.unquote(comp[1].replace('/', '\\')) return drive + ':' + tail @@ -40,6 +46,7 @@ def pathname2url(p): # C:\foo\bar\spam.foo # becomes # ///C:/foo/bar/spam.foo + import ntpath import urllib.parse # First, clean up some special forms. We are going to sacrifice # the additional information anyway @@ -48,16 +55,17 @@ def pathname2url(p): p = p[4:] if p[:4].upper() == 'UNC/': p = '//' + p[4:] - elif p[1:2] != ':': - raise OSError('Bad path: ' + p) - if not ':' in p: - # No DOS drive specified, just quote the pathname - return urllib.parse.quote(p) - comp = p.split(':', maxsplit=2) - if len(comp) != 2 or len(comp[0]) > 1: - error = 'Bad path: ' + p - raise OSError(error) + drive, root, tail = ntpath.splitroot(p) + if drive: + if drive[1:] == ':': + # DOS drive specified. Add three slashes to the start, producing + # an authority section with a zero-length authority, and a path + # section starting with a single slash. + drive = f'///{drive}' + drive = urllib.parse.quote(drive, safe='/:') + elif root: + # Add explicitly empty authority to path beginning with one slash. + root = f'//{root}' - drive = urllib.parse.quote(comp[0].upper()) - tail = urllib.parse.quote(comp[1]) - return '///' + drive + ':' + tail + tail = urllib.parse.quote(tail) + return drive + root + tail diff --git a/Lib/pickletools.py b/Lib/pickletools.py index c462d26da97ce1..d9c4fb1e63e91a 100644 --- a/Lib/pickletools.py +++ b/Lib/pickletools.py @@ -312,7 +312,7 @@ def read_uint8(f): doc="Eight-byte unsigned integer, little-endian.") -def read_stringnl(f, decode=True, stripquotes=True): +def read_stringnl(f, decode=True, stripquotes=True, *, encoding='latin-1'): r""" >>> import io >>> read_stringnl(io.BytesIO(b"'abcd'\nefg\n")) @@ -356,7 +356,7 @@ def read_stringnl(f, decode=True, stripquotes=True): raise ValueError("no string quotes around %r" % data) if decode: - data = codecs.escape_decode(data)[0].decode("ascii") + data = codecs.escape_decode(data)[0].decode(encoding) return data stringnl = ArgumentDescriptor( @@ -370,7 +370,7 @@ def read_stringnl(f, decode=True, stripquotes=True): """) def read_stringnl_noescape(f): - return read_stringnl(f, stripquotes=False) + return read_stringnl(f, stripquotes=False, encoding='utf-8') stringnl_noescape = ArgumentDescriptor( name='stringnl_noescape', @@ -2509,7 +2509,10 @@ def dis(pickle, out=None, memo=None, indentlevel=4, annotate=0): # make a mild effort to align arguments line += ' ' * (10 - len(opcode.name)) if arg is not None: - line += ' ' + repr(arg) + if opcode.name in ("STRING", "BINSTRING", "SHORT_BINSTRING"): + line += ' ' + ascii(arg) + else: + line += ' ' + repr(arg) if markmsg: line += ' ' + markmsg if annotate: diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 165d3853f95e29..f73e55d77311ae 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Oct 15 22:34:02 2024 +# Autogenerated by Sphinx on Tue Nov 19 16:52:22 2024 # as part of the release process. topics = {'assert': 'The "assert" statement\n' '**********************\n' @@ -29,13 +29,12 @@ '(command\n' 'line option "-O"). The current code generator emits no code for ' 'an\n' - 'assert statement when optimization is requested at compile time. ' - 'Note\n' - 'that it is unnecessary to include the source code for the ' - 'expression\n' - 'that failed in the error message; it will be displayed as part of ' - 'the\n' - 'stack trace.\n' + '"assert" statement when optimization is requested at compile ' + 'time.\n' + 'Note that it is unnecessary to include the source code for the\n' + 'expression that failed in the error message; it will be displayed ' + 'as\n' + 'part of the stack trace.\n' '\n' 'Assignments to "__debug__" are illegal. The value for the ' 'built-in\n' @@ -713,7 +712,8 @@ 'should either\n' ' return the (computed) attribute value or raise an ' '"AttributeError"\n' - ' exception.\n' + ' exception. The "object" class itself does not provide ' + 'this method.\n' '\n' ' Note that if the attribute is found through the ' 'normal mechanism,\n' @@ -896,7 +896,9 @@ 'parents). In the\n' 'examples below, “the attribute” refers to the attribute ' 'whose name is\n' - 'the key of the property in the owner class’ "__dict__".\n' + 'the key of the property in the owner class’ "__dict__". ' + 'The "object"\n' + 'class itself does not implement any of these protocols.\n' '\n' 'object.__get__(self, instance, owner=None)\n' '\n' @@ -1569,7 +1571,9 @@ ' Called when the instance is “called” as a function; if ' 'this method\n' ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)".\n', + ' "type(x).__call__(x, arg1, ...)". The "object" class ' + 'itself does\n' + ' not provide this method.\n', 'calls': 'Calls\n' '*****\n' '\n' @@ -1754,6 +1758,9 @@ ' Function definitions. When the code block executes a "return"\n' ' statement, this specifies the return value of the function ' 'call.\n' + ' If execution reaches the end of the code block without executing ' + 'a\n' + ' "return" statement, the return value is "None".\n' '\n' 'a built-in function or method:\n' ' The result is up to the interpreter; see Built-in Functions for ' @@ -2802,18 +2809,15 @@ ' enter = type(manager).__enter__\n' ' exit = type(manager).__exit__\n' ' value = enter(manager)\n' - ' hit_except = False\n' '\n' ' try:\n' ' TARGET = value\n' ' SUITE\n' ' except:\n' - ' hit_except = True\n' ' if not exit(manager, *sys.exc_info()):\n' ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' exit(manager, None, None, None)\n' + ' else:\n' + ' exit(manager, None, None, None)\n' '\n' 'With more than one item, the context managers are processed as ' 'if\n' @@ -4495,6 +4499,9 @@ '\n' 'For more information on context managers, see Context ' 'Manager Types.\n' + 'The "object" class itself does not provide the context ' + 'manager\n' + 'methods.\n' '\n' 'object.__enter__(self)\n' '\n' @@ -4764,17 +4771,20 @@ '\n' ' This is typically used for debugging, so it is important ' 'that the\n' - ' representation is information-rich and unambiguous.\n' + ' representation is information-rich and unambiguous. A ' + 'default\n' + ' implementation is provided by the "object" class ' + 'itself.\n' '\n' 'object.__str__(self)\n' '\n' - ' Called by "str(object)" and the built-in functions ' - '"format()" and\n' - ' "print()" to compute the “informal” or nicely printable ' - 'string\n' - ' representation of an object. The return value must be a ' - 'string\n' - ' object.\n' + ' Called by "str(object)", the default "__format__()" ' + 'implementation,\n' + ' and the built-in function "print()", to compute the ' + '“informal” or\n' + ' nicely printable string representation of an object. ' + 'The return\n' + ' value must be a str object.\n' '\n' ' This method differs from "object.__repr__()" in that ' 'there is no\n' @@ -4790,7 +4800,9 @@ '\n' ' Called by bytes to compute a byte-string representation ' 'of an\n' - ' object. This should return a "bytes" object.\n' + ' object. This should return a "bytes" object. The ' + '"object" class\n' + ' itself does not provide this method.\n' '\n' 'object.__format__(self, format_spec)\n' '\n' @@ -4818,6 +4830,11 @@ '\n' ' The return value must be a string object.\n' '\n' + ' The default implementation by the "object" class should ' + 'be given an\n' + ' empty *format_spec* string. It delegates to ' + '"__str__()".\n' + '\n' ' Changed in version 3.4: The __format__ method of ' '"object" itself\n' ' raises a "TypeError" if passed any non-empty string.\n' @@ -4875,6 +4892,16 @@ ' ordering operations from a single root operation, see\n' ' "functools.total_ordering()".\n' '\n' + ' By default, the "object" class provides implementations ' + 'consistent\n' + ' with Value comparisons: equality compares according to ' + 'object\n' + ' identity, and order comparisons raise "TypeError". Each ' + 'default\n' + ' method may generate these results directly, but may also ' + 'return\n' + ' "NotImplemented".\n' + '\n' ' See the paragraph on "__hash__()" for some important ' 'notes on\n' ' creating *hashable* objects which support custom ' @@ -4961,12 +4988,13 @@ '\n' ' User-defined classes have "__eq__()" and "__hash__()" ' 'methods by\n' - ' default; with them, all objects compare unequal (except ' - 'with\n' - ' themselves) and "x.__hash__()" returns an appropriate ' - 'value such\n' - ' that "x == y" implies both that "x is y" and "hash(x) == ' - 'hash(y)".\n' + ' default (inherited from the "object" class); with them, ' + 'all objects\n' + ' compare unequal (except with themselves) and ' + '"x.__hash__()" returns\n' + ' an appropriate value such that "x == y" implies both ' + 'that "x is y"\n' + ' and "hash(x) == hash(y)".\n' '\n' ' A class that overrides "__eq__()" and does not define ' '"__hash__()"\n' @@ -5037,9 +5065,9 @@ 'the object is\n' ' considered true if its result is nonzero. If a class ' 'defines\n' - ' neither "__len__()" nor "__bool__()", all its instances ' - 'are\n' - ' considered true.\n', + ' neither "__len__()" nor "__bool__()" (which is true of ' + 'the "object"\n' + ' class itself), all its instances are considered true.\n', 'debugger': '"pdb" — The Python Debugger\n' '***************************\n' '\n' @@ -7038,10 +7066,12 @@ 'printing fields |\n' '| | in the form ‘+000000120’. This alignment ' 'option is only |\n' - '| | valid for numeric types. It becomes the ' - 'default for |\n' - '| | numbers when ‘0’ immediately precedes the ' - 'field width. |\n' + '| | valid for numeric types, excluding "complex". ' + 'It becomes |\n' + '| | the default for numbers when ‘0’ immediately ' + 'precedes the |\n' + '| | field ' + 'width. |\n' '+-----------+------------------------------------------------------------+\n' '| "\'^\'" | Forces the field to be centered within the ' 'available |\n' @@ -7148,9 +7178,9 @@ 'field by a\n' 'zero ("\'0\'") character enables sign-aware zero-padding ' 'for numeric\n' - 'types. This is equivalent to a *fill* character of "\'0\'" ' - 'with an\n' - '*alignment* type of "\'=\'".\n' + 'types, excluding "complex". This is equivalent to a *fill* ' + 'character\n' + 'of "\'0\'" with an *alignment* type of "\'=\'".\n' '\n' 'Changed in version 3.10: Preceding the *width* field by ' '"\'0\'" no\n' @@ -7416,6 +7446,32 @@ ' ' '+-----------+------------------------------------------------------------+\n' '\n' + 'The result should be correctly rounded to a given precision ' + '"p" of\n' + 'digits after the decimal point. The rounding mode for ' + '"float" matches\n' + 'that of the "round()" builtin. For "Decimal", the rounding ' + 'mode of\n' + 'the current context will be used.\n' + '\n' + 'The available presentation types for "complex" are the same ' + 'as those\n' + 'for "float" ("\'%\'" is not allowed). Both the real and ' + 'imaginary\n' + 'components of a complex number are formatted as ' + 'floating-point\n' + 'numbers, according to the specified presentation type. ' + 'They are\n' + 'separated by the mandatory sign of the imaginary part, the ' + 'latter\n' + 'being terminated by a "j" suffix. If the presentation type ' + 'is\n' + 'missing, the result will match the output of "str()" ' + '(complex numbers\n' + 'with a non-zero real part are also surrounded by ' + 'parentheses),\n' + 'possibly altered by other format modifiers.\n' + '\n' '\n' 'Format examples\n' '===============\n' @@ -7800,33 +7856,17 @@ '\n' ' global_stmt ::= "global" identifier ("," identifier)*\n' '\n' - 'The "global" statement is a declaration which holds for the ' - 'entire\n' - 'current code block. It means that the listed identifiers are to ' - 'be\n' - 'interpreted as globals. It would be impossible to assign to a ' - 'global\n' - 'variable without "global", although free variables may refer to\n' - 'globals without being declared global.\n' - '\n' - 'Names listed in a "global" statement must not be used in the same ' - 'code\n' - 'block textually preceding that "global" statement.\n' - '\n' - 'Names listed in a "global" statement must not be defined as ' - 'formal\n' - 'parameters, or as targets in "with" statements or "except" ' - 'clauses, or\n' - 'in a "for" target list, "class" definition, function definition,\n' - '"import" statement, or *variable annotations*.\n' + 'The "global" statement causes the listed identifiers to be ' + 'interpreted\n' + 'as globals. It would be impossible to assign to a global variable\n' + 'without "global", although free variables may refer to globals ' + 'without\n' + 'being declared global.\n' '\n' - '**CPython implementation detail:** The current implementation does ' - 'not\n' - 'enforce some of these restrictions, but programs should not abuse ' - 'this\n' - 'freedom, as future implementations may enforce them or silently ' - 'change\n' - 'the meaning of the program.\n' + 'The "global" statement applies to the entire scope of a function ' + 'or\n' + 'class body. A "SyntaxError" is raised if a variable is used or\n' + 'assigned to prior to its global declaration in the scope.\n' '\n' '**Programmer’s note:** "global" is a directive to the parser. It\n' 'applies only to code parsed at the same time as the "global"\n' @@ -8896,8 +8936,8 @@ 'scope,\n' 'or if there is no nonlocal scope, a "SyntaxError" is raised.\n' '\n' - 'The nonlocal statement applies to the entire scope of a function ' - 'or\n' + 'The "nonlocal" statement applies to the entire scope of a ' + 'function or\n' 'class body. A "SyntaxError" is raised if a variable is used or\n' 'assigned to prior to its nonlocal declaration in the scope.\n' '\n' @@ -8996,17 +9036,21 @@ '"divmod()",\n' ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' '(swapped)\n' - ' operands. These functions are only called if the left ' - 'operand does\n' - ' not support the corresponding operation [3] and the ' + ' operands. These functions are only called if the ' 'operands are of\n' - ' different types. [4] For instance, to evaluate the ' - 'expression "x -\n' - ' y", where *y* is an instance of a class that has an ' - '"__rsub__()"\n' - ' method, "type(y).__rsub__(y, x)" is called if ' - '"type(x).__sub__(x,\n' - ' y)" returns "NotImplemented".\n' + ' different types, when the left operand does not support ' + 'the\n' + ' corresponding operation [3], or the right operand’s ' + 'class is\n' + ' derived from the left operand’s class. [4] For instance, ' + 'to\n' + ' evaluate the expression "x - y", where *y* is an ' + 'instance of a\n' + ' class that has an "__rsub__()" method, ' + '"type(y).__rsub__(y, x)" is\n' + ' called if "type(x).__sub__(x, y)" returns ' + '"NotImplemented" or\n' + ' "type(y)" is a subclass of "type(x)". [5]\n' '\n' ' Note that ternary "pow()" will not try calling ' '"__rpow__()" (the\n' @@ -9651,56 +9695,58 @@ '\n' 'The following methods can be defined to implement ' 'container objects.\n' - 'Containers usually are *sequences* (such as "lists" or ' - '"tuples") or\n' - '*mappings* (like "dictionaries"), but can represent other ' - 'containers\n' - 'as well. The first set of methods is used either to ' - 'emulate a\n' - 'sequence or to emulate a mapping; the difference is that ' - 'for a\n' - 'sequence, the allowable keys should be the integers *k* ' - 'for which "0\n' - '<= k < N" where *N* is the length of the sequence, or ' - '"slice" objects,\n' - 'which define a range of items. It is also recommended ' - 'that mappings\n' - 'provide the methods "keys()", "values()", "items()", ' - '"get()",\n' - '"clear()", "setdefault()", "pop()", "popitem()", "copy()", ' + 'None of them are provided by the "object" class itself. ' + 'Containers\n' + 'usually are *sequences* (such as "lists" or "tuples") or ' + '*mappings*\n' + '(like *dictionaries*), but can represent other containers ' + 'as well.\n' + 'The first set of methods is used either to emulate a ' + 'sequence or to\n' + 'emulate a mapping; the difference is that for a sequence, ' + 'the\n' + 'allowable keys should be the integers *k* for which "0 <= ' + 'k < N" where\n' + '*N* is the length of the sequence, or "slice" objects, ' + 'which define a\n' + 'range of items. It is also recommended that mappings ' + 'provide the\n' + 'methods "keys()", "values()", "items()", "get()", ' + '"clear()",\n' + '"setdefault()", "pop()", "popitem()", "copy()", and ' + '"update()"\n' + 'behaving similar to those for Python’s standard ' + '"dictionary" objects.\n' + 'The "collections.abc" module provides a "MutableMapping" ' + '*abstract\n' + 'base class* to help create those methods from a base set ' + 'of\n' + '"__getitem__()", "__setitem__()", "__delitem__()", and ' + '"keys()".\n' + 'Mutable sequences should provide methods "append()", ' + '"count()",\n' + '"index()", "extend()", "insert()", "pop()", "remove()", ' + '"reverse()"\n' + 'and "sort()", like Python standard "list" objects. ' + 'Finally, sequence\n' + 'types should implement addition (meaning concatenation) ' 'and\n' - '"update()" behaving similar to those for Python’s ' - 'standard\n' - '"dictionary" objects. The "collections.abc" module ' - 'provides a\n' - '"MutableMapping" *abstract base class* to help create ' - 'those methods\n' - 'from a base set of "__getitem__()", "__setitem__()", ' - '"__delitem__()",\n' - 'and "keys()". Mutable sequences should provide methods ' - '"append()",\n' - '"count()", "index()", "extend()", "insert()", "pop()", ' - '"remove()",\n' - '"reverse()" and "sort()", like Python standard "list" ' - 'objects.\n' - 'Finally, sequence types should implement addition ' - '(meaning\n' - 'concatenation) and multiplication (meaning repetition) by ' - 'defining the\n' - 'methods "__add__()", "__radd__()", "__iadd__()", ' - '"__mul__()",\n' - '"__rmul__()" and "__imul__()" described below; they should ' - 'not define\n' - 'other numerical operators. It is recommended that both ' - 'mappings and\n' - 'sequences implement the "__contains__()" method to allow ' - 'efficient use\n' - 'of the "in" operator; for mappings, "in" should search the ' - 'mapping’s\n' - 'keys; for sequences, it should search through the values. ' - 'It is\n' - 'further recommended that both mappings and sequences ' - 'implement the\n' + 'multiplication (meaning repetition) by defining the ' + 'methods\n' + '"__add__()", "__radd__()", "__iadd__()", "__mul__()", ' + '"__rmul__()" and\n' + '"__imul__()" described below; they should not define other ' + 'numerical\n' + 'operators. It is recommended that both mappings and ' + 'sequences\n' + 'implement the "__contains__()" method to allow efficient ' + 'use of the\n' + '"in" operator; for mappings, "in" should search the ' + 'mapping’s keys;\n' + 'for sequences, it should search through the values. It is ' + 'further\n' + 'recommended that both mappings and sequences implement ' + 'the\n' '"__iter__()" method to allow efficient iteration through ' 'the\n' 'container; for mappings, "__iter__()" should iterate ' @@ -10240,17 +10286,19 @@ '\n' ' This is typically used for debugging, so it is important ' 'that the\n' - ' representation is information-rich and unambiguous.\n' + ' representation is information-rich and unambiguous. A ' + 'default\n' + ' implementation is provided by the "object" class itself.\n' '\n' 'object.__str__(self)\n' '\n' - ' Called by "str(object)" and the built-in functions ' - '"format()" and\n' - ' "print()" to compute the “informal” or nicely printable ' - 'string\n' - ' representation of an object. The return value must be a ' - 'string\n' - ' object.\n' + ' Called by "str(object)", the default "__format__()" ' + 'implementation,\n' + ' and the built-in function "print()", to compute the ' + '“informal” or\n' + ' nicely printable string representation of an object. The ' + 'return\n' + ' value must be a str object.\n' '\n' ' This method differs from "object.__repr__()" in that ' 'there is no\n' @@ -10266,7 +10314,9 @@ '\n' ' Called by bytes to compute a byte-string representation ' 'of an\n' - ' object. This should return a "bytes" object.\n' + ' object. This should return a "bytes" object. The "object" ' + 'class\n' + ' itself does not provide this method.\n' '\n' 'object.__format__(self, format_spec)\n' '\n' @@ -10294,6 +10344,10 @@ '\n' ' The return value must be a string object.\n' '\n' + ' The default implementation by the "object" class should ' + 'be given an\n' + ' empty *format_spec* string. It delegates to "__str__()".\n' + '\n' ' Changed in version 3.4: The __format__ method of "object" ' 'itself\n' ' raises a "TypeError" if passed any non-empty string.\n' @@ -10351,6 +10405,16 @@ ' ordering operations from a single root operation, see\n' ' "functools.total_ordering()".\n' '\n' + ' By default, the "object" class provides implementations ' + 'consistent\n' + ' with Value comparisons: equality compares according to ' + 'object\n' + ' identity, and order comparisons raise "TypeError". Each ' + 'default\n' + ' method may generate these results directly, but may also ' + 'return\n' + ' "NotImplemented".\n' + '\n' ' See the paragraph on "__hash__()" for some important ' 'notes on\n' ' creating *hashable* objects which support custom ' @@ -10436,12 +10500,13 @@ '\n' ' User-defined classes have "__eq__()" and "__hash__()" ' 'methods by\n' - ' default; with them, all objects compare unequal (except ' - 'with\n' - ' themselves) and "x.__hash__()" returns an appropriate ' - 'value such\n' - ' that "x == y" implies both that "x is y" and "hash(x) == ' - 'hash(y)".\n' + ' default (inherited from the "object" class); with them, ' + 'all objects\n' + ' compare unequal (except with themselves) and ' + '"x.__hash__()" returns\n' + ' an appropriate value such that "x == y" implies both that ' + '"x is y"\n' + ' and "hash(x) == hash(y)".\n' '\n' ' A class that overrides "__eq__()" and does not define ' '"__hash__()"\n' @@ -10510,9 +10575,9 @@ 'object is\n' ' considered true if its result is nonzero. If a class ' 'defines\n' - ' neither "__len__()" nor "__bool__()", all its instances ' - 'are\n' - ' considered true.\n' + ' neither "__len__()" nor "__bool__()" (which is true of ' + 'the "object"\n' + ' class itself), all its instances are considered true.\n' '\n' '\n' 'Customizing attribute access\n' @@ -10536,7 +10601,8 @@ 'either\n' ' return the (computed) attribute value or raise an ' '"AttributeError"\n' - ' exception.\n' + ' exception. The "object" class itself does not provide ' + 'this method.\n' '\n' ' Note that if the attribute is found through the normal ' 'mechanism,\n' @@ -10716,7 +10782,9 @@ 'parents). In the\n' 'examples below, “the attribute” refers to the attribute ' 'whose name is\n' - 'the key of the property in the owner class’ "__dict__".\n' + 'the key of the property in the owner class’ "__dict__". The ' + '"object"\n' + 'class itself does not implement any of these protocols.\n' '\n' 'object.__get__(self, instance, owner=None)\n' '\n' @@ -11599,7 +11667,9 @@ ' Called when the instance is “called” as a function; if ' 'this method\n' ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)".\n' + ' "type(x).__call__(x, arg1, ...)". The "object" class ' + 'itself does\n' + ' not provide this method.\n' '\n' '\n' 'Emulating container types\n' @@ -11607,54 +11677,54 @@ '\n' 'The following methods can be defined to implement container ' 'objects.\n' - 'Containers usually are *sequences* (such as "lists" or ' - '"tuples") or\n' - '*mappings* (like "dictionaries"), but can represent other ' - 'containers\n' - 'as well. The first set of methods is used either to emulate ' - 'a\n' - 'sequence or to emulate a mapping; the difference is that for ' - 'a\n' - 'sequence, the allowable keys should be the integers *k* for ' - 'which "0\n' - '<= k < N" where *N* is the length of the sequence, or ' - '"slice" objects,\n' - 'which define a range of items. It is also recommended that ' - 'mappings\n' - 'provide the methods "keys()", "values()", "items()", ' - '"get()",\n' - '"clear()", "setdefault()", "pop()", "popitem()", "copy()", ' - 'and\n' - '"update()" behaving similar to those for Python’s standard\n' - '"dictionary" objects. The "collections.abc" module provides ' - 'a\n' - '"MutableMapping" *abstract base class* to help create those ' - 'methods\n' - 'from a base set of "__getitem__()", "__setitem__()", ' - '"__delitem__()",\n' - 'and "keys()". Mutable sequences should provide methods ' - '"append()",\n' - '"count()", "index()", "extend()", "insert()", "pop()", ' - '"remove()",\n' - '"reverse()" and "sort()", like Python standard "list" ' + 'None of them are provided by the "object" class itself. ' + 'Containers\n' + 'usually are *sequences* (such as "lists" or "tuples") or ' + '*mappings*\n' + '(like *dictionaries*), but can represent other containers as ' + 'well.\n' + 'The first set of methods is used either to emulate a ' + 'sequence or to\n' + 'emulate a mapping; the difference is that for a sequence, ' + 'the\n' + 'allowable keys should be the integers *k* for which "0 <= k ' + '< N" where\n' + '*N* is the length of the sequence, or "slice" objects, which ' + 'define a\n' + 'range of items. It is also recommended that mappings ' + 'provide the\n' + 'methods "keys()", "values()", "items()", "get()", ' + '"clear()",\n' + '"setdefault()", "pop()", "popitem()", "copy()", and ' + '"update()"\n' + 'behaving similar to those for Python’s standard "dictionary" ' 'objects.\n' - 'Finally, sequence types should implement addition (meaning\n' - 'concatenation) and multiplication (meaning repetition) by ' - 'defining the\n' - 'methods "__add__()", "__radd__()", "__iadd__()", ' - '"__mul__()",\n' - '"__rmul__()" and "__imul__()" described below; they should ' - 'not define\n' - 'other numerical operators. It is recommended that both ' - 'mappings and\n' - 'sequences implement the "__contains__()" method to allow ' - 'efficient use\n' - 'of the "in" operator; for mappings, "in" should search the ' - 'mapping’s\n' - 'keys; for sequences, it should search through the values. ' - 'It is\n' - 'further recommended that both mappings and sequences ' - 'implement the\n' + 'The "collections.abc" module provides a "MutableMapping" ' + '*abstract\n' + 'base class* to help create those methods from a base set of\n' + '"__getitem__()", "__setitem__()", "__delitem__()", and ' + '"keys()".\n' + 'Mutable sequences should provide methods "append()", ' + '"count()",\n' + '"index()", "extend()", "insert()", "pop()", "remove()", ' + '"reverse()"\n' + 'and "sort()", like Python standard "list" objects. Finally, ' + 'sequence\n' + 'types should implement addition (meaning concatenation) and\n' + 'multiplication (meaning repetition) by defining the methods\n' + '"__add__()", "__radd__()", "__iadd__()", "__mul__()", ' + '"__rmul__()" and\n' + '"__imul__()" described below; they should not define other ' + 'numerical\n' + 'operators. It is recommended that both mappings and ' + 'sequences\n' + 'implement the "__contains__()" method to allow efficient use ' + 'of the\n' + '"in" operator; for mappings, "in" should search the ' + 'mapping’s keys;\n' + 'for sequences, it should search through the values. It is ' + 'further\n' + 'recommended that both mappings and sequences implement the\n' '"__iter__()" method to allow efficient iteration through ' 'the\n' 'container; for mappings, "__iter__()" should iterate through ' @@ -11913,17 +11983,21 @@ '"divmod()",\n' ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' '(swapped)\n' - ' operands. These functions are only called if the left ' - 'operand does\n' - ' not support the corresponding operation [3] and the ' + ' operands. These functions are only called if the ' 'operands are of\n' - ' different types. [4] For instance, to evaluate the ' - 'expression "x -\n' - ' y", where *y* is an instance of a class that has an ' - '"__rsub__()"\n' - ' method, "type(y).__rsub__(y, x)" is called if ' - '"type(x).__sub__(x,\n' - ' y)" returns "NotImplemented".\n' + ' different types, when the left operand does not support ' + 'the\n' + ' corresponding operation [3], or the right operand’s class ' + 'is\n' + ' derived from the left operand’s class. [4] For instance, ' + 'to\n' + ' evaluate the expression "x - y", where *y* is an instance ' + 'of a\n' + ' class that has an "__rsub__()" method, ' + '"type(y).__rsub__(y, x)" is\n' + ' called if "type(x).__sub__(x, y)" returns ' + '"NotImplemented" or\n' + ' "type(y)" is a subclass of "type(x)". [5]\n' '\n' ' Note that ternary "pow()" will not try calling ' '"__rpow__()" (the\n' @@ -12066,6 +12140,9 @@ '\n' 'For more information on context managers, see Context ' 'Manager Types.\n' + 'The "object" class itself does not provide the context ' + 'manager\n' + 'methods.\n' '\n' 'object.__enter__(self)\n' '\n' @@ -14794,13 +14871,13 @@ ' modules created dynamically using the "types.ModuleType"\n' ' constructor. Previously the attribute was optional.\n' '\n' - ' Deprecated since version 3.12, removed in version 3.14: Setting\n' - ' "__loader__" on a module while failing to set "__spec__.loader" ' - 'is\n' - ' deprecated. In Python 3.14, "__loader__" will cease to be set ' + ' Deprecated since version 3.12, will be removed in version 3.16:\n' + ' Setting "__loader__" on a module while failing to set\n' + ' "__spec__.loader" is deprecated. In Python 3.16, "__loader__" ' + 'will\n' + ' cease to be set or taken into consideration by the import system ' 'or\n' - ' taken into consideration by the import system or the standard\n' - ' library.\n' + ' the standard library.\n' '\n' 'module.__path__\n' '\n' @@ -15311,7 +15388,7 @@ '| | version ' '3.12: This attribute of code objects is |\n' '| | deprecated, ' - 'and may be removed in Python 3.14. |\n' + 'and may be removed in Python 3.15. |\n' '+----------------------------------------------------+----------------------------------------------------+\n' '| codeobject.co_stacksize | The required ' 'stack size of the code object |\n' @@ -15345,10 +15422,9 @@ '\n' 'Other bits in "co_flags" are reserved for internal use.\n' '\n' - 'If a code object represents a function, the first item in ' - '"co_consts"\n' - 'is the documentation string of the function, or "None" if ' - 'undefined.\n' + 'If a code object represents a function and has a docstring, the ' + 'first\n' + 'item in "co_consts" is the docstring of the function.\n' '\n' '\n' 'Methods on code objects\n' @@ -15996,10 +16072,10 @@ '"keys()" method\n' ' (in which case "__getitem__()" is called with every ' 'key returned\n' - ' from the method). or an iterable of key/value pairs ' - '(as tuples\n' - ' or other iterables of length two). If keyword ' - 'arguments are\n' + ' from the method) or an iterable of key/value pairs (as ' + 'tuples or\n' + ' other iterables of length two). If keyword arguments ' + 'are\n' ' specified, the dictionary is then updated with those ' 'key/value\n' ' pairs: "d.update(red=1, blue=2)".\n' @@ -17310,18 +17386,15 @@ ' enter = type(manager).__enter__\n' ' exit = type(manager).__exit__\n' ' value = enter(manager)\n' - ' hit_except = False\n' '\n' ' try:\n' ' TARGET = value\n' ' SUITE\n' ' except:\n' - ' hit_except = True\n' ' if not exit(manager, *sys.exc_info()):\n' ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' exit(manager, None, None, None)\n' + ' else:\n' + ' exit(manager, None, None, None)\n' '\n' 'With more than one item, the context managers are processed as if\n' 'multiple "with" statements were nested:\n' @@ -17362,7 +17435,8 @@ '\n' 'A "yield" statement is semantically equivalent to a yield ' 'expression.\n' - 'The yield statement can be used to omit the parentheses that would\n' + 'The "yield" statement can be used to omit the parentheses that ' + 'would\n' 'otherwise be required in the equivalent yield expression ' 'statement.\n' 'For example, the yield statements\n' @@ -17378,10 +17452,9 @@ 'Yield expressions and statements are only used when defining a\n' '*generator* function, and are only used in the body of the ' 'generator\n' - 'function. Using yield in a function definition is sufficient to ' - 'cause\n' - 'that definition to create a generator function instead of a normal\n' - 'function.\n' + 'function. Using "yield" in a function definition is sufficient to\n' + 'cause that definition to create a generator function instead of a\n' + 'normal function.\n' '\n' 'For full details of "yield" semantics, refer to the Yield ' 'expressions\n' diff --git a/Lib/shutil.py b/Lib/shutil.py index dd3e0e0c5da54b..171489ca41f2a7 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -1550,21 +1550,21 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): if sys.platform == "win32": # PATHEXT is necessary to check on Windows. pathext_source = os.getenv("PATHEXT") or _WIN_DEFAULT_PATHEXT - pathext = [ext for ext in pathext_source.split(os.pathsep) if ext] + pathext = pathext_source.split(os.pathsep) + pathext = [ext.rstrip('.') for ext in pathext if ext] if use_bytes: pathext = [os.fsencode(ext) for ext in pathext] - files = ([cmd] + [cmd + ext for ext in pathext]) + files = [cmd + ext for ext in pathext] - # gh-109590. If we are looking for an executable, we need to look - # for a PATHEXT match. The first cmd is the direct match - # (e.g. python.exe instead of python) - # Check that direct match first if and only if the extension is in PATHEXT - # Otherwise check it last - suffix = os.path.splitext(files[0])[1].upper() - if mode & os.X_OK and not any(suffix == ext.upper() for ext in pathext): - files.append(files.pop(0)) + # If X_OK in mode, simulate the cmd.exe behavior: look at direct + # match if and only if the extension is in PATHEXT. + # If X_OK not in mode, simulate the first result of where.exe: + # always look at direct match before a PATHEXT match. + normcmd = cmd.upper() + if not (mode & os.X_OK) or any(normcmd.endswith(ext.upper()) for ext in pathext): + files.insert(0, cmd) else: # On other platforms you don't have things like PATHEXT to tell you # what file suffixes are executable, so just pass on cmd as-is. @@ -1573,7 +1573,7 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): seen = set() for dir in path: normdir = os.path.normcase(dir) - if not normdir in seen: + if normdir not in seen: seen.add(normdir) for thefile in files: name = os.path.join(dir, thefile) diff --git a/Lib/site.py b/Lib/site.py index 07a6361fad44e5..92bd1ccdadd924 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -94,6 +94,12 @@ def _trace(message): print(message, file=sys.stderr) +def _warn(*args, **kwargs): + import warnings + + warnings.warn(*args, **kwargs) + + def makepath(*paths): dir = os.path.join(*paths) try: @@ -442,8 +448,9 @@ def setcopyright(): """Set 'copyright' and 'credits' in builtins""" builtins.copyright = _sitebuiltins._Printer("copyright", sys.copyright) builtins.credits = _sitebuiltins._Printer("credits", """\ - Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands - for supporting Python development. See www.python.org for more information.""") + Thanks to CWI, CNRI, BeOpen, Zope Corporation, the Python Software + Foundation, and a cast of thousands for supporting Python + development. See www.python.org for more information.""") files, dirs = [], [] # Not all modules are required to have a __file__ attribute. See # PEP 420 for more details. @@ -497,9 +504,18 @@ def register_readline(): PYTHON_BASIC_REPL = False import atexit + + try: + try: + import readline + except ImportError: + readline = None + else: + import rlcompleter # noqa: F401 + except ImportError: + return + try: - import readline - import rlcompleter # noqa: F401 if PYTHON_BASIC_REPL: CAN_USE_PYREPL = False else: @@ -507,30 +523,36 @@ def register_readline(): sys.path = [p for p in original_path if p != ''] try: import _pyrepl.readline - import _pyrepl.unix_console + if os.name == "nt": + import _pyrepl.windows_console + console_errors = (_pyrepl.windows_console._error,) + else: + import _pyrepl.unix_console + console_errors = _pyrepl.unix_console._error from _pyrepl.main import CAN_USE_PYREPL finally: sys.path = original_path except ImportError: return - # Reading the initialization (config) file may not be enough to set a - # completion key, so we set one first and then read the file. - if readline.backend == 'editline': - readline.parse_and_bind('bind ^I rl_complete') - else: - readline.parse_and_bind('tab: complete') + if readline is not None: + # Reading the initialization (config) file may not be enough to set a + # completion key, so we set one first and then read the file. + if readline.backend == 'editline': + readline.parse_and_bind('bind ^I rl_complete') + else: + readline.parse_and_bind('tab: complete') - try: - readline.read_init_file() - except OSError: - # An OSError here could have many causes, but the most likely one - # is that there's no .inputrc file (or .editrc file in the case of - # Mac OS X + libedit) in the expected location. In that case, we - # want to ignore the exception. - pass + try: + readline.read_init_file() + except OSError: + # An OSError here could have many causes, but the most likely one + # is that there's no .inputrc file (or .editrc file in the case of + # Mac OS X + libedit) in the expected location. In that case, we + # want to ignore the exception. + pass - if readline.get_current_history_length() == 0: + if readline is None or readline.get_current_history_length() == 0: # If no history was loaded, default to .python_history, # or PYTHON_HISTORY. # The guard is necessary to avoid doubling history size at @@ -541,8 +563,10 @@ def register_readline(): if CAN_USE_PYREPL: readline_module = _pyrepl.readline - exceptions = (OSError, *_pyrepl.unix_console._error) + exceptions = (OSError, *console_errors) else: + if readline is None: + return readline_module = readline exceptions = OSError @@ -601,7 +625,10 @@ def venv(known_paths): elif key == 'home': sys._home = value - sys.prefix = sys.exec_prefix = site_prefix + if sys.prefix != site_prefix: + _warn(f'Unexpected value in sys.prefix, expected {site_prefix}, got {sys.prefix}', RuntimeWarning) + if sys.exec_prefix != site_prefix: + _warn(f'Unexpected value in sys.exec_prefix, expected {site_prefix}, got {sys.exec_prefix}', RuntimeWarning) # Doing this here ensures venv takes precedence over user-site addsitepackages(known_paths, [sys.prefix]) diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py index 43f9276799b848..ee52700b51fd07 100644 --- a/Lib/sysconfig/__init__.py +++ b/Lib/sysconfig/__init__.py @@ -355,7 +355,8 @@ def _init_posix(vars): else: _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) build_time_vars = _temp.build_time_vars - vars.update(build_time_vars) + # GH-126920: Make sure we don't overwrite any of the keys already set + vars.update(build_time_vars | vars) def _init_non_posix(vars): """Initialize the module as appropriate for NT""" diff --git a/Lib/sysconfig/__main__.py b/Lib/sysconfig/__main__.py index d7257b9d2d00db..5660a6c5105b9f 100644 --- a/Lib/sysconfig/__main__.py +++ b/Lib/sysconfig/__main__.py @@ -1,5 +1,7 @@ +import json import os import sys +import types from sysconfig import ( _ALWAYS_STR, _PYTHON_BUILD, @@ -157,6 +159,19 @@ def _print_config_dict(d, stream): print ("}", file=stream) +def _get_pybuilddir(): + pybuilddir = f'build/lib.{get_platform()}-{get_python_version()}' + if hasattr(sys, "gettotalrefcount"): + pybuilddir += '-pydebug' + return pybuilddir + + +def _get_json_data_name(): + name = _get_sysconfigdata_name() + assert name.startswith('_sysconfigdata') + return name.replace('_sysconfigdata', '_sysconfig_vars') + '.json' + + def _generate_posix_vars(): """Generate the Python module containing build-time variables.""" vars = {} @@ -185,6 +200,8 @@ def _generate_posix_vars(): if _PYTHON_BUILD: vars['BLDSHARED'] = vars['LDSHARED'] + name = _get_sysconfigdata_name() + # There's a chicken-and-egg situation on OS X with regards to the # _sysconfigdata module after the changes introduced by #15298: # get_config_vars() is called by get_platform() as part of the @@ -196,16 +213,13 @@ def _generate_posix_vars(): # _sysconfigdata module manually and populate it with the build vars. # This is more than sufficient for ensuring the subsequent call to # get_platform() succeeds. - name = _get_sysconfigdata_name() - if 'darwin' in sys.platform: - import types - module = types.ModuleType(name) - module.build_time_vars = vars - sys.modules[name] = module + # GH-127178: Since we started generating a .json file, we also need this to + # be able to run sysconfig.get_config_vars(). + module = types.ModuleType(name) + module.build_time_vars = vars + sys.modules[name] = module - pybuilddir = f'build/lib.{get_platform()}-{get_python_version()}' - if hasattr(sys, "gettotalrefcount"): - pybuilddir += '-pydebug' + pybuilddir = _get_pybuilddir() os.makedirs(pybuilddir, exist_ok=True) destfile = os.path.join(pybuilddir, name + '.py') @@ -215,6 +229,11 @@ def _generate_posix_vars(): f.write('build_time_vars = ') _print_config_dict(vars, stream=f) + # Write a JSON file with the output of sysconfig.get_config_vars + jsonfile = os.path.join(pybuilddir, _get_json_data_name()) + with open(jsonfile, 'w') as f: + json.dump(get_config_vars(), f, indent=2) + # Create file used for sys.path fixup -- see Modules/getpath.c with open('pybuilddir.txt', 'w', encoding='utf8') as f: f.write(pybuilddir) diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 8329a848a90088..80b08b8ac66899 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -846,8 +846,8 @@ def test_error_on_stdio_flush_2(self): finally: setattr(sys, stream_name, old_stream) - @classmethod - def _sleep_and_set_event(self, evt, delay=0.0): + @staticmethod + def _sleep_and_set_event(evt, delay=0.0): time.sleep(delay) evt.set() @@ -898,6 +898,56 @@ def test_forkserver_sigkill(self): if os.name != 'nt': self.check_forkserver_death(signal.SIGKILL) + def test_forkserver_auth_is_enabled(self): + if self.TYPE == "threads": + self.skipTest(f"test not appropriate for {self.TYPE}") + if multiprocessing.get_start_method() != "forkserver": + self.skipTest("forkserver start method specific") + + forkserver = multiprocessing.forkserver._forkserver + forkserver.ensure_running() + self.assertTrue(forkserver._forkserver_pid) + authkey = forkserver._forkserver_authkey + self.assertTrue(authkey) + self.assertGreater(len(authkey), 15) + addr = forkserver._forkserver_address + self.assertTrue(addr) + + # Demonstrate that a raw auth handshake, as Client performs, does not + # raise an error. + client = multiprocessing.connection.Client(addr, authkey=authkey) + client.close() + + # That worked, now launch a quick process. + proc = self.Process(target=sys.exit) + proc.start() + proc.join() + self.assertEqual(proc.exitcode, 0) + + def test_forkserver_without_auth_fails(self): + if self.TYPE == "threads": + self.skipTest(f"test not appropriate for {self.TYPE}") + if multiprocessing.get_start_method() != "forkserver": + self.skipTest("forkserver start method specific") + + forkserver = multiprocessing.forkserver._forkserver + forkserver.ensure_running() + self.assertTrue(forkserver._forkserver_pid) + authkey_len = len(forkserver._forkserver_authkey) + with unittest.mock.patch.object( + forkserver, '_forkserver_authkey', None): + # With an incorrect authkey we should get an auth rejection + # rather than the above protocol error. + forkserver._forkserver_authkey = b'T' * authkey_len + proc = self.Process(target=sys.exit) + with self.assertRaises(multiprocessing.AuthenticationError): + proc.start() + del proc + + # authkey restored, launching processes should work again. + proc = self.Process(target=sys.exit) + proc.start() + proc.join() # # diff --git a/Lib/test/clinic.test.c b/Lib/test/clinic.test.c index 4ad0b8b0910bbe..b6ae04ecf2f8ed 100644 --- a/Lib/test/clinic.test.c +++ b/Lib/test/clinic.test.c @@ -5303,9 +5303,7 @@ Test_meth_coexist_impl(TestObj *self) Test.property [clinic start generated code]*/ -#if defined(Test_property_HAS_DOCSTR) -# define Test_property_DOCSTR Test_property__doc__ -#else +#if !defined(Test_property_DOCSTR) # define Test_property_DOCSTR NULL #endif #if defined(TEST_PROPERTY_GETSETDEF) @@ -5326,16 +5324,14 @@ Test_property_get(TestObj *self, void *Py_UNUSED(context)) static PyObject * Test_property_get_impl(TestObj *self) -/*[clinic end generated code: output=27b519719d992e03 input=2d92b3449fbc7d2b]*/ +/*[clinic end generated code: output=7cadd0f539805266 input=2d92b3449fbc7d2b]*/ /*[clinic input] @setter Test.property [clinic start generated code]*/ -#if defined(TEST_PROPERTY_HAS_DOCSTR) -# define Test_property_DOCSTR Test_property__doc__ -#else +#if !defined(Test_property_DOCSTR) # define Test_property_DOCSTR NULL #endif #if defined(TEST_PROPERTY_GETSETDEF) @@ -5360,7 +5356,7 @@ Test_property_set(TestObj *self, PyObject *value, void *Py_UNUSED(context)) static int Test_property_set_impl(TestObj *self, PyObject *value) -/*[clinic end generated code: output=d51023f17c4ac3a1 input=3bc3f46a23c83a88]*/ +/*[clinic end generated code: output=e4342fe9bb1d7817 input=3bc3f46a23c83a88]*/ /*[clinic input] output push diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index dbe25ef57dea83..25a3015c4e19ce 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -510,6 +510,7 @@ class TestTimeDelta(HarmlessMixedComparison, unittest.TestCase): def test_constructor(self): eq = self.assertEqual + ra = self.assertRaises td = timedelta # Check keyword args to constructor @@ -533,6 +534,15 @@ def test_constructor(self): eq(td(seconds=0.001), td(milliseconds=1)) eq(td(milliseconds=0.001), td(microseconds=1)) + # Check type of args to constructor + ra(TypeError, lambda: td(weeks='1')) + ra(TypeError, lambda: td(days='1')) + ra(TypeError, lambda: td(hours='1')) + ra(TypeError, lambda: td(minutes='1')) + ra(TypeError, lambda: td(seconds='1')) + ra(TypeError, lambda: td(milliseconds='1')) + ra(TypeError, lambda: td(microseconds='1')) + def test_computations(self): eq = self.assertEqual td = timedelta diff --git a/Lib/test/libregrtest/mypy.ini b/Lib/test/libregrtest/mypy.ini index da75a27158a600..3fa9afcb7a4a8c 100644 --- a/Lib/test/libregrtest/mypy.ini +++ b/Lib/test/libregrtest/mypy.ini @@ -15,17 +15,12 @@ strict = True # Various stricter settings that we can't yet enable # Try to enable these in the following order: -disallow_any_generics = False disallow_incomplete_defs = False disallow_untyped_calls = False disallow_untyped_defs = False check_untyped_defs = False warn_return_any = False -# Enable --strict-optional for these ASAP: -[mypy-Lib.test.libregrtest.run_workers.*] -strict_optional = False - # Various internal modules that typeshed deliberately doesn't have stubs for: [mypy-_abc.*,_opcode.*,_overlapped.*,_testcapi.*,_testinternalcapi.*,test.*] ignore_missing_imports = True diff --git a/Lib/test/libregrtest/results.py b/Lib/test/libregrtest/results.py index 4f3e84282dc5dc..9eda926966dc7e 100644 --- a/Lib/test/libregrtest/results.py +++ b/Lib/test/libregrtest/results.py @@ -1,5 +1,6 @@ import sys import trace +from typing import TYPE_CHECKING from .runtests import RunTests from .result import State, TestResult, TestStats, Location @@ -7,6 +8,9 @@ StrPath, TestName, TestTuple, TestList, FilterDict, printlist, count, format_duration) +if TYPE_CHECKING: + from xml.etree.ElementTree import Element + # Python uses exit code 1 when an exception is not caught # argparse.ArgumentParser.error() uses exit code 2 @@ -34,7 +38,7 @@ def __init__(self) -> None: self.test_times: list[tuple[float, TestName]] = [] self.stats = TestStats() # used by --junit-xml - self.testsuite_xml: list = [] + self.testsuite_xml: list['Element'] = [] # used by -T with -j self.covered_lines: set[Location] = set() diff --git a/Lib/test/libregrtest/run_workers.py b/Lib/test/libregrtest/run_workers.py index dcc817ae9aceb6..424085a0050eb5 100644 --- a/Lib/test/libregrtest/run_workers.py +++ b/Lib/test/libregrtest/run_workers.py @@ -102,6 +102,9 @@ def __init__(self, super().__init__() +_NOT_RUNNING = "" + + class WorkerThread(threading.Thread): def __init__(self, worker_id: int, runner: "RunWorkers") -> None: super().__init__() @@ -111,8 +114,8 @@ def __init__(self, worker_id: int, runner: "RunWorkers") -> None: self.output = runner.output self.timeout = runner.worker_timeout self.log = runner.log - self.test_name: TestName | None = None - self.start_time: float | None = None + self.test_name = _NOT_RUNNING + self.start_time = time.monotonic() self._popen: subprocess.Popen[str] | None = None self._killed = False self._stopped = False @@ -129,7 +132,7 @@ def __repr__(self) -> str: popen = self._popen if popen is not None: dt = time.monotonic() - self.start_time - info.extend((f'pid={self._popen.pid}', + info.extend((f'pid={popen.pid}', f'time={format_duration(dt)}')) return '<%s>' % ' '.join(info) @@ -401,7 +404,7 @@ def run(self) -> None: except WorkerError as exc: mp_result = exc.mp_result finally: - self.test_name = None + self.test_name = _NOT_RUNNING mp_result.result.duration = time.monotonic() - self.start_time self.output.put((False, mp_result)) @@ -416,6 +419,9 @@ def run(self) -> None: def _wait_completed(self) -> None: popen = self._popen + # only needed for mypy: + if popen is None: + raise ValueError("Should never access `._popen` before calling `.run()`") try: popen.wait(WAIT_COMPLETED_TIMEOUT) @@ -451,7 +457,7 @@ def get_running(workers: list[WorkerThread]) -> str | None: running: list[str] = [] for worker in workers: test_name = worker.test_name - if not test_name: + if test_name == _NOT_RUNNING: continue dt = time.monotonic() - worker.start_time if dt >= PROGRESS_MIN_TIME: @@ -483,7 +489,7 @@ def __init__(self, num_workers: int, runtests: RunTests, self.worker_timeout: float | None = min(self.timeout * 1.5, self.timeout + 5 * 60) else: self.worker_timeout = None - self.workers: list[WorkerThread] | None = None + self.workers: list[WorkerThread] = [] jobs = self.runtests.get_jobs() if jobs is not None: @@ -503,7 +509,7 @@ def start_workers(self) -> None: processes = plural(nworkers, "process", "processes") msg = (f"Run {tests} in parallel using " f"{nworkers} worker {processes}") - if self.timeout: + if self.timeout and self.worker_timeout is not None: msg += (" (timeout: %s, worker timeout: %s)" % (format_duration(self.timeout), format_duration(self.worker_timeout))) @@ -555,7 +561,7 @@ def display_result(self, mp_result: MultiprocessResult) -> None: if mp_result.err_msg: # WORKER_BUG text += ' (%s)' % mp_result.err_msg - elif (result.duration >= PROGRESS_MIN_TIME and not pgo): + elif (result.duration and result.duration >= PROGRESS_MIN_TIME and not pgo): text += ' (%s)' % format_duration(result.duration) if not pgo: running = get_running(self.workers) diff --git a/Lib/test/libregrtest/runtests.py b/Lib/test/libregrtest/runtests.py index cd1ce8080a04df..130c036a62eefb 100644 --- a/Lib/test/libregrtest/runtests.py +++ b/Lib/test/libregrtest/runtests.py @@ -28,7 +28,7 @@ class JsonFile: file: int | None file_type: str - def configure_subprocess(self, popen_kwargs: dict) -> None: + def configure_subprocess(self, popen_kwargs: dict[str, Any]) -> None: match self.file_type: case JsonFileType.UNIX_FD: # Unix file descriptor diff --git a/Lib/test/libregrtest/worker.py b/Lib/test/libregrtest/worker.py index da24760a82c6c6..0c9f5bd6e42f11 100644 --- a/Lib/test/libregrtest/worker.py +++ b/Lib/test/libregrtest/worker.py @@ -20,7 +20,7 @@ def create_worker_process(runtests: WorkerRunTests, output_fd: int, - tmp_dir: StrPath | None = None) -> subprocess.Popen: + tmp_dir: StrPath | None = None) -> subprocess.Popen[str]: worker_json = runtests.as_json() cmd = runtests.create_python_cmd() diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 8ef8fae44f1d25..cf020a48b81cfa 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1357,6 +1357,41 @@ def test_bad_newobj_ex(self): self.check_unpickling_error(error, b'cbuiltins\nint\nN}\x92.') self.check_unpickling_error(error, b'cbuiltins\nint\n)N\x92.') + def test_bad_state(self): + c = C() + c.x = None + base = b'c__main__\nC\n)\x81' + self.assertEqual(self.loads(base + b'}X\x01\x00\x00\x00xNsb.'), c) + self.assertEqual(self.loads(base + b'N}X\x01\x00\x00\x00xNs\x86b.'), c) + # non-hashable dict key + self.check_unpickling_error(TypeError, base + b'}]Nsb.') + # state = list + error = (pickle.UnpicklingError, AttributeError) + self.check_unpickling_error(error, base + b'](}}eb.') + # state = 1-tuple + self.check_unpickling_error(error, base + b'}\x85b.') + # state = 3-tuple + self.check_unpickling_error(error, base + b'}}}\x87b.') + # non-hashable slot name + self.check_unpickling_error(TypeError, base + b'}}]Ns\x86b.') + # non-string slot name + self.check_unpickling_error(TypeError, base + b'}}NNs\x86b.') + # dict = True + self.check_unpickling_error(error, base + b'\x88}\x86b.') + # slots dict = True + self.check_unpickling_error(error, base + b'}\x88\x86b.') + + class BadKey1: + count = 1 + def __hash__(self): + if not self.count: + raise CustomError + self.count -= 1 + return 42 + __main__.BadKey1 = BadKey1 + # bad hashable dict key + self.check_unpickling_error(CustomError, base + b'}c__main__\nBadKey1\n)\x81Nsb.') + def test_bad_stack(self): badpickles = [ b'.', # STOP diff --git a/Lib/test/support/os_helper.py b/Lib/test/support/os_helper.py index 891405943b78c5..8071c248b9b67e 100644 --- a/Lib/test/support/os_helper.py +++ b/Lib/test/support/os_helper.py @@ -632,8 +632,7 @@ def fd_count(): if hasattr(os, 'sysconf'): try: MAXFD = os.sysconf("SC_OPEN_MAX") - except (OSError, ValueError): - # gh-118201: ValueError is raised intermittently on iOS + except OSError: pass old_modes = None diff --git a/Lib/test/support/strace_helper.py b/Lib/test/support/strace_helper.py index 90d4b5bccb6fa3..eab16ea3e2889f 100644 --- a/Lib/test/support/strace_helper.py +++ b/Lib/test/support/strace_helper.py @@ -1,6 +1,7 @@ import re import sys import textwrap +import os import unittest from dataclasses import dataclass from functools import cache @@ -70,6 +71,27 @@ def sections(self): return sections +def _filter_memory_call(call): + # mmap can operate on a fd or "MAP_ANONYMOUS" which gives a block of memory. + # Ignore "MAP_ANONYMOUS + the "MAP_ANON" alias. + if call.syscall == "mmap" and "MAP_ANON" in call.args[3]: + return True + + if call.syscall in ("munmap", "mprotect"): + return True + + return False + + +def filter_memory(syscalls): + """Filter out memory allocation calls from File I/O calls. + + Some calls (mmap, munmap, etc) can be used on files or to just get a block + of memory. Use this function to filter out the memory related calls from + other calls.""" + + return [call for call in syscalls if not _filter_memory_call(call)] + @support.requires_subprocess() def strace_python(code, strace_flags, check=True): @@ -91,7 +113,8 @@ def _make_error(reason, details): res, cmd_line = run_python_until_end( "-c", textwrap.dedent(code), - __run_using_command=[_strace_binary] + strace_flags) + __run_using_command=[_strace_binary] + strace_flags, + ) except OSError as err: return _make_error("Caught OSError", err) @@ -141,9 +164,14 @@ def get_events(code, strace_flags, prelude, cleanup): return all_sections['code'] -def get_syscalls(code, strace_flags, prelude="", cleanup=""): +def get_syscalls(code, strace_flags, prelude="", cleanup="", + ignore_memory=True): """Get the syscalls which a given chunk of python code generates""" events = get_events(code, strace_flags, prelude=prelude, cleanup=cleanup) + + if ignore_memory: + events = filter_memory(events) + return [ev.syscall for ev in events] @@ -160,11 +188,18 @@ def requires_strace(): if sys.platform != "linux": return unittest.skip("Linux only, requires strace.") + if "LD_PRELOAD" in os.environ: + # Distribution packaging (ex. Debian `fakeroot` and Gentoo `sandbox`) + # use LD_PRELOAD to intercept system calls, which changes the overall + # set of system calls which breaks tests expecting a specific set of + # system calls). + return unittest.skip("Not supported when LD_PRELOAD is intercepting system calls.") + if support.check_sanitizer(address=True, memory=True): return unittest.skip("LeakSanitizer does not work under ptrace (strace, gdb, etc)") return unittest.skipUnless(_can_strace(), "Requires working strace") -__all__ = ["get_events", "get_syscalls", "requires_strace", "strace_python", - "StraceEvent", "StraceResult"] +__all__ = ["filter_memory", "get_events", "get_syscalls", "requires_strace", + "strace_python", "StraceEvent", "StraceResult"] diff --git a/Lib/test/support/venv.py b/Lib/test/support/venv.py new file mode 100644 index 00000000000000..78e6a51ec1815e --- /dev/null +++ b/Lib/test/support/venv.py @@ -0,0 +1,70 @@ +import contextlib +import logging +import os +import subprocess +import shlex +import sys +import sysconfig +import tempfile +import venv + + +class VirtualEnvironment: + def __init__(self, prefix, **venv_create_args): + self._logger = logging.getLogger(self.__class__.__name__) + venv.create(prefix, **venv_create_args) + self._prefix = prefix + self._paths = sysconfig.get_paths( + scheme='venv', + vars={'base': self.prefix}, + expand=True, + ) + + @classmethod + @contextlib.contextmanager + def from_tmpdir(cls, *, prefix=None, dir=None, **venv_create_args): + delete = not bool(os.environ.get('PYTHON_TESTS_KEEP_VENV')) + with tempfile.TemporaryDirectory(prefix=prefix, dir=dir, delete=delete) as tmpdir: + yield cls(tmpdir, **venv_create_args) + + @property + def prefix(self): + return self._prefix + + @property + def paths(self): + return self._paths + + @property + def interpreter(self): + return os.path.join(self.paths['scripts'], os.path.basename(sys.executable)) + + def _format_output(self, name, data, indent='\t'): + if not data: + return indent + f'{name}: (none)' + if len(data.splitlines()) == 1: + return indent + f'{name}: {data}' + else: + prefixed_lines = '\n'.join(indent + '> ' + line for line in data.splitlines()) + return indent + f'{name}:\n' + prefixed_lines + + def run(self, *args, **subprocess_args): + if subprocess_args.get('shell'): + raise ValueError('Running the subprocess in shell mode is not supported.') + default_args = { + 'capture_output': True, + 'check': True, + } + try: + result = subprocess.run([self.interpreter, *args], **default_args | subprocess_args) + except subprocess.CalledProcessError as e: + if e.returncode != 0: + self._logger.error( + f'Interpreter returned non-zero exit status {e.returncode}.\n' + + self._format_output('COMMAND', shlex.join(e.cmd)) + '\n' + + self._format_output('STDOUT', e.stdout.decode()) + '\n' + + self._format_output('STDERR', e.stderr.decode()) + '\n' + ) + raise + else: + return result diff --git a/Lib/test/test__locale.py b/Lib/test/test__locale.py index 7e6e296c069abb..2c751033ebb3e2 100644 --- a/Lib/test/test__locale.py +++ b/Lib/test/test__locale.py @@ -90,6 +90,14 @@ def accept(loc): 'bn_IN': (100, {0: '\u09e6', 10: '\u09e7\u09e6', 99: '\u09ef\u09ef'}), } +known_era = { + 'C': (0, ''), + 'en_US': (0, ''), + 'ja_JP': (11, '+:1:2019/05/01:2019/12/31:令和:%EC元年'), + 'zh_TW': (3, '+:1:1912/01/01:1912/12/31:民國:%EC元年'), + 'th_TW': (1, '+:1:-543/01/01:+*:พ.ศ.:%EC %Ey'), +} + if sys.platform == 'win32': # ps_AF doesn't work on Windows: see bpo-38324 (msg361830) del known_numerics['ps_AF'] @@ -230,6 +238,43 @@ def test_alt_digits_nl_langinfo(self): if not tested: self.skipTest('no suitable locales') + @unittest.skipUnless(nl_langinfo, "nl_langinfo is not available") + @unittest.skipUnless(hasattr(locale, 'ERA'), "requires locale.ERA") + @unittest.skipIf( + support.is_emscripten or support.is_wasi, + "musl libc issue on Emscripten, bpo-46390" + ) + def test_era_nl_langinfo(self): + # Test nl_langinfo(ERA) + tested = False + for loc in candidate_locales: + with self.subTest(locale=loc): + try: + setlocale(LC_TIME, loc) + except Error: + self.skipTest(f'no locale {loc!r}') + continue + + with self.subTest(locale=loc): + era = nl_langinfo(locale.ERA) + self.assertIsInstance(era, str) + if era: + self.assertEqual(era.count(':'), (era.count(';') + 1) * 5, era) + + loc1 = loc.split('.', 1)[0] + if loc1 in known_era: + count, sample = known_era[loc1] + if count: + if not era: + self.skipTest(f'ERA is not set for locale {loc!r} on this platform') + self.assertGreaterEqual(era.count(';') + 1, count) + self.assertIn(sample, era) + else: + self.assertEqual(era, '') + tested = True + if not tested: + self.skipTest('no suitable locales') + def test_float_parsing(self): # Bug #1391872: Test whether float parsing is okay on European # locales. diff --git a/Lib/test/test_annotationlib.py b/Lib/test/test_annotationlib.py index 2ca7058c14398c..20f74b4ed0aadb 100644 --- a/Lib/test/test_annotationlib.py +++ b/Lib/test/test_annotationlib.py @@ -42,11 +42,14 @@ def test_enum(self): self.assertEqual(Format.VALUE.value, 1) self.assertEqual(Format.VALUE, 1) - self.assertEqual(Format.FORWARDREF.value, 2) - self.assertEqual(Format.FORWARDREF, 2) + self.assertEqual(Format.VALUE_WITH_FAKE_GLOBALS.value, 2) + self.assertEqual(Format.VALUE_WITH_FAKE_GLOBALS, 2) - self.assertEqual(Format.STRING.value, 3) - self.assertEqual(Format.STRING, 3) + self.assertEqual(Format.FORWARDREF.value, 3) + self.assertEqual(Format.FORWARDREF, 3) + + self.assertEqual(Format.STRING.value, 4) + self.assertEqual(Format.STRING, 4) class TestForwardRefFormat(unittest.TestCase): @@ -459,19 +462,28 @@ def f2(a: undefined): annotationlib.get_annotations(f2, format=Format.FORWARDREF), {"a": fwd}, ) - self.assertEqual(annotationlib.get_annotations(f2, format=2), {"a": fwd}) + self.assertEqual(annotationlib.get_annotations(f2, format=3), {"a": fwd}) self.assertEqual( annotationlib.get_annotations(f1, format=Format.STRING), {"a": "int"}, ) - self.assertEqual(annotationlib.get_annotations(f1, format=3), {"a": "int"}) + self.assertEqual(annotationlib.get_annotations(f1, format=4), {"a": "int"}) with self.assertRaises(ValueError): - annotationlib.get_annotations(f1, format=0) + annotationlib.get_annotations(f1, format=42) - with self.assertRaises(ValueError): - annotationlib.get_annotations(f1, format=4) + with self.assertRaisesRegex( + ValueError, + r"The VALUE_WITH_FAKE_GLOBALS format is for internal use only", + ): + annotationlib.get_annotations(f1, format=Format.VALUE_WITH_FAKE_GLOBALS) + + with self.assertRaisesRegex( + ValueError, + r"The VALUE_WITH_FAKE_GLOBALS format is for internal use only", + ): + annotationlib.get_annotations(f1, format=2) def test_custom_object_with_annotations(self): class C: @@ -505,6 +517,8 @@ def foo(a: int, b: str): foo.__annotations__ = {"a": "foo", "b": "str"} for format in Format: + if format is Format.VALUE_WITH_FAKE_GLOBALS: + continue with self.subTest(format=format): self.assertEqual( annotationlib.get_annotations(foo, format=format), @@ -802,6 +816,8 @@ def __annotations__(self): wa = WeirdAnnotations() for format in Format: + if format is Format.VALUE_WITH_FAKE_GLOBALS: + continue with ( self.subTest(format=format), self.assertRaisesRegex( @@ -990,7 +1006,7 @@ def test_pep_695_generics_with_future_annotations_nested_in_function(self): class TestCallEvaluateFunction(unittest.TestCase): def test_evaluation(self): def evaluate(format, exc=NotImplementedError): - if format != 1: + if format > 2: raise exc return undefined diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index 358cfb1c56aae4..488a3a4ed20fac 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -2409,16 +2409,17 @@ def assertArgumentParserError(self, *args, **kwargs): self.assertRaises(ArgumentParserError, *args, **kwargs) def _get_parser(self, subparser_help=False, prefix_chars=None, - aliases=False): + aliases=False, usage=None): # create a parser with a subparsers argument if prefix_chars: parser = ErrorRaisingArgumentParser( - prog='PROG', description='main description', prefix_chars=prefix_chars) + prog='PROG', description='main description', usage=usage, + prefix_chars=prefix_chars) parser.add_argument( prefix_chars[0] * 2 + 'foo', action='store_true', help='foo help') else: parser = ErrorRaisingArgumentParser( - prog='PROG', description='main description') + prog='PROG', description='main description', usage=usage) parser.add_argument( '--foo', action='store_true', help='foo help') parser.add_argument( @@ -2455,7 +2456,8 @@ def _get_parser(self, subparser_help=False, prefix_chars=None, parser2.add_argument('z', type=complex, nargs='*', help='z help') # add third sub-parser - parser3_kwargs = dict(description='3 description') + parser3_kwargs = dict(description='3 description', + usage='PROG --foo bar 3 t ...') if subparser_help: parser3_kwargs['help'] = '3 help' parser3 = subparsers.add_parser('3', **parser3_kwargs) @@ -2477,6 +2479,47 @@ def test_parse_args_failures(self): args = args_str.split() self.assertArgumentParserError(self.parser.parse_args, args) + def test_parse_args_failures_details(self): + for args_str, usage_str, error_str in [ + ('', + 'usage: PROG [-h] [--foo] bar {1,2,3} ...', + 'PROG: error: the following arguments are required: bar'), + ('0.5 1 -y', + 'usage: PROG bar 1 [-h] [-w W] {a,b,c}', + 'PROG bar 1: error: the following arguments are required: x'), + ('0.5 3', + 'usage: PROG --foo bar 3 t ...', + 'PROG bar 3: error: the following arguments are required: t'), + ]: + with self.subTest(args_str): + args = args_str.split() + with self.assertRaises(ArgumentParserError) as cm: + self.parser.parse_args(args) + self.assertEqual(cm.exception.args[0], 'SystemExit') + self.assertEqual(cm.exception.args[2], f'{usage_str}\n{error_str}\n') + + def test_parse_args_failures_details_custom_usage(self): + parser = self._get_parser(usage='PROG [--foo] bar 1 [-w W] {a,b,c}\n' + ' PROG --foo bar 3 t ...') + for args_str, usage_str, error_str in [ + ('', + 'usage: PROG [--foo] bar 1 [-w W] {a,b,c}\n' + ' PROG --foo bar 3 t ...', + 'PROG: error: the following arguments are required: bar'), + ('0.5 1 -y', + 'usage: PROG bar 1 [-h] [-w W] {a,b,c}', + 'PROG bar 1: error: the following arguments are required: x'), + ('0.5 3', + 'usage: PROG --foo bar 3 t ...', + 'PROG bar 3: error: the following arguments are required: t'), + ]: + with self.subTest(args_str): + args = args_str.split() + with self.assertRaises(ArgumentParserError) as cm: + parser.parse_args(args) + self.assertEqual(cm.exception.args[0], 'SystemExit') + self.assertEqual(cm.exception.args[2], f'{usage_str}\n{error_str}\n') + def test_parse_args(self): # check some non-failure cases: self.assertEqual( @@ -2954,6 +2997,13 @@ def test_group_prefix_chars_default(self): self.assertEqual(msg, str(cm.warning)) self.assertEqual(cm.filename, __file__) + def test_nested_argument_group(self): + parser = argparse.ArgumentParser() + g = parser.add_argument_group() + self.assertRaisesRegex(ValueError, + 'argument groups cannot be nested', + g.add_argument_group) + # =================== # Parent parser tests # =================== @@ -3254,6 +3304,14 @@ def test_empty_group(self): with self.assertRaises(ValueError): parser.parse_args(['-h']) + def test_nested_mutex_groups(self): + parser = argparse.ArgumentParser(prog='PROG') + g = parser.add_mutually_exclusive_group() + g.add_argument("--spam") + self.assertRaisesRegex(ValueError, + 'mutually exclusive groups cannot be nested', + g.add_mutually_exclusive_group) + class MEMixin(object): def test_failures_when_not_required(self): @@ -3621,55 +3679,6 @@ def get_parser(self, required): -c c help ''' -class TestMutuallyExclusiveNested(MEMixin, TestCase): - - # Nesting mutually exclusive groups is an undocumented feature - # that came about by accident through inheritance and has been - # the source of many bugs. It is deprecated and this test should - # eventually be removed along with it. - - def get_parser(self, required): - parser = ErrorRaisingArgumentParser(prog='PROG') - group = parser.add_mutually_exclusive_group(required=required) - group.add_argument('-a') - group.add_argument('-b') - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - group2 = group.add_mutually_exclusive_group(required=required) - group2.add_argument('-c') - group2.add_argument('-d') - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - group3 = group2.add_mutually_exclusive_group(required=required) - group3.add_argument('-e') - group3.add_argument('-f') - return parser - - usage_when_not_required = '''\ - usage: PROG [-h] [-a A | -b B | [-c C | -d D | [-e E | -f F]]] - ''' - usage_when_required = '''\ - usage: PROG [-h] (-a A | -b B | (-c C | -d D | (-e E | -f F))) - ''' - - help = '''\ - - options: - -h, --help show this help message and exit - -a A - -b B - -c C - -d D - -e E - -f F - ''' - - # We are only interested in testing the behavior of format_usage(). - test_failures_when_not_required = None - test_failures_when_required = None - test_successes_when_not_required = None - test_successes_when_required = None - class TestMutuallyExclusiveOptionalOptional(MEMixin, TestCase): def get_parser(self, required=None): @@ -4840,25 +4849,6 @@ def test_all_suppressed_mutex_with_optional_nargs(self): usage = 'usage: PROG [-h]\n' self.assertEqual(parser.format_usage(), usage) - def test_nested_mutex_groups(self): - parser = argparse.ArgumentParser(prog='PROG') - g = parser.add_mutually_exclusive_group() - g.add_argument("--spam") - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - gg = g.add_mutually_exclusive_group() - gg.add_argument("--hax") - gg.add_argument("--hox", help=argparse.SUPPRESS) - gg.add_argument("--hex") - g.add_argument("--eggs") - parser.add_argument("--num") - - usage = textwrap.dedent('''\ - usage: PROG [-h] [--spam SPAM | [--hax HAX | --hex HEX] | --eggs EGGS] - [--num NUM] - ''') - self.assertEqual(parser.format_usage(), usage) - def test_long_mutex_groups_wrap(self): parser = argparse.ArgumentParser(prog='PROG') g = parser.add_mutually_exclusive_group() diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py index 31d2a00dbb8c9c..0e2b189f761521 100644 --- a/Lib/test/test_asyncio/test_eager_task_factory.py +++ b/Lib/test/test_asyncio/test_eager_task_factory.py @@ -220,10 +220,14 @@ async def fail(): await asyncio.sleep(0) raise ValueError("no good") + async def blocked(): + fut = asyncio.Future() + await fut + async def run(): winner, index, excs = await asyncio.staggered.staggered_race( [ - lambda: asyncio.sleep(2, result="sleep2"), + lambda: blocked(), lambda: asyncio.sleep(1, result="sleep1"), lambda: fail() ], diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py index cb38a69e390f3a..61921e93e85e63 100644 --- a/Lib/test/test_buffer.py +++ b/Lib/test/test_buffer.py @@ -4439,6 +4439,14 @@ def test_issue_7385(self): x = ndarray([1,2,3], shape=[3], flags=ND_GETBUF_FAIL) self.assertRaises(BufferError, memoryview, x) + def test_bytearray_release_buffer_read_flag(self): + # See https://github.com/python/cpython/issues/126980 + obj = bytearray(b'abc') + with self.assertRaises(SystemError): + obj.__buffer__(inspect.BufferFlags.READ) + with self.assertRaises(SystemError): + obj.__buffer__(inspect.BufferFlags.WRITE) + @support.cpython_only def test_pybuffer_size_from_format(self): # basic tests @@ -4446,6 +4454,21 @@ def test_pybuffer_size_from_format(self): self.assertEqual(_testcapi.PyBuffer_SizeFromFormat(format), struct.calcsize(format)) + @support.cpython_only + def test_flags_overflow(self): + # gh-126594: Check for integer overlow on large flags + try: + from _testcapi import INT_MIN, INT_MAX + except ImportError: + INT_MIN = -(2 ** 31) + INT_MAX = 2 ** 31 - 1 + + obj = b'abc' + for flags in (INT_MIN - 1, INT_MAX + 1): + with self.subTest(flags=flags): + with self.assertRaises(OverflowError): + obj.__buffer__(flags) + class TestPythonBufferProtocol(unittest.TestCase): def test_basic(self): diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index f8e6f05cd607c8..e51711d9b4f1a4 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -35,6 +35,7 @@ from test.support.import_helper import import_module from test.support.os_helper import (EnvironmentVarGuard, TESTFN, unlink) from test.support.script_helper import assert_python_ok +from test.support.testcase import ComplexesAreIdenticalMixin from test.support.warnings_helper import check_warnings from test.support import requires_IEEE_754 from unittest.mock import MagicMock, patch @@ -151,7 +152,7 @@ def map_char(arg): def pack(*args): return args -class BuiltinTest(unittest.TestCase): +class BuiltinTest(ComplexesAreIdenticalMixin, unittest.TestCase): # Helper to check picklability def check_iter_pickle(self, it, seq, proto): itorg = it @@ -1902,6 +1903,17 @@ def __getitem__(self, index): self.assertEqual(sum(xs), complex(sum(z.real for z in xs), sum(z.imag for z in xs))) + # test that sum() of complex and real numbers doesn't + # smash sign of imaginary 0 + self.assertComplexesAreIdentical(sum([complex(1, -0.0), 1]), + complex(2, -0.0)) + self.assertComplexesAreIdentical(sum([1, complex(1, -0.0)]), + complex(2, -0.0)) + self.assertComplexesAreIdentical(sum([complex(1, -0.0), 1.0]), + complex(2, -0.0)) + self.assertComplexesAreIdentical(sum([1.0, complex(1, -0.0)]), + complex(2, -0.0)) + @requires_IEEE_754 @unittest.skipIf(HAVE_DOUBLE_ROUNDING, "sum accuracy not guaranteed on machines with double rounding") diff --git a/Lib/test/test_capi/test_complex.py b/Lib/test/test_capi/test_complex.py index 368edfbf2ce97e..97e0eb3f043080 100644 --- a/Lib/test/test_capi/test_complex.py +++ b/Lib/test/test_capi/test_complex.py @@ -7,6 +7,7 @@ FloatSubclass, Float, BadFloat, BadFloat2, ComplexSubclass) from test.support import import_helper +from test.support.testcase import ComplexesAreIdenticalMixin _testcapi = import_helper.import_module('_testcapi') @@ -23,7 +24,7 @@ def __complex__(self): raise RuntimeError -class CAPIComplexTest(unittest.TestCase): +class CAPIComplexTest(ComplexesAreIdenticalMixin, unittest.TestCase): def test_check(self): # Test PyComplex_Check() check = _testlimitedcapi.complex_check @@ -171,12 +172,33 @@ def test_py_c_sum(self): self.assertEqual(_py_c_sum(1, 1j), (1+1j, 0)) + def test_py_cr_sum(self): + # Test _Py_cr_sum() + _py_cr_sum = _testcapi._py_cr_sum + + self.assertComplexesAreIdentical(_py_cr_sum(-0j, -0.0)[0], + complex(-0.0, -0.0)) + def test_py_c_diff(self): # Test _Py_c_diff() _py_c_diff = _testcapi._py_c_diff self.assertEqual(_py_c_diff(1, 1j), (1-1j, 0)) + def test_py_cr_diff(self): + # Test _Py_cr_diff() + _py_cr_diff = _testcapi._py_cr_diff + + self.assertComplexesAreIdentical(_py_cr_diff(-0j, 0.0)[0], + complex(-0.0, -0.0)) + + def test_py_rc_diff(self): + # Test _Py_rc_diff() + _py_rc_diff = _testcapi._py_rc_diff + + self.assertComplexesAreIdentical(_py_rc_diff(-0.0, 0j)[0], + complex(-0.0, -0.0)) + def test_py_c_neg(self): # Test _Py_c_neg() _py_c_neg = _testcapi._py_c_neg @@ -189,6 +211,13 @@ def test_py_c_prod(self): self.assertEqual(_py_c_prod(2, 1j), (2j, 0)) + def test_py_cr_prod(self): + # Test _Py_cr_prod() + _py_cr_prod = _testcapi._py_cr_prod + + self.assertComplexesAreIdentical(_py_cr_prod(complex('inf+1j'), INF)[0], + complex('inf+infj')) + def test_py_c_quot(self): # Test _Py_c_quot() _py_c_quot = _testcapi._py_c_quot @@ -211,6 +240,20 @@ def test_py_c_quot(self): self.assertEqual(_py_c_quot(1, 0j)[1], errno.EDOM) + def test_py_cr_quot(self): + # Test _Py_cr_quot() + _py_cr_quot = _testcapi._py_cr_quot + + self.assertComplexesAreIdentical(_py_cr_quot(complex('inf+1j'), 2**1000)[0], + INF + 2**-1000*1j) + + def test_py_rc_quot(self): + # Test _Py_rc_quot() + _py_rc_quot = _testcapi._py_rc_quot + + self.assertComplexesAreIdentical(_py_rc_quot(1.0, complex('nan-infj'))[0], + 0j) + def test_py_c_pow(self): # Test _Py_c_pow() _py_c_pow = _testcapi._py_c_pow diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py index 9726353bcd6a6d..4cf9b66170c055 100644 --- a/Lib/test/test_capi/test_opt.py +++ b/Lib/test/test_capi/test_opt.py @@ -62,9 +62,9 @@ def test_get_set_optimizer(self): def test_counter_optimizer(self): # Generate a new function at each call ns = {} - exec(textwrap.dedent(""" + exec(textwrap.dedent(f""" def loop(): - for _ in range(1000): + for _ in range({TIER2_THRESHOLD + 1000}): pass """), ns, ns) loop = ns['loop'] @@ -75,20 +75,19 @@ def loop(): self.assertEqual(opt.get_count(), 0) with clear_executors(loop): loop() - # Subtract because optimizer doesn't kick in sooner - self.assertEqual(opt.get_count(), 1000 - TIER2_THRESHOLD) + self.assertEqual(opt.get_count(), 1001) def test_long_loop(self): "Check that we aren't confused by EXTENDED_ARG" # Generate a new function at each call ns = {} - exec(textwrap.dedent(""" + exec(textwrap.dedent(f""" def nop(): pass def long_loop(): - for _ in range(20): + for _ in range({TIER2_THRESHOLD + 20}): nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); @@ -103,7 +102,7 @@ def long_loop(): with temporary_optimizer(opt): self.assertEqual(opt.get_count(), 0) long_loop() - self.assertEqual(opt.get_count(), 20 - TIER2_THRESHOLD) # Need iterations to warm up + self.assertEqual(opt.get_count(), 21) # Need iterations to warm up def test_code_restore_for_ENTER_EXECUTOR(self): def testfunc(x): @@ -159,7 +158,7 @@ def test_invalidate_object(self): func_src = "\n".join( f""" def f{n}(): - for _ in range(1000): + for _ in range({TIER2_THRESHOLD}): pass """ for n in range(5) ) @@ -190,9 +189,9 @@ def f{n}(): def test_uop_optimizer_invalidation(self): # Generate a new function at each call ns = {} - exec(textwrap.dedent(""" + exec(textwrap.dedent(f""" def f(): - for i in range(1000): + for i in range({TIER2_THRESHOLD}): pass """), ns, ns) f = ns['f'] @@ -207,7 +206,7 @@ def f(): def test_sys__clear_internal_caches(self): def f(): - for _ in range(1000): + for _ in range(TIER2_THRESHOLD): pass opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): @@ -236,7 +235,7 @@ def testfunc(x): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(1000) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -247,7 +246,7 @@ def testfunc(x): def test_extended_arg(self): "Check EXTENDED_ARG handling in superblock creation" ns = {} - exec(textwrap.dedent(""" + exec(textwrap.dedent(f""" def many_vars(): # 260 vars, so z9 should have index 259 a0 = a1 = a2 = a3 = a4 = a5 = a6 = a7 = a8 = a9 = 42 @@ -275,7 +274,7 @@ def many_vars(): w0 = w1 = w2 = w3 = w4 = w5 = w6 = w7 = w8 = w9 = 42 x0 = x1 = x2 = x3 = x4 = x5 = x6 = x7 = x8 = x9 = 42 y0 = y1 = y2 = y3 = y4 = y5 = y6 = y7 = y8 = y9 = 42 - z0 = z1 = z2 = z3 = z4 = z5 = z6 = z7 = z8 = z9 = 42 + z0 = z1 = z2 = z3 = z4 = z5 = z6 = z7 = z8 = z9 = {TIER2_THRESHOLD} while z9 > 0: z9 = z9 - 1 +z9 @@ -308,7 +307,7 @@ def testfunc(x): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -323,7 +322,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -338,7 +337,7 @@ def testfunc(a): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(range(20)) + testfunc(range(TIER2_THRESHOLD)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -355,7 +354,7 @@ def testfunc(a): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(range(20)) + testfunc(range(TIER2_THRESHOLD)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -371,7 +370,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -386,7 +385,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -406,7 +405,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -424,8 +423,8 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - total = testfunc(20) - self.assertEqual(total, 190) + total = testfunc(TIER2_THRESHOLD) + self.assertEqual(total, sum(range(TIER2_THRESHOLD))) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -445,9 +444,9 @@ def testfunc(a): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - a = list(range(20)) + a = list(range(TIER2_THRESHOLD)) total = testfunc(a) - self.assertEqual(total, 190) + self.assertEqual(total, sum(a)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -467,9 +466,9 @@ def testfunc(a): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - a = tuple(range(20)) + a = tuple(range(TIER2_THRESHOLD)) total = testfunc(a) - self.assertEqual(total, 190) + self.assertEqual(total, sum(a)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -503,7 +502,7 @@ def dummy(x): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -521,7 +520,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -549,9 +548,9 @@ def testfunc(n, m): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - x = testfunc(10, 10) + x = testfunc(TIER2_THRESHOLD, TIER2_THRESHOLD) - self.assertEqual(x, sum(range(10)) * 10010) + self.assertEqual(x, sum(range(TIER2_THRESHOLD)) * TIER2_THRESHOLD * 1001) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -572,15 +571,13 @@ def testfunc(n): bits += 1 if i&0x10: bits += 1 - if i&0x20: - bits += 1 return bits opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - x = testfunc(20) + x = testfunc(TIER2_THRESHOLD * 2) - self.assertEqual(x, 40) + self.assertEqual(x, TIER2_THRESHOLD * 5) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) ops = list(iter_opnames(ex)) @@ -615,9 +612,9 @@ def testfunc(loops): num += 1 return a - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertIsNotNone(ex) - self.assertEqual(res, 63) + self.assertEqual(res, (TIER2_THRESHOLD - 1) * 2 + 1) binop_count = [opname for opname in iter_opnames(ex) if opname == "_BINARY_OP_ADD_INT"] guard_both_int_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_INT"] self.assertGreaterEqual(len(binop_count), 3) @@ -637,11 +634,11 @@ def testfunc(loops): opt = _testinternalcapi.new_uop_optimizer() res = None with temporary_optimizer(opt): - res = testfunc(32) + res = testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) - self.assertEqual(res, 124) + self.assertEqual(res, (TIER2_THRESHOLD - 1) * 4) binop_count = [opname for opname in iter_opnames(ex) if opname == "_BINARY_OP_ADD_INT"] guard_both_int_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_INT"] self.assertGreaterEqual(len(binop_count), 3) @@ -661,11 +658,11 @@ def testfunc(loops): opt = _testinternalcapi.new_uop_optimizer() res = None with temporary_optimizer(opt): - res = testfunc(32) + res = testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) - self.assertEqual(res, 124) + self.assertEqual(res, (TIER2_THRESHOLD - 1) * 4) binop_count = [opname for opname in iter_opnames(ex) if opname == "_BINARY_OP_ADD_INT"] guard_both_int_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_INT"] self.assertGreaterEqual(len(binop_count), 3) @@ -682,7 +679,7 @@ def testfunc(loops): num += 1 return a - res, ex = self._run_with_optimizer(testfunc, 64) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertIsNotNone(ex) binop_count = [opname for opname in iter_opnames(ex) if opname == "_BINARY_OP_ADD_INT"] self.assertGreaterEqual(len(binop_count), 3) @@ -694,7 +691,7 @@ def dummy(x): for i in range(n): dummy(i) - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertIsNotNone(ex) uops = get_opnames(ex) self.assertIn("_PUSH_FRAME", uops) @@ -708,8 +705,8 @@ def testfunc(n): x = i + i return x - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 62) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, (TIER2_THRESHOLD - 1) * 2) self.assertIsNotNone(ex) uops = get_opnames(ex) self.assertNotIn("_GUARD_BOTH_INT", uops) @@ -726,7 +723,7 @@ def testfunc(n): res = x + z + a + b return res - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertEqual(res, 4) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -739,8 +736,8 @@ def testfunc(n): for _ in range(n): return [i for i in range(n)] - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, list(range(32))) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, list(range(TIER2_THRESHOLD))) self.assertIsNotNone(ex) uops = get_opnames(ex) self.assertNotIn("_BINARY_OP_ADD_INT", uops) @@ -797,7 +794,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() _testinternalcapi.set_optimizer(opt) - testfunc(64) + testfunc(_testinternalcapi.TIER2_THRESHOLD) ex = get_first_executor(testfunc) assert ex is not None @@ -817,8 +814,8 @@ def testfunc(n): a = a + 0.25 return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertAlmostEqual(res, 33.0) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertAlmostEqual(res, TIER2_THRESHOLD + 1) self.assertIsNotNone(ex) uops = get_opnames(ex) guard_both_float_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_FLOAT"] @@ -837,8 +834,8 @@ def testfunc(n): a = a - 0.25 return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertAlmostEqual(res, -31.0) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertAlmostEqual(res, -TIER2_THRESHOLD + 1) self.assertIsNotNone(ex) uops = get_opnames(ex) guard_both_float_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_FLOAT"] @@ -857,7 +854,7 @@ def testfunc(n): a = a * 1.0 return a - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertAlmostEqual(res, 1.0) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -877,7 +874,7 @@ def testfunc(n): a + a return a - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertEqual(res, "") self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -895,7 +892,7 @@ def testfunc(n): x = a == a return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertTrue(res) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -913,7 +910,7 @@ def testfunc(n): x = a == a return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertTrue(res) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -931,7 +928,7 @@ def testfunc(n): x = 1 return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertEqual(res, 1) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -951,7 +948,7 @@ def testfunc(n): x = 1 return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertEqual(res, 1) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -971,7 +968,7 @@ def testfunc(n): x = a == a return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertTrue(res) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -989,10 +986,10 @@ def testfunc(n): exec(src, ns, ns) testfunc = ns['testfunc'] ns['_test_global'] = 0 - _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD - 1) self.assertIsNone(ex) ns['_test_global'] = 1 - _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD - 1) self.assertIsNotNone(ex) uops = get_opnames(ex) self.assertNotIn("_GUARD_BOTH_INT", uops) @@ -1003,10 +1000,10 @@ def testfunc(n): exec(src, ns, ns) testfunc = ns['testfunc'] ns['_test_global'] = 0 - _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD - 1) self.assertIsNone(ex) ns['_test_global'] = 3.14 - _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD - 1) self.assertIsNone(ex) def test_combine_stack_space_checks_sequential(self): @@ -1023,8 +1020,8 @@ def testfunc(n): a += b + c + d return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 832) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 26) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1050,8 +1047,8 @@ def testfunc(n): a += b + c return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 224) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 7) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1085,8 +1082,8 @@ def testfunc(n): a += b + c + d + e return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 800) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 25) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1121,8 +1118,8 @@ def testfunc(n): a += b + c + d + e return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 800) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 25) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1157,16 +1154,16 @@ def dummy6(x): z = dummy0(y) return dummy4(z) def testfunc(n): - a = 0; - for _ in range(32): + a = 0 + for _ in range(n): b = dummy5(1) c = dummy0(1) d = dummy6(1) a += b + c + d return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 96) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 3) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1225,8 +1222,8 @@ def testfunc(n): b += dummy15(7) return b - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 32 * (repetitions + 9)) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * (repetitions + 9)) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1259,8 +1256,13 @@ def testfunc(n): a += dummy15(n) return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 42 * 32) + recursion_limit = sys.getrecursionlimit() + try: + sys.setrecursionlimit(TIER2_THRESHOLD + recursion_limit) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + finally: + sys.setrecursionlimit(recursion_limit) + self.assertEqual(res, TIER2_THRESHOLD * 42) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1307,8 +1309,8 @@ def testfunc(n): for i in range(n): gen() return i - res, ex = self._run_with_optimizer(testfunc, 20) - self.assertEqual(res, 19) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD - 1) self.assertIsNotNone(ex) self.assertIn("_RETURN_GENERATOR", get_opnames(ex)) @@ -1322,8 +1324,8 @@ def testfunc(n): for i in g: s += i return s - res, ex = self._run_with_optimizer(testfunc, 20) - self.assertEqual(res, 190) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, sum(range(TIER2_THRESHOLD))) self.assertIsNotNone(ex) self.assertIn("_FOR_ITER_GEN_FRAME", get_opnames(ex)) @@ -1342,7 +1344,7 @@ def test_modified_local_is_seen_by_optimized_code(self): def test_guard_type_version_removed(self): def thing(a): x = 0 - for _ in range(100): + for _ in range(TIER2_THRESHOLD): x += a.attr x += a.attr return x @@ -1353,7 +1355,7 @@ class Foo: res, ex = self._run_with_optimizer(thing, Foo()) opnames = list(iter_opnames(ex)) self.assertIsNotNone(ex) - self.assertEqual(res, 200) + self.assertEqual(res, TIER2_THRESHOLD * 2) guard_type_version_count = opnames.count("_GUARD_TYPE_VERSION") self.assertEqual(guard_type_version_count, 1) @@ -1367,7 +1369,7 @@ def fn(): def thing(a): x = 0 - for _ in range(100): + for _ in range(TIER2_THRESHOLD): x += a.attr fn() x += a.attr @@ -1379,24 +1381,21 @@ class Foo: res, ex = self._run_with_optimizer(thing, Foo()) opnames = list(iter_opnames(ex)) self.assertIsNotNone(ex) - self.assertEqual(res, 200) + self.assertEqual(res, TIER2_THRESHOLD * 2) guard_type_version_count = opnames.count("_GUARD_TYPE_VERSION") self.assertEqual(guard_type_version_count, 1) - def test_guard_type_version_not_removed(self): - """ - Verify that the guard type version is not removed if we modify the class - """ + def test_guard_type_version_removed_invalidation(self): def thing(a): x = 0 - for i in range(100): + for i in range(TIER2_THRESHOLD * 2 + 1): x += a.attr - # for the first 90 iterations we set the attribute on this dummy function which shouldn't - # trigger the type watcher - # then after 90 it should trigger it and stop optimizing - # Note that the code needs to be in this weird form so it's optimized inline without any control flow - setattr((Foo, Bar)[i < 90], "attr", 2) + # The first TIER2_THRESHOLD iterations we set the attribute on + # this dummy class, which shouldn't trigger the type watcher. + # Note that the code needs to be in this weird form so it's + # optimized inline without any control flow: + setattr((Bar, Foo)[i == TIER2_THRESHOLD + 1], "attr", 2) x += a.attr return x @@ -1408,24 +1407,21 @@ class Bar: res, ex = self._run_with_optimizer(thing, Foo()) opnames = list(iter_opnames(ex)) - self.assertIsNotNone(ex) - self.assertEqual(res, 219) - guard_type_version_count = opnames.count("_GUARD_TYPE_VERSION") - self.assertEqual(guard_type_version_count, 2) + self.assertEqual(res, TIER2_THRESHOLD * 6 + 1) + call = opnames.index("_CALL_BUILTIN_FAST") + load_attr_top = opnames.index("_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", 0, call) + load_attr_bottom = opnames.index("_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", call) + self.assertEqual(opnames[:load_attr_top].count("_GUARD_TYPE_VERSION"), 1) + self.assertEqual(opnames[call:load_attr_bottom].count("_CHECK_VALIDITY"), 1) - - @unittest.expectedFailure - def test_guard_type_version_not_removed_escaping(self): - """ - Verify that the guard type version is not removed if have an escaping function - """ + def test_guard_type_version_removed_escaping(self): def thing(a): x = 0 - for i in range(100): + for i in range(TIER2_THRESHOLD): x += a.attr - # eval should be escaping and so should cause optimization to stop and preserve both type versions + # eval should be escaping eval("None") x += a.attr return x @@ -1435,12 +1431,12 @@ class Foo: res, ex = self._run_with_optimizer(thing, Foo()) opnames = list(iter_opnames(ex)) self.assertIsNotNone(ex) - self.assertEqual(res, 200) - guard_type_version_count = opnames.count("_GUARD_TYPE_VERSION") - # Note: This will actually be 1 for noe - # https://github.com/python/cpython/pull/119365#discussion_r1626220129 - self.assertEqual(guard_type_version_count, 2) - + self.assertEqual(res, TIER2_THRESHOLD * 2) + call = opnames.index("_CALL_BUILTIN_FAST_WITH_KEYWORDS") + load_attr_top = opnames.index("_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", 0, call) + load_attr_bottom = opnames.index("_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", call) + self.assertEqual(opnames[:load_attr_top].count("_GUARD_TYPE_VERSION"), 1) + self.assertEqual(opnames[call:load_attr_bottom].count("_CHECK_VALIDITY"), 1) def test_guard_type_version_executor_invalidated(self): """ @@ -1449,7 +1445,7 @@ def test_guard_type_version_executor_invalidated(self): def thing(a): x = 0 - for i in range(100): + for i in range(TIER2_THRESHOLD): x += a.attr x += a.attr return x @@ -1458,7 +1454,7 @@ class Foo: attr = 1 res, ex = self._run_with_optimizer(thing, Foo()) - self.assertEqual(res, 200) + self.assertEqual(res, TIER2_THRESHOLD * 2) self.assertIsNotNone(ex) self.assertEqual(list(iter_opnames(ex)).count("_GUARD_TYPE_VERSION"), 1) self.assertTrue(ex.is_valid()) @@ -1494,7 +1490,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py index 290656f070503a..e51f7e0ee12b1f 100644 --- a/Lib/test/test_codecs.py +++ b/Lib/test/test_codecs.py @@ -3256,7 +3256,11 @@ def test_code_page_name(self): codecs.code_page_decode, self.CP_UTF8, b'\xff', 'strict', True) def check_decode(self, cp, tests): - for raw, errors, expected in tests: + for raw, errors, expected, *rest in tests: + if rest: + altexpected, = rest + else: + altexpected = expected if expected is not None: try: decoded = codecs.code_page_decode(cp, raw, errors, True) @@ -3273,8 +3277,21 @@ def check_decode(self, cp, tests): self.assertRaises(UnicodeDecodeError, codecs.code_page_decode, cp, raw, errors, True) + if altexpected is not None: + decoded = raw.decode(f'cp{cp}', errors) + self.assertEqual(decoded, altexpected, + '%a.decode("cp%s", %r)=%a != %a' + % (raw, cp, errors, decoded, altexpected)) + else: + self.assertRaises(UnicodeDecodeError, + raw.decode, f'cp{cp}', errors) + def check_encode(self, cp, tests): - for text, errors, expected in tests: + for text, errors, expected, *rest in tests: + if rest: + altexpected, = rest + else: + altexpected = expected if expected is not None: try: encoded = codecs.code_page_encode(cp, text, errors) @@ -3285,18 +3302,26 @@ def check_encode(self, cp, tests): '%a.encode("cp%s", %r)=%a != %a' % (text, cp, errors, encoded[0], expected)) self.assertEqual(encoded[1], len(text)) + + encoded = text.encode(f'cp{cp}', errors) + self.assertEqual(encoded, altexpected, + '%a.encode("cp%s", %r)=%a != %a' + % (text, cp, errors, encoded, altexpected)) else: self.assertRaises(UnicodeEncodeError, codecs.code_page_encode, cp, text, errors) + self.assertRaises(UnicodeEncodeError, + text.encode, f'cp{cp}', errors) def test_cp932(self): self.check_encode(932, ( ('abc', 'strict', b'abc'), ('\uff44\u9a3e', 'strict', b'\x82\x84\xe9\x80'), + ('\uf8f3', 'strict', b'\xff'), # test error handlers ('\xff', 'strict', None), ('[\xff]', 'ignore', b'[]'), - ('[\xff]', 'replace', b'[y]'), + ('[\xff]', 'replace', b'[y]', b'[?]'), ('[\u20ac]', 'replace', b'[?]'), ('[\xff]', 'backslashreplace', b'[\\xff]'), ('[\xff]', 'namereplace', @@ -3310,12 +3335,12 @@ def test_cp932(self): (b'abc', 'strict', 'abc'), (b'\x82\x84\xe9\x80', 'strict', '\uff44\u9a3e'), # invalid bytes - (b'[\xff]', 'strict', None), - (b'[\xff]', 'ignore', '[]'), - (b'[\xff]', 'replace', '[\ufffd]'), - (b'[\xff]', 'backslashreplace', '[\\xff]'), - (b'[\xff]', 'surrogateescape', '[\udcff]'), - (b'[\xff]', 'surrogatepass', None), + (b'[\xff]', 'strict', None, '[\uf8f3]'), + (b'[\xff]', 'ignore', '[]', '[\uf8f3]'), + (b'[\xff]', 'replace', '[\ufffd]', '[\uf8f3]'), + (b'[\xff]', 'backslashreplace', '[\\xff]', '[\uf8f3]'), + (b'[\xff]', 'surrogateescape', '[\udcff]', '[\uf8f3]'), + (b'[\xff]', 'surrogatepass', None, '[\uf8f3]'), (b'\x81\x00abc', 'strict', None), (b'\x81\x00abc', 'ignore', '\x00abc'), (b'\x81\x00abc', 'replace', '\ufffd\x00abc'), @@ -3330,7 +3355,7 @@ def test_cp1252(self): # test error handlers ('\u0141', 'strict', None), ('\u0141', 'ignore', b''), - ('\u0141', 'replace', b'L'), + ('\u0141', 'replace', b'L', b'?'), ('\udc98', 'surrogateescape', b'\x98'), ('\udc98', 'surrogatepass', None), )) @@ -3340,6 +3365,59 @@ def test_cp1252(self): (b'\xff', 'strict', '\xff'), )) + def test_cp708(self): + self.check_encode(708, ( + ('abc2%', 'strict', b'abc2%'), + ('\u060c\u0621\u064a', 'strict', b'\xac\xc1\xea'), + ('\u2562\xe7\xa0', 'strict', b'\x86\x87\xff'), + ('\x9a\x9f', 'strict', b'\x9a\x9f'), + ('\u256b', 'strict', b'\xc0'), + # test error handlers + ('[\u0662]', 'strict', None), + ('[\u0662]', 'ignore', b'[]'), + ('[\u0662]', 'replace', b'[?]'), + ('\udca0', 'surrogateescape', b'\xa0'), + ('\udca0', 'surrogatepass', None), + )) + self.check_decode(708, ( + (b'abc2%', 'strict', 'abc2%'), + (b'\xac\xc1\xea', 'strict', '\u060c\u0621\u064a'), + (b'\x86\x87\xff', 'strict', '\u2562\xe7\xa0'), + (b'\x9a\x9f', 'strict', '\x9a\x9f'), + (b'\xc0', 'strict', '\u256b'), + # test error handlers + (b'\xa0', 'strict', None), + (b'[\xa0]', 'ignore', '[]'), + (b'[\xa0]', 'replace', '[\ufffd]'), + (b'[\xa0]', 'backslashreplace', '[\\xa0]'), + (b'[\xa0]', 'surrogateescape', '[\udca0]'), + (b'[\xa0]', 'surrogatepass', None), + )) + + def test_cp20106(self): + self.check_encode(20106, ( + ('abc', 'strict', b'abc'), + ('\xa7\xc4\xdf', 'strict', b'@[~'), + # test error handlers + ('@', 'strict', None), + ('@', 'ignore', b''), + ('@', 'replace', b'?'), + ('\udcbf', 'surrogateescape', b'\xbf'), + ('\udcbf', 'surrogatepass', None), + )) + self.check_decode(20106, ( + (b'abc', 'strict', 'abc'), + (b'@[~', 'strict', '\xa7\xc4\xdf'), + (b'\xe1\xfe', 'strict', 'a\xdf'), + # test error handlers + (b'(\xbf)', 'strict', None), + (b'(\xbf)', 'ignore', '()'), + (b'(\xbf)', 'replace', '(\ufffd)'), + (b'(\xbf)', 'backslashreplace', '(\\xbf)'), + (b'(\xbf)', 'surrogateescape', '(\udcbf)'), + (b'(\xbf)', 'surrogatepass', None), + )) + def test_cp_utf7(self): cp = 65000 self.check_encode(cp, ( @@ -3412,17 +3490,15 @@ def test_incremental(self): False) self.assertEqual(decoded, ('abc', 3)) - def test_mbcs_alias(self): - # Check that looking up our 'default' codepage will return - # mbcs when we don't have a more specific one available - code_page = 99_999 - name = f'cp{code_page}' - with mock.patch('_winapi.GetACP', return_value=code_page): - try: - codec = codecs.lookup(name) - self.assertEqual(codec.name, 'mbcs') - finally: - codecs.unregister(name) + def test_mbcs_code_page(self): + # Check that codec for the current Windows (ANSII) code page is + # always available. + try: + from _winapi import GetACP + except ImportError: + self.skipTest('requires _winapi.GetACP') + cp = GetACP() + codecs.lookup(f'cp{cp}') @support.bigmemtest(size=2**31, memuse=7, dry_run=False) def test_large_input(self, size): diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py index ecc97315e50d31..179556f57e884f 100644 --- a/Lib/test/test_complex.py +++ b/Lib/test/test_complex.py @@ -126,6 +126,12 @@ def test_truediv(self): z = complex(0, 0) / complex(denom_real, denom_imag) self.assertTrue(isnan(z.real)) self.assertTrue(isnan(z.imag)) + z = float(0) / complex(denom_real, denom_imag) + self.assertTrue(isnan(z.real)) + self.assertTrue(isnan(z.imag)) + + self.assertComplexesAreIdentical(complex(INF, NAN) / 2, + complex(INF, NAN)) self.assertComplexesAreIdentical(complex(INF, 1)/(0.0+1j), complex(NAN, -INF)) @@ -154,6 +160,39 @@ def test_truediv(self): self.assertComplexesAreIdentical(complex(INF, 1)/complex(1, INF), complex(NAN, NAN)) + # mixed types + self.assertEqual((1+1j)/float(2), 0.5+0.5j) + self.assertEqual(float(1)/(1+2j), 0.2-0.4j) + self.assertEqual(float(1)/(-1+2j), -0.2-0.4j) + self.assertEqual(float(1)/(1-2j), 0.2+0.4j) + self.assertEqual(float(1)/(2+1j), 0.4-0.2j) + self.assertEqual(float(1)/(-2+1j), -0.4-0.2j) + self.assertEqual(float(1)/(2-1j), 0.4+0.2j) + + self.assertComplexesAreIdentical(INF/(1+0j), + complex(INF, NAN)) + self.assertComplexesAreIdentical(INF/(0.0+1j), + complex(NAN, -INF)) + self.assertComplexesAreIdentical(INF/complex(2**1000, 2**-1000), + complex(INF, NAN)) + self.assertComplexesAreIdentical(INF/complex(NAN, NAN), + complex(NAN, NAN)) + + self.assertComplexesAreIdentical(float(1)/complex(INF, INF), (0.0-0j)) + self.assertComplexesAreIdentical(float(1)/complex(INF, -INF), (0.0+0j)) + self.assertComplexesAreIdentical(float(1)/complex(-INF, INF), + complex(-0.0, -0.0)) + self.assertComplexesAreIdentical(float(1)/complex(-INF, -INF), + complex(-0.0, 0)) + self.assertComplexesAreIdentical(float(1)/complex(INF, NAN), + complex(0.0, -0.0)) + self.assertComplexesAreIdentical(float(1)/complex(-INF, NAN), + complex(-0.0, -0.0)) + self.assertComplexesAreIdentical(float(1)/complex(NAN, INF), + complex(0.0, -0.0)) + self.assertComplexesAreIdentical(float(INF)/complex(NAN, INF), + complex(NAN, NAN)) + def test_truediv_zero_division(self): for a, b in ZERO_DIVISION: with self.assertRaises(ZeroDivisionError): @@ -224,6 +263,10 @@ def check(n, deltas, is_equal, imag = 0.0): def test_add(self): self.assertEqual(1j + int(+1), complex(+1, 1)) self.assertEqual(1j + int(-1), complex(-1, 1)) + self.assertComplexesAreIdentical(complex(-0.0, -0.0) + (-0.0), + complex(-0.0, -0.0)) + self.assertComplexesAreIdentical((-0.0) + complex(-0.0, -0.0), + complex(-0.0, -0.0)) self.assertRaises(OverflowError, operator.add, 1j, 10**1000) self.assertRaises(TypeError, operator.add, 1j, None) self.assertRaises(TypeError, operator.add, None, 1j) @@ -231,6 +274,14 @@ def test_add(self): def test_sub(self): self.assertEqual(1j - int(+1), complex(-1, 1)) self.assertEqual(1j - int(-1), complex(1, 1)) + self.assertComplexesAreIdentical(complex(-0.0, -0.0) - 0.0, + complex(-0.0, -0.0)) + self.assertComplexesAreIdentical(-0.0 - complex(0.0, 0.0), + complex(-0.0, -0.0)) + self.assertComplexesAreIdentical(complex(1, 2) - complex(2, 1), + complex(-1, 1)) + self.assertComplexesAreIdentical(complex(2, 1) - complex(1, 2), + complex(1, -1)) self.assertRaises(OverflowError, operator.sub, 1j, 10**1000) self.assertRaises(TypeError, operator.sub, 1j, None) self.assertRaises(TypeError, operator.sub, None, 1j) @@ -238,6 +289,12 @@ def test_sub(self): def test_mul(self): self.assertEqual(1j * int(20), complex(0, 20)) self.assertEqual(1j * int(-1), complex(0, -1)) + for c, r in [(2, complex(INF, 2)), (INF, complex(INF, INF)), + (0, complex(NAN, 0)), (-0.0, complex(NAN, -0.0)), + (NAN, complex(NAN, NAN))]: + with self.subTest(c=c, r=r): + self.assertComplexesAreIdentical(complex(INF, 1) * c, r) + self.assertComplexesAreIdentical(c * complex(INF, 1), r) self.assertRaises(OverflowError, operator.mul, 1j, 10**1000) self.assertRaises(TypeError, operator.mul, 1j, None) self.assertRaises(TypeError, operator.mul, None, 1j) @@ -338,6 +395,11 @@ def test_pow(self): except OverflowError: pass + # gh-113841: possible undefined division by 0 in _Py_c_pow() + x, y = 9j, 33j**3 + with self.assertRaises(OverflowError): + x**y + def test_pow_with_small_integer_exponents(self): # Check that small integer exponents are handled identically # regardless of their type. diff --git a/Lib/test/test_concurrent_futures/test_thread_pool.py b/Lib/test/test_concurrent_futures/test_thread_pool.py index 2b5bea9f4055a2..4324241b374967 100644 --- a/Lib/test/test_concurrent_futures/test_thread_pool.py +++ b/Lib/test/test_concurrent_futures/test_thread_pool.py @@ -66,6 +66,25 @@ def submit(pool): with futures.ProcessPoolExecutor(1, mp_context=mp.get_context('fork')) as workers: workers.submit(tuple) + @support.requires_fork() + @unittest.skipUnless(hasattr(os, 'register_at_fork'), 'need os.register_at_fork') + def test_process_fork_from_a_threadpool(self): + # bpo-43944: clear concurrent.futures.thread._threads_queues after fork, + # otherwise child process will try to join parent thread + def fork_process_and_return_exitcode(): + # Ignore the warning about fork with threads. + with self.assertWarnsRegex(DeprecationWarning, + r"use of fork\(\) may lead to deadlocks in the child"): + p = mp.get_context('fork').Process(target=lambda: 1) + p.start() + p.join() + return p.exitcode + + with futures.ThreadPoolExecutor(1) as pool: + process_exitcode = pool.submit(fork_process_and_return_exitcode).result() + + self.assertEqual(process_exitcode, 0) + def test_executor_map_current_future_cancel(self): stop_event = threading.Event() log = [] diff --git a/Lib/test/test_ctypes/test_win32.py b/Lib/test/test_ctypes/test_win32.py index 31919118670613..54b47dc28fbc73 100644 --- a/Lib/test/test_ctypes/test_win32.py +++ b/Lib/test/test_ctypes/test_win32.py @@ -65,15 +65,16 @@ def test_PARAM(self): sizeof(c_void_p)) def test_COMError(self): - from _ctypes import COMError + from ctypes import COMError if support.HAVE_DOCSTRINGS: self.assertEqual(COMError.__doc__, "Raised when a COM method call failed.") - ex = COMError(-1, "text", ("details",)) + ex = COMError(-1, "text", ("descr", "source", "helpfile", 0, "progid")) self.assertEqual(ex.hresult, -1) self.assertEqual(ex.text, "text") - self.assertEqual(ex.details, ("details",)) + self.assertEqual(ex.details, + ("descr", "source", "helpfile", 0, "progid")) self.assertEqual(COMError.mro(), [COMError, Exception, BaseException, object]) diff --git a/Lib/test/test_ctypes/test_win32_com_foreign_func.py b/Lib/test/test_ctypes/test_win32_com_foreign_func.py new file mode 100644 index 00000000000000..8d217fc17efa02 --- /dev/null +++ b/Lib/test/test_ctypes/test_win32_com_foreign_func.py @@ -0,0 +1,286 @@ +import ctypes +import gc +import sys +import unittest +from ctypes import POINTER, byref, c_void_p +from ctypes.wintypes import BYTE, DWORD, WORD + +if sys.platform != "win32": + raise unittest.SkipTest("Windows-specific test") + + +from _ctypes import COMError, CopyComPointer +from ctypes import HRESULT + + +COINIT_APARTMENTTHREADED = 0x2 +CLSCTX_SERVER = 5 +S_OK = 0 +OUT = 2 +TRUE = 1 +E_NOINTERFACE = -2147467262 + + +class GUID(ctypes.Structure): + # https://learn.microsoft.com/en-us/windows/win32/api/guiddef/ns-guiddef-guid + _fields_ = [ + ("Data1", DWORD), + ("Data2", WORD), + ("Data3", WORD), + ("Data4", BYTE * 8), + ] + + +def create_proto_com_method(name, index, restype, *argtypes): + proto = ctypes.WINFUNCTYPE(restype, *argtypes) + + def make_method(*args): + foreign_func = proto(index, name, *args) + + def call(self, *args, **kwargs): + return foreign_func(self, *args, **kwargs) + + return call + + return make_method + + +def create_guid(name): + guid = GUID() + # https://learn.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-clsidfromstring + ole32.CLSIDFromString(name, byref(guid)) + return guid + + +def is_equal_guid(guid1, guid2): + # https://learn.microsoft.com/en-us/windows/win32/api/objbase/nf-objbase-isequalguid + return ole32.IsEqualGUID(byref(guid1), byref(guid2)) + + +ole32 = ctypes.oledll.ole32 + +IID_IUnknown = create_guid("{00000000-0000-0000-C000-000000000046}") +IID_IStream = create_guid("{0000000C-0000-0000-C000-000000000046}") +IID_IPersist = create_guid("{0000010C-0000-0000-C000-000000000046}") +CLSID_ShellLink = create_guid("{00021401-0000-0000-C000-000000000046}") + +# https://learn.microsoft.com/en-us/windows/win32/api/unknwn/nf-unknwn-iunknown-queryinterface(refiid_void) +proto_query_interface = create_proto_com_method( + "QueryInterface", 0, HRESULT, POINTER(GUID), POINTER(c_void_p) +) +# https://learn.microsoft.com/en-us/windows/win32/api/unknwn/nf-unknwn-iunknown-addref +proto_add_ref = create_proto_com_method("AddRef", 1, ctypes.c_long) +# https://learn.microsoft.com/en-us/windows/win32/api/unknwn/nf-unknwn-iunknown-release +proto_release = create_proto_com_method("Release", 2, ctypes.c_long) +# https://learn.microsoft.com/en-us/windows/win32/api/objidl/nf-objidl-ipersist-getclassid +proto_get_class_id = create_proto_com_method( + "GetClassID", 3, HRESULT, POINTER(GUID) +) + + +def create_shelllink_persist(typ): + ppst = typ() + # https://learn.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-cocreateinstance + ole32.CoCreateInstance( + byref(CLSID_ShellLink), + None, + CLSCTX_SERVER, + byref(IID_IPersist), + byref(ppst), + ) + return ppst + + +class ForeignFunctionsThatWillCallComMethodsTests(unittest.TestCase): + def setUp(self): + # https://learn.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-coinitializeex + ole32.CoInitializeEx(None, COINIT_APARTMENTTHREADED) + + def tearDown(self): + # https://learn.microsoft.com/en-us/windows/win32/api/combaseapi/nf-combaseapi-couninitialize + ole32.CoUninitialize() + gc.collect() + + def test_without_paramflags_and_iid(self): + class IUnknown(c_void_p): + QueryInterface = proto_query_interface() + AddRef = proto_add_ref() + Release = proto_release() + + class IPersist(IUnknown): + GetClassID = proto_get_class_id() + + ppst = create_shelllink_persist(IPersist) + + clsid = GUID() + hr_getclsid = ppst.GetClassID(byref(clsid)) + self.assertEqual(S_OK, hr_getclsid) + self.assertEqual(TRUE, is_equal_guid(CLSID_ShellLink, clsid)) + + self.assertEqual(2, ppst.AddRef()) + self.assertEqual(3, ppst.AddRef()) + + punk = IUnknown() + hr_qi = ppst.QueryInterface(IID_IUnknown, punk) + self.assertEqual(S_OK, hr_qi) + self.assertEqual(3, punk.Release()) + + with self.assertRaises(OSError) as e: + punk.QueryInterface(IID_IStream, IUnknown()) + self.assertEqual(E_NOINTERFACE, e.exception.winerror) + + self.assertEqual(2, ppst.Release()) + self.assertEqual(1, ppst.Release()) + self.assertEqual(0, ppst.Release()) + + def test_with_paramflags_and_without_iid(self): + class IUnknown(c_void_p): + QueryInterface = proto_query_interface(None) + AddRef = proto_add_ref() + Release = proto_release() + + class IPersist(IUnknown): + GetClassID = proto_get_class_id(((OUT, "pClassID"),)) + + ppst = create_shelllink_persist(IPersist) + + clsid = ppst.GetClassID() + self.assertEqual(TRUE, is_equal_guid(CLSID_ShellLink, clsid)) + + punk = IUnknown() + hr_qi = ppst.QueryInterface(IID_IUnknown, punk) + self.assertEqual(S_OK, hr_qi) + self.assertEqual(1, punk.Release()) + + with self.assertRaises(OSError) as e: + ppst.QueryInterface(IID_IStream, IUnknown()) + self.assertEqual(E_NOINTERFACE, e.exception.winerror) + + self.assertEqual(0, ppst.Release()) + + def test_with_paramflags_and_iid(self): + class IUnknown(c_void_p): + QueryInterface = proto_query_interface(None, IID_IUnknown) + AddRef = proto_add_ref() + Release = proto_release() + + class IPersist(IUnknown): + GetClassID = proto_get_class_id(((OUT, "pClassID"),), IID_IPersist) + + ppst = create_shelllink_persist(IPersist) + + clsid = ppst.GetClassID() + self.assertEqual(TRUE, is_equal_guid(CLSID_ShellLink, clsid)) + + punk = IUnknown() + hr_qi = ppst.QueryInterface(IID_IUnknown, punk) + self.assertEqual(S_OK, hr_qi) + self.assertEqual(1, punk.Release()) + + with self.assertRaises(COMError) as e: + ppst.QueryInterface(IID_IStream, IUnknown()) + self.assertEqual(E_NOINTERFACE, e.exception.hresult) + + self.assertEqual(0, ppst.Release()) + + +class CopyComPointerTests(unittest.TestCase): + def setUp(self): + ole32.CoInitializeEx(None, COINIT_APARTMENTTHREADED) + + class IUnknown(c_void_p): + QueryInterface = proto_query_interface(None, IID_IUnknown) + AddRef = proto_add_ref() + Release = proto_release() + + class IPersist(IUnknown): + GetClassID = proto_get_class_id(((OUT, "pClassID"),), IID_IPersist) + + self.IUnknown = IUnknown + self.IPersist = IPersist + + def tearDown(self): + ole32.CoUninitialize() + gc.collect() + + def test_both_are_null(self): + src = self.IPersist() + dst = self.IPersist() + + hr = CopyComPointer(src, byref(dst)) + + self.assertEqual(S_OK, hr) + + self.assertIsNone(src.value) + self.assertIsNone(dst.value) + + def test_src_is_nonnull_and_dest_is_null(self): + # The reference count of the COM pointer created by `CoCreateInstance` + # is initially 1. + src = create_shelllink_persist(self.IPersist) + dst = self.IPersist() + + # `CopyComPointer` calls `AddRef` explicitly in the C implementation. + # The refcount of `src` is incremented from 1 to 2 here. + hr = CopyComPointer(src, byref(dst)) + + self.assertEqual(S_OK, hr) + self.assertEqual(src.value, dst.value) + + # This indicates that the refcount was 2 before the `Release` call. + self.assertEqual(1, src.Release()) + + clsid = dst.GetClassID() + self.assertEqual(TRUE, is_equal_guid(CLSID_ShellLink, clsid)) + + self.assertEqual(0, dst.Release()) + + def test_src_is_null_and_dest_is_nonnull(self): + src = self.IPersist() + dst_orig = create_shelllink_persist(self.IPersist) + dst = self.IPersist() + CopyComPointer(dst_orig, byref(dst)) + self.assertEqual(1, dst_orig.Release()) + + clsid = dst.GetClassID() + self.assertEqual(TRUE, is_equal_guid(CLSID_ShellLink, clsid)) + + # This does NOT affects the refcount of `dst_orig`. + hr = CopyComPointer(src, byref(dst)) + + self.assertEqual(S_OK, hr) + self.assertIsNone(dst.value) + + with self.assertRaises(ValueError): + dst.GetClassID() # NULL COM pointer access + + # This indicates that the refcount was 1 before the `Release` call. + self.assertEqual(0, dst_orig.Release()) + + def test_both_are_nonnull(self): + src = create_shelllink_persist(self.IPersist) + dst_orig = create_shelllink_persist(self.IPersist) + dst = self.IPersist() + CopyComPointer(dst_orig, byref(dst)) + self.assertEqual(1, dst_orig.Release()) + + self.assertEqual(dst.value, dst_orig.value) + self.assertNotEqual(src.value, dst.value) + + hr = CopyComPointer(src, byref(dst)) + + self.assertEqual(S_OK, hr) + self.assertEqual(src.value, dst.value) + self.assertNotEqual(dst.value, dst_orig.value) + + self.assertEqual(1, src.Release()) + + clsid = dst.GetClassID() + self.assertEqual(TRUE, is_equal_guid(CLSID_ShellLink, clsid)) + + self.assertEqual(0, dst.Release()) + self.assertEqual(0, dst_orig.Release()) + + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index 4030716efb51f9..c94dc2df4f0a7f 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -880,115 +880,6 @@ class C(object): gc.collect() self.assertIs(ref(), None, "Cycle was not collected") - def _not_tracked(self, t): - # Nested containers can take several collections to untrack - gc.collect() - gc.collect() - self.assertFalse(gc.is_tracked(t), t) - - def _tracked(self, t): - self.assertTrue(gc.is_tracked(t), t) - gc.collect() - gc.collect() - self.assertTrue(gc.is_tracked(t), t) - - def test_string_keys_can_track_values(self): - # Test that this doesn't leak. - for i in range(10): - d = {} - for j in range(10): - d[str(j)] = j - d["foo"] = d - - @support.cpython_only - def test_track_literals(self): - # Test GC-optimization of dict literals - x, y, z, w = 1.5, "a", (1, None), [] - - self._not_tracked({}) - self._not_tracked({x:(), y:x, z:1}) - self._not_tracked({1: "a", "b": 2}) - self._not_tracked({1: 2, (None, True, False, ()): int}) - self._not_tracked({1: object()}) - - # Dicts with mutable elements are always tracked, even if those - # elements are not tracked right now. - self._tracked({1: []}) - self._tracked({1: ([],)}) - self._tracked({1: {}}) - self._tracked({1: set()}) - - @support.cpython_only - def test_track_dynamic(self): - # Test GC-optimization of dynamically-created dicts - class MyObject(object): - pass - x, y, z, w, o = 1.5, "a", (1, object()), [], MyObject() - - d = dict() - self._not_tracked(d) - d[1] = "a" - self._not_tracked(d) - d[y] = 2 - self._not_tracked(d) - d[z] = 3 - self._not_tracked(d) - self._not_tracked(d.copy()) - d[4] = w - self._tracked(d) - self._tracked(d.copy()) - d[4] = None - self._not_tracked(d) - self._not_tracked(d.copy()) - - # dd isn't tracked right now, but it may mutate and therefore d - # which contains it must be tracked. - d = dict() - dd = dict() - d[1] = dd - self._not_tracked(dd) - self._tracked(d) - dd[1] = d - self._tracked(dd) - - d = dict.fromkeys([x, y, z]) - self._not_tracked(d) - dd = dict() - dd.update(d) - self._not_tracked(dd) - d = dict.fromkeys([x, y, z, o]) - self._tracked(d) - dd = dict() - dd.update(d) - self._tracked(dd) - - d = dict(x=x, y=y, z=z) - self._not_tracked(d) - d = dict(x=x, y=y, z=z, w=w) - self._tracked(d) - d = dict() - d.update(x=x, y=y, z=z) - self._not_tracked(d) - d.update(w=w) - self._tracked(d) - - d = dict([(x, y), (z, 1)]) - self._not_tracked(d) - d = dict([(x, y), (z, w)]) - self._tracked(d) - d = dict() - d.update([(x, y), (z, 1)]) - self._not_tracked(d) - d.update([(x, y), (z, w)]) - self._tracked(d) - - @support.cpython_only - def test_track_subtypes(self): - # Dict subtypes are always tracked - class MyDict(dict): - pass - self._tracked(MyDict()) - def make_shared_key_dict(self, n): class C: pass diff --git a/Lib/test/test_email/test_message.py b/Lib/test/test_email/test_message.py index 034f7626c1fc7c..96979db27f3a21 100644 --- a/Lib/test/test_email/test_message.py +++ b/Lib/test/test_email/test_message.py @@ -1,6 +1,6 @@ -import unittest import textwrap -from email import policy, message_from_string +import unittest +from email import message_from_bytes, message_from_string, policy from email.message import EmailMessage, MIMEPart from test.test_email import TestEmailBase, parameterize @@ -958,6 +958,52 @@ def test_folding_with_utf8_encoding_8(self): b'123456789-123456789\n 123456789 Hello ' b'=?utf-8?q?W=C3=B6rld!?= 123456789 123456789\n\n') + def test_folding_with_short_nospace_1(self): + # bpo-36520 + # + # Fold a line that contains a long whitespace after + # the fold point. + + m = EmailMessage(policy.default) + m['Message-ID'] = '123456789' * 3 + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + + def test_folding_with_long_nospace_default_policy_1(self): + # Fixed: https://github.com/python/cpython/issues/124452 + # + # When the value is too long, it should be converted back + # to its original form without any modifications. + + m = EmailMessage(policy.default) + message = '123456789' * 10 + m['Message-ID'] = message + self.assertEqual(m.as_bytes(), + f'Message-ID:\n {message}\n\n'.encode()) + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + + def test_folding_with_long_nospace_compat32_policy_1(self): + m = EmailMessage(policy.compat32) + message = '123456789' * 10 + m['Message-ID'] = message + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + + def test_folding_with_long_nospace_smtp_policy_1(self): + m = EmailMessage(policy.SMTP) + message = '123456789' * 10 + m['Message-ID'] = message + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + + def test_folding_with_long_nospace_http_policy_1(self): + m = EmailMessage(policy.HTTP) + message = '123456789' * 10 + m['Message-ID'] = message + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + def test_get_body_malformed(self): """test for bpo-42892""" msg = textwrap.dedent("""\ diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index bf861ef06ee2d3..5c38b28322deb4 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -1649,14 +1649,14 @@ def test_init_pyvenv_cfg(self): config = { 'base_prefix': sysconfig.get_config_var("prefix"), 'base_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, + 'exec_prefix': tmpdir, + 'prefix': tmpdir, 'base_executable': base_executable, 'executable': executable, 'module_search_paths': paths, } if MS_WINDOWS: config['base_prefix'] = pyvenv_home - config['prefix'] = pyvenv_home config['stdlib_dir'] = os.path.join(pyvenv_home, 'Lib') config['use_frozen_modules'] = bool(not support.Py_DEBUG) else: diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py index d60aabcdf1ae22..e681417e15d34b 100644 --- a/Lib/test/test_fileio.py +++ b/Lib/test/test_fileio.py @@ -364,8 +364,7 @@ def testErrnoOnClosedReadinto(self, f): @strace_helper.requires_strace() def test_syscalls_read(self): - """Check that the set of system calls produced by the I/O stack is what - is expected for various read cases. + """Check set of system calls during common I/O patterns It's expected as bits of the I/O implementation change, this will need to change. The goal is to catch changes that unintentionally add @@ -383,6 +382,11 @@ def check_readall(name, code, prelude="", cleanup="", prelude=prelude, cleanup=cleanup) + # Some system calls (ex. mmap) can be used for both File I/O and + # memory allocation. Filter out the ones used for memory + # allocation. + syscalls = strace_helper.filter_memory(syscalls) + # The first call should be an open that returns a # file descriptor (fd). Afer that calls may vary. Once the file # is opened, check calls refer to it by fd as the filename diff --git a/Lib/test/test_fnmatch.py b/Lib/test/test_fnmatch.py index 10ed496d4e2f37..9f360e1dc10f47 100644 --- a/Lib/test/test_fnmatch.py +++ b/Lib/test/test_fnmatch.py @@ -250,6 +250,75 @@ def test_translate(self): self.assertTrue(re.match(fatre, 'cbabcaxc')) self.assertFalse(re.match(fatre, 'dabccbad')) + def test_translate_wildcards(self): + for pattern, expect in [ + ('ab*', r'(?s:ab.*)\Z'), + ('ab*cd', r'(?s:ab.*cd)\Z'), + ('ab*cd*', r'(?s:ab(?>.*?cd).*)\Z'), + ('ab*cd*12', r'(?s:ab(?>.*?cd).*12)\Z'), + ('ab*cd*12*', r'(?s:ab(?>.*?cd)(?>.*?12).*)\Z'), + ('ab*cd*12*34', r'(?s:ab(?>.*?cd)(?>.*?12).*34)\Z'), + ('ab*cd*12*34*', r'(?s:ab(?>.*?cd)(?>.*?12)(?>.*?34).*)\Z'), + ]: + with self.subTest(pattern): + translated = translate(pattern) + self.assertEqual(translated, expect, pattern) + + for pattern, expect in [ + ('*ab', r'(?s:.*ab)\Z'), + ('*ab*', r'(?s:(?>.*?ab).*)\Z'), + ('*ab*cd', r'(?s:(?>.*?ab).*cd)\Z'), + ('*ab*cd*', r'(?s:(?>.*?ab)(?>.*?cd).*)\Z'), + ('*ab*cd*12', r'(?s:(?>.*?ab)(?>.*?cd).*12)\Z'), + ('*ab*cd*12*', r'(?s:(?>.*?ab)(?>.*?cd)(?>.*?12).*)\Z'), + ('*ab*cd*12*34', r'(?s:(?>.*?ab)(?>.*?cd)(?>.*?12).*34)\Z'), + ('*ab*cd*12*34*', r'(?s:(?>.*?ab)(?>.*?cd)(?>.*?12)(?>.*?34).*)\Z'), + ]: + with self.subTest(pattern): + translated = translate(pattern) + self.assertEqual(translated, expect, pattern) + + def test_translate_expressions(self): + for pattern, expect in [ + ('[', r'(?s:\[)\Z'), + ('[!', r'(?s:\[!)\Z'), + ('[]', r'(?s:\[\])\Z'), + ('[abc', r'(?s:\[abc)\Z'), + ('[!abc', r'(?s:\[!abc)\Z'), + ('[abc]', r'(?s:[abc])\Z'), + ('[!abc]', r'(?s:[^abc])\Z'), + ('[!abc][!def]', r'(?s:[^abc][^def])\Z'), + # with [[ + ('[[', r'(?s:\[\[)\Z'), + ('[[a', r'(?s:\[\[a)\Z'), + ('[[]', r'(?s:[\[])\Z'), + ('[[]a', r'(?s:[\[]a)\Z'), + ('[[]]', r'(?s:[\[]\])\Z'), + ('[[]a]', r'(?s:[\[]a\])\Z'), + ('[[a]', r'(?s:[\[a])\Z'), + ('[[a]]', r'(?s:[\[a]\])\Z'), + ('[[a]b', r'(?s:[\[a]b)\Z'), + # backslashes + ('[\\', r'(?s:\[\\)\Z'), + (r'[\]', r'(?s:[\\])\Z'), + (r'[\\]', r'(?s:[\\\\])\Z'), + ]: + with self.subTest(pattern): + translated = translate(pattern) + self.assertEqual(translated, expect, pattern) + + def test_star_indices_locations(self): + from fnmatch import _translate + + blocks = ['a^b', '***', '?', '?', '[a-z]', '[1-9]', '*', '++', '[[a'] + parts, star_indices = _translate(''.join(blocks), '*', '.') + expect_parts = ['a', r'\^', 'b', '*', + '.', '.', '[a-z]', '[1-9]', '*', + r'\+', r'\+', r'\[', r'\[', 'a'] + self.assertListEqual(parts, expect_parts) + self.assertListEqual(star_indices, [3, 8]) + + class FilterTestCase(unittest.TestCase): def test_filter(self): diff --git a/Lib/test/test_free_threading/test_code.py b/Lib/test/test_free_threading/test_code.py new file mode 100644 index 00000000000000..a5136a3ba4edc7 --- /dev/null +++ b/Lib/test/test_free_threading/test_code.py @@ -0,0 +1,30 @@ +import unittest + +from threading import Thread +from unittest import TestCase + +from test.support import threading_helper + +@threading_helper.requires_working_threading() +class TestCode(TestCase): + def test_code_attrs(self): + """Test concurrent accesses to lazily initialized code attributes""" + code_objects = [] + for _ in range(1000): + code_objects.append(compile("a + b", "", "eval")) + + def run_in_thread(): + for code in code_objects: + self.assertIsInstance(code.co_code, bytes) + self.assertIsInstance(code.co_freevars, tuple) + self.assertIsInstance(code.co_varnames, tuple) + + threads = [Thread(target=run_in_thread) for _ in range(2)] + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_free_threading/test_dict.py b/Lib/test/test_free_threading/test_dict.py index 80daf0d9cae9e0..13717cb39fa35d 100644 --- a/Lib/test/test_free_threading/test_dict.py +++ b/Lib/test/test_free_threading/test_dict.py @@ -142,6 +142,70 @@ def writer_func(l): for ref in thread_list: self.assertIsNone(ref()) + def test_racing_set_object_dict(self): + """Races assigning to __dict__ should be thread safe""" + class C: pass + class MyDict(dict): pass + for cyclic in (False, True): + f = C() + f.__dict__ = {"foo": 42} + THREAD_COUNT = 10 + + def writer_func(l): + for i in range(1000): + if cyclic: + other_d = {} + d = MyDict({"foo": 100}) + if cyclic: + d["x"] = other_d + other_d["bar"] = d + l.append(weakref.ref(d)) + f.__dict__ = d + + def reader_func(): + for i in range(1000): + f.foo + + lists = [] + readers = [] + writers = [] + for x in range(THREAD_COUNT): + thread_list = [] + lists.append(thread_list) + writer = Thread(target=partial(writer_func, thread_list)) + writers.append(writer) + + for x in range(THREAD_COUNT): + reader = Thread(target=partial(reader_func)) + readers.append(reader) + + for writer in writers: + writer.start() + for reader in readers: + reader.start() + + for writer in writers: + writer.join() + + for reader in readers: + reader.join() + + f.__dict__ = {} + gc.collect() + gc.collect() + + count = 0 + ids = set() + for thread_list in lists: + for i, ref in enumerate(thread_list): + if ref() is None: + continue + count += 1 + ids.add(id(ref())) + count += 1 + + self.assertEqual(count, 0) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index ff9a52b7adac8a..66862ec17cca98 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -1,9 +1,11 @@ import contextlib import os +import re import sys import tempfile import unittest +from io import StringIO from test import support from test import test_tools @@ -29,10 +31,12 @@ def skip_if_different_mount_drives(): test_tools.skip_if_missing("cases_generator") with test_tools.imports_under_tool("cases_generator"): - from analyzer import StackItem + from analyzer import analyze_forest, StackItem + from cwriter import CWriter import parser from stack import Local, Stack import tier1_generator + import opcode_metadata_generator import optimizer_generator @@ -43,6 +47,14 @@ def handle_stderr(): return support.captured_stderr() +def parse_src(src): + p = parser.Parser(src, "test.c") + nodes = [] + while node := p.definition(): + nodes.append(node) + return nodes + + class TestEffects(unittest.TestCase): def test_effect_sizes(self): stack = Stack() @@ -65,6 +77,171 @@ def test_effect_sizes(self): self.assertEqual(stack.top_offset.to_c(), "1 - oparg - oparg*2 + oparg*4") +class TestGenerateMaxStackEffect(unittest.TestCase): + def check(self, input, output): + analysis = analyze_forest(parse_src(input)) + buf = StringIO() + writer = CWriter(buf, 0, False) + opcode_metadata_generator.generate_max_stack_effect_function(analysis, writer) + buf.seek(0) + generated = buf.read() + matches = re.search(r"(case OP: {[^}]+})", generated) + if matches is None: + self.fail(f"Couldn't find case statement for OP in:\n {generated}") + self.assertEqual(output.strip(), matches.group(1)) + + def test_push_one(self): + input = """ + inst(OP, (a -- b, c)) { + SPAM(); + } + """ + output = """ + case OP: { + *effect = 1; + return 0; + } + """ + self.check(input, output) + + def test_cond_push(self): + input = """ + inst(OP, (a -- b, c if (oparg))) { + SPAM(); + } + """ + output = """ + case OP: { + *effect = ((oparg) ? 1 : 0); + return 0; + } + """ + self.check(input, output) + + def test_ops_pass_two(self): + input = """ + op(A, (-- val1)) { + val1 = SPAM(); + } + op(B, (-- val2)) { + val2 = SPAM(); + } + op(C, (val1, val2 --)) { + } + macro(OP) = A + B + C; + """ + output = """ + case OP: { + *effect = 2; + return 0; + } + """ + self.check(input, output) + + def test_ops_pass_two_cond_push(self): + input = """ + op(A, (-- val1, val2)) { + val1 = 0; + val2 = 1; + } + op(B, (val1, val2 -- val1, val2, val3 if (oparg))) { + val3 = SPAM(); + } + macro(OP) = A + B; + """ + output = """ + case OP: { + *effect = Py_MAX(2, 2 + ((oparg) ? 1 : 0)); + return 0; + } + """ + self.check(input, output) + + def test_pop_push_array(self): + input = """ + inst(OP, (values[oparg] -- values[oparg], above)) { + SPAM(values, oparg); + above = 0; + } + """ + output = """ + case OP: { + *effect = 1; + return 0; + } + """ + self.check(input, output) + + def test_family(self): + input = """ + op(A, (-- val1, val2)) { + val1 = 0; + val2 = 1; + } + op(B, (val1, val2 -- val3)) { + val3 = 2; + } + macro(OP1) = A + B; + + inst(OP, (-- val)) { + val = 0; + } + + family(OP, 0) = { OP1 }; + """ + output = """ + case OP: { + *effect = 2; + return 0; + } + """ + self.check(input, output) + + def test_family_intermediate_array(self): + input = """ + op(A, (-- values[oparg])) { + val1 = 0; + val2 = 1; + } + op(B, (values[oparg] -- val3)) { + val3 = 2; + } + macro(OP1) = A + B; + + inst(OP, (-- val)) { + val = 0; + } + + family(OP, 0) = { OP1 }; + """ + output = """ + case OP: { + *effect = Py_MAX(1, oparg); + return 0; + } + """ + self.check(input, output) + + def test_negative_effect(self): + input = """ + op(A, (val1 -- )) { + } + op(B, (val2 --)) { + } + op(C, (val3 --)) { + } + + macro(OP) = A + B + C; + """ + output = """ + case OP: { + *effect = -1; + return 0; + } + """ + self.check(input, output) + + class TestGeneratedCases(unittest.TestCase): def setUp(self) -> None: super().setUp() diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index bf2cb1160723b0..2ea6dba12effc1 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -758,7 +758,8 @@ def check_stack_names(self, frame, expected): while frame: name = frame.f_code.co_name # Stop checking frames when we get to our test helper. - if name.startswith('check_') or name.startswith('call_'): + if (name.startswith('check_') or name.startswith('call_') + or name.startswith('test')): break names.append(name) @@ -799,6 +800,25 @@ def call_throw(gen): self.check_yield_from_example(call_throw) + def test_throw_with_yield_from_custom_generator(self): + + class CustomGen: + def __init__(self, test): + self.test = test + def throw(self, *args): + self.test.check_stack_names(sys._getframe(), ['throw', 'g']) + def __iter__(self): + return self + def __next__(self): + return 42 + + def g(target): + yield from target + + gen = g(CustomGen(self)) + gen.send(None) + gen.throw(RuntimeError) + class YieldFromTests(unittest.TestCase): def test_generator_gi_yieldfrom(self): diff --git a/Lib/test/test_getpath.py b/Lib/test/test_getpath.py index d5dcdad9614ecc..7e5c4a3d14ddc5 100644 --- a/Lib/test/test_getpath.py +++ b/Lib/test/test_getpath.py @@ -92,8 +92,8 @@ def test_venv_win32(self): ]) expected = dict( executable=r"C:\venv\Scripts\python.exe", - prefix=r"C:\Python", - exec_prefix=r"C:\Python", + prefix=r"C:\venv", + exec_prefix=r"C:\venv", base_executable=r"C:\Python\python.exe", base_prefix=r"C:\Python", base_exec_prefix=r"C:\Python", @@ -339,8 +339,8 @@ def test_venv_posix(self): ]) expected = dict( executable="/venv/bin/python", - prefix="/usr", - exec_prefix="/usr", + prefix="/venv", + exec_prefix="/venv", base_executable="/usr/bin/python", base_prefix="/usr", base_exec_prefix="/usr", @@ -371,8 +371,8 @@ def test_venv_changed_name_posix(self): ]) expected = dict( executable="/venv/bin/python", - prefix="/usr", - exec_prefix="/usr", + prefix="/venv", + exec_prefix="/venv", base_executable="/usr/bin/python3", base_prefix="/usr", base_exec_prefix="/usr", @@ -404,8 +404,8 @@ def test_venv_non_installed_zip_path_posix(self): ]) expected = dict( executable="/venv/bin/python", - prefix="/path/to/non-installed", - exec_prefix="/path/to/non-installed", + prefix="/venv", + exec_prefix="/venv", base_executable="/path/to/non-installed/bin/python", base_prefix="/path/to/non-installed", base_exec_prefix="/path/to/non-installed", @@ -435,8 +435,8 @@ def test_venv_changed_name_copy_posix(self): ]) expected = dict( executable="/venv/bin/python", - prefix="/usr", - exec_prefix="/usr", + prefix="/venv", + exec_prefix="/venv", base_executable="/usr/bin/python9", base_prefix="/usr", base_exec_prefix="/usr", @@ -652,8 +652,8 @@ def test_venv_framework_macos(self): ]) expected = dict( executable=f"{venv_path}/bin/python", - prefix="/Library/Frameworks/Python.framework/Versions/9.8", - exec_prefix="/Library/Frameworks/Python.framework/Versions/9.8", + prefix=venv_path, + exec_prefix=venv_path, base_executable="/Library/Frameworks/Python.framework/Versions/9.8/bin/python9.8", base_prefix="/Library/Frameworks/Python.framework/Versions/9.8", base_exec_prefix="/Library/Frameworks/Python.framework/Versions/9.8", @@ -697,8 +697,8 @@ def test_venv_alt_framework_macos(self): ]) expected = dict( executable=f"{venv_path}/bin/python", - prefix="/Library/Frameworks/DebugPython.framework/Versions/9.8", - exec_prefix="/Library/Frameworks/DebugPython.framework/Versions/9.8", + prefix=venv_path, + exec_prefix=venv_path, base_executable="/Library/Frameworks/DebugPython.framework/Versions/9.8/bin/python9.8", base_prefix="/Library/Frameworks/DebugPython.framework/Versions/9.8", base_exec_prefix="/Library/Frameworks/DebugPython.framework/Versions/9.8", @@ -734,8 +734,8 @@ def test_venv_macos(self): ]) expected = dict( executable="/framework/Python9.8/python", - prefix="/usr", - exec_prefix="/usr", + prefix="/framework/Python9.8", + exec_prefix="/framework/Python9.8", base_executable="/usr/bin/python", base_prefix="/usr", base_exec_prefix="/usr", diff --git a/Lib/test/test_interpreters/test_stress.py b/Lib/test/test_interpreters/test_stress.py index e400535b2a0e4e..56bfc1721992c8 100644 --- a/Lib/test/test_interpreters/test_stress.py +++ b/Lib/test/test_interpreters/test_stress.py @@ -23,6 +23,7 @@ def test_create_many_sequential(self): alive.append(interp) @support.requires_resource('cpu') + @threading_helper.requires_working_threading() def test_create_many_threaded(self): alive = [] def task(): @@ -32,6 +33,35 @@ def task(): with threading_helper.start_threads(threads): pass + @support.requires_resource('cpu') + @threading_helper.requires_working_threading() + def test_many_threads_running_interp_in_other_interp(self): + interp = interpreters.create() + + script = f"""if True: + import _interpreters + _interpreters.run_string({interp.id}, '1') + """ + + def run(): + interp = interpreters.create() + alreadyrunning = (f'{interpreters.InterpreterError}: ' + 'interpreter already running') + success = False + while not success: + try: + interp.exec(script) + except interpreters.ExecutionFailed as exc: + if exc.excinfo.msg != 'interpreter already running': + raise # re-raise + assert exc.excinfo.type.__name__ == 'InterpreterError' + else: + success = True + + threads = (threading.Thread(target=run) for _ in range(200)) + with threading_helper.start_threads(threads): + pass + if __name__ == '__main__': # Test needs to be a package, so we can do relative imports. diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index aa1b8268592ff7..f1f8ce57668f3b 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -1148,6 +1148,21 @@ def test_disallow_instantiation(self): _io = self._io support.check_disallow_instantiation(self, _io._BytesIOBuffer) + def test_stringio_setstate(self): + # gh-127182: Calling __setstate__() with invalid arguments must not crash + obj = self._io.StringIO() + with self.assertRaisesRegex( + TypeError, + 'initial_value must be str or None, not int', + ): + obj.__setstate__((1, '', 0, {})) + + obj.__setstate__((None, '', 0, {})) # should not crash + self.assertEqual(obj.getvalue(), '') + + obj.__setstate__(('', '', 0, {})) + self.assertEqual(obj.getvalue(), '') + class PyIOTest(IOTest): pass diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py index c4bb8dfb1a7422..0a5b511e75537c 100644 --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -240,9 +240,17 @@ def check_extensions(): ("font/woff", ".woff"), ("font/woff2", ".woff2"), ("image/avif", ".avif"), + ("image/emf", ".emf"), + ("image/fits", ".fits"), + ("image/g3fax", ".g3"), + ("image/jp2", ".jp2"), + ("image/jpm", ".jpm"), + ("image/t38", ".t38"), ("image/webp", ".webp"), + ("image/wmf", ".wmf"), ("image/jpeg", ".jpg"), ("image/tiff", ".tiff"), + ("image/tiff-fx", ".tfx"), ("message/rfc822", ".eml"), ("text/html", ".html"), ("text/plain", ".txt"), diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index 4f59184dfcfdc7..6715071af8c752 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -347,13 +347,18 @@ def test_normpath(self): tester("ntpath.normpath('..')", r'..') tester("ntpath.normpath('.')", r'.') + tester("ntpath.normpath('c:.')", 'c:') tester("ntpath.normpath('')", r'.') tester("ntpath.normpath('/')", '\\') tester("ntpath.normpath('c:/')", 'c:\\') tester("ntpath.normpath('/../.././..')", '\\') tester("ntpath.normpath('c:/../../..')", 'c:\\') + tester("ntpath.normpath('/./a/b')", r'\a\b') + tester("ntpath.normpath('c:/./a/b')", r'c:\a\b') tester("ntpath.normpath('../.././..')", r'..\..\..') tester("ntpath.normpath('K:../.././..')", r'K:..\..\..') + tester("ntpath.normpath('./a/b')", r'a\b') + tester("ntpath.normpath('c:./a/b')", r'c:a\b') tester("ntpath.normpath('C:////a/b')", r'C:\a\b') tester("ntpath.normpath('//machine/share//a/b')", r'\\machine\share\a\b') diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index 78e4bf44f7ea0c..1a6eac236009c3 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -546,7 +546,6 @@ def count_args(self, *args): @threading_helper.requires_working_threading() -@requires_specialization class TestRacesDoNotCrash(TestBase): # Careful with these. Bigger numbers have a higher chance of catching bugs, # but you can also burn through a *ton* of type/dict/function versions: @@ -588,6 +587,7 @@ def assert_races_do_not_crash( for writer in writers: writer.join() + @requires_specialization def test_binary_subscr_getitem(self): def get_items(): class C: @@ -617,6 +617,7 @@ def write(items): opname = "BINARY_SUBSCR_GETITEM" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_binary_subscr_list_int(self): def get_items(): items = [] @@ -640,6 +641,7 @@ def write(items): opname = "BINARY_SUBSCR_LIST_INT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_for_iter_gen(self): def get_items(): def g(): @@ -671,6 +673,7 @@ def write(items): opname = "FOR_ITER_GEN" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_for_iter_list(self): def get_items(): items = [] @@ -692,6 +695,7 @@ def write(items): opname = "FOR_ITER_LIST" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_class(self): def get_items(): class C: @@ -721,6 +725,7 @@ def write(items): opname = "LOAD_ATTR_CLASS" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_getattribute_overridden(self): def get_items(): class C: @@ -750,6 +755,7 @@ def write(items): opname = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_instance_value(self): def get_items(): class C: @@ -773,6 +779,7 @@ def write(items): opname = "LOAD_ATTR_INSTANCE_VALUE" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_method_lazy_dict(self): def get_items(): class C(Exception): @@ -802,6 +809,7 @@ def write(items): opname = "LOAD_ATTR_METHOD_LAZY_DICT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_method_no_dict(self): def get_items(): class C: @@ -832,6 +840,7 @@ def write(items): opname = "LOAD_ATTR_METHOD_NO_DICT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_method_with_values(self): def get_items(): class C: @@ -861,6 +870,7 @@ def write(items): opname = "LOAD_ATTR_METHOD_WITH_VALUES" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_module(self): def get_items(): items = [] @@ -885,6 +895,7 @@ def write(items): opname = "LOAD_ATTR_MODULE" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_property(self): def get_items(): class C: @@ -914,6 +925,7 @@ def write(items): opname = "LOAD_ATTR_PROPERTY" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_with_hint(self): def get_items(): class C: @@ -940,6 +952,7 @@ def write(items): opname = "LOAD_ATTR_WITH_HINT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization_ft def test_load_global_module(self): def get_items(): items = [] @@ -961,6 +974,7 @@ def write(items): opname, get_items, read, write, check_items=True ) + @requires_specialization def test_store_attr_instance_value(self): def get_items(): class C: @@ -983,6 +997,7 @@ def write(items): opname = "STORE_ATTR_INSTANCE_VALUE" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_store_attr_with_hint(self): def get_items(): class C: @@ -1008,6 +1023,7 @@ def write(items): opname = "STORE_ATTR_WITH_HINT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_store_subscr_list_int(self): def get_items(): items = [] @@ -1031,6 +1047,7 @@ def write(items): opname = "STORE_SUBSCR_LIST_INT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_unpack_sequence_list(self): def get_items(): items = [] @@ -1255,7 +1272,104 @@ def g(): self.assert_specialized(g, "CONTAINS_OP_SET") self.assert_no_opcode(g, "CONTAINS_OP") + @cpython_only + @requires_specialization_ft + def test_to_bool(self): + def to_bool_bool(): + true_cnt, false_cnt = 0, 0 + elems = [e % 2 == 0 for e in range(100)] + for e in elems: + if e: + true_cnt += 1 + else: + false_cnt += 1 + self.assertEqual(true_cnt, 50) + self.assertEqual(false_cnt, 50) + + to_bool_bool() + self.assert_specialized(to_bool_bool, "TO_BOOL_BOOL") + self.assert_no_opcode(to_bool_bool, "TO_BOOL") + + def to_bool_int(): + count = 0 + for i in range(100): + if i: + count += 1 + else: + count -= 1 + self.assertEqual(count, 98) + + to_bool_int() + self.assert_specialized(to_bool_int, "TO_BOOL_INT") + self.assert_no_opcode(to_bool_int, "TO_BOOL") + + def to_bool_list(): + count = 0 + elems = [1, 2, 3] + while elems: + count += elems.pop() + self.assertEqual(elems, []) + self.assertEqual(count, 6) + + to_bool_list() + self.assert_specialized(to_bool_list, "TO_BOOL_LIST") + self.assert_no_opcode(to_bool_list, "TO_BOOL") + + def to_bool_none(): + count = 0 + elems = [None, None, None, None] + for e in elems: + if not e: + count += 1 + self.assertEqual(count, len(elems)) + + to_bool_none() + self.assert_specialized(to_bool_none, "TO_BOOL_NONE") + self.assert_no_opcode(to_bool_none, "TO_BOOL") + + def to_bool_str(): + count = 0 + elems = ["", "foo", ""] + for e in elems: + if e: + count += 1 + self.assertEqual(count, 1) + + to_bool_str() + self.assert_specialized(to_bool_str, "TO_BOOL_STR") + self.assert_no_opcode(to_bool_str, "TO_BOOL") + + @cpython_only + @requires_specialization_ft + def test_unpack_sequence(self): + def f(): + for _ in range(100): + a, b = 1, 2 + self.assertEqual(a, 1) + self.assertEqual(b, 2) + + f() + self.assert_specialized(f, "UNPACK_SEQUENCE_TWO_TUPLE") + self.assert_no_opcode(f, "UNPACK_SEQUENCE") + + def g(): + for _ in range(100): + a, = 1, + self.assertEqual(a, 1) + + g() + self.assert_specialized(g, "UNPACK_SEQUENCE_TUPLE") + self.assert_no_opcode(g, "UNPACK_SEQUENCE") + + def x(): + for _ in range(100): + a, b = [1, 2] + self.assertEqual(a, 1) + self.assertEqual(b, 2) + x() + self.assert_specialized(x, "UNPACK_SEQUENCE_LIST") + self.assert_no_opcode(x, "UNPACK_SEQUENCE") if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 9a4be78556c648..f3d2ceb263f6f4 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -2447,8 +2447,8 @@ def test_fchown(self): support.is_emscripten or support.is_wasi, "musl libc issue on Emscripten/WASI, bpo-46390" ) - @unittest.skipIf(support.is_apple_mobile, "gh-118201: Test is flaky on iOS") def test_fpathconf(self): + self.assertIn("PC_NAME_MAX", os.pathconf_names) self.check(os.pathconf, "PC_NAME_MAX") self.check(os.fpathconf, "PC_NAME_MAX") self.check_bool(os.pathconf, "PC_NAME_MAX") @@ -3967,10 +3967,10 @@ def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr, **kwa xattr.remove("user.test") self.assertEqual(set(listxattr(fn)), xattr) self.assertEqual(getxattr(fn, s("user.test2"), **kwargs), b"foo") - setxattr(fn, s("user.test"), b"a"*1024, **kwargs) - self.assertEqual(getxattr(fn, s("user.test"), **kwargs), b"a"*1024) + setxattr(fn, s("user.test"), b"a"*256, **kwargs) + self.assertEqual(getxattr(fn, s("user.test"), **kwargs), b"a"*256) removexattr(fn, s("user.test"), **kwargs) - many = sorted("user.test{}".format(i) for i in range(100)) + many = sorted("user.test{}".format(i) for i in range(32)) for thing in many: setxattr(fn, thing, b"x", **kwargs) self.assertEqual(set(listxattr(fn)), set(init_xattr) | set(many)) @@ -4174,6 +4174,7 @@ def test_eventfd_select(self): os.eventfd_read(fd) @unittest.skipUnless(hasattr(os, 'timerfd_create'), 'requires os.timerfd_create') +@unittest.skipIf(sys.platform == "android", "gh-124873: Test is flaky on Android") @support.requires_linux_version(2, 6, 30) class TimerfdTests(unittest.TestCase): # 1 ms accuracy is reliably achievable on every platform except Android diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index 46966b6df2d7b0..6a994f890da616 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -1425,84 +1425,6 @@ def test_passing_kwargs_errors(self): with self.assertRaises(TypeError): self.cls(foo="bar") - def setUpWalk(self): - super().setUpWalk() - sub21_path= self.sub2_path / "SUB21" - tmp5_path = sub21_path / "tmp3" - broken_link3_path = self.sub2_path / "broken_link3" - - os.makedirs(sub21_path) - tmp5_path.write_text("I am tmp5, blame test_pathlib.") - if self.can_symlink: - os.symlink(tmp5_path, broken_link3_path) - self.sub2_tree[2].append('broken_link3') - self.sub2_tree[2].sort() - if not is_emscripten: - # Emscripten fails with inaccessible directories. - os.chmod(sub21_path, 0) - try: - os.listdir(sub21_path) - except PermissionError: - self.sub2_tree[1].append('SUB21') - else: - os.chmod(sub21_path, stat.S_IRWXU) - os.unlink(tmp5_path) - os.rmdir(sub21_path) - - def test_walk_bad_dir(self): - self.setUpWalk() - errors = [] - walk_it = self.walk_path.walk(on_error=errors.append) - root, dirs, files = next(walk_it) - self.assertEqual(errors, []) - dir1 = 'SUB1' - path1 = root / dir1 - path1new = (root / dir1).with_suffix(".new") - path1.rename(path1new) - try: - roots = [r for r, _, _ in walk_it] - self.assertTrue(errors) - self.assertNotIn(path1, roots) - self.assertNotIn(path1new, roots) - for dir2 in dirs: - if dir2 != dir1: - self.assertIn(root / dir2, roots) - finally: - path1new.rename(path1) - - def test_walk_many_open_files(self): - depth = 30 - base = self.cls(self.base, 'deep') - path = self.cls(base, *(['d']*depth)) - path.mkdir(parents=True) - - iters = [base.walk(top_down=False) for _ in range(100)] - for i in range(depth + 1): - expected = (path, ['d'] if i else [], []) - for it in iters: - self.assertEqual(next(it), expected) - path = path.parent - - iters = [base.walk(top_down=True) for _ in range(100)] - path = base - for i in range(depth + 1): - expected = (path, ['d'] if i < depth else [], []) - for it in iters: - self.assertEqual(next(it), expected) - path = path / 'd' - - def test_walk_above_recursion_limit(self): - recursion_limit = 40 - # directory_depth > recursion_limit - directory_depth = recursion_limit + 10 - base = self.cls(self.base, 'deep') - path = base.joinpath(*(['d'] * directory_depth)) - path.mkdir(parents=True) - - with infinite_recursion(recursion_limit): - list(base.walk()) - list(base.walk(top_down=False)) - def test_glob_empty_pattern(self): p = self.cls('') with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'): @@ -1886,6 +1808,94 @@ def test_group_windows(self): P('c:/').group() +class PathWalkTest(test_pathlib_abc.DummyPathWalkTest): + cls = pathlib.Path + base = PathTest.base + can_symlink = PathTest.can_symlink + + def setUp(self): + super().setUp() + sub21_path= self.sub2_path / "SUB21" + tmp5_path = sub21_path / "tmp3" + broken_link3_path = self.sub2_path / "broken_link3" + + os.makedirs(sub21_path) + tmp5_path.write_text("I am tmp5, blame test_pathlib.") + if self.can_symlink: + os.symlink(tmp5_path, broken_link3_path) + self.sub2_tree[2].append('broken_link3') + self.sub2_tree[2].sort() + if not is_emscripten: + # Emscripten fails with inaccessible directories. + os.chmod(sub21_path, 0) + try: + os.listdir(sub21_path) + except PermissionError: + self.sub2_tree[1].append('SUB21') + else: + os.chmod(sub21_path, stat.S_IRWXU) + os.unlink(tmp5_path) + os.rmdir(sub21_path) + + def tearDown(self): + if 'SUB21' in self.sub2_tree[1]: + os.chmod(self.sub2_path / "SUB21", stat.S_IRWXU) + super().tearDown() + + def test_walk_bad_dir(self): + errors = [] + walk_it = self.walk_path.walk(on_error=errors.append) + root, dirs, files = next(walk_it) + self.assertEqual(errors, []) + dir1 = 'SUB1' + path1 = root / dir1 + path1new = (root / dir1).with_suffix(".new") + path1.rename(path1new) + try: + roots = [r for r, _, _ in walk_it] + self.assertTrue(errors) + self.assertNotIn(path1, roots) + self.assertNotIn(path1new, roots) + for dir2 in dirs: + if dir2 != dir1: + self.assertIn(root / dir2, roots) + finally: + path1new.rename(path1) + + def test_walk_many_open_files(self): + depth = 30 + base = self.cls(self.base, 'deep') + path = self.cls(base, *(['d']*depth)) + path.mkdir(parents=True) + + iters = [base.walk(top_down=False) for _ in range(100)] + for i in range(depth + 1): + expected = (path, ['d'] if i else [], []) + for it in iters: + self.assertEqual(next(it), expected) + path = path.parent + + iters = [base.walk(top_down=True) for _ in range(100)] + path = base + for i in range(depth + 1): + expected = (path, ['d'] if i < depth else [], []) + for it in iters: + self.assertEqual(next(it), expected) + path = path / 'd' + + def test_walk_above_recursion_limit(self): + recursion_limit = 40 + # directory_depth > recursion_limit + directory_depth = recursion_limit + 10 + base = self.cls(self.base, 'deep') + path = base.joinpath(*(['d'] * directory_depth)) + path.mkdir(parents=True) + + with infinite_recursion(recursion_limit): + list(base.walk()) + list(base.walk(top_down=False)) + + @unittest.skipIf(os.name == 'nt', 'test requires a POSIX-compatible system') class PosixPathTest(PathTest, PurePosixPathTest): cls = pathlib.PosixPath diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py index b69d674e1cf1ed..aaa30a17f2af14 100644 --- a/Lib/test/test_pathlib/test_pathlib_abc.py +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -2922,7 +2922,16 @@ def test_delete_missing(self): filename = tmp / 'foo' self.assertRaises(FileNotFoundError, filename._delete) - def setUpWalk(self): + +class DummyPathWalkTest(unittest.TestCase): + cls = DummyPath + base = DummyPathTest.base + can_symlink = False + + def setUp(self): + name = self.id().split('.')[-1] + if name in _tests_needing_symlinks and not self.can_symlink: + self.skipTest('requires symlinks') # Build: # TESTFN/ # TEST1/ a file kid and two directory kids @@ -2966,8 +2975,11 @@ def setUpWalk(self): else: self.sub2_tree = (self.sub2_path, [], ["tmp3"]) + def tearDown(self): + base = self.cls(self.base) + base._rmtree() + def test_walk_topdown(self): - self.setUpWalk() walker = self.walk_path.walk() entry = next(walker) entry[1].sort() # Ensure we visit SUB1 before SUB2 @@ -2984,7 +2996,6 @@ def test_walk_topdown(self): next(walker) def test_walk_prune(self): - self.setUpWalk() # Prune the search. all = [] for root, dirs, files in self.walk_path.walk(): @@ -3001,7 +3012,6 @@ def test_walk_prune(self): self.assertEqual(all[1], self.sub2_tree) def test_walk_bottom_up(self): - self.setUpWalk() seen_testfn = seen_sub1 = seen_sub11 = seen_sub2 = False for path, dirnames, filenames in self.walk_path.walk(top_down=False): if path == self.walk_path: @@ -3036,7 +3046,6 @@ def test_walk_bottom_up(self): @needs_symlinks def test_walk_follow_symlinks(self): - self.setUpWalk() walk_it = self.walk_path.walk(follow_symlinks=True) for root, dirs, files in walk_it: if root == self.link_path: @@ -3048,7 +3057,6 @@ def test_walk_follow_symlinks(self): @needs_symlinks def test_walk_symlink_location(self): - self.setUpWalk() # Tests whether symlinks end up in filenames or dirnames depending # on the `follow_symlinks` argument. walk_it = self.walk_path.walk(follow_symlinks=False) @@ -3097,5 +3105,10 @@ class DummyPathWithSymlinksTest(DummyPathTest): can_symlink = True +class DummyPathWithSymlinksWalkTest(DummyPathWalkTest): + cls = DummyPathWithSymlinks + can_symlink = True + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_pickletools.py b/Lib/test/test_pickletools.py index d8ff7a25cbc4b7..265dc497ccb86c 100644 --- a/Lib/test/test_pickletools.py +++ b/Lib/test/test_pickletools.py @@ -361,6 +361,88 @@ def test_annotate(self): highest protocol among opcodes = 0 ''', annotate=20) + def test_string(self): + self.check_dis(b"S'abc'\n.", '''\ + 0: S STRING 'abc' + 7: . STOP +highest protocol among opcodes = 0 +''') + self.check_dis(b'S"abc"\n.', '''\ + 0: S STRING 'abc' + 7: . STOP +highest protocol among opcodes = 0 +''') + self.check_dis(b"S'\xc3\xb5'\n.", '''\ + 0: S STRING '\\xc3\\xb5' + 6: . STOP +highest protocol among opcodes = 0 +''') + + def test_string_without_quotes(self): + self.check_dis_error(b"Sabc'\n.", '', + 'no string quotes around b"abc\'"') + self.check_dis_error(b'Sabc"\n.', '', + "no string quotes around b'abc\"'") + self.check_dis_error(b"S'abc\n.", '', + '''strinq quote b"'" not found at both ends of b"'abc"''') + self.check_dis_error(b'S"abc\n.', '', + r"""strinq quote b'"' not found at both ends of b'"abc'""") + self.check_dis_error(b"S'abc\"\n.", '', + r"""strinq quote b"'" not found at both ends of b'\\'abc"'""") + self.check_dis_error(b"S\"abc'\n.", '', + r"""strinq quote b'"' not found at both ends of b'"abc\\''""") + + def test_binstring(self): + self.check_dis(b"T\x03\x00\x00\x00abc.", '''\ + 0: T BINSTRING 'abc' + 8: . STOP +highest protocol among opcodes = 1 +''') + self.check_dis(b"T\x02\x00\x00\x00\xc3\xb5.", '''\ + 0: T BINSTRING '\\xc3\\xb5' + 7: . STOP +highest protocol among opcodes = 1 +''') + + def test_short_binstring(self): + self.check_dis(b"U\x03abc.", '''\ + 0: U SHORT_BINSTRING 'abc' + 5: . STOP +highest protocol among opcodes = 1 +''') + self.check_dis(b"U\x02\xc3\xb5.", '''\ + 0: U SHORT_BINSTRING '\\xc3\\xb5' + 4: . STOP +highest protocol among opcodes = 1 +''') + + def test_global(self): + self.check_dis(b"cmodule\nname\n.", '''\ + 0: c GLOBAL 'module name' + 13: . STOP +highest protocol among opcodes = 0 +''') + self.check_dis(b"cm\xc3\xb6dule\nn\xc3\xa4me\n.", '''\ + 0: c GLOBAL 'm\xf6dule n\xe4me' + 15: . STOP +highest protocol among opcodes = 0 +''') + + def test_inst(self): + self.check_dis(b"(imodule\nname\n.", '''\ + 0: ( MARK + 1: i INST 'module name' (MARK at 0) + 14: . STOP +highest protocol among opcodes = 0 +''') + + def test_persid(self): + self.check_dis(b"Pabc\n.", '''\ + 0: P PERSID 'abc' + 5: . STOP +highest protocol among opcodes = 0 +''') + class MiscTestCase(unittest.TestCase): def test__all__(self): diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index ef9d617f66feec..c9cbe1541e733e 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -568,10 +568,38 @@ def test_dup(self): @unittest.skipUnless(hasattr(posix, 'confstr'), 'test needs posix.confstr()') - @unittest.skipIf(support.is_apple_mobile, "gh-118201: Test is flaky on iOS") def test_confstr(self): - self.assertRaises(ValueError, posix.confstr, "CS_garbage") - self.assertEqual(len(posix.confstr("CS_PATH")) > 0, True) + with self.assertRaisesRegex( + ValueError, "unrecognized configuration name" + ): + posix.confstr("CS_garbage") + + with self.assertRaisesRegex( + TypeError, "configuration names must be strings or integers" + ): + posix.confstr(1.23) + + path = posix.confstr("CS_PATH") + self.assertGreater(len(path), 0) + self.assertEqual(posix.confstr(posix.confstr_names["CS_PATH"]), path) + + @unittest.skipUnless(hasattr(posix, 'sysconf'), + 'test needs posix.sysconf()') + def test_sysconf(self): + with self.assertRaisesRegex( + ValueError, "unrecognized configuration name" + ): + posix.sysconf("SC_garbage") + + with self.assertRaisesRegex( + TypeError, "configuration names must be strings or integers" + ): + posix.sysconf(1.23) + + arg_max = posix.sysconf("SC_ARG_MAX") + self.assertGreater(arg_max, 0) + self.assertEqual( + posix.sysconf(posix.sysconf_names["SC_ARG_MAX"]), arg_max) @unittest.skipUnless(hasattr(posix, 'dup2'), 'test needs posix.dup2()') diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index b39255ebc79ac1..43e4fbc610e5f7 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -379,6 +379,7 @@ def test_expanduser_pwd2(self): ("/.", "/"), ("/./", "/"), ("/.//.", "/"), + ("/./foo/bar", "/foo/bar"), ("/foo", "/foo"), ("/foo/bar", "/foo/bar"), ("//", "//"), @@ -388,6 +389,7 @@ def test_expanduser_pwd2(self): ("///..//./foo/.//bar", "/foo/bar"), (".", "."), (".//.", "."), + ("./foo/bar", "foo/bar"), ("..", ".."), ("../", ".."), ("../foo", "../foo"), diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py index 7bc702ec89a4a7..0d3599be87f228 100644 --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -2640,6 +2640,12 @@ def test_bug_gh100061(self): self.assertEqual(re.match("(?>(?:ab?c){1,3})", "aca").span(), (0, 2)) self.assertEqual(re.match("(?:ab?c){1,3}+", "aca").span(), (0, 2)) + def test_bug_gh101955(self): + # Possessive quantifier with nested alternative with capture groups + self.assertEqual(re.match('((x)|y|z)*+', 'xyz').groups(), ('z', 'x')) + self.assertEqual(re.match('((x)|y|z){3}+', 'xyz').groups(), ('z', 'x')) + self.assertEqual(re.match('((x)|y|z){3,}+', 'xyz').groups(), ('z', 'x')) + @unittest.skipIf(multiprocessing is None, 'test requires multiprocessing') def test_regression_gh94675(self): pattern = re.compile(r'(?<=[({}])(((//[^\n]*)?[\n])([\000-\040])*)*' @@ -2681,6 +2687,29 @@ def test_character_set_none(self): self.assertIsNone(re.search(p, s)) self.assertIsNone(re.search('(?s:.)' + p, s)) + def check_interrupt(self, pattern, string, maxcount): + class Interrupt(Exception): + pass + p = re.compile(pattern) + for n in range(maxcount): + try: + p._fail_after(n, Interrupt) + p.match(string) + return n + except Interrupt: + pass + finally: + p._fail_after(-1, None) + + @unittest.skipUnless(hasattr(re.Pattern, '_fail_after'), 'requires debug build') + def test_memory_leaks(self): + self.check_interrupt(r'(.)*:', 'abc:', 100) + self.check_interrupt(r'([^:])*?:', 'abc:', 100) + self.check_interrupt(r'([^:])*+:', 'abc:', 100) + self.check_interrupt(r'(.){2,4}:', 'abc:', 100) + self.check_interrupt(r'([^:]){2,4}?:', 'abc:', 100) + self.check_interrupt(r'([^:]){2,4}+:', 'abc:', 100) + def get_debug_out(pat): with captured_stdout() as out: diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 37e54d23b22516..1f18b1f09b5858 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -70,18 +70,17 @@ def wrap(*args, **kwargs): os.rename = builtin_rename return wrap -def write_file(path, content, binary=False): +def create_file(path, content=b''): """Write *content* to a file located at *path*. If *path* is a tuple instead of a string, os.path.join will be used to - make a path. If *binary* is true, the file will be opened in binary - mode. + make a path. """ if isinstance(path, tuple): path = os.path.join(*path) - mode = 'wb' if binary else 'w' - encoding = None if binary else "utf-8" - with open(path, mode, encoding=encoding) as fp: + if isinstance(content, str): + content = content.encode() + with open(path, 'xb') as fp: fp.write(content) def write_test_file(path, size): @@ -190,7 +189,7 @@ def test_rmtree_works_on_bytes(self): tmp = self.mkdtemp() victim = os.path.join(tmp, 'killme') os.mkdir(victim) - write_file(os.path.join(victim, 'somefile'), 'foo') + create_file(os.path.join(victim, 'somefile'), 'foo') victim = os.fsencode(victim) self.assertIsInstance(victim, bytes) shutil.rmtree(victim) @@ -242,7 +241,7 @@ def test_rmtree_works_on_symlinks(self): for d in dir1, dir2, dir3: os.mkdir(d) file1 = os.path.join(tmp, 'file1') - write_file(file1, 'foo') + create_file(file1, 'foo') link1 = os.path.join(dir1, 'link1') os.symlink(dir2, link1) link2 = os.path.join(dir1, 'link2') @@ -304,7 +303,7 @@ def test_rmtree_works_on_junctions(self): for d in dir1, dir2, dir3: os.mkdir(d) file1 = os.path.join(tmp, 'file1') - write_file(file1, 'foo') + create_file(file1, 'foo') link1 = os.path.join(dir1, 'link1') _winapi.CreateJunction(dir2, link1) link2 = os.path.join(dir1, 'link2') @@ -327,7 +326,7 @@ def test_rmtree_errors(self): # existing file tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "tstfile") - write_file(filename, "") + create_file(filename) with self.assertRaises(NotADirectoryError) as cm: shutil.rmtree(filename) self.assertEqual(cm.exception.filename, filename) @@ -347,7 +346,7 @@ def test_rmtree_errors(self): def test_rmtree_errors_onerror(self): tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "tstfile") - write_file(filename, "") + create_file(filename) errors = [] def onerror(*args): errors.append(args) @@ -365,7 +364,7 @@ def onerror(*args): def test_rmtree_errors_onexc(self): tmpdir = self.mkdtemp() filename = os.path.join(tmpdir, "tstfile") - write_file(filename, "") + create_file(filename) errors = [] def onexc(*args): errors.append(args) @@ -547,7 +546,7 @@ def raiser(fn, *args, **kwargs): os.lstat = raiser os.mkdir(TESTFN) - write_file((TESTFN, 'foo'), 'foo') + create_file((TESTFN, 'foo'), 'foo') shutil.rmtree(TESTFN) finally: os.lstat = orig_lstat @@ -618,7 +617,7 @@ def test_rmtree_with_dir_fd(self): self.addCleanup(os.close, dir_fd) os.mkdir(fullname) os.mkdir(os.path.join(fullname, 'subdir')) - write_file(os.path.join(fullname, 'subdir', 'somefile'), 'foo') + create_file(os.path.join(fullname, 'subdir', 'somefile'), 'foo') self.assertTrue(os.path.exists(fullname)) shutil.rmtree(victim, dir_fd=dir_fd) self.assertFalse(os.path.exists(fullname)) @@ -658,7 +657,7 @@ def test_rmtree_on_junction(self): src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') os.mkdir(src) - open(os.path.join(src, 'spam'), 'wb').close() + create_file(os.path.join(src, 'spam')) _winapi.CreateJunction(src, dst) self.assertRaises(OSError, shutil.rmtree, dst) shutil.rmtree(dst, ignore_errors=True) @@ -718,7 +717,7 @@ def _onexc(fn, path, exc): for path in dirs: os.mkdir(path) for path in files: - write_file(path, '') + create_file(path) old_modes = [os.stat(path).st_mode for path in paths] @@ -757,9 +756,9 @@ def test_copytree_simple(self): dst_dir = os.path.join(self.mkdtemp(), 'destination') self.addCleanup(shutil.rmtree, src_dir) self.addCleanup(shutil.rmtree, os.path.dirname(dst_dir)) - write_file((src_dir, 'test.txt'), '123') + create_file((src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) - write_file((src_dir, 'test_dir', 'test.txt'), '456') + create_file((src_dir, 'test_dir', 'test.txt'), '456') shutil.copytree(src_dir, dst_dir) self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt'))) @@ -777,11 +776,11 @@ def test_copytree_dirs_exist_ok(self): self.addCleanup(shutil.rmtree, src_dir) self.addCleanup(shutil.rmtree, dst_dir) - write_file((src_dir, 'nonexisting.txt'), '123') + create_file((src_dir, 'nonexisting.txt'), '123') os.mkdir(os.path.join(src_dir, 'existing_dir')) os.mkdir(os.path.join(dst_dir, 'existing_dir')) - write_file((dst_dir, 'existing_dir', 'existing.txt'), 'will be replaced') - write_file((src_dir, 'existing_dir', 'existing.txt'), 'has been replaced') + create_file((dst_dir, 'existing_dir', 'existing.txt'), 'will be replaced') + create_file((src_dir, 'existing_dir', 'existing.txt'), 'has been replaced') shutil.copytree(src_dir, dst_dir, dirs_exist_ok=True) self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'nonexisting.txt'))) @@ -804,7 +803,7 @@ def test_copytree_symlinks(self): sub_dir = os.path.join(src_dir, 'sub') os.mkdir(src_dir) os.mkdir(sub_dir) - write_file((src_dir, 'file.txt'), 'foo') + create_file((src_dir, 'file.txt'), 'foo') src_link = os.path.join(sub_dir, 'link') dst_link = os.path.join(dst_dir, 'sub/link') os.symlink(os.path.join(src_dir, 'file.txt'), @@ -835,16 +834,16 @@ def test_copytree_with_exclude(self): src_dir = self.mkdtemp() try: dst_dir = join(self.mkdtemp(), 'destination') - write_file((src_dir, 'test.txt'), '123') - write_file((src_dir, 'test.tmp'), '123') + create_file((src_dir, 'test.txt'), '123') + create_file((src_dir, 'test.tmp'), '123') os.mkdir(join(src_dir, 'test_dir')) - write_file((src_dir, 'test_dir', 'test.txt'), '456') + create_file((src_dir, 'test_dir', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2')) - write_file((src_dir, 'test_dir2', 'test.txt'), '456') + create_file((src_dir, 'test_dir2', 'test.txt'), '456') os.mkdir(join(src_dir, 'test_dir2', 'subdir')) os.mkdir(join(src_dir, 'test_dir2', 'subdir2')) - write_file((src_dir, 'test_dir2', 'subdir', 'test.txt'), '456') - write_file((src_dir, 'test_dir2', 'subdir2', 'test.py'), '456') + create_file((src_dir, 'test_dir2', 'subdir', 'test.txt'), '456') + create_file((src_dir, 'test_dir2', 'subdir2', 'test.py'), '456') # testing glob-like patterns try: @@ -903,7 +902,7 @@ def test_copytree_arg_types_of_ignore(self): os.mkdir(join(src_dir)) os.mkdir(join(src_dir, 'test_dir')) os.mkdir(os.path.join(src_dir, 'test_dir', 'subdir')) - write_file((src_dir, 'test_dir', 'subdir', 'test.txt'), '456') + create_file((src_dir, 'test_dir', 'subdir', 'test.txt'), '456') invocations = [] @@ -943,9 +942,9 @@ def test_copytree_retains_permissions(self): self.addCleanup(shutil.rmtree, tmp_dir) os.chmod(src_dir, 0o777) - write_file((src_dir, 'permissive.txt'), '123') + create_file((src_dir, 'permissive.txt'), '123') os.chmod(os.path.join(src_dir, 'permissive.txt'), 0o777) - write_file((src_dir, 'restrictive.txt'), '456') + create_file((src_dir, 'restrictive.txt'), '456') os.chmod(os.path.join(src_dir, 'restrictive.txt'), 0o600) restrictive_subdir = tempfile.mkdtemp(dir=src_dir) self.addCleanup(os_helper.rmtree, restrictive_subdir) @@ -988,8 +987,7 @@ def custom_cpfun(a, b): flag = [] src = self.mkdtemp() dst = tempfile.mktemp(dir=self.mkdtemp()) - with open(os.path.join(src, 'foo'), 'w', encoding='utf-8') as f: - f.close() + create_file(os.path.join(src, 'foo')) shutil.copytree(src, dst, copy_function=custom_cpfun) self.assertEqual(len(flag), 1) @@ -1024,9 +1022,9 @@ def test_copytree_named_pipe(self): def test_copytree_special_func(self): src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') - write_file((src_dir, 'test.txt'), '123') + create_file((src_dir, 'test.txt'), '123') os.mkdir(os.path.join(src_dir, 'test_dir')) - write_file((src_dir, 'test_dir', 'test.txt'), '456') + create_file((src_dir, 'test_dir', 'test.txt'), '456') copied = [] def _copy(src, dst): @@ -1039,7 +1037,7 @@ def _copy(src, dst): def test_copytree_dangling_symlinks(self): src_dir = self.mkdtemp() valid_file = os.path.join(src_dir, 'test.txt') - write_file(valid_file, 'abc') + create_file(valid_file, 'abc') dir_a = os.path.join(src_dir, 'dir_a') os.mkdir(dir_a) for d in src_dir, dir_a: @@ -1067,8 +1065,7 @@ def test_copytree_symlink_dir(self): src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') os.mkdir(os.path.join(src_dir, 'real_dir')) - with open(os.path.join(src_dir, 'real_dir', 'test.txt'), 'wb'): - pass + create_file(os.path.join(src_dir, 'real_dir', 'test.txt')) os.symlink(os.path.join(src_dir, 'real_dir'), os.path.join(src_dir, 'link_to_dir'), target_is_directory=True) @@ -1088,7 +1085,7 @@ def test_copytree_return_value(self): dst_dir = src_dir + "dest" self.addCleanup(shutil.rmtree, dst_dir, True) src = os.path.join(src_dir, 'foo') - write_file(src, 'foo') + create_file(src, 'foo') rv = shutil.copytree(src_dir, dst_dir) self.assertEqual(['foo'], os.listdir(rv)) @@ -1100,7 +1097,7 @@ def test_copytree_subdirectory(self): dst_dir = os.path.join(src_dir, "somevendor", "1.0") os.makedirs(src_dir) src = os.path.join(src_dir, 'pol') - write_file(src, 'pol') + create_file(src, 'pol') rv = shutil.copytree(src_dir, dst_dir) self.assertEqual(['pol'], os.listdir(rv)) @@ -1115,8 +1112,8 @@ def test_copymode_follow_symlinks(self): dst = os.path.join(tmp_dir, 'bar') src_link = os.path.join(tmp_dir, 'baz') dst_link = os.path.join(tmp_dir, 'quux') - write_file(src, 'foo') - write_file(dst, 'foo') + create_file(src, 'foo') + create_file(dst, 'foo') os.symlink(src, src_link) os.symlink(dst, dst_link) os.chmod(src, stat.S_IRWXU|stat.S_IRWXG) @@ -1147,8 +1144,8 @@ def test_copymode_symlink_to_symlink(self): dst = os.path.join(tmp_dir, 'bar') src_link = os.path.join(tmp_dir, 'baz') dst_link = os.path.join(tmp_dir, 'quux') - write_file(src, 'foo') - write_file(dst, 'foo') + create_file(src, 'foo') + create_file(dst, 'foo') os.symlink(src, src_link) os.symlink(dst, dst_link) os.chmod(src, stat.S_IRWXU|stat.S_IRWXG) @@ -1178,8 +1175,8 @@ def test_copymode_symlink_to_symlink_wo_lchmod(self): dst = os.path.join(tmp_dir, 'bar') src_link = os.path.join(tmp_dir, 'baz') dst_link = os.path.join(tmp_dir, 'quux') - write_file(src, 'foo') - write_file(dst, 'foo') + create_file(src, 'foo') + create_file(dst, 'foo') os.symlink(src, src_link) os.symlink(dst, dst_link) shutil.copymode(src_link, dst_link, follow_symlinks=False) # silent fail @@ -1193,11 +1190,11 @@ def test_copystat_symlinks(self): dst = os.path.join(tmp_dir, 'bar') src_link = os.path.join(tmp_dir, 'baz') dst_link = os.path.join(tmp_dir, 'qux') - write_file(src, 'foo') + create_file(src, 'foo') src_stat = os.stat(src) os.utime(src, (src_stat.st_atime, src_stat.st_mtime - 42.0)) # ensure different mtimes - write_file(dst, 'bar') + create_file(dst, 'bar') self.assertNotEqual(os.stat(src).st_mtime, os.stat(dst).st_mtime) os.symlink(src, src_link) os.symlink(dst, dst_link) @@ -1235,8 +1232,8 @@ def test_copystat_handles_harmless_chflags_errors(self): tmpdir = self.mkdtemp() file1 = os.path.join(tmpdir, 'file1') file2 = os.path.join(tmpdir, 'file2') - write_file(file1, 'xxx') - write_file(file2, 'xxx') + create_file(file1, 'xxx') + create_file(file2, 'xxx') def make_chflags_raiser(err): ex = OSError() @@ -1262,9 +1259,9 @@ def _chflags_raiser(path, flags, *, follow_symlinks=True): def test_copyxattr(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') - write_file(src, 'foo') + create_file(src, 'foo') dst = os.path.join(tmp_dir, 'bar') - write_file(dst, 'bar') + create_file(dst, 'bar') # no xattr == no problem shutil._copyxattr(src, dst) @@ -1278,7 +1275,7 @@ def test_copyxattr(self): os.getxattr(dst, 'user.foo')) # check errors don't affect other attrs os.remove(dst) - write_file(dst, 'bar') + create_file(dst, 'bar') os_error = OSError(errno.EPERM, 'EPERM') def _raise_on_user_foo(fname, attr, val, **kwargs): @@ -1308,15 +1305,15 @@ def _raise_on_src(fname, *, follow_symlinks=True): # test that shutil.copystat copies xattrs src = os.path.join(tmp_dir, 'the_original') srcro = os.path.join(tmp_dir, 'the_original_ro') - write_file(src, src) - write_file(srcro, srcro) + create_file(src, src) + create_file(srcro, srcro) os.setxattr(src, 'user.the_value', b'fiddly') os.setxattr(srcro, 'user.the_value', b'fiddly') os.chmod(srcro, 0o444) dst = os.path.join(tmp_dir, 'the_copy') dstro = os.path.join(tmp_dir, 'the_copy_ro') - write_file(dst, dst) - write_file(dstro, dstro) + create_file(dst, dst) + create_file(dstro, dstro) shutil.copystat(src, dst) shutil.copystat(srcro, dstro) self.assertEqual(os.getxattr(dst, 'user.the_value'), b'fiddly') @@ -1332,13 +1329,13 @@ def test_copyxattr_symlinks(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') src_link = os.path.join(tmp_dir, 'baz') - write_file(src, 'foo') + create_file(src, 'foo') os.symlink(src, src_link) os.setxattr(src, 'trusted.foo', b'42') os.setxattr(src_link, 'trusted.foo', b'43', follow_symlinks=False) dst = os.path.join(tmp_dir, 'bar') dst_link = os.path.join(tmp_dir, 'qux') - write_file(dst, 'bar') + create_file(dst, 'bar') os.symlink(dst, dst_link) shutil._copyxattr(src_link, dst_link, follow_symlinks=False) self.assertEqual(os.getxattr(dst_link, 'trusted.foo', follow_symlinks=False), b'43') @@ -1351,7 +1348,7 @@ def test_copyxattr_symlinks(self): def _copy_file(self, method): fname = 'test.txt' tmpdir = self.mkdtemp() - write_file((tmpdir, fname), 'xxx') + create_file((tmpdir, fname), 'xxx') file1 = os.path.join(tmpdir, fname) tmpdir2 = self.mkdtemp() method(file1, tmpdir2) @@ -1370,7 +1367,7 @@ def test_copy_symlinks(self): src = os.path.join(tmp_dir, 'foo') dst = os.path.join(tmp_dir, 'bar') src_link = os.path.join(tmp_dir, 'baz') - write_file(src, 'foo') + create_file(src, 'foo') os.symlink(src, src_link) if hasattr(os, 'lchmod'): os.lchmod(src_link, stat.S_IRWXU | stat.S_IRWXO) @@ -1412,7 +1409,7 @@ def test_copy2_symlinks(self): src = os.path.join(tmp_dir, 'foo') dst = os.path.join(tmp_dir, 'bar') src_link = os.path.join(tmp_dir, 'baz') - write_file(src, 'foo') + create_file(src, 'foo') os.symlink(src, src_link) if hasattr(os, 'lchmod'): os.lchmod(src_link, stat.S_IRWXU | stat.S_IRWXO) @@ -1446,7 +1443,7 @@ def test_copy2_xattr(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') dst = os.path.join(tmp_dir, 'bar') - write_file(src, 'foo') + create_file(src, 'foo') os.setxattr(src, 'user.foo', b'42') shutil.copy2(src, dst) self.assertEqual( @@ -1460,7 +1457,7 @@ def test_copy_return_value(self): src_dir = self.mkdtemp() dst_dir = self.mkdtemp() src = os.path.join(src_dir, 'foo') - write_file(src, 'foo') + create_file(src, 'foo') rv = fn(src, dst_dir) self.assertEqual(rv, os.path.join(dst_dir, 'foo')) rv = fn(src, os.path.join(dst_dir, 'bar')) @@ -1477,7 +1474,7 @@ def _test_copy_dir(self, copy_func): src_file = os.path.join(src_dir, 'foo') dir2 = self.mkdtemp() dst = os.path.join(src_dir, 'does_not_exist/') - write_file(src_file, 'foo') + create_file(src_file, 'foo') if sys.platform == "win32": err = PermissionError else: @@ -1497,7 +1494,7 @@ def test_copyfile_symlinks(self): dst = os.path.join(tmp_dir, 'dst') dst_link = os.path.join(tmp_dir, 'dst_link') link = os.path.join(tmp_dir, 'link') - write_file(src, 'foo') + create_file(src, 'foo') os.symlink(src, link) # don't follow shutil.copyfile(link, dst_link, follow_symlinks=False) @@ -1514,8 +1511,7 @@ def test_dont_copy_file_onto_link_to_itself(self): src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') try: - with open(src, 'w', encoding='utf-8') as f: - f.write('cheddar') + create_file(src, 'cheddar') try: os.link(src, dst) except PermissionError as e: @@ -1534,8 +1530,7 @@ def test_dont_copy_file_onto_symlink_to_itself(self): src = os.path.join(TESTFN, 'cheese') dst = os.path.join(TESTFN, 'shop') try: - with open(src, 'w', encoding='utf-8') as f: - f.write('cheddar') + create_file(src, 'cheddar') # Using `src` here would mean we end up with a symlink pointing # to TESTFN/TESTFN/cheese, while it should point at # TESTFN/cheese. @@ -1570,7 +1565,7 @@ def test_copyfile_return_value(self): dst_dir = self.mkdtemp() dst_file = os.path.join(dst_dir, 'bar') src_file = os.path.join(src_dir, 'foo') - write_file(src_file, 'foo') + create_file(src_file, 'foo') rv = shutil.copyfile(src_file, dst_file) self.assertTrue(os.path.exists(rv)) self.assertEqual(read_file(src_file), read_file(dst_file)) @@ -1580,7 +1575,7 @@ def test_copyfile_same_file(self): # are the same. src_dir = self.mkdtemp() src_file = os.path.join(src_dir, 'foo') - write_file(src_file, 'foo') + create_file(src_file, 'foo') self.assertRaises(SameFileError, shutil.copyfile, src_file, src_file) # But Error should work too, to stay backward compatible. self.assertRaises(Error, shutil.copyfile, src_file, src_file) @@ -1597,7 +1592,7 @@ def test_copyfile_nonexistent_dir(self): src_dir = self.mkdtemp() src_file = os.path.join(src_dir, 'foo') dst = os.path.join(src_dir, 'does_not_exist/') - write_file(src_file, 'foo') + create_file(src_file, 'foo') self.assertRaises(FileNotFoundError, shutil.copyfile, src_file, dst) def test_copyfile_copy_dir(self): @@ -1608,7 +1603,7 @@ def test_copyfile_copy_dir(self): src_file = os.path.join(src_dir, 'foo') dir2 = self.mkdtemp() dst = os.path.join(src_dir, 'does_not_exist/') - write_file(src_file, 'foo') + create_file(src_file, 'foo') if sys.platform == "win32": err = PermissionError else: @@ -1634,13 +1629,13 @@ def _create_files(self, base_dir='dist'): root_dir = self.mkdtemp() dist = os.path.join(root_dir, base_dir) os.makedirs(dist, exist_ok=True) - write_file((dist, 'file1'), 'xxx') - write_file((dist, 'file2'), 'xxx') + create_file((dist, 'file1'), 'xxx') + create_file((dist, 'file2'), 'xxx') os.mkdir(os.path.join(dist, 'sub')) - write_file((dist, 'sub', 'file3'), 'xxx') + create_file((dist, 'sub', 'file3'), 'xxx') os.mkdir(os.path.join(dist, 'sub2')) if base_dir: - write_file((root_dir, 'outer'), 'xxx') + create_file((root_dir, 'outer'), 'xxx') return root_dir, base_dir @support.requires_zlib() @@ -2221,7 +2216,7 @@ def test_chown(self): dirname = self.mkdtemp() filename = tempfile.mktemp(dir=dirname) linkname = os.path.join(dirname, "chown_link") - write_file(filename, 'testing chown function') + create_file(filename, 'testing chown function') os.symlink(filename, linkname) with self.assertRaises(ValueError): @@ -2314,37 +2309,41 @@ def check_chown(path, uid=None, gid=None): class TestWhich(BaseTest, unittest.TestCase): def setUp(self): - self.temp_dir = self.mkdtemp(prefix="Tmp") + temp_dir = self.mkdtemp(prefix="Tmp") + base_dir = os.path.join(temp_dir, TESTFN + '-basedir') + os.mkdir(base_dir) + self.dir = os.path.join(base_dir, TESTFN + '-dir') + os.mkdir(self.dir) + self.other_dir = os.path.join(base_dir, TESTFN + '-dir2') + os.mkdir(self.other_dir) # Give the temp_file an ".exe" suffix for all. # It's needed on Windows and not harmful on other platforms. - self.temp_file = tempfile.NamedTemporaryFile(dir=self.temp_dir, - prefix="Tmp", - suffix=".Exe") - os.chmod(self.temp_file.name, stat.S_IXUSR) - self.addCleanup(self.temp_file.close) - self.dir, self.file = os.path.split(self.temp_file.name) + self.file = TESTFN + '.Exe' + self.filepath = os.path.join(self.dir, self.file) + self.create_file(self.filepath) self.env_path = self.dir self.curdir = os.curdir self.ext = ".EXE" - def to_text_type(self, s): - ''' - In this class we're testing with str, so convert s to a str - ''' - if isinstance(s, bytes): - return s.decode() - return s + to_text_type = staticmethod(os.fsdecode) + + def create_file(self, path): + create_file(path) + os.chmod(path, 0o755) + + def assertNormEqual(self, actual, expected): + self.assertEqual(os.path.normcase(actual), os.path.normcase(expected)) def test_basic(self): # Given an EXE in a directory, it should be returned. rv = shutil.which(self.file, path=self.dir) - self.assertEqual(rv, self.temp_file.name) + self.assertEqual(rv, self.filepath) def test_absolute_cmd(self): # When given the fully qualified path to an executable that exists, # it should be returned. - rv = shutil.which(self.temp_file.name, path=self.temp_dir) - self.assertEqual(rv, self.temp_file.name) + rv = shutil.which(self.filepath, path=self.other_dir) + self.assertEqual(rv, self.filepath) def test_relative_cmd(self): # When given the relative path with a directory part to an executable @@ -2352,7 +2351,7 @@ def test_relative_cmd(self): base_dir, tail_dir = os.path.split(self.dir) relpath = os.path.join(tail_dir, self.file) with os_helper.change_cwd(path=base_dir): - rv = shutil.which(relpath, path=self.temp_dir) + rv = shutil.which(relpath, path=self.other_dir) self.assertEqual(rv, relpath) # But it shouldn't be searched in PATH directories (issue #16957). with os_helper.change_cwd(path=self.dir): @@ -2363,9 +2362,8 @@ def test_relative_cmd(self): "test is for non win32") def test_cwd_non_win32(self): # Issue #16957 - base_dir = os.path.dirname(self.dir) with os_helper.change_cwd(path=self.dir): - rv = shutil.which(self.file, path=base_dir) + rv = shutil.which(self.file, path=self.other_dir) # non-win32: shouldn't match in the current directory. self.assertIsNone(rv) @@ -2375,57 +2373,32 @@ def test_cwd_win32(self): base_dir = os.path.dirname(self.dir) with os_helper.change_cwd(path=self.dir): with unittest.mock.patch('shutil._win_path_needs_curdir', return_value=True): - rv = shutil.which(self.file, path=base_dir) + rv = shutil.which(self.file, path=self.other_dir) # Current directory implicitly on PATH self.assertEqual(rv, os.path.join(self.curdir, self.file)) with unittest.mock.patch('shutil._win_path_needs_curdir', return_value=False): - rv = shutil.which(self.file, path=base_dir) + rv = shutil.which(self.file, path=self.other_dir) # Current directory not on PATH self.assertIsNone(rv) @unittest.skipUnless(sys.platform == "win32", "test is for win32") def test_cwd_win32_added_before_all_other_path(self): - base_dir = pathlib.Path(os.fsdecode(self.dir)) - - elsewhere_in_path_dir = base_dir / 'dir1' - elsewhere_in_path_dir.mkdir() - match_elsewhere_in_path = elsewhere_in_path_dir / 'hello.exe' - match_elsewhere_in_path.touch() - - exe_in_cwd = base_dir / 'hello.exe' - exe_in_cwd.touch() - - with os_helper.change_cwd(path=base_dir): - with unittest.mock.patch('shutil._win_path_needs_curdir', return_value=True): - rv = shutil.which('hello.exe', path=elsewhere_in_path_dir) - - self.assertEqual(os.path.abspath(rv), os.path.abspath(exe_in_cwd)) - - @unittest.skipUnless(sys.platform == "win32", - "test is for win32") - def test_pathext_match_before_path_full_match(self): - base_dir = pathlib.Path(os.fsdecode(self.dir)) - dir1 = base_dir / 'dir1' - dir2 = base_dir / 'dir2' - dir1.mkdir() - dir2.mkdir() - - pathext_match = dir1 / 'hello.com.exe' - path_match = dir2 / 'hello.com' - pathext_match.touch() - path_match.touch() - - test_path = os.pathsep.join([str(dir1), str(dir2)]) - assert os.path.basename(shutil.which( - 'hello.com', path=test_path, mode = os.F_OK - )).lower() == 'hello.com.exe' + other_file_path = os.path.join(self.other_dir, self.file) + self.create_file(other_file_path) + with unittest.mock.patch('shutil._win_path_needs_curdir', return_value=True): + with os_helper.change_cwd(path=self.dir): + rv = shutil.which(self.file, path=self.other_dir) + self.assertEqual(rv, os.path.join(self.curdir, self.file)) + with os_helper.change_cwd(path=self.other_dir): + rv = shutil.which(self.file, path=self.dir) + self.assertEqual(rv, os.path.join(self.curdir, self.file)) @os_helper.skip_if_dac_override def test_non_matching_mode(self): # Set the file read-only and ask for writeable files. - os.chmod(self.temp_file.name, stat.S_IREAD) - if os.access(self.temp_file.name, os.W_OK): + os.chmod(self.filepath, stat.S_IREAD) + if os.access(self.filepath, os.W_OK): self.skipTest("can't set the file read-only") rv = shutil.which(self.file, path=self.dir, mode=os.W_OK) self.assertIsNone(rv) @@ -2447,13 +2420,13 @@ def test_pathext_checking(self): # Ask for the file without the ".exe" extension, then ensure that # it gets found properly with the extension. rv = shutil.which(self.file[:-4], path=self.dir) - self.assertEqual(rv, self.temp_file.name[:-4] + self.ext) + self.assertEqual(rv, self.filepath[:-4] + self.ext) def test_environ_path(self): with os_helper.EnvironmentVarGuard() as env: env['PATH'] = self.env_path rv = shutil.which(self.file) - self.assertEqual(rv, self.temp_file.name) + self.assertEqual(rv, self.filepath) def test_environ_path_empty(self): # PATH='': no match @@ -2467,12 +2440,9 @@ def test_environ_path_empty(self): self.assertIsNone(rv) def test_environ_path_cwd(self): - expected_cwd = os.path.basename(self.temp_file.name) + expected_cwd = self.file if sys.platform == "win32": - curdir = os.curdir - if isinstance(expected_cwd, bytes): - curdir = os.fsencode(curdir) - expected_cwd = os.path.join(curdir, expected_cwd) + expected_cwd = os.path.join(self.curdir, expected_cwd) # PATH=':': explicitly looks in the current directory with os_helper.EnvironmentVarGuard() as env: @@ -2497,14 +2467,14 @@ def test_environ_path_missing(self): create=True), \ support.swap_attr(os, 'defpath', self.dir): rv = shutil.which(self.file) - self.assertEqual(rv, self.temp_file.name) + self.assertEqual(rv, self.filepath) # with confstr with unittest.mock.patch('os.confstr', return_value=self.dir, \ create=True), \ support.swap_attr(os, 'defpath', ''): rv = shutil.which(self.file) - self.assertEqual(rv, self.temp_file.name) + self.assertEqual(rv, self.filepath) def test_empty_path(self): base_dir = os.path.dirname(self.dir) @@ -2522,50 +2492,88 @@ def test_empty_path_no_PATH(self): @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') def test_pathext(self): - ext = self.to_text_type(".xyz") - temp_filexyz = tempfile.NamedTemporaryFile(dir=self.temp_dir, - prefix=self.to_text_type("Tmp2"), suffix=ext) - os.chmod(temp_filexyz.name, stat.S_IXUSR) - self.addCleanup(temp_filexyz.close) - - # strip path and extension - program = os.path.basename(temp_filexyz.name) - program = os.path.splitext(program)[0] - + ext = '.xyz' + cmd = self.to_text_type(TESTFN2) + cmdext = cmd + self.to_text_type(ext) + filepath = os.path.join(self.dir, cmdext) + self.create_file(filepath) with os_helper.EnvironmentVarGuard() as env: - env['PATHEXT'] = ext if isinstance(ext, str) else ext.decode() - rv = shutil.which(program, path=self.temp_dir) - self.assertEqual(rv, temp_filexyz.name) + env['PATHEXT'] = ext + self.assertEqual(shutil.which(cmd, path=self.dir), filepath) + self.assertEqual(shutil.which(cmdext, path=self.dir), filepath) # Issue 40592: See https://bugs.python.org/issue40592 @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') def test_pathext_with_empty_str(self): - ext = self.to_text_type(".xyz") - temp_filexyz = tempfile.NamedTemporaryFile(dir=self.temp_dir, - prefix=self.to_text_type("Tmp2"), suffix=ext) - self.addCleanup(temp_filexyz.close) + ext = '.xyz' + cmd = self.to_text_type(TESTFN2) + cmdext = cmd + self.to_text_type(ext) + filepath = os.path.join(self.dir, cmdext) + self.create_file(filepath) + with os_helper.EnvironmentVarGuard() as env: + env['PATHEXT'] = ext + ';' # note the ; + self.assertEqual(shutil.which(cmd, path=self.dir), filepath) + self.assertEqual(shutil.which(cmdext, path=self.dir), filepath) - # strip path and extension - program = os.path.basename(temp_filexyz.name) - program = os.path.splitext(program)[0] + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_pathext_with_multidot_extension(self): + ext = '.foo.bar' + cmd = self.to_text_type(TESTFN2) + cmdext = cmd + self.to_text_type(ext) + filepath = os.path.join(self.dir, cmdext) + self.create_file(filepath) + with os_helper.EnvironmentVarGuard() as env: + env['PATHEXT'] = ext + self.assertEqual(shutil.which(cmd, path=self.dir), filepath) + self.assertEqual(shutil.which(cmdext, path=self.dir), filepath) + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_pathext_with_null_extension(self): + cmd = self.to_text_type(TESTFN2) + cmddot = cmd + self.to_text_type('.') + filepath = os.path.join(self.dir, cmd) + self.create_file(filepath) with os_helper.EnvironmentVarGuard() as env: - env['PATHEXT'] = f"{ext if isinstance(ext, str) else ext.decode()};" # note the ; - rv = shutil.which(program, path=self.temp_dir) - self.assertEqual(rv, temp_filexyz.name) + env['PATHEXT'] = '.xyz' + self.assertIsNone(shutil.which(cmd, path=self.dir)) + self.assertIsNone(shutil.which(cmddot, path=self.dir)) + env['PATHEXT'] = '.xyz;.' # note the . + self.assertEqual(shutil.which(cmd, path=self.dir), filepath) + self.assertEqual(shutil.which(cmddot, path=self.dir), + filepath + self.to_text_type('.')) + env['PATHEXT'] = '.xyz;..' # multiple dots + self.assertEqual(shutil.which(cmd, path=self.dir), filepath) + self.assertEqual(shutil.which(cmddot, path=self.dir), + filepath + self.to_text_type('.')) + + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_pathext_extension_ends_with_dot(self): + ext = '.xyz' + cmd = self.to_text_type(TESTFN2) + cmdext = cmd + self.to_text_type(ext) + dot = self.to_text_type('.') + filepath = os.path.join(self.dir, cmdext) + self.create_file(filepath) + with os_helper.EnvironmentVarGuard() as env: + env['PATHEXT'] = ext + '.' + self.assertEqual(shutil.which(cmd, path=self.dir), filepath) # cmd.exe hangs here + self.assertEqual(shutil.which(cmdext, path=self.dir), filepath) + self.assertIsNone(shutil.which(cmd + dot, path=self.dir)) + self.assertIsNone(shutil.which(cmdext + dot, path=self.dir)) # See GH-75586 @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') def test_pathext_applied_on_files_in_path(self): + ext = '.xyz' + cmd = self.to_text_type(TESTFN2) + cmdext = cmd + self.to_text_type(ext) + filepath = os.path.join(self.dir, cmdext) + self.create_file(filepath) with os_helper.EnvironmentVarGuard() as env: - env["PATH"] = self.temp_dir if isinstance(self.temp_dir, str) else self.temp_dir.decode() - env["PATHEXT"] = ".test" - - test_path = os.path.join(self.temp_dir, self.to_text_type("test_program.test")) - open(test_path, 'w').close() - os.chmod(test_path, 0o755) - - self.assertEqual(shutil.which(self.to_text_type("test_program")), test_path) + env["PATH"] = os.fsdecode(self.dir) + env["PATHEXT"] = ext + self.assertEqual(shutil.which(cmd), filepath) + self.assertEqual(shutil.which(cmdext), filepath) # See GH-75586 @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') @@ -2581,49 +2589,107 @@ def test_win_path_needs_curdir(self): self.assertFalse(shutil._win_path_needs_curdir('dontcare', os.X_OK)) need_curdir_mock.assert_called_once_with('dontcare') - # See GH-109590 @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') - def test_pathext_preferred_for_execute(self): - with os_helper.EnvironmentVarGuard() as env: - env["PATH"] = self.temp_dir if isinstance(self.temp_dir, str) else self.temp_dir.decode() - env["PATHEXT"] = ".test" - - exe = os.path.join(self.temp_dir, self.to_text_type("test.exe")) - open(exe, 'w').close() - os.chmod(exe, 0o755) + def test_same_dir_with_pathext_extension(self): + cmd = self.file # with .exe extension + # full match + self.assertNormEqual(shutil.which(cmd, path=self.dir), self.filepath) + self.assertNormEqual(shutil.which(cmd, path=self.dir, mode=os.F_OK), + self.filepath) + + cmd2 = cmd + self.to_text_type('.com') # with .exe.com extension + other_file_path = os.path.join(self.dir, cmd2) + self.create_file(other_file_path) + + # full match + self.assertNormEqual(shutil.which(cmd, path=self.dir), self.filepath) + self.assertNormEqual(shutil.which(cmd, path=self.dir, mode=os.F_OK), + self.filepath) + self.assertNormEqual(shutil.which(cmd2, path=self.dir), other_file_path) + self.assertNormEqual(shutil.which(cmd2, path=self.dir, mode=os.F_OK), + other_file_path) - # default behavior allows a direct match if nothing in PATHEXT matches - self.assertEqual(shutil.which(self.to_text_type("test.exe")), exe) - - dot_test = os.path.join(self.temp_dir, self.to_text_type("test.exe.test")) - open(dot_test, 'w').close() - os.chmod(dot_test, 0o755) - - # now we have a PATHEXT match, so it take precedence - self.assertEqual(shutil.which(self.to_text_type("test.exe")), dot_test) - - # but if we don't use os.X_OK we don't change the order based off PATHEXT - # and therefore get the direct match. - self.assertEqual(shutil.which(self.to_text_type("test.exe"), mode=os.F_OK), exe) - - # See GH-109590 @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') - def test_pathext_given_extension_preferred(self): - with os_helper.EnvironmentVarGuard() as env: - env["PATH"] = self.temp_dir if isinstance(self.temp_dir, str) else self.temp_dir.decode() - env["PATHEXT"] = ".exe2;.exe" + def test_same_dir_without_pathext_extension(self): + cmd = self.file[:-4] # without .exe extension + # pathext match + self.assertNormEqual(shutil.which(cmd, path=self.dir), self.filepath) + self.assertNormEqual(shutil.which(cmd, path=self.dir, mode=os.F_OK), + self.filepath) + + # without extension + other_file_path = os.path.join(self.dir, cmd) + self.create_file(other_file_path) + + # pathext match if mode contains X_OK + self.assertNormEqual(shutil.which(cmd, path=self.dir), self.filepath) + # full match + self.assertNormEqual(shutil.which(cmd, path=self.dir, mode=os.F_OK), + other_file_path) + self.assertNormEqual(shutil.which(self.file, path=self.dir), self.filepath) + self.assertNormEqual(shutil.which(self.file, path=self.dir, mode=os.F_OK), + self.filepath) - exe = os.path.join(self.temp_dir, self.to_text_type("test.exe")) - open(exe, 'w').close() - os.chmod(exe, 0o755) - - exe2 = os.path.join(self.temp_dir, self.to_text_type("test.exe2")) - open(exe2, 'w').close() - os.chmod(exe2, 0o755) + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_dir_order_with_pathext_extension(self): + cmd = self.file # with .exe extension + search_path = os.pathsep.join([os.fsdecode(self.other_dir), + os.fsdecode(self.dir)]) + # full match in the second directory + self.assertNormEqual(shutil.which(cmd, path=search_path), self.filepath) + self.assertNormEqual(shutil.which(cmd, path=search_path, mode=os.F_OK), + self.filepath) + + cmd2 = cmd + self.to_text_type('.com') # with .exe.com extension + other_file_path = os.path.join(self.other_dir, cmd2) + self.create_file(other_file_path) + + # pathext match in the first directory + self.assertNormEqual(shutil.which(cmd, path=search_path), other_file_path) + self.assertNormEqual(shutil.which(cmd, path=search_path, mode=os.F_OK), + other_file_path) + # full match in the first directory + self.assertNormEqual(shutil.which(cmd2, path=search_path), other_file_path) + self.assertNormEqual(shutil.which(cmd2, path=search_path, mode=os.F_OK), + other_file_path) + + # full match in the first directory + search_path = os.pathsep.join([os.fsdecode(self.dir), + os.fsdecode(self.other_dir)]) + self.assertEqual(shutil.which(cmd, path=search_path), self.filepath) + self.assertEqual(shutil.which(cmd, path=search_path, mode=os.F_OK), + self.filepath) - # even though .exe2 is preferred in PATHEXT, we matched directly to test.exe - self.assertEqual(shutil.which(self.to_text_type("test.exe")), exe) - self.assertEqual(shutil.which(self.to_text_type("test")), exe2) + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_dir_order_without_pathext_extension(self): + cmd = self.file[:-4] # without .exe extension + search_path = os.pathsep.join([os.fsdecode(self.other_dir), + os.fsdecode(self.dir)]) + # pathext match in the second directory + self.assertNormEqual(shutil.which(cmd, path=search_path), self.filepath) + self.assertNormEqual(shutil.which(cmd, path=search_path, mode=os.F_OK), + self.filepath) + + # without extension + other_file_path = os.path.join(self.other_dir, cmd) + self.create_file(other_file_path) + + # pathext match in the second directory + self.assertNormEqual(shutil.which(cmd, path=search_path), self.filepath) + # full match in the first directory + self.assertNormEqual(shutil.which(cmd, path=search_path, mode=os.F_OK), + other_file_path) + # full match in the second directory + self.assertNormEqual(shutil.which(self.file, path=search_path), self.filepath) + self.assertNormEqual(shutil.which(self.file, path=search_path, mode=os.F_OK), + self.filepath) + + # pathext match in the first directory + search_path = os.pathsep.join([os.fsdecode(self.dir), + os.fsdecode(self.other_dir)]) + self.assertNormEqual(shutil.which(cmd, path=search_path), self.filepath) + self.assertNormEqual(shutil.which(cmd, path=search_path, mode=os.F_OK), + self.filepath) class TestWhichBytes(TestWhich): @@ -2631,18 +2697,12 @@ def setUp(self): TestWhich.setUp(self) self.dir = os.fsencode(self.dir) self.file = os.fsencode(self.file) - self.temp_file.name = os.fsencode(self.temp_file.name) - self.temp_dir = os.fsencode(self.temp_dir) + self.filepath = os.fsencode(self.filepath) + self.other_dir = os.fsencode(self.other_dir) self.curdir = os.fsencode(self.curdir) self.ext = os.fsencode(self.ext) - def to_text_type(self, s): - ''' - In this class we're testing with bytes, so convert s to a bytes - ''' - if isinstance(s, str): - return s.encode() - return s + to_text_type = staticmethod(os.fsencode) class TestMove(BaseTest, unittest.TestCase): @@ -2653,8 +2713,7 @@ def setUp(self): self.dst_dir = self.mkdtemp() self.src_file = os.path.join(self.src_dir, filename) self.dst_file = os.path.join(self.dst_dir, filename) - with open(self.src_file, "wb") as f: - f.write(b"spam") + create_file(self.src_file, b"spam") def _check_move_file(self, src, dst, real_dst): with open(src, "rb") as f: @@ -2732,8 +2791,7 @@ def test_move_dir_altsep_to_dir(self): def test_existing_file_inside_dest_dir(self): # A file with the same name inside the destination dir already exists. - with open(self.dst_file, "wb"): - pass + create_file(self.dst_file) self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir) def test_dont_move_dir_in_itself(self): @@ -3148,8 +3206,7 @@ def test_empty_file(self): dstname = TESTFN + 'dst' self.addCleanup(lambda: os_helper.unlink(srcname)) self.addCleanup(lambda: os_helper.unlink(dstname)) - with open(srcname, "wb"): - pass + create_file(srcname) with open(srcname, "rb") as src: with open(dstname, "wb") as dst: @@ -3272,7 +3329,7 @@ def test_blocksize_arg(self): self.assertEqual(blocksize, os.path.getsize(TESTFN)) # ...unless we're dealing with a small file. os_helper.unlink(TESTFN2) - write_file(TESTFN2, b"hello", binary=True) + create_file(TESTFN2, b"hello") self.addCleanup(os_helper.unlink, TESTFN2 + '3') self.assertRaises(ZeroDivisionError, shutil.copyfile, TESTFN2, TESTFN2 + '3') diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index dc8a4a44f37e65..7b3914f30e5f52 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -5132,6 +5132,33 @@ def _testRecv(self): # send data: recv() will no longer block self.cli.sendall(MSG) + def testLargeTimeout(self): + # gh-126876: Check that a timeout larger than INT_MAX is replaced with + # INT_MAX in the poll() code path. The following assertion must not + # fail: assert(INT_MIN <= ms && ms <= INT_MAX). + large_timeout = _testcapi.INT_MAX + 1 + + # test recv() with large timeout + conn, addr = self.serv.accept() + self.addCleanup(conn.close) + try: + conn.settimeout(large_timeout) + except OverflowError: + # On Windows, settimeout() fails with OverflowError, whereas + # we want to test recv(). Just give up silently. + return + msg = conn.recv(len(MSG)) + + def _testLargeTimeout(self): + # test sendall() with large timeout + large_timeout = _testcapi.INT_MAX + 1 + self.cli.connect((HOST, self.port)) + try: + self.cli.settimeout(large_timeout) + except OverflowError: + return + self.cli.sendall(MSG) + class FileObjectClassTestCase(SocketConnectedTest): """Unit tests for the object returned by socket.makefile() diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index 1ade49281b4e26..0df1a67ea2b720 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -5,10 +5,13 @@ import os import subprocess import shutil +import json +import textwrap from copy import copy from test.support import ( captured_stdout, + is_android, is_apple_mobile, is_wasi, PythonSymlink, @@ -17,13 +20,15 @@ from test.support.import_helper import import_module from test.support.os_helper import (TESTFN, unlink, skip_unless_symlink, change_cwd) +from test.support.venv import VirtualEnvironment import sysconfig from sysconfig import (get_paths, get_platform, get_config_vars, get_path, get_path_names, _INSTALL_SCHEMES, get_default_scheme, get_scheme_names, get_config_var, - _expand_vars, _get_preferred_schemes) -from sysconfig.__main__ import _main, _parse_makefile + _expand_vars, _get_preferred_schemes, + is_python_build, _PROJECT_BASE) +from sysconfig.__main__ import _main, _parse_makefile, _get_pybuilddir, _get_json_data_name import _imp import _osx_support import _sysconfig @@ -36,6 +41,7 @@ class TestSysConfig(unittest.TestCase): def setUp(self): super(TestSysConfig, self).setUp() + self.maxDiff = None self.sys_path = sys.path[:] # patching os.uname if hasattr(os, 'uname'): @@ -101,6 +107,13 @@ def _cleanup_testfn(self): elif os.path.isdir(path): shutil.rmtree(path) + def venv(self, **venv_create_args): + return VirtualEnvironment.from_tmpdir( + prefix=f'{self.id()}-venv-', + **venv_create_args, + ) + + def test_get_path_names(self): self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS) @@ -582,6 +595,65 @@ def test_osx_ext_suffix(self): suffix = sysconfig.get_config_var('EXT_SUFFIX') self.assertTrue(suffix.endswith('-darwin.so'), suffix) + @requires_subprocess() + def test_makefile_overwrites_config_vars(self): + script = textwrap.dedent(""" + import sys, sysconfig + + data = { + 'prefix': sys.prefix, + 'exec_prefix': sys.exec_prefix, + 'base_prefix': sys.base_prefix, + 'base_exec_prefix': sys.base_exec_prefix, + 'config_vars': sysconfig.get_config_vars(), + } + + import json + print(json.dumps(data, indent=2)) + """) + + # We need to run the test inside a virtual environment so that + # sys.prefix/sys.exec_prefix have a different value from the + # prefix/exec_prefix Makefile variables. + with self.venv() as venv: + data = json.loads(venv.run('-c', script).stdout) + + # We expect sysconfig.get_config_vars to correctly reflect sys.prefix/sys.exec_prefix + self.assertEqual(data['prefix'], data['config_vars']['prefix']) + self.assertEqual(data['exec_prefix'], data['config_vars']['exec_prefix']) + # As a sanity check, just make sure sys.prefix/sys.exec_prefix really + # are different from the Makefile values. + # sys.base_prefix/sys.base_exec_prefix should reflect the value of the + # prefix/exec_prefix Makefile variables, so we use them in the comparison. + self.assertNotEqual(data['prefix'], data['base_prefix']) + self.assertNotEqual(data['exec_prefix'], data['base_exec_prefix']) + + @unittest.skipIf(os.name != 'posix', '_sysconfig-vars JSON file is only available on POSIX') + @unittest.skipIf(is_wasi, "_sysconfig-vars JSON file currently isn't available on WASI") + @unittest.skipIf(is_android or is_apple_mobile, 'Android and iOS change the prefix') + def test_sysconfigdata_json(self): + if '_PYTHON_SYSCONFIGDATA_PATH' in os.environ: + data_dir = os.environ['_PYTHON_SYSCONFIGDATA_PATH'] + elif is_python_build(): + data_dir = os.path.join(_PROJECT_BASE, _get_pybuilddir()) + else: + data_dir = sys._stdlib_dir + + json_data_path = os.path.join(data_dir, _get_json_data_name()) + + with open(json_data_path) as f: + json_config_vars = json.load(f) + + system_config_vars = get_config_vars() + + # Ignore keys in the check + for key in ('projectbase', 'srcdir'): + json_config_vars.pop(key) + system_config_vars.pop(key) + + self.assertEqual(system_config_vars, json_config_vars) + + class MakefileTests(unittest.TestCase): @unittest.skipIf(sys.platform.startswith('win'), diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index 3ca5f5ce1b7068..b666533466e578 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -1171,6 +1171,40 @@ def __del__(self): self.assertEqual(out.strip(), b"OK") self.assertIn(b"can't create new thread at interpreter shutdown", err) + def test_start_new_thread_failed(self): + # gh-109746: if Python fails to start newly created thread + # due to failure of underlying PyThread_start_new_thread() call, + # its state should be removed from interpreter' thread states list + # to avoid its double cleanup + try: + from resource import setrlimit, RLIMIT_NPROC + except ImportError as err: + self.skipTest(err) # RLIMIT_NPROC is specific to Linux and BSD + code = """if 1: + import resource + import _thread + + def f(): + print("shouldn't be printed") + + limits = resource.getrlimit(resource.RLIMIT_NPROC) + [_, hard] = limits + resource.setrlimit(resource.RLIMIT_NPROC, (0, hard)) + + try: + _thread.start_new_thread(f, ()) + except RuntimeError: + print('ok') + else: + print('!skip!') + """ + _, out, err = assert_python_ok("-u", "-c", code) + out = out.strip() + if b'!skip!' in out: + self.skipTest('RLIMIT_NPROC had no effect; probably superuser') + self.assertEqual(out, b'ok') + self.assertEqual(err, b'') + @cpython_only def test_finalize_daemon_thread_hang(self): if support.check_sanitizer(thread=True, memory=True): diff --git a/Lib/test/test_threading_local.py b/Lib/test/test_threading_local.py index f0b829a978feb5..3a58afd8194a32 100644 --- a/Lib/test/test_threading_local.py +++ b/Lib/test/test_threading_local.py @@ -208,6 +208,21 @@ def test_threading_local_clear_race(self): _testcapi.join_temporary_c_thread() + @support.cpython_only + def test_error(self): + class Loop(self._local): + attr = 1 + + # Trick the "if name == '__dict__':" test of __setattr__() + # to always be true + class NameCompareTrue: + def __eq__(self, other): + return True + + loop = Loop() + with self.assertRaisesRegex(AttributeError, 'Loop.*read-only'): + loop.__setattr__(NameCompareTrue(), 2) + class ThreadLocalTest(unittest.TestCase, BaseLocalTest): _local = _thread._local diff --git a/Lib/test/test_tools/i18n_data/messages.pot b/Lib/test/test_tools/i18n_data/messages.pot index ddfbd18349ef4f..8d66fbc4f3a937 100644 --- a/Lib/test/test_tools/i18n_data/messages.pot +++ b/Lib/test/test_tools/i18n_data/messages.pot @@ -15,53 +15,75 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: messages.py:5 +#: messages.py:16 msgid "" msgstr "" -#: messages.py:8 messages.py:9 +#: messages.py:19 messages.py:20 msgid "parentheses" msgstr "" -#: messages.py:12 +#: messages.py:23 msgid "Hello, world!" msgstr "" -#: messages.py:15 +#: messages.py:26 msgid "" "Hello,\n" " multiline!\n" msgstr "" -#: messages.py:29 +#: messages.py:46 messages.py:89 messages.py:90 messages.py:93 messages.py:94 +#: messages.py:99 +msgid "foo" +msgid_plural "foos" +msgstr[0] "" +msgstr[1] "" + +#: messages.py:47 +msgid "something" +msgstr "" + +#: messages.py:50 msgid "Hello, {}!" msgstr "" -#: messages.py:33 +#: messages.py:54 msgid "1" msgstr "" -#: messages.py:33 +#: messages.py:54 msgid "2" msgstr "" -#: messages.py:34 messages.py:35 +#: messages.py:55 messages.py:56 msgid "A" msgstr "" -#: messages.py:34 messages.py:35 +#: messages.py:55 messages.py:56 msgid "B" msgstr "" -#: messages.py:36 +#: messages.py:57 msgid "set" msgstr "" -#: messages.py:42 +#: messages.py:63 msgid "nested string" msgstr "" -#: messages.py:47 +#: messages.py:68 msgid "baz" msgstr "" +#: messages.py:91 messages.py:92 messages.py:95 messages.py:96 +msgctxt "context" +msgid "foo" +msgid_plural "foos" +msgstr[0] "" +msgstr[1] "" + +#: messages.py:100 +msgid "domain foo" +msgstr "" + diff --git a/Lib/test/test_tools/i18n_data/messages.py b/Lib/test/test_tools/i18n_data/messages.py index f220294b8d5c67..1e03f4e556830d 100644 --- a/Lib/test/test_tools/i18n_data/messages.py +++ b/Lib/test/test_tools/i18n_data/messages.py @@ -1,5 +1,16 @@ # Test message extraction -from gettext import gettext as _ +from gettext import ( + gettext, + ngettext, + pgettext, + npgettext, + dgettext, + dngettext, + dpgettext, + dnpgettext +) + +_ = gettext # Empty string _("") @@ -21,13 +32,23 @@ _(None) _(1) _(False) -_(x="kwargs are not allowed") +_(("invalid")) +_(["invalid"]) +_({"invalid"}) +_("string"[3]) +_("string"[:3]) +_({"string": "foo"}) + +# pygettext does not allow keyword arguments, but both xgettext and pybabel do +_(x="kwargs work!") + +# Unusual, but valid arguments _("foo", "bar") _("something", x="something else") # .format() _("Hello, {}!").format("world") # valid -_("Hello, {}!".format("world")) # invalid +_("Hello, {}!".format("world")) # invalid, but xgettext and pybabel extract the first string # Nested structures _("1"), _("2") @@ -62,3 +83,28 @@ def _(x): def _(x="don't extract me"): pass + + +# Other gettext functions +gettext("foo") +ngettext("foo", "foos", 1) +pgettext("context", "foo") +npgettext("context", "foo", "foos", 1) +dgettext("domain", "foo") +dngettext("domain", "foo", "foos", 1) +dpgettext("domain", "context", "foo") +dnpgettext("domain", "context", "foo", "foos", 1) + +# Complex arguments +ngettext("foo", "foos", 42 + (10 - 20)) +dgettext(["some", {"complex"}, ("argument",)], "domain foo") + +# Invalid calls which are not extracted +gettext() +ngettext('foo') +pgettext('context') +npgettext('context', 'foo') +dgettext('domain') +dngettext('domain', 'foo') +dpgettext('domain', 'context') +dnpgettext('domain', 'context', 'foo') diff --git a/Lib/test/test_tools/test_i18n.py b/Lib/test/test_tools/test_i18n.py index 6f71f0976819f1..29c3423e234d20 100644 --- a/Lib/test/test_tools/test_i18n.py +++ b/Lib/test/test_tools/test_i18n.py @@ -332,14 +332,14 @@ def test_calls_in_fstring_with_multiple_args(self): msgids = self.extract_docstrings_from_str(dedent('''\ f"{_('foo', 'bar')}" ''')) - self.assertNotIn('foo', msgids) + self.assertIn('foo', msgids) self.assertNotIn('bar', msgids) def test_calls_in_fstring_with_keyword_args(self): msgids = self.extract_docstrings_from_str(dedent('''\ f"{_('foo', bar='baz')}" ''')) - self.assertNotIn('foo', msgids) + self.assertIn('foo', msgids) self.assertNotIn('bar', msgids) self.assertNotIn('baz', msgids) diff --git a/Lib/test/test_type_annotations.py b/Lib/test/test_type_annotations.py index 257b7fa95dcb76..7d88f4cdfa3141 100644 --- a/Lib/test/test_type_annotations.py +++ b/Lib/test/test_type_annotations.py @@ -316,7 +316,7 @@ def test_module(self): ns = run_code("x: undefined = 1") anno = ns["__annotate__"] with self.assertRaises(NotImplementedError): - anno(2) + anno(3) with self.assertRaises(NameError): anno(1) @@ -376,7 +376,7 @@ class X: annotate(annotationlib.Format.FORWARDREF) with self.assertRaises(NotImplementedError): annotate(annotationlib.Format.STRING) - with self.assertRaises(NotImplementedError): + with self.assertRaises(TypeError): annotate(None) self.assertEqual(annotate(annotationlib.Format.VALUE), {"x": int}) diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index 2c53ce3f99e675..00e46990c406ac 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -7,11 +7,9 @@ import email.message import io import unittest -from unittest.mock import patch from test import support from test.support import os_helper from test.support import socket_helper -from test.support import warnings_helper import os try: import ssl @@ -20,7 +18,6 @@ import sys import tempfile -from base64 import b64encode import collections @@ -35,32 +32,6 @@ def hexescape(char): hex_repr = "0%s" % hex_repr return "%" + hex_repr -# Shortcut for testing FancyURLopener -_urlopener = None - - -def urlopen(url, data=None, proxies=None): - """urlopen(url [, data]) -> open file-like object""" - global _urlopener - if proxies is not None: - opener = urllib.request.FancyURLopener(proxies=proxies) - elif not _urlopener: - opener = FancyURLopener() - _urlopener = opener - else: - opener = _urlopener - if data is None: - return opener.open(url) - else: - return opener.open(url, data) - - -def FancyURLopener(): - with warnings_helper.check_warnings( - ('FancyURLopener style of invoking requests is deprecated.', - DeprecationWarning)): - return urllib.request.FancyURLopener() - def fakehttp(fakedata, mock_close=False): class FakeSocket(io.BytesIO): @@ -119,26 +90,6 @@ def unfakehttp(self): http.client.HTTPConnection = self._connection_class -class FakeFTPMixin(object): - def fakeftp(self): - class FakeFtpWrapper(object): - def __init__(self, user, passwd, host, port, dirs, timeout=None, - persistent=True): - pass - - def retrfile(self, file, type): - return io.BytesIO(), 0 - - def close(self): - pass - - self._ftpwrapper_class = urllib.request.ftpwrapper - urllib.request.ftpwrapper = FakeFtpWrapper - - def unfakeftp(self): - urllib.request.ftpwrapper = self._ftpwrapper_class - - class urlopen_FileTests(unittest.TestCase): """Test urlopen() opening a temporary file. @@ -158,7 +109,7 @@ def setUp(self): f.close() self.pathname = os_helper.TESTFN self.quoted_pathname = urllib.parse.quote(self.pathname) - self.returned_obj = urlopen("file:%s" % self.quoted_pathname) + self.returned_obj = urllib.request.urlopen("file:%s" % self.quoted_pathname) def tearDown(self): """Shut down the open object""" @@ -205,7 +156,7 @@ def test_headers(self): self.assertIsInstance(self.returned_obj.headers, email.message.Message) def test_url(self): - self.assertEqual(self.returned_obj.url, self.quoted_pathname) + self.assertEqual(self.returned_obj.url, "file://" + self.quoted_pathname) def test_status(self): self.assertIsNone(self.returned_obj.status) @@ -214,7 +165,7 @@ def test_info(self): self.assertIsInstance(self.returned_obj.info(), email.message.Message) def test_geturl(self): - self.assertEqual(self.returned_obj.geturl(), self.quoted_pathname) + self.assertEqual(self.returned_obj.geturl(), "file://" + self.quoted_pathname) def test_getcode(self): self.assertIsNone(self.returned_obj.getcode()) @@ -339,13 +290,13 @@ def test_getproxies_environment_prefer_lowercase(self): self.assertEqual('http://somewhere:3128', proxies['http']) -class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin, FakeFTPMixin): +class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin): """Test urlopen() opening a fake http connection.""" def check_read(self, ver): self.fakehttp(b"HTTP/" + ver + b" 200 OK\r\n\r\nHello!") try: - fp = urlopen("http://python.org/") + fp = urllib.request.urlopen("http://python.org/") self.assertEqual(fp.readline(), b"Hello!") self.assertEqual(fp.readline(), b"") self.assertEqual(fp.geturl(), 'http://python.org/') @@ -366,8 +317,8 @@ def test_url_fragment(self): def test_willclose(self): self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello!") try: - resp = urlopen("http://www.python.org") - self.assertTrue(resp.fp.will_close) + resp = urllib.request.urlopen("http://www.python.org") + self.assertTrue(resp.will_close) finally: self.unfakehttp() @@ -392,9 +343,6 @@ def test_url_path_with_control_char_rejected(self): with self.assertRaisesRegex( InvalidURL, f"contain control.*{escaped_char_repr}"): urllib.request.urlopen(f"https:{schemeless_url}") - # This code path quotes the URL so there is no injection. - resp = urlopen(f"http:{schemeless_url}") - self.assertNotIn(char, resp.geturl()) finally: self.unfakehttp() @@ -416,11 +364,6 @@ def test_url_path_with_newline_header_injection_rejected(self): urllib.request.urlopen(f"http:{schemeless_url}") with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"): urllib.request.urlopen(f"https:{schemeless_url}") - # This code path quotes the URL so there is no injection. - resp = urlopen(f"http:{schemeless_url}") - self.assertNotIn(' ', resp.geturl()) - self.assertNotIn('\r', resp.geturl()) - self.assertNotIn('\n', resp.geturl()) finally: self.unfakehttp() @@ -435,9 +378,9 @@ def test_url_host_with_control_char_rejected(self): InvalidURL = http.client.InvalidURL with self.assertRaisesRegex( InvalidURL, f"contain control.*{escaped_char_repr}"): - urlopen(f"http:{schemeless_url}") + urllib.request.urlopen(f"http:{schemeless_url}") with self.assertRaisesRegex(InvalidURL, f"contain control.*{escaped_char_repr}"): - urlopen(f"https:{schemeless_url}") + urllib.request.urlopen(f"https:{schemeless_url}") finally: self.unfakehttp() @@ -450,9 +393,9 @@ def test_url_host_with_newline_header_injection_rejected(self): InvalidURL = http.client.InvalidURL with self.assertRaisesRegex( InvalidURL, r"contain control.*\\r"): - urlopen(f"http:{schemeless_url}") + urllib.request.urlopen(f"http:{schemeless_url}") with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"): - urlopen(f"https:{schemeless_url}") + urllib.request.urlopen(f"https:{schemeless_url}") finally: self.unfakehttp() @@ -476,7 +419,7 @@ def test_read_bogus(self): Content-Type: text/html; charset=iso-8859-1 ''', mock_close=True) try: - self.assertRaises(OSError, urlopen, "http://python.org/") + self.assertRaises(OSError, urllib.request.urlopen, "http://python.org/") finally: self.unfakehttp() @@ -492,20 +435,20 @@ def test_invalid_redirect(self): try: msg = "Redirection to url 'file:" with self.assertRaisesRegex(urllib.error.HTTPError, msg): - urlopen("http://python.org/") + urllib.request.urlopen("http://python.org/") finally: self.unfakehttp() def test_redirect_limit_independent(self): # Ticket #12923: make sure independent requests each use their # own retry limit. - for i in range(FancyURLopener().maxtries): + for i in range(urllib.request.HTTPRedirectHandler.max_redirections): self.fakehttp(b'''HTTP/1.1 302 Found Location: file://guidocomputer.athome.com:/python/license Connection: close ''', mock_close=True) try: - self.assertRaises(urllib.error.HTTPError, urlopen, + self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, "http://something") finally: self.unfakehttp() @@ -515,14 +458,14 @@ def test_empty_socket(self): # data. (#1680230) self.fakehttp(b'') try: - self.assertRaises(OSError, urlopen, "http://something") + self.assertRaises(OSError, urllib.request.urlopen, "http://something") finally: self.unfakehttp() def test_missing_localfile(self): # Test for #10836 with self.assertRaises(urllib.error.URLError) as e: - urlopen('file://localhost/a/file/which/doesnot/exists.py') + urllib.request.urlopen('file://localhost/a/file/which/doesnot/exists.py') self.assertTrue(e.exception.filename) self.assertTrue(e.exception.reason) @@ -531,71 +474,28 @@ def test_file_notexists(self): tmp_fileurl = 'file://localhost/' + tmp_file.replace(os.path.sep, '/') try: self.assertTrue(os.path.exists(tmp_file)) - with urlopen(tmp_fileurl) as fobj: + with urllib.request.urlopen(tmp_fileurl) as fobj: self.assertTrue(fobj) finally: os.close(fd) os.unlink(tmp_file) self.assertFalse(os.path.exists(tmp_file)) with self.assertRaises(urllib.error.URLError): - urlopen(tmp_fileurl) + urllib.request.urlopen(tmp_fileurl) def test_ftp_nohost(self): test_ftp_url = 'ftp:///path' with self.assertRaises(urllib.error.URLError) as e: - urlopen(test_ftp_url) + urllib.request.urlopen(test_ftp_url) self.assertFalse(e.exception.filename) self.assertTrue(e.exception.reason) def test_ftp_nonexisting(self): with self.assertRaises(urllib.error.URLError) as e: - urlopen('ftp://localhost/a/file/which/doesnot/exists.py') + urllib.request.urlopen('ftp://localhost/a/file/which/doesnot/exists.py') self.assertFalse(e.exception.filename) self.assertTrue(e.exception.reason) - @patch.object(urllib.request, 'MAXFTPCACHE', 0) - def test_ftp_cache_pruning(self): - self.fakeftp() - try: - urllib.request.ftpcache['test'] = urllib.request.ftpwrapper('user', 'pass', 'localhost', 21, []) - urlopen('ftp://localhost') - finally: - self.unfakeftp() - - def test_userpass_inurl(self): - self.fakehttp(b"HTTP/1.0 200 OK\r\n\r\nHello!") - try: - fp = urlopen("http://user:pass@python.org/") - self.assertEqual(fp.readline(), b"Hello!") - self.assertEqual(fp.readline(), b"") - self.assertEqual(fp.geturl(), 'http://user:pass@python.org/') - self.assertEqual(fp.getcode(), 200) - finally: - self.unfakehttp() - - def test_userpass_inurl_w_spaces(self): - self.fakehttp(b"HTTP/1.0 200 OK\r\n\r\nHello!") - try: - userpass = "a b:c d" - url = "http://{}@python.org/".format(userpass) - fakehttp_wrapper = http.client.HTTPConnection - authorization = ("Authorization: Basic %s\r\n" % - b64encode(userpass.encode("ASCII")).decode("ASCII")) - fp = urlopen(url) - # The authorization header must be in place - self.assertIn(authorization, fakehttp_wrapper.buf.decode("UTF-8")) - self.assertEqual(fp.readline(), b"Hello!") - self.assertEqual(fp.readline(), b"") - # the spaces are quoted in URL so no match - self.assertNotEqual(fp.geturl(), url) - self.assertEqual(fp.getcode(), 200) - finally: - self.unfakehttp() - - def test_URLopener_deprecation(self): - with warnings_helper.check_warnings(('',DeprecationWarning)): - urllib.request.URLopener() - class urlopen_DataTests(unittest.TestCase): """Test urlopen() opening a data URL.""" @@ -709,10 +609,6 @@ def tearDown(self): def constructLocalFileUrl(self, filePath): filePath = os.path.abspath(filePath) - try: - filePath.encode("utf-8") - except UnicodeEncodeError: - raise unittest.SkipTest("filePath is not encodable to utf8") return "file://%s" % urllib.request.pathname2url(filePath) def createNewTempFile(self, data=b""): @@ -1527,16 +1423,18 @@ def test_pathname2url_win(self): self.assertEqual(fn('\\\\?\\unc\\server\\share\\dir'), '//server/share/dir') self.assertEqual(fn("C:"), '///C:') self.assertEqual(fn("C:\\"), '///C:/') + self.assertEqual(fn('c:\\a\\b.c'), '///c:/a/b.c') self.assertEqual(fn('C:\\a\\b.c'), '///C:/a/b.c') self.assertEqual(fn('C:\\a\\b.c\\'), '///C:/a/b.c/') self.assertEqual(fn('C:\\a\\\\b.c'), '///C:/a//b.c') self.assertEqual(fn('C:\\a\\b%#c'), '///C:/a/b%25%23c') self.assertEqual(fn('C:\\a\\b\xe9'), '///C:/a/b%C3%A9') self.assertEqual(fn('C:\\foo\\bar\\spam.foo'), "///C:/foo/bar/spam.foo") - # Long drive letter - self.assertRaises(IOError, fn, "XX:\\") + # NTFS alternate data streams + self.assertEqual(fn('C:\\foo:bar'), '///C:/foo%3Abar') + self.assertEqual(fn('foo:bar'), 'foo%3Abar') # No drive letter - self.assertEqual(fn("\\folder\\test\\"), '/folder/test/') + self.assertEqual(fn("\\folder\\test\\"), '///folder/test/') self.assertEqual(fn("\\\\folder\\test\\"), '//folder/test/') self.assertEqual(fn("\\\\\\folder\\test\\"), '///folder/test/') self.assertEqual(fn('\\\\some\\share\\'), '//some/share/') @@ -1549,7 +1447,7 @@ def test_pathname2url_win(self): self.assertEqual(fn('//?/unc/server/share/dir'), '//server/share/dir') # Round-tripping urls = ['///C:', - '/folder/test/', + '///folder/test/', '///C:/foo/bar/spam.foo'] for url in urls: self.assertEqual(fn(urllib.request.url2pathname(url)), url) @@ -1558,9 +1456,19 @@ def test_pathname2url_win(self): 'test specific to POSIX pathnames') def test_pathname2url_posix(self): fn = urllib.request.pathname2url - self.assertEqual(fn('/'), '/') - self.assertEqual(fn('/a/b.c'), '/a/b.c') - self.assertEqual(fn('/a/b%#c'), '/a/b%25%23c') + self.assertEqual(fn('/'), '///') + self.assertEqual(fn('/a/b.c'), '///a/b.c') + self.assertEqual(fn('//a/b.c'), '////a/b.c') + self.assertEqual(fn('///a/b.c'), '/////a/b.c') + self.assertEqual(fn('////a/b.c'), '//////a/b.c') + self.assertEqual(fn('/a/b%#c'), '///a/b%25%23c') + + @unittest.skipUnless(os_helper.FS_NONASCII, 'need os_helper.FS_NONASCII') + def test_pathname2url_nonascii(self): + encoding = sys.getfilesystemencoding() + errors = sys.getfilesystemencodeerrors() + url = urllib.parse.quote(os_helper.FS_NONASCII, encoding=encoding, errors=errors) + self.assertEqual(urllib.request.pathname2url(os_helper.FS_NONASCII), url) @unittest.skipUnless(sys.platform == 'win32', 'test specific to Windows pathnames.') @@ -1576,6 +1484,7 @@ def test_url2pathname_win(self): self.assertEqual(fn("///C/test/"), '\\C\\test\\') self.assertEqual(fn("////C/test/"), '\\\\C\\test\\') # DOS drive paths + self.assertEqual(fn('c:/path/to/file'), 'c:\\path\\to\\file') self.assertEqual(fn('C:/path/to/file'), 'C:\\path\\to\\file') self.assertEqual(fn('C:/path/to/file/'), 'C:\\path\\to\\file\\') self.assertEqual(fn('C:/path/to//file'), 'C:\\path\\to\\\\file') @@ -1588,10 +1497,12 @@ def test_url2pathname_win(self): # UNC paths self.assertEqual(fn('//server/path/to/file'), '\\\\server\\path\\to\\file') self.assertEqual(fn('////server/path/to/file'), '\\\\server\\path\\to\\file') - self.assertEqual(fn('/////server/path/to/file'), '\\\\\\server\\path\\to\\file') + self.assertEqual(fn('/////server/path/to/file'), '\\\\server\\path\\to\\file') # Localhost paths self.assertEqual(fn('//localhost/C:/path/to/file'), 'C:\\path\\to\\file') self.assertEqual(fn('//localhost/C|/path/to/file'), 'C:\\path\\to\\file') + self.assertEqual(fn('//localhost/path/to/file'), '\\path\\to\\file') + self.assertEqual(fn('//localhost//server/path/to/file'), '\\\\server\\path\\to\\file') # Percent-encoded forward slashes are preserved for backwards compatibility self.assertEqual(fn('C:/foo%2fbar'), 'C:\\foo/bar') self.assertEqual(fn('//server/share/foo%2fbar'), '\\\\server\\share\\foo/bar') @@ -1610,7 +1521,16 @@ def test_url2pathname_posix(self): self.assertEqual(fn('//foo/bar'), '//foo/bar') self.assertEqual(fn('///foo/bar'), '/foo/bar') self.assertEqual(fn('////foo/bar'), '//foo/bar') - self.assertEqual(fn('//localhost/foo/bar'), '//localhost/foo/bar') + self.assertEqual(fn('//localhost/foo/bar'), '/foo/bar') + + @unittest.skipUnless(os_helper.FS_NONASCII, 'need os_helper.FS_NONASCII') + def test_url2pathname_nonascii(self): + encoding = sys.getfilesystemencoding() + errors = sys.getfilesystemencodeerrors() + url = os_helper.FS_NONASCII + self.assertEqual(urllib.request.url2pathname(url), os_helper.FS_NONASCII) + url = urllib.parse.quote(url, encoding=encoding, errors=errors) + self.assertEqual(urllib.request.url2pathname(url), os_helper.FS_NONASCII) class Utility_Tests(unittest.TestCase): """Testcase to test the various utility functions in the urllib.""" @@ -1620,56 +1540,6 @@ def test_thishost(self): self.assertIsInstance(urllib.request.thishost(), tuple) -class URLopener_Tests(FakeHTTPMixin, unittest.TestCase): - """Testcase to test the open method of URLopener class.""" - - def test_quoted_open(self): - class DummyURLopener(urllib.request.URLopener): - def open_spam(self, url): - return url - with warnings_helper.check_warnings( - ('DummyURLopener style of invoking requests is deprecated.', - DeprecationWarning)): - self.assertEqual(DummyURLopener().open( - 'spam://example/ /'),'//example/%20/') - - # test the safe characters are not quoted by urlopen - self.assertEqual(DummyURLopener().open( - "spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"), - "//c:|windows%/:=&?~#+!$,;'@()*[]|/path/") - - @warnings_helper.ignore_warnings(category=DeprecationWarning) - def test_urlopener_retrieve_file(self): - with os_helper.temp_dir() as tmpdir: - fd, tmpfile = tempfile.mkstemp(dir=tmpdir) - os.close(fd) - fileurl = "file:" + urllib.request.pathname2url(tmpfile) - filename, _ = urllib.request.URLopener().retrieve(fileurl) - # Some buildbots have TEMP folder that uses a lowercase drive letter. - self.assertEqual(os.path.normcase(filename), os.path.normcase(tmpfile)) - - @warnings_helper.ignore_warnings(category=DeprecationWarning) - def test_urlopener_retrieve_remote(self): - url = "http://www.python.org/file.txt" - self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello!") - self.addCleanup(self.unfakehttp) - filename, _ = urllib.request.URLopener().retrieve(url) - self.assertEqual(os.path.splitext(filename)[1], ".txt") - - @warnings_helper.ignore_warnings(category=DeprecationWarning) - def test_local_file_open(self): - # bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme - class DummyURLopener(urllib.request.URLopener): - def open_local_file(self, url): - return url - for url in ('local_file://example', 'local-file://example'): - self.assertRaises(OSError, urllib.request.urlopen, url) - self.assertRaises(OSError, urllib.request.URLopener().open, url) - self.assertRaises(OSError, urllib.request.URLopener().retrieve, url) - self.assertRaises(OSError, DummyURLopener().open, url) - self.assertRaises(OSError, DummyURLopener().retrieve, url) - - class RequestTests(unittest.TestCase): """Unit tests for urllib.request.Request.""" diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index b90ccc2f125b93..99ad11cf0552eb 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -718,10 +718,6 @@ def test_processors(self): def sanepathname2url(path): - try: - path.encode("utf-8") - except UnicodeEncodeError: - raise unittest.SkipTest("path is not encodable to utf8") urlpath = urllib.request.pathname2url(path) if os.name == "nt" and urlpath.startswith("///"): urlpath = urlpath[2:] diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py index 49a3b5afdebb2f..f824dddf711761 100644 --- a/Lib/test/test_urllibnet.py +++ b/Lib/test/test_urllibnet.py @@ -5,6 +5,7 @@ import contextlib import socket +import urllib.error import urllib.parse import urllib.request import os @@ -101,13 +102,10 @@ def test_getcode(self): # test getcode() with the fancy opener to get 404 error codes URL = self.url + "XXXinvalidXXX" with socket_helper.transient_internet(URL): - with self.assertWarns(DeprecationWarning): - open_url = urllib.request.FancyURLopener().open(URL) - try: - code = open_url.getcode() - finally: - open_url.close() - self.assertEqual(code, 404) + with self.assertRaises(urllib.error.URLError) as e: + with urllib.request.urlopen(URL): + pass + self.assertEqual(e.exception.code, 404) @support.requires_resource('walltime') def test_bad_address(self): diff --git a/Lib/test/test_winconsoleio.py b/Lib/test/test_winconsoleio.py index a10d63dfdc9753..d9076e77c158a2 100644 --- a/Lib/test/test_winconsoleio.py +++ b/Lib/test/test_winconsoleio.py @@ -142,6 +142,29 @@ def test_write_empty_data(self): with ConIO('CONOUT$', 'w') as f: self.assertEqual(f.write(b''), 0) + @requires_resource('console') + def test_write(self): + testcases = [] + with ConIO('CONOUT$', 'w') as f: + for a in [ + b'', + b'abc', + b'\xc2\xa7\xe2\x98\x83\xf0\x9f\x90\x8d', + b'\xff'*10, + ]: + for b in b'\xc2\xa7', b'\xe2\x98\x83', b'\xf0\x9f\x90\x8d': + testcases.append(a + b) + for i in range(1, len(b)): + data = a + b[:i] + testcases.append(data + b'z') + testcases.append(data + b'\xff') + # incomplete multibyte sequence + with self.subTest(data=data): + self.assertEqual(f.write(data), len(a)) + for data in testcases: + with self.subTest(data=data): + self.assertEqual(f.write(data), len(data)) + def assertStdinRoundTrip(self, text): stdin = open('CONIN$', 'r') old_stdin = sys.stdin diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py index 36f7f542872897..c36228c033a414 100644 --- a/Lib/test/test_zipfile/test_core.py +++ b/Lib/test/test_zipfile/test_core.py @@ -1969,10 +1969,16 @@ def test_is_zip_valid_file(self): zip_contents = fp.read() # - passing a file-like object fp = io.BytesIO() - fp.write(zip_contents) + end = fp.write(zip_contents) + self.assertEqual(fp.tell(), end) + mid = end // 2 + fp.seek(mid, 0) self.assertTrue(zipfile.is_zipfile(fp)) - fp.seek(0, 0) + # check that the position is left unchanged after the call + # see: https://github.com/python/cpython/issues/122356 + self.assertEqual(fp.tell(), mid) self.assertTrue(zipfile.is_zipfile(fp)) + self.assertEqual(fp.tell(), mid) def test_non_existent_file_raises_OSError(self): # make sure we don't raise an AttributeError when a partially-constructed diff --git a/Lib/test/translationdata/argparse/msgids.txt b/Lib/test/translationdata/argparse/msgids.txt index 2b012906436e85..ae89ac74726ecf 100644 --- a/Lib/test/translationdata/argparse/msgids.txt +++ b/Lib/test/translationdata/argparse/msgids.txt @@ -8,6 +8,8 @@ argument %(argument_name)s: %(message)s argument '%(argument_name)s' is deprecated can't open '%(filename)s': %(error)s command '%(parser_name)s' is deprecated +conflicting option string: %s +expected %s argument expected at least one argument expected at most one argument expected one argument diff --git a/Lib/test/translationdata/getopt/msgids.txt b/Lib/test/translationdata/getopt/msgids.txt index 1ffab1f31abad5..5c0c02d452d156 100644 --- a/Lib/test/translationdata/getopt/msgids.txt +++ b/Lib/test/translationdata/getopt/msgids.txt @@ -1,6 +1,6 @@ option -%s not recognized option -%s requires argument option --%s must not have an argument -option --%s not a unique prefix +option --%s not a unique prefix; possible options: %s option --%s not recognized option --%s requires argument \ No newline at end of file diff --git a/Lib/test/translationdata/optparse/msgids.txt b/Lib/test/translationdata/optparse/msgids.txt index ac5317c736af8c..8f405a2bf26dbe 100644 --- a/Lib/test/translationdata/optparse/msgids.txt +++ b/Lib/test/translationdata/optparse/msgids.txt @@ -1,3 +1,4 @@ +%(option)s option requires %(number)d argument %prog [options] %s option does not take a value Options diff --git a/Lib/token.py b/Lib/token.py index b620317106e173..54d7cdccadc79a 100644 --- a/Lib/token.py +++ b/Lib/token.py @@ -1,7 +1,8 @@ """Token constants.""" # Auto-generated by Tools/build/generate_token.py -__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF'] +__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF', + 'EXACT_TOKEN_TYPES'] ENDMARKER = 0 NAME = 1 diff --git a/Lib/typing.py b/Lib/typing.py index 938e52922aee03..5f3aacd877221c 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -2936,10 +2936,13 @@ def _make_eager_annotate(types): checked_types = {key: _type_check(val, f"field {key} annotation must be a type") for key, val in types.items()} def annotate(format): - if format in (annotationlib.Format.VALUE, annotationlib.Format.FORWARDREF): - return checked_types - else: - return annotationlib.annotations_to_string(types) + match format: + case annotationlib.Format.VALUE | annotationlib.Format.FORWARDREF: + return checked_types + case annotationlib.Format.STRING: + return annotationlib.annotations_to_string(types) + case _: + raise NotImplementedError(format) return annotate @@ -3229,8 +3232,10 @@ def __annotate__(format): } elif format == annotationlib.Format.STRING: own = annotationlib.annotations_to_string(own_annotations) - else: + elif format in (annotationlib.Format.FORWARDREF, annotationlib.Format.VALUE): own = own_checked_annotations + else: + raise NotImplementedError(format) annos.update(own) return annos diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index 8d7631d5693ece..c412c729852272 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -247,11 +247,11 @@ def _hostinfo(self): return hostname, port -_DefragResultBase = namedtuple('DefragResult', 'url fragment') +_DefragResultBase = namedtuple('_DefragResultBase', 'url fragment') _SplitResultBase = namedtuple( - 'SplitResult', 'scheme netloc path query fragment') + '_SplitResultBase', 'scheme netloc path query fragment') _ParseResultBase = namedtuple( - 'ParseResult', 'scheme netloc path params query fragment') + '_ParseResultBase', 'scheme netloc path params query fragment') _DefragResultBase.__doc__ = """ DefragResult(url, fragment) diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index 18a837dd57ed59..1fcaa89188188d 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -83,6 +83,7 @@ import base64 import bisect +import contextlib import email import hashlib import http.client @@ -94,15 +95,13 @@ import sys import time import tempfile -import contextlib -import warnings from urllib.error import URLError, HTTPError, ContentTooShortError from urllib.parse import ( urlparse, urlsplit, urljoin, unwrap, quote, unquote, _splittype, _splithost, _splitport, _splituser, _splitpasswd, - _splitattr, _splitquery, _splitvalue, _splittag, _to_bytes, + _splitattr, _splitvalue, _splittag, unquote_to_bytes, urlunparse) from urllib.response import addinfourl, addclosehook @@ -128,7 +127,7 @@ 'urlopen', 'install_opener', 'build_opener', 'pathname2url', 'url2pathname', 'getproxies', # Legacy interface - 'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener', + 'urlretrieve', 'urlcleanup', ] # used in User-Agent header sent @@ -165,8 +164,7 @@ def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, the reason phrase returned by the server --- instead of the response headers as it is specified in the documentation for HTTPResponse. - For FTP, file, and data URLs and requests explicitly handled by legacy - URLopener and FancyURLopener classes, this function returns a + For FTP, file, and data URLs, this function returns a urllib.response.addinfourl object. Note that None may be returned if no handler handles the request (though @@ -940,6 +938,7 @@ def _parse_realm(self, header): for mo in AbstractBasicAuthHandler.rx.finditer(header): scheme, quote, realm = mo.groups() if quote not in ['"', "'"]: + import warnings warnings.warn("Basic Auth Realm was unquoted", UserWarning, 3) @@ -1495,7 +1494,7 @@ def open_local_file(self, req): origurl = 'file://' + filename return addinfourl(open(localfile, 'rb'), headers, origurl) except OSError as exp: - raise URLError(exp) + raise URLError(exp, exp.filename) raise URLError('file not on local host') def _safe_gethostbyname(host): @@ -1647,8 +1646,6 @@ def data_open(self, req): # Code move from the old urllib module -MAXFTPCACHE = 10 # Trim the ftp cache beyond this size - # Helper for non-unix systems if os.name == 'nt': from nturl2path import url2pathname, pathname2url @@ -1660,684 +1657,25 @@ def url2pathname(pathname): # URL has an empty authority section, so the path begins on the # third character. pathname = pathname[2:] - return unquote(pathname) + elif pathname[:12] == '//localhost/': + # Skip past 'localhost' authority. + pathname = pathname[11:] + encoding = sys.getfilesystemencoding() + errors = sys.getfilesystemencodeerrors() + return unquote(pathname, encoding=encoding, errors=errors) def pathname2url(pathname): """OS-specific conversion from a file system path to a relative URL of the 'file' scheme; not recommended for general use.""" - return quote(pathname) - - -ftpcache = {} - - -class URLopener: - """Class to open URLs. - This is a class rather than just a subroutine because we may need - more than one set of global protocol-specific options. - Note -- this is a base class for those who don't want the - automatic handling of errors type 302 (relocated) and 401 - (authorization needed).""" - - __tempfiles = None - - version = "Python-urllib/%s" % __version__ - - # Constructor - def __init__(self, proxies=None, **x509): - msg = "%(class)s style of invoking requests is deprecated. " \ - "Use newer urlopen functions/methods" % {'class': self.__class__.__name__} - warnings.warn(msg, DeprecationWarning, stacklevel=3) - if proxies is None: - proxies = getproxies() - assert hasattr(proxies, 'keys'), "proxies must be a mapping" - self.proxies = proxies - self.key_file = x509.get('key_file') - self.cert_file = x509.get('cert_file') - self.addheaders = [('User-Agent', self.version), ('Accept', '*/*')] - self.__tempfiles = [] - self.__unlink = os.unlink # See cleanup() - self.tempcache = None - # Undocumented feature: if you assign {} to tempcache, - # it is used to cache files retrieved with - # self.retrieve(). This is not enabled by default - # since it does not work for changing documents (and I - # haven't got the logic to check expiration headers - # yet). - self.ftpcache = ftpcache - # Undocumented feature: you can use a different - # ftp cache by assigning to the .ftpcache member; - # in case you want logically independent URL openers - # XXX This is not threadsafe. Bah. - - def __del__(self): - self.close() - - def close(self): - self.cleanup() - - def cleanup(self): - # This code sometimes runs when the rest of this module - # has already been deleted, so it can't use any globals - # or import anything. - if self.__tempfiles: - for file in self.__tempfiles: - try: - self.__unlink(file) - except OSError: - pass - del self.__tempfiles[:] - if self.tempcache: - self.tempcache.clear() - - def addheader(self, *args): - """Add a header to be used by the HTTP interface only - e.g. u.addheader('Accept', 'sound/basic')""" - self.addheaders.append(args) - - # External interface - def open(self, fullurl, data=None): - """Use URLopener().open(file) instead of open(file, 'r').""" - fullurl = unwrap(_to_bytes(fullurl)) - fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") - if self.tempcache and fullurl in self.tempcache: - filename, headers = self.tempcache[fullurl] - fp = open(filename, 'rb') - return addinfourl(fp, headers, fullurl) - urltype, url = _splittype(fullurl) - if not urltype: - urltype = 'file' - if urltype in self.proxies: - proxy = self.proxies[urltype] - urltype, proxyhost = _splittype(proxy) - host, selector = _splithost(proxyhost) - url = (host, fullurl) # Signal special case to open_*() - else: - proxy = None - name = 'open_' + urltype - self.type = urltype - name = name.replace('-', '_') - if not hasattr(self, name) or name == 'open_local_file': - if proxy: - return self.open_unknown_proxy(proxy, fullurl, data) - else: - return self.open_unknown(fullurl, data) - try: - if data is None: - return getattr(self, name)(url) - else: - return getattr(self, name)(url, data) - except (HTTPError, URLError): - raise - except OSError as msg: - raise OSError('socket error', msg) from msg - - def open_unknown(self, fullurl, data=None): - """Overridable interface to open unknown URL type.""" - type, url = _splittype(fullurl) - raise OSError('url error', 'unknown url type', type) - - def open_unknown_proxy(self, proxy, fullurl, data=None): - """Overridable interface to open unknown URL type.""" - type, url = _splittype(fullurl) - raise OSError('url error', 'invalid proxy for %s' % type, proxy) - - # External interface - def retrieve(self, url, filename=None, reporthook=None, data=None): - """retrieve(url) returns (filename, headers) for a local object - or (tempfilename, headers) for a remote object.""" - url = unwrap(_to_bytes(url)) - if self.tempcache and url in self.tempcache: - return self.tempcache[url] - type, url1 = _splittype(url) - if filename is None and (not type or type == 'file'): - try: - fp = self.open_local_file(url1) - hdrs = fp.info() - fp.close() - return url2pathname(_splithost(url1)[1]), hdrs - except OSError: - pass - fp = self.open(url, data) - try: - headers = fp.info() - if filename: - tfp = open(filename, 'wb') - else: - garbage, path = _splittype(url) - garbage, path = _splithost(path or "") - path, garbage = _splitquery(path or "") - path, garbage = _splitattr(path or "") - suffix = os.path.splitext(path)[1] - (fd, filename) = tempfile.mkstemp(suffix) - self.__tempfiles.append(filename) - tfp = os.fdopen(fd, 'wb') - try: - result = filename, headers - if self.tempcache is not None: - self.tempcache[url] = result - bs = 1024*8 - size = -1 - read = 0 - blocknum = 0 - if "content-length" in headers: - size = int(headers["Content-Length"]) - if reporthook: - reporthook(blocknum, bs, size) - while block := fp.read(bs): - read += len(block) - tfp.write(block) - blocknum += 1 - if reporthook: - reporthook(blocknum, bs, size) - finally: - tfp.close() - finally: - fp.close() - - # raise exception if actual size does not match content-length header - if size >= 0 and read < size: - raise ContentTooShortError( - "retrieval incomplete: got only %i out of %i bytes" - % (read, size), result) - - return result - - # Each method named open_ knows how to open that type of URL - - def _open_generic_http(self, connection_factory, url, data): - """Make an HTTP connection using connection_class. - - This is an internal method that should be called from - open_http() or open_https(). - - Arguments: - - connection_factory should take a host name and return an - HTTPConnection instance. - - url is the url to retrieval or a host, relative-path pair. - - data is payload for a POST request or None. - """ - - user_passwd = None - proxy_passwd= None - if isinstance(url, str): - host, selector = _splithost(url) - if host: - user_passwd, host = _splituser(host) - host = unquote(host) - realhost = host - else: - host, selector = url - # check whether the proxy contains authorization information - proxy_passwd, host = _splituser(host) - # now we proceed with the url we want to obtain - urltype, rest = _splittype(selector) - url = rest - user_passwd = None - if urltype.lower() != 'http': - realhost = None - else: - realhost, rest = _splithost(rest) - if realhost: - user_passwd, realhost = _splituser(realhost) - if user_passwd: - selector = "%s://%s%s" % (urltype, realhost, rest) - if proxy_bypass(realhost): - host = realhost - - if not host: raise OSError('http error', 'no host given') - - if proxy_passwd: - proxy_passwd = unquote(proxy_passwd) - proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii') - else: - proxy_auth = None - - if user_passwd: - user_passwd = unquote(user_passwd) - auth = base64.b64encode(user_passwd.encode()).decode('ascii') - else: - auth = None - http_conn = connection_factory(host) - headers = {} - if proxy_auth: - headers["Proxy-Authorization"] = "Basic %s" % proxy_auth - if auth: - headers["Authorization"] = "Basic %s" % auth - if realhost: - headers["Host"] = realhost - - # Add Connection:close as we don't support persistent connections yet. - # This helps in closing the socket and avoiding ResourceWarning - - headers["Connection"] = "close" - - for header, value in self.addheaders: - headers[header] = value - - if data is not None: - headers["Content-Type"] = "application/x-www-form-urlencoded" - http_conn.request("POST", selector, data, headers) - else: - http_conn.request("GET", selector, headers=headers) - - try: - response = http_conn.getresponse() - except http.client.BadStatusLine: - # something went wrong with the HTTP status line - raise URLError("http protocol error: bad status line") - - # According to RFC 2616, "2xx" code indicates that the client's - # request was successfully received, understood, and accepted. - if 200 <= response.status < 300: - return addinfourl(response, response.msg, "http:" + url, - response.status) - else: - return self.http_error( - url, response.fp, - response.status, response.reason, response.msg, data) - - def open_http(self, url, data=None): - """Use HTTP protocol.""" - return self._open_generic_http(http.client.HTTPConnection, url, data) - - def http_error(self, url, fp, errcode, errmsg, headers, data=None): - """Handle http errors. - - Derived class can override this, or provide specific handlers - named http_error_DDD where DDD is the 3-digit error code.""" - # First check if there's a specific handler for this error - name = 'http_error_%d' % errcode - if hasattr(self, name): - method = getattr(self, name) - if data is None: - result = method(url, fp, errcode, errmsg, headers) - else: - result = method(url, fp, errcode, errmsg, headers, data) - if result: return result - return self.http_error_default(url, fp, errcode, errmsg, headers) - - def http_error_default(self, url, fp, errcode, errmsg, headers): - """Default error handler: close the connection and raise OSError.""" - fp.close() - raise HTTPError(url, errcode, errmsg, headers, None) - - if _have_ssl: - def _https_connection(self, host): - if self.key_file or self.cert_file: - http_version = http.client.HTTPSConnection._http_vsn - context = http.client._create_https_context(http_version) - context.load_cert_chain(self.cert_file, self.key_file) - # cert and key file means the user wants to authenticate. - # enable TLS 1.3 PHA implicitly even for custom contexts. - if context.post_handshake_auth is not None: - context.post_handshake_auth = True - else: - context = None - return http.client.HTTPSConnection(host, context=context) - - def open_https(self, url, data=None): - """Use HTTPS protocol.""" - return self._open_generic_http(self._https_connection, url, data) - - def open_file(self, url): - """Use local file or FTP depending on form of URL.""" - if not isinstance(url, str): - raise URLError('file error: proxy support for file protocol currently not implemented') - if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/': - raise ValueError("file:// scheme is supported only on localhost") - else: - return self.open_local_file(url) - - def open_local_file(self, url): - """Use local file.""" - import email.utils - import mimetypes - host, file = _splithost(url) - localname = url2pathname(file) - try: - stats = os.stat(localname) - except OSError as e: - raise URLError(e.strerror, e.filename) - size = stats.st_size - modified = email.utils.formatdate(stats.st_mtime, usegmt=True) - mtype = mimetypes.guess_type(url)[0] - headers = email.message_from_string( - 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % - (mtype or 'text/plain', size, modified)) - if not host: - urlfile = file - if file[:1] == '/': - urlfile = 'file://' + file - return addinfourl(open(localname, 'rb'), headers, urlfile) - host, port = _splitport(host) - if (not port - and socket.gethostbyname(host) in ((localhost(),) + thishost())): - urlfile = file - if file[:1] == '/': - urlfile = 'file://' + file - elif file[:2] == './': - raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url) - return addinfourl(open(localname, 'rb'), headers, urlfile) - raise URLError('local file error: not on local host') - - def open_ftp(self, url): - """Use FTP protocol.""" - if not isinstance(url, str): - raise URLError('ftp error: proxy support for ftp protocol currently not implemented') - import mimetypes - host, path = _splithost(url) - if not host: raise URLError('ftp error: no host given') - host, port = _splitport(host) - user, host = _splituser(host) - if user: user, passwd = _splitpasswd(user) - else: passwd = None - host = unquote(host) - user = unquote(user or '') - passwd = unquote(passwd or '') - host = socket.gethostbyname(host) - if not port: - import ftplib - port = ftplib.FTP_PORT - else: - port = int(port) - path, attrs = _splitattr(path) - path = unquote(path) - dirs = path.split('/') - dirs, file = dirs[:-1], dirs[-1] - if dirs and not dirs[0]: dirs = dirs[1:] - if dirs and not dirs[0]: dirs[0] = '/' - key = user, host, port, '/'.join(dirs) - # XXX thread unsafe! - if len(self.ftpcache) > MAXFTPCACHE: - # Prune the cache, rather arbitrarily - for k in list(self.ftpcache): - if k != key: - v = self.ftpcache[k] - del self.ftpcache[k] - v.close() - try: - if key not in self.ftpcache: - self.ftpcache[key] = \ - ftpwrapper(user, passwd, host, port, dirs) - if not file: type = 'D' - else: type = 'I' - for attr in attrs: - attr, value = _splitvalue(attr) - if attr.lower() == 'type' and \ - value in ('a', 'A', 'i', 'I', 'd', 'D'): - type = value.upper() - (fp, retrlen) = self.ftpcache[key].retrfile(file, type) - mtype = mimetypes.guess_type("ftp:" + url)[0] - headers = "" - if mtype: - headers += "Content-Type: %s\n" % mtype - if retrlen is not None and retrlen >= 0: - headers += "Content-Length: %d\n" % retrlen - headers = email.message_from_string(headers) - return addinfourl(fp, headers, "ftp:" + url) - except ftperrors() as exp: - raise URLError(f'ftp error: {exp}') from exp - - def open_data(self, url, data=None): - """Use "data" URL.""" - if not isinstance(url, str): - raise URLError('data error: proxy support for data protocol currently not implemented') - # ignore POSTed data - # - # syntax of data URLs: - # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data - # mediatype := [ type "/" subtype ] *( ";" parameter ) - # data := *urlchar - # parameter := attribute "=" value - try: - [type, data] = url.split(',', 1) - except ValueError: - raise OSError('data error', 'bad data URL') - if not type: - type = 'text/plain;charset=US-ASCII' - semi = type.rfind(';') - if semi >= 0 and '=' not in type[semi:]: - encoding = type[semi+1:] - type = type[:semi] - else: - encoding = '' - msg = [] - msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT', - time.gmtime(time.time()))) - msg.append('Content-type: %s' % type) - if encoding == 'base64': - # XXX is this encoding/decoding ok? - data = base64.decodebytes(data.encode('ascii')).decode('latin-1') - else: - data = unquote(data) - msg.append('Content-Length: %d' % len(data)) - msg.append('') - msg.append(data) - msg = '\n'.join(msg) - headers = email.message_from_string(msg) - f = io.StringIO(msg) - #f.fileno = None # needed for addinfourl - return addinfourl(f, headers, url) - - -class FancyURLopener(URLopener): - """Derived class with handlers for errors we can handle (perhaps).""" - - def __init__(self, *args, **kwargs): - URLopener.__init__(self, *args, **kwargs) - self.auth_cache = {} - self.tries = 0 - self.maxtries = 10 - - def http_error_default(self, url, fp, errcode, errmsg, headers): - """Default error handling -- don't raise an exception.""" - return addinfourl(fp, headers, "http:" + url, errcode) - - def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): - """Error 302 -- relocated (temporarily).""" - self.tries += 1 - try: - if self.maxtries and self.tries >= self.maxtries: - if hasattr(self, "http_error_500"): - meth = self.http_error_500 - else: - meth = self.http_error_default - return meth(url, fp, 500, - "Internal Server Error: Redirect Recursion", - headers) - result = self.redirect_internal(url, fp, errcode, errmsg, - headers, data) - return result - finally: - self.tries = 0 - - def redirect_internal(self, url, fp, errcode, errmsg, headers, data): - if 'location' in headers: - newurl = headers['location'] - elif 'uri' in headers: - newurl = headers['uri'] - else: - return - fp.close() - - # In case the server sent a relative URL, join with original: - newurl = urljoin(self.type + ":" + url, newurl) - - urlparts = urlparse(newurl) - - # For security reasons, we don't allow redirection to anything other - # than http, https and ftp. - - # We are using newer HTTPError with older redirect_internal method - # This older method will get deprecated in 3.3 - - if urlparts.scheme not in ('http', 'https', 'ftp', ''): - raise HTTPError(newurl, errcode, - errmsg + - " Redirection to url '%s' is not allowed." % newurl, - headers, fp) - - return self.open(newurl) - - def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): - """Error 301 -- also relocated (permanently).""" - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - - def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): - """Error 303 -- also relocated (essentially identical to 302).""" - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - - def http_error_307(self, url, fp, errcode, errmsg, headers, data=None): - """Error 307 -- relocated, but turn POST into error.""" - if data is None: - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - else: - return self.http_error_default(url, fp, errcode, errmsg, headers) - - def http_error_308(self, url, fp, errcode, errmsg, headers, data=None): - """Error 308 -- relocated, but turn POST into error.""" - if data is None: - return self.http_error_301(url, fp, errcode, errmsg, headers, data) - else: - return self.http_error_default(url, fp, errcode, errmsg, headers) - - def http_error_401(self, url, fp, errcode, errmsg, headers, data=None, - retry=False): - """Error 401 -- authentication required. - This function supports Basic authentication only.""" - if 'www-authenticate' not in headers: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - stuff = headers['www-authenticate'] - match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) - if not match: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - scheme, realm = match.groups() - if scheme.lower() != 'basic': - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - if not retry: - URLopener.http_error_default(self, url, fp, errcode, errmsg, - headers) - name = 'retry_' + self.type + '_basic_auth' - if data is None: - return getattr(self,name)(url, realm) - else: - return getattr(self,name)(url, realm, data) - - def http_error_407(self, url, fp, errcode, errmsg, headers, data=None, - retry=False): - """Error 407 -- proxy authentication required. - This function supports Basic authentication only.""" - if 'proxy-authenticate' not in headers: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - stuff = headers['proxy-authenticate'] - match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) - if not match: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - scheme, realm = match.groups() - if scheme.lower() != 'basic': - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - if not retry: - URLopener.http_error_default(self, url, fp, errcode, errmsg, - headers) - name = 'retry_proxy_' + self.type + '_basic_auth' - if data is None: - return getattr(self,name)(url, realm) - else: - return getattr(self,name)(url, realm, data) - - def retry_proxy_http_basic_auth(self, url, realm, data=None): - host, selector = _splithost(url) - newurl = 'http://' + host + selector - proxy = self.proxies['http'] - urltype, proxyhost = _splittype(proxy) - proxyhost, proxyselector = _splithost(proxyhost) - i = proxyhost.find('@') + 1 - proxyhost = proxyhost[i:] - user, passwd = self.get_user_passwd(proxyhost, realm, i) - if not (user or passwd): return None - proxyhost = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), proxyhost) - self.proxies['http'] = 'http://' + proxyhost + proxyselector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_proxy_https_basic_auth(self, url, realm, data=None): - host, selector = _splithost(url) - newurl = 'https://' + host + selector - proxy = self.proxies['https'] - urltype, proxyhost = _splittype(proxy) - proxyhost, proxyselector = _splithost(proxyhost) - i = proxyhost.find('@') + 1 - proxyhost = proxyhost[i:] - user, passwd = self.get_user_passwd(proxyhost, realm, i) - if not (user or passwd): return None - proxyhost = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), proxyhost) - self.proxies['https'] = 'https://' + proxyhost + proxyselector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_http_basic_auth(self, url, realm, data=None): - host, selector = _splithost(url) - i = host.find('@') + 1 - host = host[i:] - user, passwd = self.get_user_passwd(host, realm, i) - if not (user or passwd): return None - host = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), host) - newurl = 'http://' + host + selector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_https_basic_auth(self, url, realm, data=None): - host, selector = _splithost(url) - i = host.find('@') + 1 - host = host[i:] - user, passwd = self.get_user_passwd(host, realm, i) - if not (user or passwd): return None - host = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), host) - newurl = 'https://' + host + selector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def get_user_passwd(self, host, realm, clear_cache=0): - key = realm + '@' + host.lower() - if key in self.auth_cache: - if clear_cache: - del self.auth_cache[key] - else: - return self.auth_cache[key] - user, passwd = self.prompt_user_passwd(host, realm) - if user or passwd: self.auth_cache[key] = (user, passwd) - return user, passwd - - def prompt_user_passwd(self, host, realm): - """Override this in a GUI environment!""" - import getpass - try: - user = input("Enter username for %s at %s: " % (realm, host)) - passwd = getpass.getpass("Enter password for %s in %s at %s: " % - (user, realm, host)) - return user, passwd - except KeyboardInterrupt: - print() - return None, None + if pathname[:1] == '/': + # Add explicitly empty authority to absolute path. If the path + # starts with exactly one slash then this change is mostly + # cosmetic, but if it begins with two or more slashes then this + # avoids interpreting the path as a URL authority. + pathname = '//' + pathname + encoding = sys.getfilesystemencoding() + errors = sys.getfilesystemencodeerrors() + return quote(pathname, encoding=encoding, errors=errors) # Utility functions @@ -2485,9 +1823,7 @@ def getproxies_environment(): """Return a dictionary of scheme -> proxy server URL mappings. Scan the environment for variables named _proxy; - this seems to be the standard convention. If you need a - different way, you can pass a proxies dictionary to the - [Fancy]URLopener constructor. + this seems to be the standard convention. """ # in order to prefer lowercase variables, process environment in # two passes: first matches any, second pass matches lowercase only diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py index 08c83cfb760250..6907ae6d5b7464 100644 --- a/Lib/zipfile/__init__.py +++ b/Lib/zipfile/__init__.py @@ -241,7 +241,9 @@ def is_zipfile(filename): result = False try: if hasattr(filename, "read"): + pos = filename.tell() result = _check_zipfile(fp=filename) + filename.seek(pos) else: with open(filename, "rb") as fp: result = _check_zipfile(fp) diff --git a/Makefile.pre.in b/Makefile.pre.in index 8d94ba361fd934..724354746b8d81 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2645,8 +2645,8 @@ libinstall: all $(srcdir)/Modules/xxmodule.c esac; \ done; \ done - $(INSTALL_DATA) `cat pybuilddir.txt`/_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH).py \ - $(DESTDIR)$(LIBDEST); \ + $(INSTALL_DATA) `cat pybuilddir.txt`/_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH).py $(DESTDIR)$(LIBDEST); \ + $(INSTALL_DATA) `cat pybuilddir.txt`/_sysconfig_vars_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH).json $(DESTDIR)$(LIBDEST); \ $(INSTALL_DATA) $(srcdir)/LICENSE $(DESTDIR)$(LIBDEST)/LICENSE.txt @ # If app store compliance has been configured, apply the patch to the @ # installed library code. The patch has been previously validated against diff --git a/Misc/ACKS b/Misc/ACKS index 08cd293eac3835..cd34846574b304 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1154,6 +1154,7 @@ Mark Lutz Taras Lyapun Jim Lynch Mikael Lyngvig +Ilya Lyubavski Jeff MacDonald John Machin Andrew I MacIntyre diff --git a/Misc/NEWS.d/3.13.0a6.rst b/Misc/NEWS.d/3.13.0a6.rst index b9cdbc4e146d5a..2740b4f0d967ba 100644 --- a/Misc/NEWS.d/3.13.0a6.rst +++ b/Misc/NEWS.d/3.13.0a6.rst @@ -642,7 +642,7 @@ Also in the corresponding :class:`ipaddress.IPv4Network` and .. nonce: OToJnG .. section: Library -In :mod:`encodings.idna`, any capitalization of the the ACE prefix +In :mod:`encodings.idna`, any capitalization of the ACE prefix (``xn--``) is now acceptable. Patch by Pepijn de Vos and Zackery Spytz. .. diff --git a/Misc/NEWS.d/3.14.0a2.rst b/Misc/NEWS.d/3.14.0a2.rst new file mode 100644 index 00000000000000..7384ce54cb8914 --- /dev/null +++ b/Misc/NEWS.d/3.14.0a2.rst @@ -0,0 +1,1762 @@ +.. date: 2024-11-16-22-08-41 +.. gh-issue: 126911 +.. nonce: HchCZZ +.. release date: 2024-11-19 +.. section: Windows + +Update credits command output. + +.. + +.. date: 2024-11-12-22-31-13 +.. gh-issue: 118973 +.. nonce: _lfxW6 +.. section: Windows + +Ensures the experimental free-threaded install includes the ``_tkinter`` +module. The optional Tcl/Tk component must also be installed in order for +the module to work. + +.. + +.. date: 2024-11-07-20-42-31 +.. gh-issue: 126497 +.. nonce: EARpd- +.. section: Windows + +Fixes venv failure due to missing redirector executables in experimental +free-threaded installs. + +.. + +.. date: 2024-10-29-20-09-52 +.. gh-issue: 126074 +.. nonce: 83ZzZs +.. section: Windows + +Removed unnecessary DLLs from Windows embeddable package + +.. + +.. date: 2024-10-29-19-48-03 +.. gh-issue: 125315 +.. nonce: jdB9qN +.. section: Windows + +Avoid crashing in :mod:`platform` due to slow WMI calls on some Windows +machines. + +.. + +.. date: 2024-10-29-09-39-06 +.. gh-issue: 126084 +.. nonce: 3wAL8o +.. section: Windows + +Fix venvwlauncher to launch pythonw instead of python so no extra console +window is created. + +.. + +.. date: 2024-10-23-17-24-23 +.. gh-issue: 125842 +.. nonce: m3EF9E +.. section: Windows + +Fix a :exc:`SystemError` when :func:`sys.exit` is called with ``0xffffffff`` +on Windows. + +.. + +.. date: 2024-10-15-21-28-43 +.. gh-issue: 125550 +.. nonce: hmGWCP +.. section: Windows + +Enable the :ref:`launcher` to detect Python 3.14 installs from the Windows +Store. + +.. + +.. date: 2024-09-07-15-16-24 +.. gh-issue: 123803 +.. nonce: J9VNQU +.. section: Windows + +All Windows code pages are now supported as "cpXXX" codecs on Windows. + +.. + +.. date: 2024-11-13-22-23-36 +.. gh-issue: 126807 +.. nonce: vpaWuN +.. section: Tools/Demos + +Fix extraction warnings in :program:`pygettext.py` caused by mistaking +function definitions for function calls. + +.. + +.. date: 2024-10-30-13-59-07 +.. gh-issue: 126167 +.. nonce: j5cCWE +.. section: Tools/Demos + +The iOS testbed was modified so that it can be used by third-party projects +for testing purposes. + +.. + +.. date: 2024-11-17-16-56-48 +.. gh-issue: 126909 +.. nonce: 60VTxW +.. section: Tests + +Fix test_os extended attribute tests to work on filesystems with 1 KiB xattr +size limit. + +.. + +.. date: 2024-10-21-14-10-56 +.. gh-issue: 125730 +.. nonce: kcWbvI +.. section: Tests + +Change ``make test`` to not run GUI tests by default. Use ``make ci`` to run +tests with GUI tests instead. + +.. + +.. date: 2024-09-30-22-52-44 +.. gh-issue: 124295 +.. nonce: VZy5kx +.. section: Tests + +Add translation tests to the :mod:`argparse` module. + +.. + +.. date: 2024-11-13-11-09-12 +.. gh-issue: 126623 +.. nonce: TO7NnR +.. section: Security + +Upgrade libexpat to 2.6.4 + +.. + +.. date: 2024-11-18-15-33-25 +.. gh-issue: 85957 +.. nonce: 8gT3B- +.. section: Library + +Add missing MIME types for images with RFCs: emf, fits, g3fax, jp2, jpm, +jpx, t38, tiff-fx and wmf. Patch by Hugo van Kemenade. + +.. + +.. date: 2024-11-17-01-14-59 +.. gh-issue: 126920 +.. nonce: s8-f_L +.. section: Library + +Fix the ``prefix`` and ``exec_prefix`` keys from +:py:func:`sysconfig.get_config_vars` incorrectly having the same value as +:py:const:`sys.base_prefix` and :py:const:`sys.base_exec_prefix`, +respectively, inside virtual environments. They now accurately reflect +:py:const:`sys.prefix` and :py:const:`sys.exec_prefix`. + +.. + +.. date: 2024-11-14-22-25-49 +.. gh-issue: 67877 +.. nonce: G9hw0w +.. section: Library + +Fix memory leaks when :mod:`regular expression ` matching terminates +abruptly, either because of a signal or because memory allocation fails. + +.. + +.. date: 2024-11-14-13-16-20 +.. gh-issue: 125063 +.. nonce: kJ-WnH +.. section: Library + +:mod:`marshal` now supports :class:`slice` objects. The marshal format +version was increased to 5. + +.. + +.. date: 2024-11-13-22-25-57 +.. gh-issue: 126789 +.. nonce: lKzlc7 +.. section: Library + +Fixed the values of :py:func:`sysconfig.get_config_vars`, +:py:func:`sysconfig.get_paths`, and their siblings when the :py:mod:`site` +initialization happens after :py:mod:`sysconfig` has built a cache for +:py:func:`sysconfig.get_config_vars`. + +.. + +.. date: 2024-11-13-20-03-18 +.. gh-issue: 126188 +.. nonce: RJLKk- +.. section: Library + +Update bundled pip to 24.3.1 + +.. + +.. date: 2024-11-12-21-43-12 +.. gh-issue: 126766 +.. nonce: oi2KJ7 +.. section: Library + +Fix issue where :func:`urllib.request.url2pathname` failed to discard two +leading slashes introducing an empty authority section. + +.. + +.. date: 2024-11-11-14-52-21 +.. gh-issue: 126705 +.. nonce: 0W7jFW +.. section: Library + +Allow :class:`os.PathLike` to be a base for Protocols. + +.. + +.. date: 2024-11-11-13-24-22 +.. gh-issue: 126699 +.. nonce: ONGbMd +.. section: Library + +Allow :class:`collections.abc.AsyncIterator` to be a base for Protocols. + +.. + +.. date: 2024-11-11-13-00-21 +.. gh-issue: 126654 +.. nonce: 4gfP2y +.. section: Library + +Fix crash when non-dict was passed to several functions in ``_interpreters`` +module. + +.. + +.. date: 2024-11-10-18-14-51 +.. gh-issue: 104745 +.. nonce: zAa5Ke +.. section: Library + +Limit starting a patcher (from :func:`unittest.mock.patch` or +:func:`unittest.mock.patch.object`) more than once without stopping it + +.. + +.. date: 2024-11-09-10-31-10 +.. gh-issue: 126595 +.. nonce: A-7MyC +.. section: Library + +Fix a crash when instantiating :class:`itertools.count` with an initial +count of :data:`sys.maxsize` on debug builds. Patch by Bénédikt Tran. + +.. + +.. date: 2024-11-08-17-05-10 +.. gh-issue: 120423 +.. nonce: 7rdLVV +.. section: Library + +Fix issue where :func:`urllib.request.pathname2url` mishandled Windows paths +with embedded forward slashes. + +.. + +.. date: 2024-11-08-11-06-14 +.. gh-issue: 126565 +.. nonce: dFFO22 +.. section: Library + +Improve performances of :meth:`zipfile.Path.open` for non-reading modes. + +.. + +.. date: 2024-11-07-22-41-47 +.. gh-issue: 126505 +.. nonce: iztYE1 +.. section: Library + +Fix bugs in compiling case-insensitive :mod:`regular expressions ` with +character classes containing non-BMP characters: upper-case non-BMP +character did was ignored and the ASCII flag was ignored when matching a +character range whose upper bound is beyond the BMP region. + +.. + +.. date: 2024-11-07-01-40-11 +.. gh-issue: 117378 +.. nonce: o9O5uM +.. section: Library + +Fixed the :mod:`multiprocessing` ``"forkserver"`` start method forkserver +process to correctly inherit the parent's :data:`sys.path` during the +importing of :func:`multiprocessing.set_forkserver_preload` modules in the +same manner as :data:`sys.path` is configured in workers before executing +work items. + +This bug caused some forkserver module preloading to silently fail to +preload. This manifested as a performance degration in child processes when +the ``sys.path`` was required due to additional repeated work in every +worker. + +It could also have a side effect of ``""`` remaining in :data:`sys.path` +during forkserver preload imports instead of the absolute path from +:func:`os.getcwd` at multiprocessing import time used in the worker +``sys.path``. + +The ``sys.path`` differences between phases in the child process could +potentially have caused preload to import incorrect things from the wrong +location. We are unaware of that actually having happened in practice. + +.. + +.. date: 2024-11-06-23-40-28 +.. gh-issue: 125679 +.. nonce: Qq9xF5 +.. section: Library + +The :class:`multiprocessing.Lock` and :class:`multiprocessing.RLock` +``repr`` values no longer say "unknown" on macOS. + +.. + +.. date: 2024-11-06-18-30-50 +.. gh-issue: 126476 +.. nonce: F1wh3c +.. section: Library + +Raise :class:`calendar.IllegalMonthError` (now a subclass of +:class:`IndexError`) for :func:`calendar.month` when the input month is not +correct. + +.. + +.. date: 2024-11-06-13-41-38 +.. gh-issue: 126489 +.. nonce: toaf-0 +.. section: Library + +The Python implementation of :mod:`pickle` no longer calls +:meth:`pickle.Pickler.persistent_id` for the result of +:meth:`!persistent_id`. + +.. + +.. date: 2024-11-05-11-28-45 +.. gh-issue: 126451 +.. nonce: XJMtqz +.. section: Library + +Register the :class:`contextvars.Context` type to +:class:`collections.abc.Mapping`. + +.. + +.. date: 2024-11-05-09-54-49 +.. gh-issue: 126175 +.. nonce: spnjJr +.. section: Library + +Add ``msg``, ``doc``, ``pos``, ``lineno`` and ``colno`` attributes to +:exc:`tomllib.TOMLDecodeError`. Deprecate instantiating with free-form +arguments. + +.. + +.. date: 2024-11-04-22-53-09 +.. gh-issue: 89416 +.. nonce: YVQaas +.. section: Library + +Add :rfc:`9559` MIME types for Matroska audiovisual container formats. Patch +by Hugo van Kemenade. + +.. + +.. date: 2024-11-04-16-40-02 +.. gh-issue: 126417 +.. nonce: OWPqn0 +.. section: Library + +Register the :class:`!multiprocessing.managers.DictProxy` and +:class:`!multiprocessing.managers.ListProxy` types in +:mod:`multiprocessing.managers` to :class:`collections.abc.MutableMapping` +and :class:`collections.abc.MutableSequence`, respectively. + +.. + +.. date: 2024-11-04-13-16-18 +.. gh-issue: 126390 +.. nonce: Cxvqa5 +.. section: Library + +Add support for returning intermixed options and non-option arguments in +order in :func:`getopt.gnu_getopt`. + +.. + +.. date: 2024-11-03-23-25-07 +.. gh-issue: 126374 +.. nonce: Xu_THP +.. section: Library + +Add support for options with optional arguments in the :mod:`getopt` module. + +.. + +.. date: 2024-11-03-14-43-51 +.. gh-issue: 126363 +.. nonce: Xus7vU +.. section: Library + +Speed up pattern parsing in :meth:`pathlib.Path.glob` by skipping creation +of a :class:`pathlib.Path` object for the pattern. + +.. + +.. date: 2024-11-03-10-48-07 +.. gh-issue: 126353 +.. nonce: ChDzot +.. section: Library + +:func:`asyncio.get_event_loop` now does not implicitly creates an event +loop. It now raises a :exc:`RuntimeError` if there is no set event loop. +Patch by Kumar Aditya. + +.. + +.. date: 2024-11-03-09-42-42 +.. gh-issue: 126313 +.. nonce: EFP6Dl +.. section: Library + +Fix an issue in :func:`curses.napms` when :func:`curses.initscr` has not yet +been called. Patch by Bénédikt Tran. + +.. + +.. date: 2024-11-02-19-20-44 +.. gh-issue: 126303 +.. nonce: yVvyWB +.. section: Library + +Fix pickling and copying of :class:`os.sched_param` objects. + +.. + +.. date: 2024-11-01-14-31-41 +.. gh-issue: 126138 +.. nonce: yTniOG +.. section: Library + +Fix a use-after-free crash on :class:`asyncio.Task` objects whose underlying +coroutine yields an object that implements an evil +:meth:`~object.__getattribute__`. Patch by Nico Posada. + +.. + +.. date: 2024-11-01-10-35-49 +.. gh-issue: 120057 +.. nonce: YWy81Q +.. section: Library + +Replace the ``os.environ.refresh()`` method with a new +:func:`os.reload_environ` function. Patch by Victor Stinner. + +.. + +.. date: 2024-10-31-14-06-28 +.. gh-issue: 126220 +.. nonce: uJAJCU +.. section: Library + +Fix crash in :class:`!cProfile.Profile` and :class:`!_lsprof.Profiler` when +their callbacks were directly called with 0 arguments. + +.. + +.. date: 2024-10-30-23-59-36 +.. gh-issue: 126212 +.. nonce: _9uYjT +.. section: Library + +Fix issue where :func:`urllib.request.pathname2url` and +:func:`~urllib.request.url2pathname` removed slashes from Windows DOS drive +paths and URLs. + +.. + +.. date: 2024-10-30-23-42-44 +.. gh-issue: 126223 +.. nonce: k2qooc +.. section: Library + +Raise a :exc:`UnicodeEncodeError` instead of a :exc:`SystemError` upon +calling :func:`!_interpreters.create` with an invalid Unicode character. + +.. + +.. date: 2024-10-30-20-45-17 +.. gh-issue: 126205 +.. nonce: CHEmtx +.. section: Library + +Fix issue where :func:`urllib.request.pathname2url` generated URLs beginning +with four slashes (rather than two) when given a Windows UNC path. + +.. + +.. date: 2024-10-30-00-12-22 +.. gh-issue: 126156 +.. nonce: BOSqv0 +.. section: Library + +Improved performances of creating :py:class:`~http.cookies.Morsel` objects +by a factor of 3.8x. + +.. + +.. date: 2024-10-29-11-45-44 +.. gh-issue: 126105 +.. nonce: cOL-R6 +.. section: Library + +Fix a crash in :mod:`ast` when the :attr:`ast.AST._fields` attribute is +deleted. + +.. + +.. date: 2024-10-29-10-58-52 +.. gh-issue: 126106 +.. nonce: rlF798 +.. section: Library + +Fixes a possible ``NULL`` pointer dereference in :mod:`ssl`. + +.. + +.. date: 2024-10-29-10-38-28 +.. gh-issue: 126080 +.. nonce: qKRBuo +.. section: Library + +Fix a use-after-free crash on :class:`asyncio.Task` objects for which the +underlying event loop implements an evil :meth:`~object.__getattribute__`. +Reported by Nico-Posada. Patch by Bénédikt Tran. + +.. + +.. date: 2024-10-29-07-24-52 +.. gh-issue: 125322 +.. nonce: sstOM- +.. section: Library + +Correct detection of complex numbers support in libffi. + +.. + +.. date: 2024-10-28-22-35-22 +.. gh-issue: 126083 +.. nonce: TuI--n +.. section: Library + +Fixed a reference leak in :class:`asyncio.Task` objects when reinitializing +the same object with a non-``None`` context. Patch by Nico Posada. + +.. + +.. date: 2024-10-28-11-33-59 +.. gh-issue: 126068 +.. nonce: Pdznm_ +.. section: Library + +Fix exceptions in the :mod:`argparse` module so that only error messages for +ArgumentError and ArgumentTypeError are now translated. ArgumentError is now +only used for command line errors, not for logical errors in the program. +TypeError is now raised instead of ValueError for some logical errors. + +.. + +.. date: 2024-10-28-01-24-52 +.. gh-issue: 125413 +.. nonce: Jat5kq +.. section: Library + +Add :meth:`pathlib.Path.scandir` method to efficiently fetch directory +children and their file attributes. This is a trivial wrapper of +:func:`os.scandir`. + +.. + +.. date: 2024-10-26-12-50-48 +.. gh-issue: 125984 +.. nonce: d4vp5_ +.. section: Library + +Fix use-after-free crashes on :class:`asyncio.Future` objects for which the +underlying event loop implements an evil :meth:`~object.__getattribute__`. +Reported by Nico-Posada. Patch by Bénédikt Tran. + +.. + +.. date: 2024-10-25-20-52-15 +.. gh-issue: 125926 +.. nonce: pp8rtZ +.. section: Library + +Fix :func:`urllib.parse.urljoin` for base URI with undefined authority. +Although :rfc:`3986` only specify reference resolution for absolute base +URI, :func:`!urljoin` should continue to return sensible result for relative +base URI. + +.. + +.. date: 2024-10-25-11-13-24 +.. gh-issue: 125969 +.. nonce: YvbrTr +.. section: Library + +Fix an out-of-bounds crash when an evil :meth:`asyncio.loop.call_soon` +mutates the length of the internal callbacks list. Patch by Bénédikt Tran. + +.. + +.. date: 2024-10-25-10-53-56 +.. gh-issue: 125966 +.. nonce: eOCYU_ +.. section: Library + +Fix a use-after-free crash in :meth:`asyncio.Future.remove_done_callback`. +Patch by Bénédikt Tran. + +.. + +.. date: 2024-10-24-14-08-10 +.. gh-issue: 125789 +.. nonce: eaiAMw +.. section: Library + +Fix possible crash when mutating list of callbacks returned by +:attr:`!asyncio.Future._callbacks`. It now always returns a new copy in C +implementation :mod:`!_asyncio`. Patch by Kumar Aditya. + +.. + +.. date: 2024-10-24-13-40-20 +.. gh-issue: 126916 +.. nonce: MAgz6D +.. section: Library + +Allow the *initial* parameter of :func:`functools.reduce` to be passed as a +keyword argument. Patch by Sayandip Dutta. + +.. + +.. date: 2024-10-24-10-49-47 +.. gh-issue: 124452 +.. nonce: eqTRgx +.. section: Library + +Fix an issue in :meth:`email.policy.EmailPolicy.header_source_parse` and +:meth:`email.policy.Compat32.header_source_parse` that introduced spurious +leading whitespaces into header values when the header includes a newline +character after the header name delimiter (``:``) and before the value. + +.. + +.. date: 2024-10-23-20-44-30 +.. gh-issue: 117941 +.. nonce: Y9jdlW +.. section: Library + +:class:`!argparse.BooleanOptionalAction` now rejects option names starting +with ``--no-``. + +.. + +.. date: 2024-10-23-17-45-40 +.. gh-issue: 125884 +.. nonce: 41E_PD +.. section: Library + +Fixed the bug for :mod:`pdb` where it can't set breakpoints on functions +with certain annotations. + +.. + +.. date: 2024-10-22-13-28-00 +.. gh-issue: 125355 +.. nonce: zssHm_ +.. section: Library + +Fix several bugs in :meth:`argparse.ArgumentParser.parse_intermixed_args`. + +* The parser no longer changes temporarily during parsing. +* Default values are not processed twice. +* Required mutually exclusive groups containing positional arguments are now supported. +* The missing arguments report now includes the names of all required optional and positional arguments. +* Unknown options can be intermixed with positional arguments in parse_known_intermixed_args(). + +.. + +.. date: 2024-10-21-13-52-37 +.. gh-issue: 125767 +.. nonce: 0kK4lX +.. section: Library + +:class:`super` objects are now :mod:`pickleable ` and :mod:`copyable +`. + +.. + +.. date: 2024-10-21-12-06-55 +.. gh-issue: 124969 +.. nonce: xiY8UP +.. section: Library + +``locale.nl_langinfo(locale.ALT_DIGITS)`` now returns a string again. The +returned value consists of up to 100 semicolon-separated symbols. + +.. + +.. date: 2024-10-20-00-56-44 +.. gh-issue: 84850 +.. nonce: p5TeUB +.. section: Library + +Remove :class:`!URLopener` and :class:`!FancyURLopener` classes from +:mod:`urllib.request`. They had previously raised :exc:`DeprecationWarning` +since Python 3.3. + +.. + +.. date: 2024-10-19-16-06-52 +.. gh-issue: 125666 +.. nonce: jGfdCP +.. section: Library + +Avoid the exiting the interpreter if a null byte is given as input in the +new REPL. + +.. + +.. date: 2024-10-19-13-37-37 +.. gh-issue: 125710 +.. nonce: FyFAAr +.. section: Library + +[Enum] fix hashable<->nonhashable comparisons for member values + +.. + +.. date: 2024-10-19-11-06-06 +.. gh-issue: 125631 +.. nonce: BlhVvR +.. section: Library + +Restore ability to set :attr:`~pickle.Pickler.persistent_id` and +:attr:`~pickle.Unpickler.persistent_load` attributes of instances of the +:class:`!Pickler` and :class:`!Unpickler` classes in the :mod:`pickle` +module. + +.. + +.. date: 2024-10-19-01-30-40 +.. gh-issue: 125378 +.. nonce: WTosxX +.. section: Library + +Fixed the bug in :mod:`pdb` where after a multi-line command, an empty line +repeats the first line of the multi-line command, instead of the full +command. + +.. + +.. date: 2024-10-18-09-51-29 +.. gh-issue: 125682 +.. nonce: vsj4cU +.. section: Library + +Reject non-ASCII digits in the Python implementation of :func:`json.loads` +conforming to the JSON specification. + +.. + +.. date: 2024-10-18-08-58-10 +.. gh-issue: 125660 +.. nonce: sDdDqO +.. section: Library + +Reject invalid unicode escapes for Python implementation of +:func:`json.loads`. + +.. + +.. date: 2024-10-17-20-36-06 +.. gh-issue: 52551 +.. nonce: EIVNYY +.. section: Library + +Use :c:func:`!wcsftime` to implement :func:`time.strftime` on Windows. + +.. + +.. date: 2024-10-17-16-10-29 +.. gh-issue: 125259 +.. nonce: oMew0c +.. section: Library + +Fix the notes removal logic for errors thrown in enum initialization. + +.. + +.. date: 2024-10-17-04-52-00 +.. gh-issue: 125633 +.. nonce: lMck06 +.. section: Library + +Add function :func:`inspect.ispackage` to determine whether an object is a +:term:`package` or not. + +.. + +.. date: 2024-10-16-22-45-50 +.. gh-issue: 125614 +.. nonce: 3OEo_Q +.. section: Library + +In the :data:`~annotationlib.Format.FORWARDREF` format of +:mod:`annotationlib`, fix bug where nested expressions were not returned as +:class:`annotationlib.ForwardRef` format. + +.. + +.. date: 2024-10-16-20-32-40 +.. gh-issue: 125590 +.. nonce: stHzOP +.. section: Library + +Allow ``FrameLocalsProxy`` to delete and pop if the key is not a fast +variable. + +.. + +.. date: 2024-10-16-15-55-50 +.. gh-issue: 125600 +.. nonce: yMsJx0 +.. section: Library + +Only show stale code warning in :mod:`pdb` when we display source code. + +.. + +.. date: 2024-10-16-04-50-53 +.. gh-issue: 125542 +.. nonce: vZJ-Ns +.. section: Library + +Deprecate passing keyword-only *prefix_chars* argument to +:meth:`argparse.ArgumentParser.add_argument_group`. + +.. + +.. date: 2024-10-15-16-50-03 +.. gh-issue: 125541 +.. nonce: FfhmWo +.. section: Library + +Pressing :kbd:`Ctrl-C` while blocked in :meth:`threading.Lock.acquire`, +:meth:`threading.RLock.acquire`, and :meth:`threading.Thread.join` now +interrupts the function call and raises a :exc:`KeyboardInterrupt` exception +on Windows, similar to how those functions behave on macOS and Linux. + +.. + +.. date: 2024-10-15-14-01-03 +.. gh-issue: 125519 +.. nonce: TqGh6a +.. section: Library + +Improve traceback if :func:`importlib.reload` is called with an object that +is not a module. Patch by Alex Waygood. + +.. + +.. date: 2024-10-14-17-29-34 +.. gh-issue: 125451 +.. nonce: fmP3T9 +.. section: Library + +Fix deadlock when :class:`concurrent.futures.ProcessPoolExecutor` shuts down +concurrently with an error when feeding a job to a worker process. + +.. + +.. date: 2024-10-14-02-07-44 +.. gh-issue: 125115 +.. nonce: IOf3ON +.. section: Library + +Fixed a bug in :mod:`pdb` where arguments starting with ``-`` can't be +passed to the debugged script. + +.. + +.. date: 2024-10-13-15-04-58 +.. gh-issue: 125398 +.. nonce: UW7Ndv +.. section: Library + +Fix the conversion of the :envvar:`!VIRTUAL_ENV` path in the activate script +in :mod:`venv` when running in Git Bash for Windows. + +.. + +.. date: 2024-10-11-00-40-13 +.. gh-issue: 125245 +.. nonce: 8vReM- +.. section: Library + +Fix race condition when importing :mod:`collections.abc`, which could +incorrectly return an empty module. + +.. + +.. date: 2024-10-09-17-07-33 +.. gh-issue: 52551 +.. nonce: PBakSY +.. section: Library + +Fix encoding issues in :func:`time.strftime`, the +:meth:`~datetime.datetime.strftime` method of the :mod:`datetime` classes +:class:`~datetime.datetime`, :class:`~datetime.date` and +:class:`~datetime.time` and formatting of these classes. Characters not +encodable in the current locale are now acceptable in the format string. +Surrogate pairs and sequence of surrogatescape-encoded bytes are no longer +recombinated. Embedded null character no longer terminates the format +string. + +.. + +.. date: 2024-10-04-22-43-48 +.. gh-issue: 124984 +.. nonce: xjMv9b +.. section: Library + +Fixed thread safety in :mod:`ssl` in the free-threaded build. OpenSSL +operations are now protected by a per-object lock. + +.. + +.. date: 2024-09-28-02-03-04 +.. gh-issue: 124651 +.. nonce: bLBGtH +.. section: Library + +Properly quote template strings in :mod:`venv` activation scripts. + +.. + +.. date: 2024-09-27-15-42-55 +.. gh-issue: 124694 +.. nonce: uUy32y +.. section: Library + +We've added :class:`concurrent.futures.InterpreterPoolExecutor`, which +allows you to run code in multiple isolated interpreters. This allows you +to circumvent the limitations of CPU-bound threads (due to the GIL). Patch +by Eric Snow. + +This addition is unrelated to :pep:`734`. + +.. + +.. date: 2024-09-27-13-10-17 +.. gh-issue: 58032 +.. nonce: 0aNAQ0 +.. section: Library + +Deprecate the :class:`argparse.FileType` type converter. + +.. + +.. date: 2024-09-24-18-49-16 +.. gh-issue: 99749 +.. nonce: gBDJX7 +.. section: Library + +Adds a feature to optionally enable suggestions for argument choices and +subparser names if mistyped by the user. + +.. + +.. date: 2024-09-24-18-16-59 +.. gh-issue: 58956 +.. nonce: 0wFrBR +.. section: Library + +Fixed a bug in :mod:`pdb` where sometimes the breakpoint won't trigger if it +was set on a function which is already in the call stack. + +.. + +.. date: 2024-09-17-10-38-26 +.. gh-issue: 124111 +.. nonce: Hd53VN +.. section: Library + +The tkinter module can now be built to use either the new version 9.0.0 of +Tcl/Tk or the latest release 8.6.15 of Tcl/Tk 8. Tcl/Tk 9 includes many +improvements, both to the Tcl language and to the appearance and utility of +the graphical user interface provided by Tk. + +.. + +.. date: 2024-09-07-13-57-49 +.. gh-issue: 80958 +.. nonce: fVYnqV +.. section: Library + +unittest discovery supports PEP 420 namespace packages as start directory +again. + +.. + +.. date: 2024-08-28-19-27-35 +.. gh-issue: 123370 +.. nonce: SPZ9Ux +.. section: Library + +Fix the canvas not clearing after running turtledemo clock. + +.. + +.. date: 2024-08-22-12-12-35 +.. gh-issue: 89083 +.. nonce: b6zFh0 +.. section: Library + +Add :func:`uuid.uuid8` for generating UUIDv8 objects as specified in +:rfc:`9562`. Patch by Bénédikt Tran + +.. + +.. date: 2024-08-01-11-15-55 +.. gh-issue: 122549 +.. nonce: ztV4Kz +.. section: Library + +Add :func:`platform.invalidate_caches` to invalidate cached results. + +.. + +.. date: 2024-07-23-02-24-50 +.. gh-issue: 120754 +.. nonce: nHb5mG +.. section: Library + +Update unbounded ``read`` calls in :mod:`zipfile` to specify an explicit +``size`` putting a limit on how much data they may read. This also updates +handling around ZIP max comment size to match the standard instead of +reading comments that are one byte too long. + +.. + +.. date: 2024-07-02-15-56-42 +.. gh-issue: 121267 +.. nonce: yFBWkh +.. section: Library + +Improve the performance of :mod:`tarfile` when writing files, by caching +user names and group names. + +.. + +.. date: 2024-06-06-04-06-05 +.. gh-issue: 70764 +.. nonce: 6511hw +.. section: Library + +Fixed an issue where :func:`inspect.getclosurevars` would incorrectly +classify an attribute name as a global variable when the name exists both as +an attribute name and a global variable. + +.. + +.. date: 2024-06-05-19-09-36 +.. gh-issue: 118289 +.. nonce: moL9_d +.. section: Library + +:func:`!posixpath.realpath` now raises :exc:`NotADirectoryError` when +*strict* mode is enabled and a non-directory path with a trailing slash is +supplied. + +.. + +.. date: 2024-06-02-11-48-19 +.. gh-issue: 119826 +.. nonce: N1obGa +.. section: Library + +Always return an absolute path for :func:`os.path.abspath` on Windows. + +.. + +.. date: 2024-05-28-14-35-23 +.. gh-issue: 97850 +.. nonce: dCtjel +.. section: Library + +Remove deprecated :func:`!pkgutil.get_loader` and +:func:`!pkgutil.find_loader`. + +.. + +.. date: 2024-05-13-10-09-41 +.. gh-issue: 118986 +.. nonce: -r4W9h +.. section: Library + +Add :data:`!socket.IPV6_RECVERR` constant (available since Linux 2.2). + +.. + +.. date: 2024-03-16-13-38-27 +.. gh-issue: 116897 +.. nonce: UDQTjp +.. section: Library + +Accepting objects with false values (like ``0`` and ``[]``) except empty +strings, byte-like objects and ``None`` in :mod:`urllib.parse` functions +:func:`~urllib.parse.parse_qsl` and :func:`~urllib.parse.parse_qs` is now +deprecated. + +.. + +.. date: 2023-10-26-16-36-22 +.. gh-issue: 101955 +.. nonce: Ixu3IF +.. section: Library + +Fix SystemError when match regular expression pattern containing some +combination of possessive quantifier, alternative and capture group. + +.. + +.. date: 2022-10-15-10-18-20 +.. gh-issue: 71936 +.. nonce: MzJjc_ +.. section: Library + +Fix a race condition in :class:`multiprocessing.pool.Pool`. + +.. + +.. bpo: 46128 +.. date: 2021-12-19-10-47-24 +.. nonce: Qv3EK1 +.. section: Library + +Strip :class:`unittest.IsolatedAsyncioTestCase` stack frames from reported +stacktraces. + +.. + +.. date: 2020-05-19-01-12-47 +.. gh-issue: 84852 +.. nonce: FEjHJW +.. section: Library + +Add MIME types for MS Embedded OpenType, OpenType Layout, TrueType, WOFF 1.0 +and 2.0 fonts. Patch by Sahil Prajapati and Hugo van Kemenade. + +.. + +.. date: 2024-11-09-19-43-10 +.. gh-issue: 126622 +.. nonce: YacfDc +.. section: Documentation + +Added stub pages for removed modules explaining their removal, where to find +replacements, and linking to the last Python version that supported them. +Contributed by Ned Batchelder. + +.. + +.. date: 2024-10-10-23-46-54 +.. gh-issue: 125277 +.. nonce: QAby09 +.. section: Documentation + +Require Sphinx 7.2.6 or later to build the Python documentation. Patch by +Adam Turner. + +.. + +.. date: 2023-03-28-22-24-45 +.. gh-issue: 60712 +.. nonce: So5uad +.. section: Documentation + +Include the :class:`object` type in the lists of documented types. Change by +Furkan Onder and Martin Panter. + +.. + +.. date: 2024-11-13-17-18-13 +.. gh-issue: 126795 +.. nonce: _JBX9e +.. section: Core and Builtins + +Increase the threshold for JIT code warmup. Depending on platform and +workload, this can result in performance gains of 1-9% and memory savings of +3-5%. + +.. + +.. date: 2024-11-12-19-24-00 +.. gh-issue: 126341 +.. nonce: 5SdAe1 +.. section: Core and Builtins + +Now :exc:`ValueError` is raised instead of :exc:`SystemError` when trying to +iterate over a released :class:`memoryview` object. + +.. + +.. date: 2024-11-11-17-02-48 +.. gh-issue: 126688 +.. nonce: QiOXUi +.. section: Core and Builtins + +Fix a crash when calling :func:`os.fork` on some operating systems, +including SerenityOS. + +.. + +.. date: 2024-11-09-16-10-22 +.. gh-issue: 126066 +.. nonce: 9zs4m4 +.. section: Core and Builtins + +Fix :mod:`importlib` to not write an incomplete .pyc files when a ulimit or +some other operating system mechanism is preventing the write to go through +fully. + +.. + +.. date: 2024-11-06-16-34-11 +.. gh-issue: 126222 +.. nonce: 9NBfTn +.. section: Core and Builtins + +Do not include count of "peek" items in ``_PyUop_num_popped``. This ensures +that the correct number of items are popped from the stack when a micro-op +exits with an error. + +.. + +.. date: 2024-11-03-15-15-36 +.. gh-issue: 126366 +.. nonce: 8BBdGU +.. section: Core and Builtins + +Fix crash when using ``yield from`` on an object that raises an exception in +its ``__iter__``. + +.. + +.. date: 2024-11-02-18-01-31 +.. gh-issue: 126209 +.. nonce: 2ZIhrS +.. section: Core and Builtins + +Fix an issue with ``skip_file_prefixes`` parameter which resulted in an +inconsistent behaviour between the C and Python implementations of +:func:`warnings.warn`. Patch by Daehee Kim. + +.. + +.. date: 2024-11-02-14-43-46 +.. gh-issue: 126312 +.. nonce: LMHzLT +.. section: Core and Builtins + +Fix crash during garbage collection on an object frozen by :func:`gc.freeze` +on the free-threaded build. + +.. + +.. date: 2024-11-01-09-58-06 +.. gh-issue: 103951 +.. nonce: 6qduwj +.. section: Core and Builtins + +Relax optimization requirements to allow fast attribute access to module +subclasses. + +.. + +.. date: 2024-10-31-21-49-00 +.. gh-issue: 126072 +.. nonce: o9k8Ns +.. section: Core and Builtins + +Following :gh:`126101`, for :ref:`codeobjects` like lambda, annotation and +type alias, we no longer add ``None`` to its :attr:`~codeobject.co_consts`. + +.. + +.. date: 2024-10-30-18-16-10 +.. gh-issue: 126195 +.. nonce: 6ezBpr +.. section: Core and Builtins + +Improve JIT performance by 1.4% on macOS Apple Silicon by using +platform-specific memory protection APIs. Patch by Diego Russo. + +.. + +.. date: 2024-10-29-15-17-31 +.. gh-issue: 126139 +.. nonce: B4OQ8a +.. section: Core and Builtins + +Provide better error location when attempting to use a :term:`future +statement <__future__>` with an unknown future feature. + +.. + +.. date: 2024-10-29-10-37-39 +.. gh-issue: 126072 +.. nonce: XLKlxv +.. section: Core and Builtins + +Add a new attribute in :attr:`~codeobject.co_flags` to indicate whether the +first item in :attr:`~codeobject.co_consts` is the docstring. If a code +object has no docstring, ``None`` will **NOT** be inserted. + +.. + +.. date: 2024-10-28-13-18-16 +.. gh-issue: 126076 +.. nonce: MebZuS +.. section: Core and Builtins + +Relocated objects such as ``tuple``, ``bytes`` and ``str`` objects are +properly tracked by :mod:`tracemalloc` and its associated hooks. Patch by +Pablo Galindo. + +.. + +.. date: 2024-10-27-20-31-43 +.. gh-issue: 90370 +.. nonce: IP_W3a +.. section: Core and Builtins + +Avoid temporary tuple creation for vararg in argument passing with Argument +Clinic generated code (if arguments either vararg or positional-only). + +.. + +.. date: 2024-10-26-23-50-03 +.. gh-issue: 126018 +.. nonce: Hq-qcM +.. section: Core and Builtins + +Fix a crash in :func:`sys.audit` when passing a non-string as first argument +and Python was compiled in debug mode. + +.. + +.. date: 2024-10-26-13-32-48 +.. gh-issue: 126012 +.. nonce: 2KalhG +.. section: Core and Builtins + +The :class:`memoryview` type now supports subscription, making it a +:term:`generic type`. + +.. + +.. date: 2024-10-25-15-56-14 +.. gh-issue: 125837 +.. nonce: KlCdgD +.. section: Core and Builtins + +Adds :opcode:`LOAD_SMALL_INT` and :opcode:`LOAD_CONST_IMMORTAL` +instructions. ``LOAD_SMALL_INT`` pushes a small integer equal to the +``oparg`` to the stack. ``LOAD_CONST_IMMORTAL`` does the same as +``LOAD_CONST`` but is more efficient for immortal objects. Removes +``RETURN_CONST`` instruction. + +.. + +.. date: 2024-10-24-22-43-03 +.. gh-issue: 125942 +.. nonce: 3UQht1 +.. section: Core and Builtins + +On Android, the ``errors`` setting of :any:`sys.stdout` was changed from +``surrogateescape`` to ``backslashreplace``. + +.. + +.. date: 2024-10-23-14-42-27 +.. gh-issue: 125859 +.. nonce: m3EF9E +.. section: Core and Builtins + +Fix a crash in the free threading build when :func:`gc.get_objects` or +:func:`gc.get_referrers` is called during an in-progress garbage collection. + +.. + +.. date: 2024-10-23-14-05-47 +.. gh-issue: 125868 +.. nonce: uLfXYB +.. section: Core and Builtins + +It was possible in 3.14.0a1 only for attribute lookup to give the wrong +value. This was due to an incorrect specialization in very specific +circumstances. This is fixed in 3.14.0a2. + +.. + +.. date: 2024-10-22-04-18-53 +.. gh-issue: 125498 +.. nonce: cFjPIn +.. section: Core and Builtins + +The JIT has been updated to leverage Clang 19’s new ``preserve_none`` +attribute, which supports more platforms and is more useful than LLVM's +existing ``ghccc`` calling convention. This also removes the need to +manually patch the calling convention in LLVM IR, simplifying the JIT +compilation process. + +.. + +.. date: 2024-10-18-16-00-10 +.. gh-issue: 125703 +.. nonce: QRoqMo +.. section: Core and Builtins + +Correctly honour :mod:`tracemalloc` hooks in specialized ``Py_DECREF`` +paths. Patch by Pablo Galindo + +.. + +.. date: 2024-10-18-10-11-43 +.. gh-issue: 125593 +.. nonce: Q97m3A +.. section: Core and Builtins + +Use color to highlight error locations in traceback from exception group + +.. + +.. date: 2024-10-16-23-06-06 +.. gh-issue: 125017 +.. nonce: fcltj0 +.. section: Core and Builtins + +Fix crash on certain accesses to the ``__annotations__`` of +:class:`staticmethod` and :class:`classmethod` objects. + +.. + +.. date: 2024-10-16-13-52-48 +.. gh-issue: 125588 +.. nonce: kCahyO +.. section: Core and Builtins + +The Python PEG generator can now use f-strings in the grammar actions. Patch +by Pablo Galindo + +.. + +.. date: 2024-10-16-12-12-39 +.. gh-issue: 125444 +.. nonce: 9tG2X6 +.. section: Core and Builtins + +Fix illegal instruction for older Arm architectures. Patch by Diego Russo, +testing by Ross Burton. + +.. + +.. date: 2024-10-14-17-13-12 +.. gh-issue: 118423 +.. nonce: SkBoda +.. section: Core and Builtins + +Add a new ``INSTRUCTION_SIZE`` macro to the cases generator which returns +the current instruction size. + +.. + +.. date: 2024-10-09-13-53-50 +.. gh-issue: 125038 +.. nonce: ffSLCz +.. section: Core and Builtins + +Fix crash when iterating over a generator expression after direct changes on +``gi_frame.f_locals``. Patch by Mikhail Efimov. + +.. + +.. date: 2024-10-01-17-31-32 +.. gh-issue: 124855 +.. nonce: sdsv_H +.. section: Core and Builtins + +Don't allow the JIT and perf support to be active at the same time. Patch by +Pablo Galindo + +.. + +.. date: 2024-09-14-20-09-39 +.. gh-issue: 123714 +.. nonce: o1mbe4 +.. section: Core and Builtins + +Update JIT compilation to use LLVM 19 + +.. + +.. date: 2024-09-11-01-32-07 +.. gh-issue: 123930 +.. nonce: BkPfB6 +.. section: Core and Builtins + +Improve the error message when a script shadowing a module from the standard +library causes :exc:`ImportError` to be raised during a "from" import. +Similarly, improve the error message when a script shadowing a third party +module attempts to "from" import an attribute from that third party module +while still initialising. + +.. + +.. date: 2024-06-13-19-12-49 +.. gh-issue: 119793 +.. nonce: FDVCDk +.. section: Core and Builtins + +The :func:`map` built-in now has an optional keyword-only *strict* flag like +:func:`zip` to check that all the iterables are of equal length. Patch by +Wannes Boeykens. + +.. + +.. date: 2024-05-12-03-10-36 +.. gh-issue: 118950 +.. nonce: 5Wc4vp +.. section: Core and Builtins + +Fix bug where SSLProtocol.connection_lost wasn't getting called when OSError +was thrown on writing to socket. + +.. + +.. date: 2023-12-30-00-21-45 +.. gh-issue: 113570 +.. nonce: _XQgsW +.. section: Core and Builtins + +Fixed a bug in ``reprlib.repr`` where it incorrectly called the repr method +on shadowed Python built-in types. + +.. + +.. date: 2024-11-07-20-24-58 +.. gh-issue: 126554 +.. nonce: ri12eb +.. section: C API + +Fix error handling in :class:`ctypes.CDLL` objects which could result in a +crash in rare situations. + +.. + +.. date: 2024-10-28-15-56-03 +.. gh-issue: 126061 +.. nonce: Py51_1 +.. section: C API + +Add :c:func:`PyLong_IsPositive`, :c:func:`PyLong_IsNegative` and +:c:func:`PyLong_IsZero` for checking if a :c:type:`PyLongObject` is +positive, negative, or zero, respectively. + +.. + +.. date: 2024-10-16-19-28-23 +.. gh-issue: 125608 +.. nonce: gTsU2g +.. section: C API + +Fix a bug where dictionary watchers (e.g., :c:func:`PyDict_Watch`) on an +object's attribute dictionary (:attr:`~object.__dict__`) were not triggered +when the object's attributes were modified. + +.. + +.. date: 2024-09-03-13-33-33 +.. gh-issue: 123619 +.. nonce: HhgUUI +.. section: C API + +Added the :c:func:`PyUnstable_Object_EnableDeferredRefcount` function for +enabling :pep:`703` deferred reference counting. + +.. + +.. date: 2024-07-30-14-40-08 +.. gh-issue: 121654 +.. nonce: tgGeAl +.. section: C API + +Add :c:func:`PyType_Freeze` function to make a type immutable. Patch by +Victor Stinner. + +.. + +.. date: 2024-06-04-13-38-44 +.. gh-issue: 120026 +.. nonce: uhEvJ9 +.. section: C API + +The :c:macro:`!Py_HUGE_VAL` macro is :term:`soft deprecated`. + +.. + +.. date: 2024-11-13-15-47-09 +.. gh-issue: 126691 +.. nonce: ni4K-b +.. section: Build + +Removed the ``--with-emscripten-target`` configure flag. We unified the +``node`` and ``browser`` options and the same build can now be used, +independent of target runtime. + +.. + +.. date: 2024-11-07-11-09-31 +.. gh-issue: 123877 +.. nonce: CVdd0b +.. section: Build + +Use ``wasm32-wasip1`` as the target triple for WASI instead of +``wasm32-wasi``. The latter will eventually be reclaimed for WASI 1.0 while +CPython currently only supports WASI preview1. + +.. + +.. date: 2024-11-06-11-12-04 +.. gh-issue: 126458 +.. nonce: 7vzHtx +.. section: Build + +Disable SIMD support for HACL under WASI. + +.. + +.. date: 2024-11-04-09-42-04 +.. gh-issue: 89640 +.. nonce: QBv05o +.. section: Build + +Hard-code float word ordering as little endian on WASM. + +.. + +.. date: 2024-10-31-15-37-05 +.. gh-issue: 126206 +.. nonce: oC6z2i +.. section: Build + +``make clinic`` now runs Argument Clinic using the ``--force`` option, thus +forcefully regenerating generated code. + +.. + +.. date: 2024-10-30-17-47-15 +.. gh-issue: 126187 +.. nonce: 0jFCZB +.. section: Build + +Introduced ``Tools/wasm/emscripten.py`` to simplify doing Emscripten builds. + +.. + +.. date: 2024-10-25-17-20-50 +.. gh-issue: 124932 +.. nonce: F-aNuS +.. section: Build + +For cross builds, there is now support for having a different install +``prefix`` than the ``host_prefix`` used by ``getpath.py``. This is set to +``/`` by default for Emscripten, on other platforms the default behavior is +the same as before. + +.. + +.. date: 2024-10-25-00-29-15 +.. gh-issue: 125946 +.. nonce: KPA3g0 +.. section: Build + +The minimum supported Android version is now 7.0 (API level 24). + +.. + +.. date: 2024-10-24-22-14-35 +.. gh-issue: 125940 +.. nonce: 2wMtTA +.. section: Build + +The Android build now supports `16 KB page sizes +`__. + +.. + +.. date: 2024-10-16-09-37-51 +.. gh-issue: 89640 +.. nonce: UDsW-j +.. section: Build + +Improve detection of float word ordering on Linux when link-time +optimizations are enabled. + +.. + +.. date: 2024-10-04-17-29-23 +.. gh-issue: 124928 +.. nonce: FsGffe +.. section: Build + +Emscripten builds now require node >= 18. + +.. + +.. date: 2024-03-03-20-28-23 +.. gh-issue: 115382 +.. nonce: 97hJFE +.. section: Build + +Fix cross compile failures when the host and target SOABIs match. diff --git a/Misc/NEWS.d/next/Build/2024-03-03-20-28-23.gh-issue-115382.97hJFE.rst b/Misc/NEWS.d/next/Build/2024-03-03-20-28-23.gh-issue-115382.97hJFE.rst deleted file mode 100644 index f8d19651fc5854..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-03-03-20-28-23.gh-issue-115382.97hJFE.rst +++ /dev/null @@ -1 +0,0 @@ -Fix cross compile failures when the host and target SOABIs match. diff --git a/Misc/NEWS.d/next/Build/2024-10-04-17-29-23.gh-issue-124928.FsGffe.rst b/Misc/NEWS.d/next/Build/2024-10-04-17-29-23.gh-issue-124928.FsGffe.rst deleted file mode 100644 index 291bf336ef225e..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-04-17-29-23.gh-issue-124928.FsGffe.rst +++ /dev/null @@ -1 +0,0 @@ -Emscripten builds now require node >= 18. diff --git a/Misc/NEWS.d/next/Build/2024-10-16-09-37-51.gh-issue-89640.UDsW-j.rst b/Misc/NEWS.d/next/Build/2024-10-16-09-37-51.gh-issue-89640.UDsW-j.rst deleted file mode 100644 index 5aba2c789b6842..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-16-09-37-51.gh-issue-89640.UDsW-j.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve detection of float word ordering on Linux when link-time optimizations -are enabled. diff --git a/Misc/NEWS.d/next/Build/2024-10-24-22-14-35.gh-issue-125940.2wMtTA.rst b/Misc/NEWS.d/next/Build/2024-10-24-22-14-35.gh-issue-125940.2wMtTA.rst deleted file mode 100644 index 2b4c1c95db8806..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-24-22-14-35.gh-issue-125940.2wMtTA.rst +++ /dev/null @@ -1,2 +0,0 @@ -The Android build now supports `16 KB page sizes -`__. diff --git a/Misc/NEWS.d/next/Build/2024-10-25-00-29-15.gh-issue-125946.KPA3g0.rst b/Misc/NEWS.d/next/Build/2024-10-25-00-29-15.gh-issue-125946.KPA3g0.rst deleted file mode 100644 index ecab57c8111d45..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-25-00-29-15.gh-issue-125946.KPA3g0.rst +++ /dev/null @@ -1 +0,0 @@ -The minimum supported Android version is now 7.0 (API level 24). diff --git a/Misc/NEWS.d/next/Build/2024-10-25-17-20-50.gh-issue-124932.F-aNuS.rst b/Misc/NEWS.d/next/Build/2024-10-25-17-20-50.gh-issue-124932.F-aNuS.rst deleted file mode 100644 index 10c4171dc14a64..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-25-17-20-50.gh-issue-124932.F-aNuS.rst +++ /dev/null @@ -1,4 +0,0 @@ -For cross builds, there is now support for having a different install -``prefix`` than the ``host_prefix`` used by ``getpath.py``. This is set to ``/`` by -default for Emscripten, on other platforms the default behavior is the same -as before. diff --git a/Misc/NEWS.d/next/Build/2024-10-30-17-47-15.gh-issue-126187.0jFCZB.rst b/Misc/NEWS.d/next/Build/2024-10-30-17-47-15.gh-issue-126187.0jFCZB.rst deleted file mode 100644 index c295a91c2225a3..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-30-17-47-15.gh-issue-126187.0jFCZB.rst +++ /dev/null @@ -1 +0,0 @@ -Introduced ``Tools/wasm/emscripten.py`` to simplify doing Emscripten builds. diff --git a/Misc/NEWS.d/next/Build/2024-10-31-15-37-05.gh-issue-126206.oC6z2i.rst b/Misc/NEWS.d/next/Build/2024-10-31-15-37-05.gh-issue-126206.oC6z2i.rst deleted file mode 100644 index 24b172e1747403..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-31-15-37-05.gh-issue-126206.oC6z2i.rst +++ /dev/null @@ -1,2 +0,0 @@ -``make clinic`` now runs Argument Clinic using the ``--force`` option, -thus forcefully regenerating generated code. diff --git a/Misc/NEWS.d/next/Build/2024-11-04-09-42-04.gh-issue-89640.QBv05o.rst b/Misc/NEWS.d/next/Build/2024-11-04-09-42-04.gh-issue-89640.QBv05o.rst deleted file mode 100644 index 4fa44a1d6493b4..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-11-04-09-42-04.gh-issue-89640.QBv05o.rst +++ /dev/null @@ -1 +0,0 @@ -Hard-code float word ordering as little endian on WASM. diff --git a/Misc/NEWS.d/next/Build/2024-11-06-11-12-04.gh-issue-126458.7vzHtx.rst b/Misc/NEWS.d/next/Build/2024-11-06-11-12-04.gh-issue-126458.7vzHtx.rst deleted file mode 100644 index cc06dd8a30e30a..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-11-06-11-12-04.gh-issue-126458.7vzHtx.rst +++ /dev/null @@ -1 +0,0 @@ -Disable SIMD support for HACL under WASI. diff --git a/Misc/NEWS.d/next/Build/2024-11-07-11-09-31.gh-issue-123877.CVdd0b.rst b/Misc/NEWS.d/next/Build/2024-11-07-11-09-31.gh-issue-123877.CVdd0b.rst deleted file mode 100644 index 080d2f2ab12af9..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-11-07-11-09-31.gh-issue-123877.CVdd0b.rst +++ /dev/null @@ -1,3 +0,0 @@ -Use ``wasm32-wasip1`` as the target triple for WASI instead of -``wasm32-wasi``. The latter will eventually be reclaimed for WASI 1.0 while -CPython currently only supports WASI preview1. diff --git a/Misc/NEWS.d/next/Build/2024-11-13-15-47-09.gh-issue-126691.ni4K-b.rst b/Misc/NEWS.d/next/Build/2024-11-13-15-47-09.gh-issue-126691.ni4K-b.rst deleted file mode 100644 index 9a2196dab8d711..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-11-13-15-47-09.gh-issue-126691.ni4K-b.rst +++ /dev/null @@ -1,3 +0,0 @@ -Removed the ``--with-emscripten-target`` configure flag. We unified the -``node`` and ``browser`` options and the same build can now be used, independent -of target runtime. diff --git a/Misc/NEWS.d/next/Build/2024-11-20-17-12-40.gh-issue-126898.I2zILt.rst b/Misc/NEWS.d/next/Build/2024-11-20-17-12-40.gh-issue-126898.I2zILt.rst new file mode 100644 index 00000000000000..37783c4e890015 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-11-20-17-12-40.gh-issue-126898.I2zILt.rst @@ -0,0 +1 @@ +The Emscripten build of Python is now based on ES6 modules. diff --git a/Misc/NEWS.d/next/Build/2024-11-22-08-46-46.gh-issue-115869.UVLSKd.rst b/Misc/NEWS.d/next/Build/2024-11-22-08-46-46.gh-issue-115869.UVLSKd.rst new file mode 100644 index 00000000000000..9e8a078983f20b --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-11-22-08-46-46.gh-issue-115869.UVLSKd.rst @@ -0,0 +1 @@ +Make ``jit_stencils.h`` (which is produced during JIT builds) reproducible. diff --git a/Misc/NEWS.d/next/C_API/2024-06-04-13-38-44.gh-issue-120026.uhEvJ9.rst b/Misc/NEWS.d/next/C_API/2024-06-04-13-38-44.gh-issue-120026.uhEvJ9.rst deleted file mode 100644 index d43a138724ad33..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-06-04-13-38-44.gh-issue-120026.uhEvJ9.rst +++ /dev/null @@ -1 +0,0 @@ -The :c:macro:`!Py_HUGE_VAL` macro is :term:`soft deprecated`. diff --git a/Misc/NEWS.d/next/C_API/2024-07-03-13-39-13.gh-issue-121058.MKi1MV.rst b/Misc/NEWS.d/next/C_API/2024-07-03-13-39-13.gh-issue-121058.MKi1MV.rst new file mode 100644 index 00000000000000..133d8cb6fe4b9e --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2024-07-03-13-39-13.gh-issue-121058.MKi1MV.rst @@ -0,0 +1,2 @@ +``PyThreadState_Clear()`` now warns (and calls ``sys.excepthook``) if the +thread state still has an active exception. diff --git a/Misc/NEWS.d/next/C_API/2024-07-30-14-40-08.gh-issue-121654.tgGeAl.rst b/Misc/NEWS.d/next/C_API/2024-07-30-14-40-08.gh-issue-121654.tgGeAl.rst deleted file mode 100644 index 134d36c281ab21..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-07-30-14-40-08.gh-issue-121654.tgGeAl.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :c:func:`PyType_Freeze` function to make a type immutable. Patch by -Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2024-09-03-13-33-33.gh-issue-123619.HhgUUI.rst b/Misc/NEWS.d/next/C_API/2024-09-03-13-33-33.gh-issue-123619.HhgUUI.rst deleted file mode 100644 index ac821b5326026e..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-09-03-13-33-33.gh-issue-123619.HhgUUI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added the :c:func:`PyUnstable_Object_EnableDeferredRefcount` function for -enabling :pep:`703` deferred reference counting. diff --git a/Misc/NEWS.d/next/C_API/2024-10-16-19-28-23.gh-issue-125608.gTsU2g.rst b/Misc/NEWS.d/next/C_API/2024-10-16-19-28-23.gh-issue-125608.gTsU2g.rst deleted file mode 100644 index e70f9f173957a2..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-10-16-19-28-23.gh-issue-125608.gTsU2g.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a bug where dictionary watchers (e.g., :c:func:`PyDict_Watch`) on an -object's attribute dictionary (:attr:`~object.__dict__`) were not triggered -when the object's attributes were modified. diff --git a/Misc/NEWS.d/next/C_API/2024-10-28-15-56-03.gh-issue-126061.Py51_1.rst b/Misc/NEWS.d/next/C_API/2024-10-28-15-56-03.gh-issue-126061.Py51_1.rst deleted file mode 100644 index 0a4ad4ea2874cf..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-10-28-15-56-03.gh-issue-126061.Py51_1.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add :c:func:`PyLong_IsPositive`, :c:func:`PyLong_IsNegative` -and :c:func:`PyLong_IsZero` for checking if a :c:type:`PyLongObject` -is positive, negative, or zero, respectively. diff --git a/Misc/NEWS.d/next/C_API/2024-11-07-20-24-58.gh-issue-126554.ri12eb.rst b/Misc/NEWS.d/next/C_API/2024-11-07-20-24-58.gh-issue-126554.ri12eb.rst deleted file mode 100644 index 6af89c7d4709ec..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-11-07-20-24-58.gh-issue-126554.ri12eb.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix error handling in :class:`ctypes.CDLL` objects -which could result in a crash in rare situations. diff --git a/Misc/NEWS.d/next/C API/README.rst b/Misc/NEWS.d/next/C_API/README.rst similarity index 100% rename from Misc/NEWS.d/next/C API/README.rst rename to Misc/NEWS.d/next/C_API/README.rst diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2023-09-22-21-01-56.gh-issue-109746.32MHt9.rst b/Misc/NEWS.d/next/Core_and_Builtins/2023-09-22-21-01-56.gh-issue-109746.32MHt9.rst new file mode 100644 index 00000000000000..2d350c33aa6975 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2023-09-22-21-01-56.gh-issue-109746.32MHt9.rst @@ -0,0 +1 @@ +If :func:`!_thread.start_new_thread` fails to start a new thread, it deletes its state from interpreter and thus avoids its repeated cleanup on finalization. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2023-12-30-00-21-45.gh-issue-113570._XQgsW.rst b/Misc/NEWS.d/next/Core_and_Builtins/2023-12-30-00-21-45.gh-issue-113570._XQgsW.rst deleted file mode 100644 index 6e0f0afe05369b..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2023-12-30-00-21-45.gh-issue-113570._XQgsW.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug in ``reprlib.repr`` where it incorrectly called the repr method on shadowed Python built-in types. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-05-12-03-10-36.gh-issue-118950.5Wc4vp.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-05-12-03-10-36.gh-issue-118950.5Wc4vp.rst deleted file mode 100644 index 82be975f4d808d..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-05-12-03-10-36.gh-issue-118950.5Wc4vp.rst +++ /dev/null @@ -1 +0,0 @@ -Fix bug where SSLProtocol.connection_lost wasn't getting called when OSError was thrown on writing to socket. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-06-13-19-12-49.gh-issue-119793.FDVCDk.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-06-13-19-12-49.gh-issue-119793.FDVCDk.rst deleted file mode 100644 index 976d6712e4b6af..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-06-13-19-12-49.gh-issue-119793.FDVCDk.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :func:`map` built-in now has an optional keyword-only *strict* flag -like :func:`zip` to check that all the iterables are of equal length. -Patch by Wannes Boeykens. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-08-03-14-02-27.gh-issue-69639.mW3iKq.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-08-03-14-02-27.gh-issue-69639.mW3iKq.rst new file mode 100644 index 00000000000000..72596b0302aa45 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-08-03-14-02-27.gh-issue-69639.mW3iKq.rst @@ -0,0 +1,2 @@ +Implement mixed-mode arithmetic rules combining real and complex numbers +as specified by C standards since C99. Patch by Sergey B Kirpichev. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-11-01-32-07.gh-issue-123930.BkPfB6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-11-01-32-07.gh-issue-123930.BkPfB6.rst deleted file mode 100644 index 3c8eb02b2dc2d6..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-11-01-32-07.gh-issue-123930.BkPfB6.rst +++ /dev/null @@ -1,4 +0,0 @@ -Improve the error message when a script shadowing a module from the standard -library causes :exc:`ImportError` to be raised during a "from" import. -Similarly, improve the error message when a script shadowing a third party module -attempts to "from" import an attribute from that third party module while still initialising. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-14-20-09-39.gh-issue-123714.o1mbe4.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-14-20-09-39.gh-issue-123714.o1mbe4.rst deleted file mode 100644 index 03a2ef63238073..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-14-20-09-39.gh-issue-123714.o1mbe4.rst +++ /dev/null @@ -1 +0,0 @@ -Update JIT compilation to use LLVM 19 diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-21-50-23.gh-issue-124470.pFr3_d.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-21-50-23.gh-issue-124470.pFr3_d.rst new file mode 100644 index 00000000000000..8f2f37146d3c13 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-21-50-23.gh-issue-124470.pFr3_d.rst @@ -0,0 +1 @@ +Fix crash in free-threaded builds when replacing object dictionary while reading attribute on another thread diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-01-17-31-32.gh-issue-124855.sdsv_H.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-01-17-31-32.gh-issue-124855.sdsv_H.rst deleted file mode 100644 index b65a5e6ac11c76..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-01-17-31-32.gh-issue-124855.sdsv_H.rst +++ /dev/null @@ -1,2 +0,0 @@ -Don't allow the JIT and perf support to be active at the same time. Patch by -Pablo Galindo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-09-13-53-50.gh-issue-125038.ffSLCz.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-09-13-53-50.gh-issue-125038.ffSLCz.rst deleted file mode 100644 index 15de48ec0e4450..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-09-13-53-50.gh-issue-125038.ffSLCz.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash when iterating over a generator expression after direct changes on ``gi_frame.f_locals``. -Patch by Mikhail Efimov. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-14-17-13-12.gh-issue-118423.SkBoda.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-14-17-13-12.gh-issue-118423.SkBoda.rst deleted file mode 100644 index 8511a8de5530d6..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-14-17-13-12.gh-issue-118423.SkBoda.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add a new ``INSTRUCTION_SIZE`` macro to the cases generator which returns -the current instruction size. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-12-12-39.gh-issue-125444.9tG2X6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-12-12-39.gh-issue-125444.9tG2X6.rst deleted file mode 100644 index 13c1e745edf8d5..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-12-12-39.gh-issue-125444.9tG2X6.rst +++ /dev/null @@ -1 +0,0 @@ -Fix illegal instruction for older Arm architectures. Patch by Diego Russo, testing by Ross Burton. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-13-52-48.gh-issue-125588.kCahyO.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-13-52-48.gh-issue-125588.kCahyO.rst deleted file mode 100644 index 1d59a9c3c205b8..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-13-52-48.gh-issue-125588.kCahyO.rst +++ /dev/null @@ -1,2 +0,0 @@ -The Python PEG generator can now use f-strings in the grammar actions. Patch -by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-23-06-06.gh-issue-125017.fcltj0.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-23-06-06.gh-issue-125017.fcltj0.rst deleted file mode 100644 index 11c526643c3122..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-23-06-06.gh-issue-125017.fcltj0.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash on certain accesses to the ``__annotations__`` of -:class:`staticmethod` and :class:`classmethod` objects. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-10-11-43.gh-issue-125593.Q97m3A.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-10-11-43.gh-issue-125593.Q97m3A.rst deleted file mode 100644 index 220e94467af849..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-10-11-43.gh-issue-125593.Q97m3A.rst +++ /dev/null @@ -1 +0,0 @@ -Use color to highlight error locations in traceback from exception group diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-16-00-10.gh-issue-125703.QRoqMo.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-16-00-10.gh-issue-125703.QRoqMo.rst deleted file mode 100644 index 7cbfa725e78cef..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-16-00-10.gh-issue-125703.QRoqMo.rst +++ /dev/null @@ -1,2 +0,0 @@ -Correctly honour :mod:`tracemalloc` hooks in specialized ``Py_DECREF`` -paths. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-22-04-18-53.gh-issue-125498.cFjPIn.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-22-04-18-53.gh-issue-125498.cFjPIn.rst deleted file mode 100644 index 807c2e07210e7f..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-22-04-18-53.gh-issue-125498.cFjPIn.rst +++ /dev/null @@ -1,4 +0,0 @@ -The JIT has been updated to leverage Clang 19’s new ``preserve_none`` attribute, -which supports more platforms and is more useful than LLVM's existing ``ghccc`` -calling convention. This also removes the need to manually patch the calling -convention in LLVM IR, simplifying the JIT compilation process. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-05-47.gh-issue-125868.uLfXYB.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-05-47.gh-issue-125868.uLfXYB.rst deleted file mode 100644 index dea250e7166ec6..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-05-47.gh-issue-125868.uLfXYB.rst +++ /dev/null @@ -1,3 +0,0 @@ -It was possible in 3.14.0a1 only for attribute lookup to give the wrong -value. This was due to an incorrect specialization in very specific -circumstances. This is fixed in 3.14.0a2. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-42-27.gh-issue-125859.m3EF9E.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-42-27.gh-issue-125859.m3EF9E.rst deleted file mode 100644 index d36aa8fbe7482f..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-42-27.gh-issue-125859.m3EF9E.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash in the free threading build when :func:`gc.get_objects` or -:func:`gc.get_referrers` is called during an in-progress garbage collection. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-24-22-43-03.gh-issue-125942.3UQht1.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-24-22-43-03.gh-issue-125942.3UQht1.rst deleted file mode 100644 index d1b1ecd2a724ab..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-24-22-43-03.gh-issue-125942.3UQht1.rst +++ /dev/null @@ -1,2 +0,0 @@ -On Android, the ``errors`` setting of :any:`sys.stdout` was changed from -``surrogateescape`` to ``backslashreplace``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-25-15-56-14.gh-issue-125837.KlCdgD.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-25-15-56-14.gh-issue-125837.KlCdgD.rst deleted file mode 100644 index 9538f34f969377..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-25-15-56-14.gh-issue-125837.KlCdgD.rst +++ /dev/null @@ -1,5 +0,0 @@ -Adds :opcode:`LOAD_SMALL_INT` and :opcode:`LOAD_CONST_IMMORTAL` instructions. -``LOAD_SMALL_INT`` pushes a small integer equal to the ``oparg`` to the stack. -``LOAD_CONST_IMMORTAL`` does the same as ``LOAD_CONST`` but is more -efficient for immortal objects. -Removes ``RETURN_CONST`` instruction. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-13-32-48.gh-issue-126012.2KalhG.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-13-32-48.gh-issue-126012.2KalhG.rst deleted file mode 100644 index 5307920ddcf200..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-13-32-48.gh-issue-126012.2KalhG.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :class:`memoryview` type now supports subscription, making it a -:term:`generic type`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-23-50-03.gh-issue-126018.Hq-qcM.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-23-50-03.gh-issue-126018.Hq-qcM.rst deleted file mode 100644 index e019408638997b..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-23-50-03.gh-issue-126018.Hq-qcM.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash in :func:`sys.audit` when passing a non-string as first argument -and Python was compiled in debug mode. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-04-47-28.gh-issue-126024.XCQSqT.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-04-47-28.gh-issue-126024.XCQSqT.rst new file mode 100644 index 00000000000000..b41fff30433c34 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-04-47-28.gh-issue-126024.XCQSqT.rst @@ -0,0 +1,2 @@ +Optimize decoding of short UTF-8 sequences containing non-ASCII characters +by approximately 15%. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-20-31-43.gh-issue-90370.IP_W3a.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-20-31-43.gh-issue-90370.IP_W3a.rst deleted file mode 100644 index b6a19c06a228ca..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-20-31-43.gh-issue-90370.IP_W3a.rst +++ /dev/null @@ -1,2 +0,0 @@ -Avoid temporary tuple creation for vararg in argument passing with Argument -Clinic generated code (if arguments either vararg or positional-only). diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-10-37-39.gh-issue-126072.XLKlxv.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-10-37-39.gh-issue-126072.XLKlxv.rst deleted file mode 100644 index 4ad30e9f954ecf..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-10-37-39.gh-issue-126072.XLKlxv.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add a new attribute in :attr:`~codeobject.co_flags` to indicate whether the -first item in :attr:`~codeobject.co_consts` is the docstring. If a code -object has no docstring, ``None`` will **NOT** be inserted. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-15-17-31.gh-issue-126139.B4OQ8a.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-15-17-31.gh-issue-126139.B4OQ8a.rst deleted file mode 100644 index 278971b46d18ab..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-15-17-31.gh-issue-126139.B4OQ8a.rst +++ /dev/null @@ -1,2 +0,0 @@ -Provide better error location when attempting to use a :term:`future -statement <__future__>` with an unknown future feature. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-18-16-10.gh-issue-126195.6ezBpr.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-18-16-10.gh-issue-126195.6ezBpr.rst deleted file mode 100644 index 01424d8a545d78..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-18-16-10.gh-issue-126195.6ezBpr.rst +++ /dev/null @@ -1 +0,0 @@ -Improve JIT performance by 1.4% on macOS Apple Silicon by using platform-specific memory protection APIs. Patch by Diego Russo. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-31-21-49-00.gh-issue-126072.o9k8Ns.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-31-21-49-00.gh-issue-126072.o9k8Ns.rst deleted file mode 100644 index 2464ac78cf429b..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-31-21-49-00.gh-issue-126072.o9k8Ns.rst +++ /dev/null @@ -1,2 +0,0 @@ -Following :gh:`126101`, for :ref:`codeobjects` like lambda, annotation and type alias, -we no longer add ``None`` to its :attr:`~codeobject.co_consts`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-01-09-58-06.gh-issue-103951.6qduwj.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-01-09-58-06.gh-issue-103951.6qduwj.rst deleted file mode 100644 index 39b54e0b72556e..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-01-09-58-06.gh-issue-103951.6qduwj.rst +++ /dev/null @@ -1,2 +0,0 @@ -Relax optimization requirements to allow fast attribute access to module -subclasses. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-14-43-46.gh-issue-126312.LMHzLT.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-14-43-46.gh-issue-126312.LMHzLT.rst deleted file mode 100644 index 19c8f0a348731c..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-14-43-46.gh-issue-126312.LMHzLT.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash during garbage collection on an object frozen by :func:`gc.freeze` on the -free-threaded build. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-18-01-31.gh-issue-126209.2ZIhrS.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-18-01-31.gh-issue-126209.2ZIhrS.rst deleted file mode 100644 index 727f7f8180ab22..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-18-01-31.gh-issue-126209.2ZIhrS.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix an issue with ``skip_file_prefixes`` parameter which resulted in an inconsistent -behaviour between the C and Python implementations of :func:`warnings.warn`. -Patch by Daehee Kim. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-15-15-36.gh-issue-126366.8BBdGU.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-15-15-36.gh-issue-126366.8BBdGU.rst deleted file mode 100644 index a47233602e4eff..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-15-15-36.gh-issue-126366.8BBdGU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash when using ``yield from`` on an object that raises an exception in -its ``__iter__``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-06-16-34-11.gh-issue-126222.9NBfTn.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-06-16-34-11.gh-issue-126222.9NBfTn.rst deleted file mode 100644 index ebf6673782f02c..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-06-16-34-11.gh-issue-126222.9NBfTn.rst +++ /dev/null @@ -1,3 +0,0 @@ -Do not include count of "peek" items in ``_PyUop_num_popped``. This ensures -that the correct number of items are popped from the stack when a micro-op -exits with an error. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-07-21-48-23.gh-issue-126091.ETaRGE.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-07-21-48-23.gh-issue-126091.ETaRGE.rst new file mode 100644 index 00000000000000..08118ff1af657d --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-07-21-48-23.gh-issue-126091.ETaRGE.rst @@ -0,0 +1,2 @@ +Ensure stack traces are complete when throwing into a generator chain that +ends in a custom generator. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-09-16-10-22.gh-issue-126066.9zs4m4.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-09-16-10-22.gh-issue-126066.9zs4m4.rst deleted file mode 100644 index 9c0072304ded63..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-09-16-10-22.gh-issue-126066.9zs4m4.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix :mod:`importlib` to not write an incomplete .pyc files when a ulimit or some -other operating system mechanism is preventing the write to go through -fully. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-11-17-02-48.gh-issue-126688.QiOXUi.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-11-17-02-48.gh-issue-126688.QiOXUi.rst deleted file mode 100644 index 30aa5722f0ea02..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-11-17-02-48.gh-issue-126688.QiOXUi.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash when calling :func:`os.fork` on some operating systems, -including SerenityOS. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-12-19-24-00.gh-issue-126341.5SdAe1.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-12-19-24-00.gh-issue-126341.5SdAe1.rst deleted file mode 100644 index c2436d2ebf4d09..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-12-19-24-00.gh-issue-126341.5SdAe1.rst +++ /dev/null @@ -1 +0,0 @@ -Now :exc:`ValueError` is raised instead of :exc:`SystemError` when trying to iterate over a released :class:`memoryview` object. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-15-16-39-37.gh-issue-126892.QR6Yo3.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-15-16-39-37.gh-issue-126892.QR6Yo3.rst new file mode 100644 index 00000000000000..db3c398e5dbdbe --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-15-16-39-37.gh-issue-126892.QR6Yo3.rst @@ -0,0 +1,2 @@ +Require cold or invalidated code to "warm up" before being JIT compiled +again. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-16-11-11-35.gh-issue-126881.ijofLZ.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-16-11-11-35.gh-issue-126881.ijofLZ.rst new file mode 100644 index 00000000000000..13381c7630d7ce --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-16-11-11-35.gh-issue-126881.ijofLZ.rst @@ -0,0 +1 @@ +Fix crash in finalization of dtoa state. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-23-18-17.gh-issue-126980.r8QHdi.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-23-18-17.gh-issue-126980.r8QHdi.rst new file mode 100644 index 00000000000000..84484e7c3001da --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-23-18-17.gh-issue-126980.r8QHdi.rst @@ -0,0 +1,3 @@ +Fix :meth:`~object.__buffer__` of :class:`bytearray` crashing when +:attr:`~inspect.BufferFlags.READ` or :attr:`~inspect.BufferFlags.WRITE` are +passed as flags. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-17-17-32.gh-issue-127010.9Cl4bb.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-17-17-32.gh-issue-127010.9Cl4bb.rst new file mode 100644 index 00000000000000..36e379c88ab27e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-17-17-32.gh-issue-127010.9Cl4bb.rst @@ -0,0 +1,4 @@ +Simplify GC tracking of dictionaries. All dictionaries are tracked when +created, rather than being lazily tracked when a trackable object was added +to them. This simplifies the code considerably and results in a slight +speedup. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-21-49-58.gh-issue-127020.5vvI17.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-21-49-58.gh-issue-127020.5vvI17.rst new file mode 100644 index 00000000000000..a8fd9272f5a923 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-21-49-58.gh-issue-127020.5vvI17.rst @@ -0,0 +1,4 @@ +Fix a crash in the free threading build when :c:func:`PyCode_GetCode`, +:c:func:`PyCode_GetVarnames`, :c:func:`PyCode_GetCellvars`, or +:c:func:`PyCode_GetFreevars` were called from multiple threads at the same +time. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-23-04-54-42.gh-issue-127133.WMoJjF.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-23-04-54-42.gh-issue-127133.WMoJjF.rst new file mode 100644 index 00000000000000..56b496bdf1e310 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-23-04-54-42.gh-issue-127133.WMoJjF.rst @@ -0,0 +1,6 @@ +Calling :meth:`argparse.ArgumentParser.add_argument_group` on an argument group, +and calling :meth:`argparse.ArgumentParser.add_argument_group` or +:meth:`argparse.ArgumentParser.add_mutually_exclusive_group` on a mutually +exclusive group now raise exceptions. This nesting was never supported, often +failed to work correctly, and was unintentionally exposed through inheritance. +This functionality has been deprecated since Python 3.11. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-24-07-01-28.gh-issue-113841.WFg-Bu.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-24-07-01-28.gh-issue-113841.WFg-Bu.rst new file mode 100644 index 00000000000000..2b07fdfcc6b527 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-24-07-01-28.gh-issue-113841.WFg-Bu.rst @@ -0,0 +1,2 @@ +Fix possible undefined behavior division by zero in :class:`complex`'s +:c:func:`_Py_c_pow`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-25-05-15-21.gh-issue-127238.O8wkH-.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-25-05-15-21.gh-issue-127238.O8wkH-.rst new file mode 100644 index 00000000000000..e8a274fcd31f26 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-25-05-15-21.gh-issue-127238.O8wkH-.rst @@ -0,0 +1 @@ +Correct error message for :func:`sys.set_int_max_str_digits`. diff --git a/Misc/NEWS.d/next/Core and Builtins/README.rst b/Misc/NEWS.d/next/Core_and_Builtins/README.rst similarity index 100% rename from Misc/NEWS.d/next/Core and Builtins/README.rst rename to Misc/NEWS.d/next/Core_and_Builtins/README.rst diff --git a/Misc/NEWS.d/next/Documentation/2023-03-28-22-24-45.gh-issue-60712.So5uad.rst b/Misc/NEWS.d/next/Documentation/2023-03-28-22-24-45.gh-issue-60712.So5uad.rst deleted file mode 100644 index e401cc2535e389..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2023-03-28-22-24-45.gh-issue-60712.So5uad.rst +++ /dev/null @@ -1,2 +0,0 @@ -Include the :class:`object` type in the lists of documented types. -Change by Furkan Onder and Martin Panter. diff --git a/Misc/NEWS.d/next/Documentation/2024-10-10-23-46-54.gh-issue-125277.QAby09.rst b/Misc/NEWS.d/next/Documentation/2024-10-10-23-46-54.gh-issue-125277.QAby09.rst deleted file mode 100644 index fcd6e22c27b5f4..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2024-10-10-23-46-54.gh-issue-125277.QAby09.rst +++ /dev/null @@ -1,2 +0,0 @@ -Require Sphinx 7.2.6 or later to build the Python documentation. -Patch by Adam Turner. diff --git a/Misc/NEWS.d/next/Documentation/2024-11-09-19-43-10.gh-issue-126622.YacfDc.rst b/Misc/NEWS.d/next/Documentation/2024-11-09-19-43-10.gh-issue-126622.YacfDc.rst deleted file mode 100644 index a2181b5712873b..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2024-11-09-19-43-10.gh-issue-126622.YacfDc.rst +++ /dev/null @@ -1,3 +0,0 @@ -Added stub pages for removed modules explaining their removal, where to find -replacements, and linking to the last Python version that supported them. -Contributed by Ned Batchelder. diff --git a/Misc/NEWS.d/next/Library/2020-05-19-01-12-47.gh-issue-84852.FEjHJW.rst b/Misc/NEWS.d/next/Library/2020-05-19-01-12-47.gh-issue-84852.FEjHJW.rst deleted file mode 100644 index 2581697591af62..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-19-01-12-47.gh-issue-84852.FEjHJW.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add MIME types for MS Embedded OpenType, OpenType Layout, TrueType, -WOFF 1.0 and 2.0 fonts. Patch by Sahil Prajapati and Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2021-12-19-10-47-24.bpo-46128.Qv3EK1.rst b/Misc/NEWS.d/next/Library/2021-12-19-10-47-24.bpo-46128.Qv3EK1.rst deleted file mode 100644 index 7d11d20d94e8a3..00000000000000 --- a/Misc/NEWS.d/next/Library/2021-12-19-10-47-24.bpo-46128.Qv3EK1.rst +++ /dev/null @@ -1,2 +0,0 @@ -Strip :class:`unittest.IsolatedAsyncioTestCase` stack frames from reported -stacktraces. diff --git a/Misc/NEWS.d/next/Library/2022-10-15-10-18-20.gh-issue-71936.MzJjc_.rst b/Misc/NEWS.d/next/Library/2022-10-15-10-18-20.gh-issue-71936.MzJjc_.rst deleted file mode 100644 index a0959cc086fa9e..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-10-15-10-18-20.gh-issue-71936.MzJjc_.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a race condition in :class:`multiprocessing.pool.Pool`. diff --git a/Misc/NEWS.d/next/Library/2022-11-10-17-16-45.gh-issue-97514.kzA0zl.rst b/Misc/NEWS.d/next/Library/2022-11-10-17-16-45.gh-issue-97514.kzA0zl.rst new file mode 100644 index 00000000000000..10c56edb8c7303 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-10-17-16-45.gh-issue-97514.kzA0zl.rst @@ -0,0 +1,10 @@ +Authentication was added to the :mod:`multiprocessing` forkserver start +method control socket so that only processes with the authentication key +generated by the process that spawned the forkserver can control it. This +is an enhancement over the other :gh:`97514` fixes so that access is no +longer limited only by filesystem permissions. + +The file descriptor exchange of control pipes with the forked worker process +now requires an explicit acknowledgement byte to be sent over the socket after +the exchange on all forkserver supporting platforms. That makes testing the +above much easier. diff --git a/Misc/NEWS.d/next/Library/2023-02-15-23-54-42.gh-issue-88110.KU6erv.rst b/Misc/NEWS.d/next/Library/2023-02-15-23-54-42.gh-issue-88110.KU6erv.rst new file mode 100644 index 00000000000000..42a83edc3ba68d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-15-23-54-42.gh-issue-88110.KU6erv.rst @@ -0,0 +1,2 @@ +Fixed :class:`multiprocessing.Process` reporting a ``.exitcode`` of 1 even on success when +using the ``"fork"`` start method while using a :class:`concurrent.futures.ThreadPoolExecutor`. diff --git a/Misc/NEWS.d/next/Library/2024-03-16-13-38-27.gh-issue-116897.UDQTjp.rst b/Misc/NEWS.d/next/Library/2024-03-16-13-38-27.gh-issue-116897.UDQTjp.rst deleted file mode 100644 index 6c8e4b16f20de8..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-03-16-13-38-27.gh-issue-116897.UDQTjp.rst +++ /dev/null @@ -1,4 +0,0 @@ -Accepting objects with false values (like ``0`` and ``[]``) except empty -strings, byte-like objects and ``None`` in :mod:`urllib.parse` functions -:func:`~urllib.parse.parse_qsl` and :func:`~urllib.parse.parse_qs` is now -deprecated. diff --git a/Misc/NEWS.d/next/Library/2024-05-13-10-09-41.gh-issue-118986.-r4W9h.rst b/Misc/NEWS.d/next/Library/2024-05-13-10-09-41.gh-issue-118986.-r4W9h.rst deleted file mode 100644 index 196da60a950bfb..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-05-13-10-09-41.gh-issue-118986.-r4W9h.rst +++ /dev/null @@ -1 +0,0 @@ -Add :data:`!socket.IPV6_RECVERR` constant (available since Linux 2.2). diff --git a/Misc/NEWS.d/next/Library/2024-05-28-14-35-23.gh-issue-97850.dCtjel.rst b/Misc/NEWS.d/next/Library/2024-05-28-14-35-23.gh-issue-97850.dCtjel.rst deleted file mode 100644 index bb94f7d8ad124d..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-05-28-14-35-23.gh-issue-97850.dCtjel.rst +++ /dev/null @@ -1 +0,0 @@ -Remove deprecated :func:`!pkgutil.get_loader` and :func:`!pkgutil.find_loader`. diff --git a/Misc/NEWS.d/next/Library/2024-06-02-11-48-19.gh-issue-119826.N1obGa.rst b/Misc/NEWS.d/next/Library/2024-06-02-11-48-19.gh-issue-119826.N1obGa.rst deleted file mode 100644 index 6901e7475dd082..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-06-02-11-48-19.gh-issue-119826.N1obGa.rst +++ /dev/null @@ -1 +0,0 @@ -Always return an absolute path for :func:`os.path.abspath` on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-06-05-19-09-36.gh-issue-118289.moL9_d.rst b/Misc/NEWS.d/next/Library/2024-06-05-19-09-36.gh-issue-118289.moL9_d.rst deleted file mode 100644 index 522572e160ba7b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-06-05-19-09-36.gh-issue-118289.moL9_d.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`!posixpath.realpath` now raises :exc:`NotADirectoryError` when *strict* -mode is enabled and a non-directory path with a trailing slash is supplied. diff --git a/Misc/NEWS.d/next/Library/2024-06-06-04-06-05.gh-issue-70764.6511hw.rst b/Misc/NEWS.d/next/Library/2024-06-06-04-06-05.gh-issue-70764.6511hw.rst deleted file mode 100644 index 4cfb66a6ccc6ee..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-06-06-04-06-05.gh-issue-70764.6511hw.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an issue where :func:`inspect.getclosurevars` would incorrectly classify an attribute name as a global variable when the name exists both as an attribute name and a global variable. diff --git a/Misc/NEWS.d/next/Library/2024-07-02-15-56-42.gh-issue-121267.yFBWkh.rst b/Misc/NEWS.d/next/Library/2024-07-02-15-56-42.gh-issue-121267.yFBWkh.rst deleted file mode 100644 index 9e52405c15a82d..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-07-02-15-56-42.gh-issue-121267.yFBWkh.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve the performance of :mod:`tarfile` when writing files, by caching user names -and group names. diff --git a/Misc/NEWS.d/next/Library/2024-07-23-02-24-50.gh-issue-120754.nHb5mG.rst b/Misc/NEWS.d/next/Library/2024-07-23-02-24-50.gh-issue-120754.nHb5mG.rst deleted file mode 100644 index 6c33e7b7ec7716..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-07-23-02-24-50.gh-issue-120754.nHb5mG.rst +++ /dev/null @@ -1 +0,0 @@ -Update unbounded ``read`` calls in :mod:`zipfile` to specify an explicit ``size`` putting a limit on how much data they may read. This also updates handling around ZIP max comment size to match the standard instead of reading comments that are one byte too long. diff --git a/Misc/NEWS.d/next/Library/2024-07-25-18-06-51.gh-issue-122288.-_xxOR.rst b/Misc/NEWS.d/next/Library/2024-07-25-18-06-51.gh-issue-122288.-_xxOR.rst new file mode 100644 index 00000000000000..26a18afca945d9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-25-18-06-51.gh-issue-122288.-_xxOR.rst @@ -0,0 +1,2 @@ +Improve the performances of :func:`fnmatch.translate` by a factor 1.7. Patch +by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-07-29-15-20-30.gh-issue-122356.wKCmFx.rst b/Misc/NEWS.d/next/Library/2024-07-29-15-20-30.gh-issue-122356.wKCmFx.rst new file mode 100644 index 00000000000000..0a4632ca975f6b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-29-15-20-30.gh-issue-122356.wKCmFx.rst @@ -0,0 +1,3 @@ +Guarantee that the position of a file-like object passed to +:func:`zipfile.is_zipfile` is left untouched after the call. +Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-08-01-11-15-55.gh-issue-122549.ztV4Kz.rst b/Misc/NEWS.d/next/Library/2024-08-01-11-15-55.gh-issue-122549.ztV4Kz.rst deleted file mode 100644 index 6b2cbc0a6c9b7a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-08-01-11-15-55.gh-issue-122549.ztV4Kz.rst +++ /dev/null @@ -1 +0,0 @@ -Add :func:`platform.invalidate_caches` to invalidate cached results. diff --git a/Misc/NEWS.d/next/Library/2024-08-22-12-12-35.gh-issue-89083.b6zFh0.rst b/Misc/NEWS.d/next/Library/2024-08-22-12-12-35.gh-issue-89083.b6zFh0.rst deleted file mode 100644 index d37d585d51b490..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-08-22-12-12-35.gh-issue-89083.b6zFh0.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :func:`uuid.uuid8` for generating UUIDv8 objects as specified in -:rfc:`9562`. Patch by Bénédikt Tran diff --git a/Misc/NEWS.d/next/Library/2024-08-28-19-27-35.gh-issue-123370.SPZ9Ux.rst b/Misc/NEWS.d/next/Library/2024-08-28-19-27-35.gh-issue-123370.SPZ9Ux.rst deleted file mode 100644 index 1fd5cc54eaf3e7..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-08-28-19-27-35.gh-issue-123370.SPZ9Ux.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the canvas not clearing after running turtledemo clock. diff --git a/Misc/NEWS.d/next/Library/2024-09-07-13-57-49.gh-issue-80958.fVYnqV.rst b/Misc/NEWS.d/next/Library/2024-09-07-13-57-49.gh-issue-80958.fVYnqV.rst deleted file mode 100644 index f0edd7b1ac6e8b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-07-13-57-49.gh-issue-80958.fVYnqV.rst +++ /dev/null @@ -1 +0,0 @@ -unittest discovery supports PEP 420 namespace packages as start directory again. diff --git a/Misc/NEWS.d/next/Library/2024-09-13-18-24-27.gh-issue-124008.XaiPQx.rst b/Misc/NEWS.d/next/Library/2024-09-13-18-24-27.gh-issue-124008.XaiPQx.rst new file mode 100644 index 00000000000000..cd6dd9a7a97e90 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-13-18-24-27.gh-issue-124008.XaiPQx.rst @@ -0,0 +1,2 @@ +Fix possible crash (in debug build), incorrect output or returning incorrect +value from raw binary ``write()`` when writing to console on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-09-17-10-38-26.gh-issue-124111.Hd53VN.rst b/Misc/NEWS.d/next/Library/2024-09-17-10-38-26.gh-issue-124111.Hd53VN.rst deleted file mode 100644 index aba082a7ac1ad4..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-17-10-38-26.gh-issue-124111.Hd53VN.rst +++ /dev/null @@ -1,4 +0,0 @@ -The tkinter module can now be built to use either the new version 9.0.0 of -Tcl/Tk or the latest release 8.6.15 of Tcl/Tk 8. Tcl/Tk 9 includes many -improvements, both to the Tcl language and to the appearance and utility of -the graphical user interface provided by Tk. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-18-16-59.gh-issue-58956.0wFrBR.rst b/Misc/NEWS.d/next/Library/2024-09-24-18-16-59.gh-issue-58956.0wFrBR.rst deleted file mode 100644 index a882a632fddf1b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-24-18-16-59.gh-issue-58956.0wFrBR.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug in :mod:`pdb` where sometimes the breakpoint won't trigger if it was set on a function which is already in the call stack. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-18-49-16.gh-issue-99749.gBDJX7.rst b/Misc/NEWS.d/next/Library/2024-09-24-18-49-16.gh-issue-99749.gBDJX7.rst deleted file mode 100644 index 3ecd75c5b551b6..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-24-18-49-16.gh-issue-99749.gBDJX7.rst +++ /dev/null @@ -1 +0,0 @@ -Adds a feature to optionally enable suggestions for argument choices and subparser names if mistyped by the user. diff --git a/Misc/NEWS.d/next/Library/2024-09-27-13-10-17.gh-issue-58032.0aNAQ0.rst b/Misc/NEWS.d/next/Library/2024-09-27-13-10-17.gh-issue-58032.0aNAQ0.rst deleted file mode 100644 index 278512b22a8d3f..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-27-13-10-17.gh-issue-58032.0aNAQ0.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecate the :class:`argparse.FileType` type converter. diff --git a/Misc/NEWS.d/next/Library/2024-09-27-15-42-55.gh-issue-124694.uUy32y.rst b/Misc/NEWS.d/next/Library/2024-09-27-15-42-55.gh-issue-124694.uUy32y.rst deleted file mode 100644 index 1aa1a463b0c63a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-27-15-42-55.gh-issue-124694.uUy32y.rst +++ /dev/null @@ -1,6 +0,0 @@ -We've added :class:`concurrent.futures.InterpreterPoolExecutor`, which -allows you to run code in multiple isolated interpreters. This allows you -to circumvent the limitations of CPU-bound threads (due to the GIL). Patch -by Eric Snow. - -This addition is unrelated to :pep:`734`. diff --git a/Misc/NEWS.d/next/Library/2024-09-28-02-03-04.gh-issue-124651.bLBGtH.rst b/Misc/NEWS.d/next/Library/2024-09-28-02-03-04.gh-issue-124651.bLBGtH.rst deleted file mode 100644 index 17fc9171390dd9..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-28-02-03-04.gh-issue-124651.bLBGtH.rst +++ /dev/null @@ -1 +0,0 @@ -Properly quote template strings in :mod:`venv` activation scripts. diff --git a/Misc/NEWS.d/next/Library/2024-10-04-22-43-48.gh-issue-124984.xjMv9b.rst b/Misc/NEWS.d/next/Library/2024-10-04-22-43-48.gh-issue-124984.xjMv9b.rst deleted file mode 100644 index dd0a55a6854c25..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-04-22-43-48.gh-issue-124984.xjMv9b.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed thread safety in :mod:`ssl` in the free-threaded build. OpenSSL operations are now protected by a per-object lock. diff --git a/Misc/NEWS.d/next/Library/2024-10-09-17-07-33.gh-issue-52551.PBakSY.rst b/Misc/NEWS.d/next/Library/2024-10-09-17-07-33.gh-issue-52551.PBakSY.rst deleted file mode 100644 index edc9ac5bb23117..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-09-17-07-33.gh-issue-52551.PBakSY.rst +++ /dev/null @@ -1,8 +0,0 @@ -Fix encoding issues in :func:`time.strftime`, the -:meth:`~datetime.datetime.strftime` method of the :mod:`datetime` classes -:class:`~datetime.datetime`, :class:`~datetime.date` and -:class:`~datetime.time` and formatting of these classes. Characters not -encodable in the current locale are now acceptable in the format string. -Surrogate pairs and sequence of surrogatescape-encoded bytes are no longer -recombinated. Embedded null character no longer terminates the format -string. diff --git a/Misc/NEWS.d/next/Library/2024-10-11-00-40-13.gh-issue-125245.8vReM-.rst b/Misc/NEWS.d/next/Library/2024-10-11-00-40-13.gh-issue-125245.8vReM-.rst deleted file mode 100644 index c880efe73d06b4..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-11-00-40-13.gh-issue-125245.8vReM-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix race condition when importing :mod:`collections.abc`, which could -incorrectly return an empty module. diff --git a/Misc/NEWS.d/next/Library/2024-10-13-15-04-58.gh-issue-125398.UW7Ndv.rst b/Misc/NEWS.d/next/Library/2024-10-13-15-04-58.gh-issue-125398.UW7Ndv.rst deleted file mode 100644 index a188b35e1fbdbc..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-13-15-04-58.gh-issue-125398.UW7Ndv.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the conversion of the :envvar:`!VIRTUAL_ENV` path in the activate script in :mod:`venv` when running in Git Bash for Windows. diff --git a/Misc/NEWS.d/next/Library/2024-10-14-02-07-44.gh-issue-125115.IOf3ON.rst b/Misc/NEWS.d/next/Library/2024-10-14-02-07-44.gh-issue-125115.IOf3ON.rst deleted file mode 100644 index 3583d537a6ec61..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-14-02-07-44.gh-issue-125115.IOf3ON.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug in :mod:`pdb` where arguments starting with ``-`` can't be passed to the debugged script. diff --git a/Misc/NEWS.d/next/Library/2024-10-14-17-29-34.gh-issue-125451.fmP3T9.rst b/Misc/NEWS.d/next/Library/2024-10-14-17-29-34.gh-issue-125451.fmP3T9.rst deleted file mode 100644 index 589988d4d6273f..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-14-17-29-34.gh-issue-125451.fmP3T9.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix deadlock when :class:`concurrent.futures.ProcessPoolExecutor` shuts down -concurrently with an error when feeding a job to a worker process. diff --git a/Misc/NEWS.d/next/Library/2024-10-15-14-01-03.gh-issue-125519.TqGh6a.rst b/Misc/NEWS.d/next/Library/2024-10-15-14-01-03.gh-issue-125519.TqGh6a.rst deleted file mode 100644 index e6062625104590..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-15-14-01-03.gh-issue-125519.TqGh6a.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve traceback if :func:`importlib.reload` is called with an object that -is not a module. Patch by Alex Waygood. diff --git a/Misc/NEWS.d/next/Library/2024-10-15-16-50-03.gh-issue-125541.FfhmWo.rst b/Misc/NEWS.d/next/Library/2024-10-15-16-50-03.gh-issue-125541.FfhmWo.rst deleted file mode 100644 index 7a20bca1739869..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-15-16-50-03.gh-issue-125541.FfhmWo.rst +++ /dev/null @@ -1,4 +0,0 @@ -Pressing :kbd:`Ctrl-C` while blocked in :meth:`threading.Lock.acquire`, -:meth:`threading.RLock.acquire`, and :meth:`threading.Thread.join` now -interrupts the function call and raises a :exc:`KeyboardInterrupt` exception -on Windows, similar to how those functions behave on macOS and Linux. diff --git a/Misc/NEWS.d/next/Library/2024-10-16-04-50-53.gh-issue-125542.vZJ-Ns.rst b/Misc/NEWS.d/next/Library/2024-10-16-04-50-53.gh-issue-125542.vZJ-Ns.rst deleted file mode 100644 index 777920cc54ff9b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-16-04-50-53.gh-issue-125542.vZJ-Ns.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate passing keyword-only *prefix_chars* argument to -:meth:`argparse.ArgumentParser.add_argument_group`. diff --git a/Misc/NEWS.d/next/Library/2024-10-16-15-55-50.gh-issue-125600.yMsJx0.rst b/Misc/NEWS.d/next/Library/2024-10-16-15-55-50.gh-issue-125600.yMsJx0.rst deleted file mode 100644 index 19bf4fbefb601b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-16-15-55-50.gh-issue-125600.yMsJx0.rst +++ /dev/null @@ -1 +0,0 @@ -Only show stale code warning in :mod:`pdb` when we display source code. diff --git a/Misc/NEWS.d/next/Library/2024-10-16-20-32-40.gh-issue-125590.stHzOP.rst b/Misc/NEWS.d/next/Library/2024-10-16-20-32-40.gh-issue-125590.stHzOP.rst deleted file mode 100644 index dc6765ada641a9..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-16-20-32-40.gh-issue-125590.stHzOP.rst +++ /dev/null @@ -1 +0,0 @@ -Allow ``FrameLocalsProxy`` to delete and pop if the key is not a fast variable. diff --git a/Misc/NEWS.d/next/Library/2024-10-16-22-45-50.gh-issue-125614.3OEo_Q.rst b/Misc/NEWS.d/next/Library/2024-10-16-22-45-50.gh-issue-125614.3OEo_Q.rst deleted file mode 100644 index 5f4803c9b74578..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-16-22-45-50.gh-issue-125614.3OEo_Q.rst +++ /dev/null @@ -1,3 +0,0 @@ -In the :data:`~annotationlib.Format.FORWARDREF` format of -:mod:`annotationlib`, fix bug where nested expressions were not returned as -:class:`annotationlib.ForwardRef` format. diff --git a/Misc/NEWS.d/next/Library/2024-10-17-04-52-00.gh-issue-125633.lMck06.rst b/Misc/NEWS.d/next/Library/2024-10-17-04-52-00.gh-issue-125633.lMck06.rst deleted file mode 100644 index e816a13b75e0c7..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-17-04-52-00.gh-issue-125633.lMck06.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add function :func:`inspect.ispackage` to determine whether an object is a -:term:`package` or not. diff --git a/Misc/NEWS.d/next/Library/2024-10-17-16-10-29.gh-issue-125259.oMew0c.rst b/Misc/NEWS.d/next/Library/2024-10-17-16-10-29.gh-issue-125259.oMew0c.rst deleted file mode 100644 index 4fa6330abea512..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-17-16-10-29.gh-issue-125259.oMew0c.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the notes removal logic for errors thrown in enum initialization. diff --git a/Misc/NEWS.d/next/Library/2024-10-17-20-36-06.gh-issue-52551.EIVNYY.rst b/Misc/NEWS.d/next/Library/2024-10-17-20-36-06.gh-issue-52551.EIVNYY.rst deleted file mode 100644 index 48d3d93c3d72af..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-17-20-36-06.gh-issue-52551.EIVNYY.rst +++ /dev/null @@ -1 +0,0 @@ -Use :c:func:`!wcsftime` to implement :func:`time.strftime` on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-10-18-08-58-10.gh-issue-125660.sDdDqO.rst b/Misc/NEWS.d/next/Library/2024-10-18-08-58-10.gh-issue-125660.sDdDqO.rst deleted file mode 100644 index 74d76c7bddae7d..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-18-08-58-10.gh-issue-125660.sDdDqO.rst +++ /dev/null @@ -1 +0,0 @@ -Reject invalid unicode escapes for Python implementation of :func:`json.loads`. diff --git a/Misc/NEWS.d/next/Library/2024-10-18-09-51-29.gh-issue-125682.vsj4cU.rst b/Misc/NEWS.d/next/Library/2024-10-18-09-51-29.gh-issue-125682.vsj4cU.rst deleted file mode 100644 index 3eb2905ad8d810..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-18-09-51-29.gh-issue-125682.vsj4cU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Reject non-ASCII digits in the Python implementation of :func:`json.loads` -conforming to the JSON specification. diff --git a/Misc/NEWS.d/next/Library/2024-10-19-01-30-40.gh-issue-125378.WTosxX.rst b/Misc/NEWS.d/next/Library/2024-10-19-01-30-40.gh-issue-125378.WTosxX.rst deleted file mode 100644 index dc76889d3b210a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-19-01-30-40.gh-issue-125378.WTosxX.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the bug in :mod:`pdb` where after a multi-line command, an empty line repeats the first line of the multi-line command, instead of the full command. diff --git a/Misc/NEWS.d/next/Library/2024-10-19-11-06-06.gh-issue-125631.BlhVvR.rst b/Misc/NEWS.d/next/Library/2024-10-19-11-06-06.gh-issue-125631.BlhVvR.rst deleted file mode 100644 index e870abbf87803a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-19-11-06-06.gh-issue-125631.BlhVvR.rst +++ /dev/null @@ -1,4 +0,0 @@ -Restore ability to set :attr:`~pickle.Pickler.persistent_id` and -:attr:`~pickle.Unpickler.persistent_load` attributes of instances of the -:class:`!Pickler` and :class:`!Unpickler` classes in the :mod:`pickle` -module. diff --git a/Misc/NEWS.d/next/Library/2024-10-19-13-37-37.gh-issue-125710.FyFAAr.rst b/Misc/NEWS.d/next/Library/2024-10-19-13-37-37.gh-issue-125710.FyFAAr.rst deleted file mode 100644 index 8d5220e9889c3a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-19-13-37-37.gh-issue-125710.FyFAAr.rst +++ /dev/null @@ -1 +0,0 @@ -[Enum] fix hashable<->nonhashable comparisons for member values diff --git a/Misc/NEWS.d/next/Library/2024-10-19-16-06-52.gh-issue-125666.jGfdCP.rst b/Misc/NEWS.d/next/Library/2024-10-19-16-06-52.gh-issue-125666.jGfdCP.rst deleted file mode 100644 index 3b4488815cced6..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-19-16-06-52.gh-issue-125666.jGfdCP.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid the exiting the interpreter if a null byte is given as input in the new REPL. diff --git a/Misc/NEWS.d/next/Library/2024-10-21-12-06-55.gh-issue-124969.xiY8UP.rst b/Misc/NEWS.d/next/Library/2024-10-21-12-06-55.gh-issue-124969.xiY8UP.rst deleted file mode 100644 index c44550184e0000..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-21-12-06-55.gh-issue-124969.xiY8UP.rst +++ /dev/null @@ -1,2 +0,0 @@ -``locale.nl_langinfo(locale.ALT_DIGITS)`` now returns a string again. The -returned value consists of up to 100 semicolon-separated symbols. diff --git a/Misc/NEWS.d/next/Library/2024-10-21-13-52-37.gh-issue-125767.0kK4lX.rst b/Misc/NEWS.d/next/Library/2024-10-21-13-52-37.gh-issue-125767.0kK4lX.rst deleted file mode 100644 index bfda740a79d10e..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-21-13-52-37.gh-issue-125767.0kK4lX.rst +++ /dev/null @@ -1,2 +0,0 @@ -:class:`super` objects are now :mod:`pickleable ` and -:mod:`copyable `. diff --git a/Misc/NEWS.d/next/Library/2024-10-22-13-28-00.gh-issue-125355.zssHm_.rst b/Misc/NEWS.d/next/Library/2024-10-22-13-28-00.gh-issue-125355.zssHm_.rst deleted file mode 100644 index fd67f697641d92..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-22-13-28-00.gh-issue-125355.zssHm_.rst +++ /dev/null @@ -1,7 +0,0 @@ -Fix several bugs in :meth:`argparse.ArgumentParser.parse_intermixed_args`. - -* The parser no longer changes temporarily during parsing. -* Default values are not processed twice. -* Required mutually exclusive groups containing positional arguments are now supported. -* The missing arguments report now includes the names of all required optional and positional arguments. -* Unknown options can be intermixed with positional arguments in parse_known_intermixed_args(). diff --git a/Misc/NEWS.d/next/Library/2024-10-23-17-45-40.gh-issue-125884.41E_PD.rst b/Misc/NEWS.d/next/Library/2024-10-23-17-45-40.gh-issue-125884.41E_PD.rst deleted file mode 100644 index 684b1f282b143e..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-23-17-45-40.gh-issue-125884.41E_PD.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the bug for :mod:`pdb` where it can't set breakpoints on functions with certain annotations. diff --git a/Misc/NEWS.d/next/Library/2024-10-23-20-05-54.gh-issue-86463.jvFTI_.rst b/Misc/NEWS.d/next/Library/2024-10-23-20-05-54.gh-issue-86463.jvFTI_.rst new file mode 100644 index 00000000000000..9ac155770e2254 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-23-20-05-54.gh-issue-86463.jvFTI_.rst @@ -0,0 +1,2 @@ +The ``usage`` parameter of :class:`argparse.ArgumentParser` no longer +affects the default value of the ``prog`` parameter in subparsers. diff --git a/Misc/NEWS.d/next/Library/2024-10-23-20-44-30.gh-issue-117941.Y9jdlW.rst b/Misc/NEWS.d/next/Library/2024-10-23-20-44-30.gh-issue-117941.Y9jdlW.rst deleted file mode 100644 index 9c2553f0f0e8cd..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-23-20-44-30.gh-issue-117941.Y9jdlW.rst +++ /dev/null @@ -1,2 +0,0 @@ -:class:`!argparse.BooleanOptionalAction` now rejects option names starting -with ``--no-``. diff --git a/Misc/NEWS.d/next/Library/2024-10-24-13-40-20.gh-issue-126916.MAgz6D.rst b/Misc/NEWS.d/next/Library/2024-10-24-13-40-20.gh-issue-126916.MAgz6D.rst deleted file mode 100644 index cbe2fc166ba6af..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-24-13-40-20.gh-issue-126916.MAgz6D.rst +++ /dev/null @@ -1,2 +0,0 @@ -Allow the *initial* parameter of :func:`functools.reduce` to be passed as a keyword argument. -Patch by Sayandip Dutta. diff --git a/Misc/NEWS.d/next/Library/2024-10-24-14-08-10.gh-issue-125789.eaiAMw.rst b/Misc/NEWS.d/next/Library/2024-10-24-14-08-10.gh-issue-125789.eaiAMw.rst deleted file mode 100644 index 964a006bb47b7b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-24-14-08-10.gh-issue-125789.eaiAMw.rst +++ /dev/null @@ -1 +0,0 @@ -Fix possible crash when mutating list of callbacks returned by :attr:`!asyncio.Future._callbacks`. It now always returns a new copy in C implementation :mod:`!_asyncio`. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2024-10-25-10-53-56.gh-issue-125966.eOCYU_.rst b/Misc/NEWS.d/next/Library/2024-10-25-10-53-56.gh-issue-125966.eOCYU_.rst deleted file mode 100644 index 9fe8795de18003..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-25-10-53-56.gh-issue-125966.eOCYU_.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a use-after-free crash in :meth:`asyncio.Future.remove_done_callback`. -Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-10-25-11-13-24.gh-issue-125969.YvbrTr.rst b/Misc/NEWS.d/next/Library/2024-10-25-11-13-24.gh-issue-125969.YvbrTr.rst deleted file mode 100644 index dc99adff7416c5..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-25-11-13-24.gh-issue-125969.YvbrTr.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an out-of-bounds crash when an evil :meth:`asyncio.loop.call_soon` -mutates the length of the internal callbacks list. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-10-25-20-52-15.gh-issue-125926.pp8rtZ.rst b/Misc/NEWS.d/next/Library/2024-10-25-20-52-15.gh-issue-125926.pp8rtZ.rst deleted file mode 100644 index 7f98bcdc38e566..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-25-20-52-15.gh-issue-125926.pp8rtZ.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix :func:`urllib.parse.urljoin` for base URI with undefined authority. -Although :rfc:`3986` only specify reference resolution for absolute base -URI, :func:`!urljoin` should continue to return sensible result for relative -base URI. diff --git a/Misc/NEWS.d/next/Library/2024-10-26-12-50-48.gh-issue-125984.d4vp5_.rst b/Misc/NEWS.d/next/Library/2024-10-26-12-50-48.gh-issue-125984.d4vp5_.rst deleted file mode 100644 index 7a1d7b53b11301..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-26-12-50-48.gh-issue-125984.d4vp5_.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix use-after-free crashes on :class:`asyncio.Future` objects for which the -underlying event loop implements an evil :meth:`~object.__getattribute__`. -Reported by Nico-Posada. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-10-28-01-24-52.gh-issue-125413.Jat5kq.rst b/Misc/NEWS.d/next/Library/2024-10-28-01-24-52.gh-issue-125413.Jat5kq.rst deleted file mode 100644 index ddf1f9725d9695..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-28-01-24-52.gh-issue-125413.Jat5kq.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add :meth:`pathlib.Path.scandir` method to efficiently fetch directory -children and their file attributes. This is a trivial wrapper of -:func:`os.scandir`. diff --git a/Misc/NEWS.d/next/Library/2024-10-28-11-33-59.gh-issue-126068.Pdznm_.rst b/Misc/NEWS.d/next/Library/2024-10-28-11-33-59.gh-issue-126068.Pdznm_.rst deleted file mode 100644 index a0faf61890da17..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-28-11-33-59.gh-issue-126068.Pdznm_.rst +++ /dev/null @@ -1,5 +0,0 @@ -Fix exceptions in the :mod:`argparse` module so that only error messages for -ArgumentError and ArgumentTypeError are now translated. -ArgumentError is now only used for command line errors, not for logical -errors in the program. TypeError is now raised instead of ValueError for -some logical errors. diff --git a/Misc/NEWS.d/next/Library/2024-10-28-19-49-18.gh-issue-118201.v41XXh.rst b/Misc/NEWS.d/next/Library/2024-10-28-19-49-18.gh-issue-118201.v41XXh.rst new file mode 100644 index 00000000000000..bed4b3b5956f31 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-28-19-49-18.gh-issue-118201.v41XXh.rst @@ -0,0 +1,2 @@ +Fixed intermittent failures of :any:`os.confstr`, :any:`os.pathconf` and +:any:`os.sysconf` on iOS and Android. diff --git a/Misc/NEWS.d/next/Library/2024-10-28-22-35-22.gh-issue-126083.TuI--n.rst b/Misc/NEWS.d/next/Library/2024-10-28-22-35-22.gh-issue-126083.TuI--n.rst deleted file mode 100644 index d64b7dd2fedbd6..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-28-22-35-22.gh-issue-126083.TuI--n.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a reference leak in :class:`asyncio.Task` objects when reinitializing the same object with a non-``None`` context. Patch by Nico Posada. diff --git a/Misc/NEWS.d/next/Library/2024-10-29-07-24-52.gh-issue-125322.sstOM-.rst b/Misc/NEWS.d/next/Library/2024-10-29-07-24-52.gh-issue-125322.sstOM-.rst deleted file mode 100644 index e0cc91d287f89d..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-29-07-24-52.gh-issue-125322.sstOM-.rst +++ /dev/null @@ -1 +0,0 @@ -Correct detection of complex numbers support in libffi. diff --git a/Misc/NEWS.d/next/Library/2024-10-29-10-38-28.gh-issue-126080.qKRBuo.rst b/Misc/NEWS.d/next/Library/2024-10-29-10-38-28.gh-issue-126080.qKRBuo.rst deleted file mode 100644 index e54ac17b217c92..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-29-10-38-28.gh-issue-126080.qKRBuo.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a use-after-free crash on :class:`asyncio.Task` objects for which the -underlying event loop implements an evil :meth:`~object.__getattribute__`. -Reported by Nico-Posada. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-10-29-10-58-52.gh-issue-126106.rlF798.rst b/Misc/NEWS.d/next/Library/2024-10-29-10-58-52.gh-issue-126106.rlF798.rst deleted file mode 100644 index de989007b4c35a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-29-10-58-52.gh-issue-126106.rlF798.rst +++ /dev/null @@ -1 +0,0 @@ -Fixes a possible ``NULL`` pointer dereference in :mod:`ssl`. diff --git a/Misc/NEWS.d/next/Library/2024-10-29-11-45-44.gh-issue-126105.cOL-R6.rst b/Misc/NEWS.d/next/Library/2024-10-29-11-45-44.gh-issue-126105.cOL-R6.rst deleted file mode 100644 index 547eb3af1ca064..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-29-11-45-44.gh-issue-126105.cOL-R6.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a crash in :mod:`ast` when the :attr:`ast.AST._fields` attribute is deleted. diff --git a/Misc/NEWS.d/next/Library/2024-10-30-00-12-22.gh-issue-126156.BOSqv0.rst b/Misc/NEWS.d/next/Library/2024-10-30-00-12-22.gh-issue-126156.BOSqv0.rst deleted file mode 100644 index 4fe18275ab9384..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-30-00-12-22.gh-issue-126156.BOSqv0.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performances of creating :py:class:`~http.cookies.Morsel` objects by a factor of 3.8x. diff --git a/Misc/NEWS.d/next/Library/2024-10-30-20-45-17.gh-issue-126205.CHEmtx.rst b/Misc/NEWS.d/next/Library/2024-10-30-20-45-17.gh-issue-126205.CHEmtx.rst deleted file mode 100644 index c92ffb75056606..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-30-20-45-17.gh-issue-126205.CHEmtx.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix issue where :func:`urllib.request.pathname2url` generated URLs beginning -with four slashes (rather than two) when given a Windows UNC path. diff --git a/Misc/NEWS.d/next/Library/2024-10-30-23-42-44.gh-issue-126223.k2qooc.rst b/Misc/NEWS.d/next/Library/2024-10-30-23-42-44.gh-issue-126223.k2qooc.rst deleted file mode 100644 index fee391c030b941..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-30-23-42-44.gh-issue-126223.k2qooc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Raise a :exc:`UnicodeEncodeError` instead of a :exc:`SystemError` upon -calling :func:`!_interpreters.create` with an invalid Unicode character. diff --git a/Misc/NEWS.d/next/Library/2024-10-30-23-59-36.gh-issue-126212._9uYjT.rst b/Misc/NEWS.d/next/Library/2024-10-30-23-59-36.gh-issue-126212._9uYjT.rst deleted file mode 100644 index 047fe0f68048b5..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-30-23-59-36.gh-issue-126212._9uYjT.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix issue where :func:`urllib.request.pathname2url` and -:func:`~urllib.request.url2pathname` removed slashes from Windows DOS drive -paths and URLs. diff --git a/Misc/NEWS.d/next/Library/2024-10-31-14-06-28.gh-issue-126220.uJAJCU.rst b/Misc/NEWS.d/next/Library/2024-10-31-14-06-28.gh-issue-126220.uJAJCU.rst deleted file mode 100644 index 555f2f3bafbf33..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-31-14-06-28.gh-issue-126220.uJAJCU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash in :class:`!cProfile.Profile` and :class:`!_lsprof.Profiler` when their -callbacks were directly called with 0 arguments. diff --git a/Misc/NEWS.d/next/Library/2024-11-01-10-35-49.gh-issue-120057.YWy81Q.rst b/Misc/NEWS.d/next/Library/2024-11-01-10-35-49.gh-issue-120057.YWy81Q.rst deleted file mode 100644 index ded60a3f57bca3..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-01-10-35-49.gh-issue-120057.YWy81Q.rst +++ /dev/null @@ -1,2 +0,0 @@ -Replace the ``os.environ.refresh()`` method with a new -:func:`os.reload_environ` function. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2024-11-01-14-31-41.gh-issue-126138.yTniOG.rst b/Misc/NEWS.d/next/Library/2024-11-01-14-31-41.gh-issue-126138.yTniOG.rst deleted file mode 100644 index 459eebc82bd42a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-01-14-31-41.gh-issue-126138.yTniOG.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a use-after-free crash on :class:`asyncio.Task` objects -whose underlying coroutine yields an object that implements -an evil :meth:`~object.__getattribute__`. Patch by Nico Posada. diff --git a/Misc/NEWS.d/next/Library/2024-11-02-19-20-44.gh-issue-126303.yVvyWB.rst b/Misc/NEWS.d/next/Library/2024-11-02-19-20-44.gh-issue-126303.yVvyWB.rst deleted file mode 100644 index 0072c97338c251..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-02-19-20-44.gh-issue-126303.yVvyWB.rst +++ /dev/null @@ -1 +0,0 @@ -Fix pickling and copying of :class:`os.sched_param` objects. diff --git a/Misc/NEWS.d/next/Library/2024-11-03-09-42-42.gh-issue-126313.EFP6Dl.rst b/Misc/NEWS.d/next/Library/2024-11-03-09-42-42.gh-issue-126313.EFP6Dl.rst deleted file mode 100644 index dad348d8898f13..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-03-09-42-42.gh-issue-126313.EFP6Dl.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue in :func:`curses.napms` when :func:`curses.initscr` has not yet -been called. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-11-03-10-48-07.gh-issue-126353.ChDzot.rst b/Misc/NEWS.d/next/Library/2024-11-03-10-48-07.gh-issue-126353.ChDzot.rst deleted file mode 100644 index 16d508b7ec6a20..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-03-10-48-07.gh-issue-126353.ChDzot.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`asyncio.get_event_loop` now does not implicitly creates an event loop. -It now raises a :exc:`RuntimeError` if there is no set event loop. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2024-11-03-14-43-51.gh-issue-126363.Xus7vU.rst b/Misc/NEWS.d/next/Library/2024-11-03-14-43-51.gh-issue-126363.Xus7vU.rst deleted file mode 100644 index 20fea9b9ef99a0..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-03-14-43-51.gh-issue-126363.Xus7vU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Speed up pattern parsing in :meth:`pathlib.Path.glob` by skipping creation -of a :class:`pathlib.Path` object for the pattern. diff --git a/Misc/NEWS.d/next/Library/2024-11-03-23-25-07.gh-issue-126374.Xu_THP.rst b/Misc/NEWS.d/next/Library/2024-11-03-23-25-07.gh-issue-126374.Xu_THP.rst deleted file mode 100644 index ad7ecfb6af9ec8..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-03-23-25-07.gh-issue-126374.Xu_THP.rst +++ /dev/null @@ -1 +0,0 @@ -Add support for options with optional arguments in the :mod:`getopt` module. diff --git a/Misc/NEWS.d/next/Library/2024-11-04-13-16-18.gh-issue-126390.Cxvqa5.rst b/Misc/NEWS.d/next/Library/2024-11-04-13-16-18.gh-issue-126390.Cxvqa5.rst deleted file mode 100644 index 3b32bb512f6556..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-04-13-16-18.gh-issue-126390.Cxvqa5.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add support for returning intermixed options and non-option arguments in -order in :func:`getopt.gnu_getopt`. diff --git a/Misc/NEWS.d/next/Library/2024-11-04-16-40-02.gh-issue-126417.OWPqn0.rst b/Misc/NEWS.d/next/Library/2024-11-04-16-40-02.gh-issue-126417.OWPqn0.rst deleted file mode 100644 index c4a366343382f3..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-04-16-40-02.gh-issue-126417.OWPqn0.rst +++ /dev/null @@ -1,3 +0,0 @@ -Register the :class:`!multiprocessing.managers.DictProxy` and :class:`!multiprocessing.managers.ListProxy` types in -:mod:`multiprocessing.managers` to :class:`collections.abc.MutableMapping` and -:class:`collections.abc.MutableSequence`, respectively. diff --git a/Misc/NEWS.d/next/Library/2024-11-04-22-53-09.gh-issue-89416.YVQaas.rst b/Misc/NEWS.d/next/Library/2024-11-04-22-53-09.gh-issue-89416.YVQaas.rst deleted file mode 100644 index f1a2fcbaff2564..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-04-22-53-09.gh-issue-89416.YVQaas.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :rfc:`9559` MIME types for Matroska audiovisual container formats. Patch -by Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2024-11-05-09-54-49.gh-issue-126175.spnjJr.rst b/Misc/NEWS.d/next/Library/2024-11-05-09-54-49.gh-issue-126175.spnjJr.rst deleted file mode 100644 index de7ce88c8d0f28..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-05-09-54-49.gh-issue-126175.spnjJr.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add ``msg``, ``doc``, ``pos``, ``lineno`` and ``colno`` attributes to :exc:`tomllib.TOMLDecodeError`. -Deprecate instantiating with free-form arguments. diff --git a/Misc/NEWS.d/next/Library/2024-11-05-11-28-45.gh-issue-126451.XJMtqz.rst b/Misc/NEWS.d/next/Library/2024-11-05-11-28-45.gh-issue-126451.XJMtqz.rst deleted file mode 100644 index 563cb2515eca60..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-05-11-28-45.gh-issue-126451.XJMtqz.rst +++ /dev/null @@ -1,2 +0,0 @@ -Register the :class:`contextvars.Context` type to -:class:`collections.abc.Mapping`. diff --git a/Misc/NEWS.d/next/Library/2024-11-06-13-41-38.gh-issue-126489.toaf-0.rst b/Misc/NEWS.d/next/Library/2024-11-06-13-41-38.gh-issue-126489.toaf-0.rst deleted file mode 100644 index 8a6573cdea7b42..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-06-13-41-38.gh-issue-126489.toaf-0.rst +++ /dev/null @@ -1,3 +0,0 @@ -The Python implementation of :mod:`pickle` no longer calls -:meth:`pickle.Pickler.persistent_id` for the result of -:meth:`!persistent_id`. diff --git a/Misc/NEWS.d/next/Library/2024-11-06-18-30-50.gh-issue-126476.F1wh3c.rst b/Misc/NEWS.d/next/Library/2024-11-06-18-30-50.gh-issue-126476.F1wh3c.rst deleted file mode 100644 index f558c29e8b087f..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-06-18-30-50.gh-issue-126476.F1wh3c.rst +++ /dev/null @@ -1,2 +0,0 @@ -Raise :class:`calendar.IllegalMonthError` (now a subclass of :class:`IndexError`) for :func:`calendar.month` -when the input month is not correct. diff --git a/Misc/NEWS.d/next/Library/2024-11-06-23-40-28.gh-issue-125679.Qq9xF5.rst b/Misc/NEWS.d/next/Library/2024-11-06-23-40-28.gh-issue-125679.Qq9xF5.rst deleted file mode 100644 index ac6851e2689692..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-06-23-40-28.gh-issue-125679.Qq9xF5.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :class:`multiprocessing.Lock` and :class:`multiprocessing.RLock` -``repr`` values no longer say "unknown" on macOS. diff --git a/Misc/NEWS.d/next/Library/2024-11-07-01-40-11.gh-issue-117378.o9O5uM.rst b/Misc/NEWS.d/next/Library/2024-11-07-01-40-11.gh-issue-117378.o9O5uM.rst deleted file mode 100644 index d7d4477ec17814..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-07-01-40-11.gh-issue-117378.o9O5uM.rst +++ /dev/null @@ -1,17 +0,0 @@ -Fixed the :mod:`multiprocessing` ``"forkserver"`` start method forkserver -process to correctly inherit the parent's :data:`sys.path` during the importing -of :func:`multiprocessing.set_forkserver_preload` modules in the same manner as -:data:`sys.path` is configured in workers before executing work items. - -This bug caused some forkserver module preloading to silently fail to preload. -This manifested as a performance degration in child processes when the -``sys.path`` was required due to additional repeated work in every worker. - -It could also have a side effect of ``""`` remaining in :data:`sys.path` during -forkserver preload imports instead of the absolute path from :func:`os.getcwd` -at multiprocessing import time used in the worker ``sys.path``. - -The ``sys.path`` differences between phases in the child process could -potentially have caused preload to import incorrect things from the wrong -location. We are unaware of that actually having happened in practice. - diff --git a/Misc/NEWS.d/next/Library/2024-11-07-22-41-47.gh-issue-126505.iztYE1.rst b/Misc/NEWS.d/next/Library/2024-11-07-22-41-47.gh-issue-126505.iztYE1.rst deleted file mode 100644 index 0a0f893a2688a0..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-07-22-41-47.gh-issue-126505.iztYE1.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix bugs in compiling case-insensitive :mod:`regular expressions ` with -character classes containing non-BMP characters: upper-case non-BMP -character did was ignored and the ASCII flag was ignored when -matching a character range whose upper bound is beyond the BMP region. diff --git a/Misc/NEWS.d/next/Library/2024-11-08-11-06-14.gh-issue-126565.dFFO22.rst b/Misc/NEWS.d/next/Library/2024-11-08-11-06-14.gh-issue-126565.dFFO22.rst deleted file mode 100644 index 22858570bbe03c..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-08-11-06-14.gh-issue-126565.dFFO22.rst +++ /dev/null @@ -1 +0,0 @@ -Improve performances of :meth:`zipfile.Path.open` for non-reading modes. diff --git a/Misc/NEWS.d/next/Library/2024-11-08-17-05-10.gh-issue-120423.7rdLVV.rst b/Misc/NEWS.d/next/Library/2024-11-08-17-05-10.gh-issue-120423.7rdLVV.rst deleted file mode 100644 index b475257ceb6610..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-08-17-05-10.gh-issue-120423.7rdLVV.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix issue where :func:`urllib.request.pathname2url` mishandled Windows paths -with embedded forward slashes. diff --git a/Misc/NEWS.d/next/Library/2024-11-09-10-31-10.gh-issue-126595.A-7MyC.rst b/Misc/NEWS.d/next/Library/2024-11-09-10-31-10.gh-issue-126595.A-7MyC.rst deleted file mode 100644 index 84a5dc0b23922f..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-09-10-31-10.gh-issue-126595.A-7MyC.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash when instantiating :class:`itertools.count` with an initial -count of :data:`sys.maxsize` on debug builds. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-11-10-18-14-51.gh-issue-104745.zAa5Ke.rst b/Misc/NEWS.d/next/Library/2024-11-10-18-14-51.gh-issue-104745.zAa5Ke.rst deleted file mode 100644 index c83a10769820cf..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-10-18-14-51.gh-issue-104745.zAa5Ke.rst +++ /dev/null @@ -1,3 +0,0 @@ -Limit starting a patcher (from :func:`unittest.mock.patch` or -:func:`unittest.mock.patch.object`) more than -once without stopping it diff --git a/Misc/NEWS.d/next/Library/2024-11-11-13-00-21.gh-issue-126654.4gfP2y.rst b/Misc/NEWS.d/next/Library/2024-11-11-13-00-21.gh-issue-126654.4gfP2y.rst deleted file mode 100644 index 750158e6d4d3ae..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-11-13-00-21.gh-issue-126654.4gfP2y.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash when non-dict was passed to several functions in ``_interpreters`` -module. diff --git a/Misc/NEWS.d/next/Library/2024-11-11-13-24-22.gh-issue-126699.ONGbMd.rst b/Misc/NEWS.d/next/Library/2024-11-11-13-24-22.gh-issue-126699.ONGbMd.rst deleted file mode 100644 index 9741294487d716..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-11-13-24-22.gh-issue-126699.ONGbMd.rst +++ /dev/null @@ -1 +0,0 @@ -Allow :class:`collections.abc.AsyncIterator` to be a base for Protocols. diff --git a/Misc/NEWS.d/next/Library/2024-11-11-14-52-21.gh-issue-126705.0W7jFW.rst b/Misc/NEWS.d/next/Library/2024-11-11-14-52-21.gh-issue-126705.0W7jFW.rst deleted file mode 100644 index f49c9c765d778f..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-11-14-52-21.gh-issue-126705.0W7jFW.rst +++ /dev/null @@ -1 +0,0 @@ -Allow :class:`os.PathLike` to be a base for Protocols. diff --git a/Misc/NEWS.d/next/Library/2024-11-12-13-14-47.gh-issue-126727.5Eqfqd.rst b/Misc/NEWS.d/next/Library/2024-11-12-13-14-47.gh-issue-126727.5Eqfqd.rst new file mode 100644 index 00000000000000..7bec8a6b7a830a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-12-13-14-47.gh-issue-126727.5Eqfqd.rst @@ -0,0 +1,3 @@ +``locale.nl_langinfo(locale.ERA)`` now returns multiple era description +segments separated by semicolons. Previously it only returned the first +segment on platforms with Glibc. diff --git a/Misc/NEWS.d/next/Library/2024-11-12-20-05-09.gh-issue-126601.Nj7bA9.rst b/Misc/NEWS.d/next/Library/2024-11-12-20-05-09.gh-issue-126601.Nj7bA9.rst new file mode 100644 index 00000000000000..11e2b7350a0e48 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-12-20-05-09.gh-issue-126601.Nj7bA9.rst @@ -0,0 +1,3 @@ +Fix issue where :func:`urllib.request.pathname2url` raised :exc:`OSError` +when given a Windows path containing a colon character not following a +drive letter, such as before an NTFS alternate data stream. diff --git a/Misc/NEWS.d/next/Library/2024-11-12-21-43-12.gh-issue-126766.oi2KJ7.rst b/Misc/NEWS.d/next/Library/2024-11-12-21-43-12.gh-issue-126766.oi2KJ7.rst deleted file mode 100644 index e3936305164883..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-12-21-43-12.gh-issue-126766.oi2KJ7.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix issue where :func:`urllib.request.url2pathname` failed to discard two -leading slashes introducing an empty authority section. diff --git a/Misc/NEWS.d/next/Library/2024-11-13-19-15-18.gh-issue-126780.ZZqJvI.rst b/Misc/NEWS.d/next/Library/2024-11-13-19-15-18.gh-issue-126780.ZZqJvI.rst new file mode 100644 index 00000000000000..93d45caf5cad72 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-13-19-15-18.gh-issue-126780.ZZqJvI.rst @@ -0,0 +1 @@ +Fix :func:`os.path.normpath` for drive-relative paths on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-11-13-20-03-18.gh-issue-126188.RJLKk-.rst b/Misc/NEWS.d/next/Library/2024-11-13-20-03-18.gh-issue-126188.RJLKk-.rst deleted file mode 100644 index bb13662e6ae62c..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-13-20-03-18.gh-issue-126188.RJLKk-.rst +++ /dev/null @@ -1 +0,0 @@ -Update bundled pip to 24.3.1 diff --git a/Misc/NEWS.d/next/Library/2024-11-14-13-16-20.gh-issue-125063.kJ-WnH.rst b/Misc/NEWS.d/next/Library/2024-11-14-13-16-20.gh-issue-125063.kJ-WnH.rst deleted file mode 100644 index 5ddf41206db07e..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-14-13-16-20.gh-issue-125063.kJ-WnH.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`marshal` now supports :class:`slice` objects. The marshal format -version was increased to 5. diff --git a/Misc/NEWS.d/next/Library/2024-11-15-01-50-36.gh-issue-85168.bP8VIN.rst b/Misc/NEWS.d/next/Library/2024-11-15-01-50-36.gh-issue-85168.bP8VIN.rst new file mode 100644 index 00000000000000..abceda8f6fd707 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-15-01-50-36.gh-issue-85168.bP8VIN.rst @@ -0,0 +1,4 @@ +Fix issue where :func:`urllib.request.url2pathname` and +:func:`~urllib.request.pathname2url` always used UTF-8 when quoting and +unquoting file URIs. They now use the :term:`filesystem encoding and error +handler`. diff --git a/Misc/NEWS.d/next/Library/2024-11-18-16-43-11.gh-issue-126946.52Ou-B.rst b/Misc/NEWS.d/next/Library/2024-11-18-16-43-11.gh-issue-126946.52Ou-B.rst new file mode 100644 index 00000000000000..448055ccfdff40 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-18-16-43-11.gh-issue-126946.52Ou-B.rst @@ -0,0 +1,3 @@ +Improve the :exc:`~getopt.GetoptError` error message when a long option +prefix matches multiple accepted options in :func:`getopt.getopt` and +:func:`getopt.gnu_getopt`. diff --git a/Misc/NEWS.d/next/Library/2024-11-18-19-03-46.gh-issue-126947.NiDYUe.rst b/Misc/NEWS.d/next/Library/2024-11-18-19-03-46.gh-issue-126947.NiDYUe.rst new file mode 100644 index 00000000000000..29ba4f21454fe1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-18-19-03-46.gh-issue-126947.NiDYUe.rst @@ -0,0 +1,2 @@ +Raise :exc:`TypeError` in :meth:`!_pydatetime.timedelta.__new__` if the passed arguments are not :class:`int` or :class:`float`, so that the Python +implementation is in line with the C implementation. diff --git a/Misc/NEWS.d/next/Library/2024-11-18-22-02-47.gh-issue-118761.GQKD_J.rst b/Misc/NEWS.d/next/Library/2024-11-18-22-02-47.gh-issue-118761.GQKD_J.rst new file mode 100644 index 00000000000000..ebb9fe8016de21 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-18-22-02-47.gh-issue-118761.GQKD_J.rst @@ -0,0 +1,2 @@ +Improve import time of :mod:`mimetypes` by around 11-16 times. Patch by Hugo +van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2024-11-18-23-42-06.gh-issue-126985.7XplY9.rst b/Misc/NEWS.d/next/Library/2024-11-18-23-42-06.gh-issue-126985.7XplY9.rst new file mode 100644 index 00000000000000..c875c7b547bba9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-18-23-42-06.gh-issue-126985.7XplY9.rst @@ -0,0 +1,3 @@ +When running under a virtual environment with the :mod:`site` disabled (see +:option:`-S`), :data:`sys.prefix` and :data:`sys.base_prefix` will now point +to the virtual environment, instead of the base installation. diff --git a/Misc/NEWS.d/next/Library/2024-11-19-14-34-05.gh-issue-126615.LOskwi.rst b/Misc/NEWS.d/next/Library/2024-11-19-14-34-05.gh-issue-126615.LOskwi.rst new file mode 100644 index 00000000000000..8c7a2ade03c19e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-19-14-34-05.gh-issue-126615.LOskwi.rst @@ -0,0 +1,2 @@ +The :exc:`~ctypes.COMError` exception is now public. +Previously, this was private and only available in ``_ctypes``. diff --git a/Misc/NEWS.d/next/Library/2024-11-20-11-37-08.gh-issue-126316.ElkZmE.rst b/Misc/NEWS.d/next/Library/2024-11-20-11-37-08.gh-issue-126316.ElkZmE.rst new file mode 100644 index 00000000000000..d643254c5b3564 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-20-11-37-08.gh-issue-126316.ElkZmE.rst @@ -0,0 +1,2 @@ +:mod:`grp`: Make :func:`grp.getgrall` thread-safe by adding a mutex. Patch +by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2024-11-20-16-58-59.gh-issue-126997.0PI41Y.rst b/Misc/NEWS.d/next/Library/2024-11-20-16-58-59.gh-issue-126997.0PI41Y.rst new file mode 100644 index 00000000000000..b85c51ef07dcbe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-20-16-58-59.gh-issue-126997.0PI41Y.rst @@ -0,0 +1,3 @@ +Fix support of STRING and GLOBAL opcodes with non-ASCII arguments in +:mod:`pickletools`. :func:`pickletools.dis` now outputs non-ASCII bytes in +STRING, BINSTRING and SHORT_BINSTRING arguments as escaped (``\xXX``). diff --git a/Misc/NEWS.d/next/Library/2024-11-21-16-23-16.gh-issue-127065.cfL1zd.rst b/Misc/NEWS.d/next/Library/2024-11-21-16-23-16.gh-issue-127065.cfL1zd.rst new file mode 100644 index 00000000000000..83457da467ffa9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-21-16-23-16.gh-issue-127065.cfL1zd.rst @@ -0,0 +1,2 @@ +Fix crash when calling a :func:`operator.methodcaller` instance from +multiple threads in the free threading build. diff --git a/Misc/NEWS.d/next/Library/2024-11-22-02-31-55.gh-issue-126766.jfkhBH.rst b/Misc/NEWS.d/next/Library/2024-11-22-02-31-55.gh-issue-126766.jfkhBH.rst new file mode 100644 index 00000000000000..998c99bf4358d5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-22-02-31-55.gh-issue-126766.jfkhBH.rst @@ -0,0 +1,2 @@ +Fix issue where :func:`urllib.request.url2pathname` failed to discard any +'localhost' authority present in the URL. diff --git a/Misc/NEWS.d/next/Library/2024-11-22-03-40-02.gh-issue-127078.gI_PaP.rst b/Misc/NEWS.d/next/Library/2024-11-22-03-40-02.gh-issue-127078.gI_PaP.rst new file mode 100644 index 00000000000000..a84c06f3c7a273 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-22-03-40-02.gh-issue-127078.gI_PaP.rst @@ -0,0 +1,2 @@ +Fix issue where :func:`urllib.request.url2pathname` failed to discard an +extra slash before a UNC drive in the URL path on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-11-22-04-49-31.gh-issue-125866.TUtvPK.rst b/Misc/NEWS.d/next/Library/2024-11-22-04-49-31.gh-issue-125866.TUtvPK.rst new file mode 100644 index 00000000000000..682e061747689b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-22-04-49-31.gh-issue-125866.TUtvPK.rst @@ -0,0 +1,2 @@ +:func:`urllib.request.pathname2url` and :func:`~urllib.request.url2pathname` +no longer convert Windows drive letters to uppercase. diff --git a/Misc/NEWS.d/next/Library/2024-11-22-09-23-41.gh-issue-122273.H8M6fd.rst b/Misc/NEWS.d/next/Library/2024-11-22-09-23-41.gh-issue-122273.H8M6fd.rst new file mode 100644 index 00000000000000..99071e05377e33 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-22-09-23-41.gh-issue-122273.H8M6fd.rst @@ -0,0 +1 @@ +Support PyREPL history on Windows. Patch by devdanzin and Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2024-11-22-10-42-34.gh-issue-127035.UnbDlr.rst b/Misc/NEWS.d/next/Library/2024-11-22-10-42-34.gh-issue-127035.UnbDlr.rst new file mode 100644 index 00000000000000..6bb7abfdd50040 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-22-10-42-34.gh-issue-127035.UnbDlr.rst @@ -0,0 +1,4 @@ +Fix :mod:`shutil.which` on Windows. Now it looks at direct match if and only +if the command ends with a PATHEXT extension or X_OK is not in mode. Support +extensionless files if "." is in PATHEXT. Support PATHEXT extensions that end +with a dot. diff --git a/Misc/NEWS.d/next/Library/2024-11-23-12-25-06.gh-issue-125866.wEOP66.rst b/Misc/NEWS.d/next/Library/2024-11-23-12-25-06.gh-issue-125866.wEOP66.rst new file mode 100644 index 00000000000000..0b8ffdb3901db3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-23-12-25-06.gh-issue-125866.wEOP66.rst @@ -0,0 +1,5 @@ +:func:`urllib.request.pathname2url` now adds an empty authority when +generating a URL for a path that begins with exactly one slash. For example, +the path ``/etc/hosts`` is converted to the scheme-less URL ``///etc/hosts``. +As a result of this change, URLs without authorities are only generated for +relative paths. diff --git a/Misc/NEWS.d/next/Library/2024-11-24-12-41-31.gh-issue-127217.UAXGFr.rst b/Misc/NEWS.d/next/Library/2024-11-24-12-41-31.gh-issue-127217.UAXGFr.rst new file mode 100644 index 00000000000000..3139e33302f378 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-24-12-41-31.gh-issue-127217.UAXGFr.rst @@ -0,0 +1,2 @@ +Fix :func:`urllib.request.pathname2url` for paths starting with multiple +slashes on Posix. diff --git a/Misc/NEWS.d/next/Library/2024-11-24-14-20-17.gh-issue-127182.WmfY2g.rst b/Misc/NEWS.d/next/Library/2024-11-24-14-20-17.gh-issue-127182.WmfY2g.rst new file mode 100644 index 00000000000000..2cc46ca3d33977 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-24-14-20-17.gh-issue-127182.WmfY2g.rst @@ -0,0 +1,2 @@ +Fix :meth:`!io.StringIO.__setstate__` crash, when :const:`None` was passed as +the first value. diff --git a/Misc/NEWS.d/next/Library/2024-11-25-19-04-10.gh-issue-127072.-c284K.rst b/Misc/NEWS.d/next/Library/2024-11-25-19-04-10.gh-issue-127072.-c284K.rst new file mode 100644 index 00000000000000..1bc7e1f0de9e0b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-25-19-04-10.gh-issue-127072.-c284K.rst @@ -0,0 +1 @@ +Remove outdated ``socket.NETLINK_*`` constants not present in Linux kernels beyond 2.6.17. diff --git a/Misc/NEWS.d/next/Library/2024-11-26-17-42-00.gh-issue-127178.U8hxjc.rst b/Misc/NEWS.d/next/Library/2024-11-26-17-42-00.gh-issue-127178.U8hxjc.rst new file mode 100644 index 00000000000000..b703b58ea8e1d9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-26-17-42-00.gh-issue-127178.U8hxjc.rst @@ -0,0 +1,4 @@ +A ``_sysconfig_vars_(...).json`` file is now shipped in the standard library +directory. It contains the output of :func:`sysconfig.get_config_vars` on +the default environment encoded as JSON data. This is an implementation +detail, and may change at any time. diff --git a/Misc/NEWS.d/next/Library/2024-11-27-14-06-35.gh-issue-123967.wxUmnW.rst b/Misc/NEWS.d/next/Library/2024-11-27-14-06-35.gh-issue-123967.wxUmnW.rst new file mode 100644 index 00000000000000..788fe0c78ef257 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-27-14-06-35.gh-issue-123967.wxUmnW.rst @@ -0,0 +1,2 @@ +Fix faulthandler for trampoline frames. If the top-most frame is a +trampoline frame, skip it. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2024-11-27-14-23-02.gh-issue-127331.9sNEC9.rst b/Misc/NEWS.d/next/Library/2024-11-27-14-23-02.gh-issue-127331.9sNEC9.rst new file mode 100644 index 00000000000000..c668816955ca59 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-27-14-23-02.gh-issue-127331.9sNEC9.rst @@ -0,0 +1 @@ +:mod:`ssl` can show descriptions for errors added in OpenSSL 3.4. diff --git a/Misc/NEWS.d/next/Library/2024-11-27-16-06-10.gh-issue-127303.asqkgh.rst b/Misc/NEWS.d/next/Library/2024-11-27-16-06-10.gh-issue-127303.asqkgh.rst new file mode 100644 index 00000000000000..58ebf5d0abe141 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-27-16-06-10.gh-issue-127303.asqkgh.rst @@ -0,0 +1 @@ +Publicly expose :data:`~token.EXACT_TOKEN_TYPES` in :attr:`!token.__all__`. diff --git a/Misc/NEWS.d/next/Security/2024-11-13-11-09-12.gh-issue-126623.TO7NnR.rst b/Misc/NEWS.d/next/Security/2024-11-13-11-09-12.gh-issue-126623.TO7NnR.rst deleted file mode 100644 index f09a158af2a475..00000000000000 --- a/Misc/NEWS.d/next/Security/2024-11-13-11-09-12.gh-issue-126623.TO7NnR.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade libexpat to 2.6.4 diff --git a/Misc/NEWS.d/next/Tests/2024-09-30-22-52-44.gh-issue-124295.VZy5kx.rst b/Misc/NEWS.d/next/Tests/2024-09-30-22-52-44.gh-issue-124295.VZy5kx.rst deleted file mode 100644 index 3c2455cfc8c530..00000000000000 --- a/Misc/NEWS.d/next/Tests/2024-09-30-22-52-44.gh-issue-124295.VZy5kx.rst +++ /dev/null @@ -1 +0,0 @@ -Add translation tests to the :mod:`argparse` module. diff --git a/Misc/NEWS.d/next/Tests/2024-10-21-14-10-56.gh-issue-125730.kcWbvI.rst b/Misc/NEWS.d/next/Tests/2024-10-21-14-10-56.gh-issue-125730.kcWbvI.rst deleted file mode 100644 index 061a1f9f27599c..00000000000000 --- a/Misc/NEWS.d/next/Tests/2024-10-21-14-10-56.gh-issue-125730.kcWbvI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Change ``make test`` to not run GUI tests by default. Use ``make ci`` to run -tests with GUI tests instead. diff --git a/Misc/NEWS.d/next/Tests/2024-11-20-18-49-01.gh-issue-127076.DHnXxo.rst b/Misc/NEWS.d/next/Tests/2024-11-20-18-49-01.gh-issue-127076.DHnXxo.rst new file mode 100644 index 00000000000000..39323604bbef56 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-11-20-18-49-01.gh-issue-127076.DHnXxo.rst @@ -0,0 +1,2 @@ +Filter out memory-related ``mmap``, ``munmap``, and ``mprotect`` calls from +file-related ones when testing :mod:`io` behavior using strace. diff --git a/Misc/NEWS.d/next/Tests/2024-11-21-02-03-48.gh-issue-127076.a3avV1.rst b/Misc/NEWS.d/next/Tests/2024-11-21-02-03-48.gh-issue-127076.a3avV1.rst new file mode 100644 index 00000000000000..7dec8bd627c063 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-11-21-02-03-48.gh-issue-127076.a3avV1.rst @@ -0,0 +1 @@ +Disable strace based system call tests when LD_PRELOAD is set. diff --git a/Misc/NEWS.d/next/Tools-Demos/2024-11-13-22-23-36.gh-issue-126807.vpaWuN.rst b/Misc/NEWS.d/next/Tools-Demos/2024-11-13-22-23-36.gh-issue-126807.vpaWuN.rst deleted file mode 100644 index 310286ce8319ea..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2024-11-13-22-23-36.gh-issue-126807.vpaWuN.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix extraction warnings in :program:`pygettext.py` caused by mistaking -function definitions for function calls. diff --git a/Misc/NEWS.d/next/Tools-Demos/2024-11-16-20-47-20.gh-issue-126700.ayrHv4.rst b/Misc/NEWS.d/next/Tools-Demos/2024-11-16-20-47-20.gh-issue-126700.ayrHv4.rst new file mode 100644 index 00000000000000..c08ad9d7059904 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2024-11-16-20-47-20.gh-issue-126700.ayrHv4.rst @@ -0,0 +1 @@ +Add support for multi-argument :mod:`gettext` functions in :program:`pygettext.py`. diff --git a/Misc/NEWS.d/next/Windows/2024-10-15-21-28-43.gh-issue-125550.hmGWCP.rst b/Misc/NEWS.d/next/Windows/2024-10-15-21-28-43.gh-issue-125550.hmGWCP.rst deleted file mode 100644 index c3ae00c74b3d91..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-15-21-28-43.gh-issue-125550.hmGWCP.rst +++ /dev/null @@ -1,2 +0,0 @@ -Enable the :ref:`launcher` to detect Python 3.14 installs from the Windows -Store. diff --git a/Misc/NEWS.d/next/Windows/2024-10-23-17-24-23.gh-issue-125842.m3EF9E.rst b/Misc/NEWS.d/next/Windows/2024-10-23-17-24-23.gh-issue-125842.m3EF9E.rst deleted file mode 100644 index 63644721d57f5b..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-23-17-24-23.gh-issue-125842.m3EF9E.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a :exc:`SystemError` when :func:`sys.exit` is called with ``0xffffffff`` -on Windows. diff --git a/Misc/NEWS.d/next/Windows/2024-10-29-09-39-06.gh-issue-126084.3wAL8o.rst b/Misc/NEWS.d/next/Windows/2024-10-29-09-39-06.gh-issue-126084.3wAL8o.rst deleted file mode 100644 index 319053d95db19e..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-29-09-39-06.gh-issue-126084.3wAL8o.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix venvwlauncher to launch pythonw instead of python so no extra console -window is created. diff --git a/Misc/NEWS.d/next/Windows/2024-10-29-19-48-03.gh-issue-125315.jdB9qN.rst b/Misc/NEWS.d/next/Windows/2024-10-29-19-48-03.gh-issue-125315.jdB9qN.rst deleted file mode 100644 index 3d813248766a5b..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-29-19-48-03.gh-issue-125315.jdB9qN.rst +++ /dev/null @@ -1,2 +0,0 @@ -Avoid crashing in :mod:`platform` due to slow WMI calls on some Windows -machines. diff --git a/Misc/NEWS.d/next/Windows/2024-10-29-20-09-52.gh-issue-126074.83ZzZs.rst b/Misc/NEWS.d/next/Windows/2024-10-29-20-09-52.gh-issue-126074.83ZzZs.rst deleted file mode 100644 index d4d06b090b5922..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-29-20-09-52.gh-issue-126074.83ZzZs.rst +++ /dev/null @@ -1 +0,0 @@ -Removed unnecessary DLLs from Windows embeddable package diff --git a/Misc/NEWS.d/next/Windows/2024-10-31-09-46-53.gh-issue-125729.KdKVLa.rst b/Misc/NEWS.d/next/Windows/2024-10-31-09-46-53.gh-issue-125729.KdKVLa.rst new file mode 100644 index 00000000000000..fbf4ab1cd1a11a --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2024-10-31-09-46-53.gh-issue-125729.KdKVLa.rst @@ -0,0 +1 @@ +Makes the presence of the :mod:`turtle` module dependent on the Tcl/Tk installer option. Previously, the module was always installed but would be unusable without Tcl/Tk. diff --git a/Misc/NEWS.d/next/Windows/2024-11-07-20-42-31.gh-issue-126497.EARpd-.rst b/Misc/NEWS.d/next/Windows/2024-11-07-20-42-31.gh-issue-126497.EARpd-.rst deleted file mode 100644 index c902b9d6da8c65..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-11-07-20-42-31.gh-issue-126497.EARpd-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixes venv failure due to missing redirector executables in experimental -free-threaded installs. diff --git a/Misc/NEWS.d/next/Windows/2024-11-12-22-31-13.gh-issue-118973._lfxW6.rst b/Misc/NEWS.d/next/Windows/2024-11-12-22-31-13.gh-issue-118973._lfxW6.rst deleted file mode 100644 index c173c3ee081cc2..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-11-12-22-31-13.gh-issue-118973._lfxW6.rst +++ /dev/null @@ -1,3 +0,0 @@ -Ensures the experimental free-threaded install includes the ``_tkinter`` -module. The optional Tcl/Tk component must also be installed in order for -the module to work. diff --git a/Modules/_io/clinic/bufferedio.c.h b/Modules/_io/clinic/bufferedio.c.h index b703d7e6855398..e035bd99baca5f 100644 --- a/Modules/_io/clinic/bufferedio.c.h +++ b/Modules/_io/clinic/bufferedio.c.h @@ -330,9 +330,7 @@ _io__Buffered_simple_flush(buffered *self, PyObject *Py_UNUSED(ignored)) return return_value; } -#if defined(_io__Buffered_closed_HAS_DOCSTR) -# define _io__Buffered_closed_DOCSTR _io__Buffered_closed__doc__ -#else +#if !defined(_io__Buffered_closed_DOCSTR) # define _io__Buffered_closed_DOCSTR NULL #endif #if defined(_IO__BUFFERED_CLOSED_GETSETDEF) @@ -472,9 +470,7 @@ _io__Buffered_writable(buffered *self, PyObject *Py_UNUSED(ignored)) return return_value; } -#if defined(_io__Buffered_name_HAS_DOCSTR) -# define _io__Buffered_name_DOCSTR _io__Buffered_name__doc__ -#else +#if !defined(_io__Buffered_name_DOCSTR) # define _io__Buffered_name_DOCSTR NULL #endif #if defined(_IO__BUFFERED_NAME_GETSETDEF) @@ -499,9 +495,7 @@ _io__Buffered_name_get(buffered *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_io__Buffered_mode_HAS_DOCSTR) -# define _io__Buffered_mode_DOCSTR _io__Buffered_mode__doc__ -#else +#if !defined(_io__Buffered_mode_DOCSTR) # define _io__Buffered_mode_DOCSTR NULL #endif #if defined(_IO__BUFFERED_MODE_GETSETDEF) @@ -1252,4 +1246,4 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=36abca5bd2f63ea1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8f28a97987a9fbe1 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/stringio.c.h b/Modules/_io/clinic/stringio.c.h index ac8b4b7b85b76d..6f9205af32f010 100644 --- a/Modules/_io/clinic/stringio.c.h +++ b/Modules/_io/clinic/stringio.c.h @@ -476,9 +476,7 @@ _io_StringIO___setstate__(stringio *self, PyObject *state) return return_value; } -#if defined(_io_StringIO_closed_HAS_DOCSTR) -# define _io_StringIO_closed_DOCSTR _io_StringIO_closed__doc__ -#else +#if !defined(_io_StringIO_closed_DOCSTR) # define _io_StringIO_closed_DOCSTR NULL #endif #if defined(_IO_STRINGIO_CLOSED_GETSETDEF) @@ -503,9 +501,7 @@ _io_StringIO_closed_get(stringio *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_io_StringIO_line_buffering_HAS_DOCSTR) -# define _io_StringIO_line_buffering_DOCSTR _io_StringIO_line_buffering__doc__ -#else +#if !defined(_io_StringIO_line_buffering_DOCSTR) # define _io_StringIO_line_buffering_DOCSTR NULL #endif #if defined(_IO_STRINGIO_LINE_BUFFERING_GETSETDEF) @@ -530,9 +526,7 @@ _io_StringIO_line_buffering_get(stringio *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_io_StringIO_newlines_HAS_DOCSTR) -# define _io_StringIO_newlines_DOCSTR _io_StringIO_newlines__doc__ -#else +#if !defined(_io_StringIO_newlines_DOCSTR) # define _io_StringIO_newlines_DOCSTR NULL #endif #if defined(_IO_STRINGIO_NEWLINES_GETSETDEF) @@ -556,4 +550,4 @@ _io_StringIO_newlines_get(stringio *self, void *Py_UNUSED(context)) return return_value; } -/*[clinic end generated code: output=8c8d4f8fa32986bb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=9d2b092274469d42 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h index c9301c5a23fa86..160f80ada43660 100644 --- a/Modules/_io/clinic/textio.c.h +++ b/Modules/_io/clinic/textio.c.h @@ -208,11 +208,9 @@ PyDoc_STRVAR(_io__TextIOBase_encoding__doc__, "Encoding of the text stream.\n" "\n" "Subclasses should override."); -#define _io__TextIOBase_encoding_HAS_DOCSTR +#define _io__TextIOBase_encoding_DOCSTR _io__TextIOBase_encoding__doc__ -#if defined(_io__TextIOBase_encoding_HAS_DOCSTR) -# define _io__TextIOBase_encoding_DOCSTR _io__TextIOBase_encoding__doc__ -#else +#if !defined(_io__TextIOBase_encoding_DOCSTR) # define _io__TextIOBase_encoding_DOCSTR NULL #endif #if defined(_IO__TEXTIOBASE_ENCODING_GETSETDEF) @@ -237,11 +235,9 @@ PyDoc_STRVAR(_io__TextIOBase_newlines__doc__, "Only line endings translated during reading are considered.\n" "\n" "Subclasses should override."); -#define _io__TextIOBase_newlines_HAS_DOCSTR +#define _io__TextIOBase_newlines_DOCSTR _io__TextIOBase_newlines__doc__ -#if defined(_io__TextIOBase_newlines_HAS_DOCSTR) -# define _io__TextIOBase_newlines_DOCSTR _io__TextIOBase_newlines__doc__ -#else +#if !defined(_io__TextIOBase_newlines_DOCSTR) # define _io__TextIOBase_newlines_DOCSTR NULL #endif #if defined(_IO__TEXTIOBASE_NEWLINES_GETSETDEF) @@ -264,11 +260,9 @@ PyDoc_STRVAR(_io__TextIOBase_errors__doc__, "The error setting of the decoder or encoder.\n" "\n" "Subclasses should override."); -#define _io__TextIOBase_errors_HAS_DOCSTR +#define _io__TextIOBase_errors_DOCSTR _io__TextIOBase_errors__doc__ -#if defined(_io__TextIOBase_errors_HAS_DOCSTR) -# define _io__TextIOBase_errors_DOCSTR _io__TextIOBase_errors__doc__ -#else +#if !defined(_io__TextIOBase_errors_DOCSTR) # define _io__TextIOBase_errors_DOCSTR NULL #endif #if defined(_IO__TEXTIOBASE_ERRORS_GETSETDEF) @@ -1138,9 +1132,7 @@ _io_TextIOWrapper_close(textio *self, PyObject *Py_UNUSED(ignored)) return return_value; } -#if defined(_io_TextIOWrapper_name_HAS_DOCSTR) -# define _io_TextIOWrapper_name_DOCSTR _io_TextIOWrapper_name__doc__ -#else +#if !defined(_io_TextIOWrapper_name_DOCSTR) # define _io_TextIOWrapper_name_DOCSTR NULL #endif #if defined(_IO_TEXTIOWRAPPER_NAME_GETSETDEF) @@ -1165,9 +1157,7 @@ _io_TextIOWrapper_name_get(textio *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_io_TextIOWrapper_closed_HAS_DOCSTR) -# define _io_TextIOWrapper_closed_DOCSTR _io_TextIOWrapper_closed__doc__ -#else +#if !defined(_io_TextIOWrapper_closed_DOCSTR) # define _io_TextIOWrapper_closed_DOCSTR NULL #endif #if defined(_IO_TEXTIOWRAPPER_CLOSED_GETSETDEF) @@ -1192,9 +1182,7 @@ _io_TextIOWrapper_closed_get(textio *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_io_TextIOWrapper_newlines_HAS_DOCSTR) -# define _io_TextIOWrapper_newlines_DOCSTR _io_TextIOWrapper_newlines__doc__ -#else +#if !defined(_io_TextIOWrapper_newlines_DOCSTR) # define _io_TextIOWrapper_newlines_DOCSTR NULL #endif #if defined(_IO_TEXTIOWRAPPER_NEWLINES_GETSETDEF) @@ -1219,9 +1207,7 @@ _io_TextIOWrapper_newlines_get(textio *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_io_TextIOWrapper_errors_HAS_DOCSTR) -# define _io_TextIOWrapper_errors_DOCSTR _io_TextIOWrapper_errors__doc__ -#else +#if !defined(_io_TextIOWrapper_errors_DOCSTR) # define _io_TextIOWrapper_errors_DOCSTR NULL #endif #if defined(_IO_TEXTIOWRAPPER_ERRORS_GETSETDEF) @@ -1246,9 +1232,7 @@ _io_TextIOWrapper_errors_get(textio *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_io_TextIOWrapper__CHUNK_SIZE_HAS_DOCSTR) -# define _io_TextIOWrapper__CHUNK_SIZE_DOCSTR _io_TextIOWrapper__CHUNK_SIZE__doc__ -#else +#if !defined(_io_TextIOWrapper__CHUNK_SIZE_DOCSTR) # define _io_TextIOWrapper__CHUNK_SIZE_DOCSTR NULL #endif #if defined(_IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF) @@ -1273,9 +1257,7 @@ _io_TextIOWrapper__CHUNK_SIZE_get(textio *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_IO_TEXTIOWRAPPER__CHUNK_SIZE_HAS_DOCSTR) -# define _io_TextIOWrapper__CHUNK_SIZE_DOCSTR _io_TextIOWrapper__CHUNK_SIZE__doc__ -#else +#if !defined(_io_TextIOWrapper__CHUNK_SIZE_DOCSTR) # define _io_TextIOWrapper__CHUNK_SIZE_DOCSTR NULL #endif #if defined(_IO_TEXTIOWRAPPER__CHUNK_SIZE_GETSETDEF) @@ -1299,4 +1281,4 @@ _io_TextIOWrapper__CHUNK_SIZE_set(textio *self, PyObject *value, void *Py_UNUSED return return_value; } -/*[clinic end generated code: output=459c0e50acd772b1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1172c500a022c65d input=a9049054013a1b77]*/ diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c index f558613dc6233c..65e8d97aa8ac19 100644 --- a/Modules/_io/stringio.c +++ b/Modules/_io/stringio.c @@ -908,23 +908,25 @@ _io_StringIO___setstate___impl(stringio *self, PyObject *state) once by __init__. So we do not take any chance and replace object's buffer completely. */ { - PyObject *item; - Py_UCS4 *buf; - Py_ssize_t bufsize; - - item = PyTuple_GET_ITEM(state, 0); - buf = PyUnicode_AsUCS4Copy(item); - if (buf == NULL) - return NULL; - bufsize = PyUnicode_GET_LENGTH(item); + PyObject *item = PyTuple_GET_ITEM(state, 0); + if (PyUnicode_Check(item)) { + Py_UCS4 *buf = PyUnicode_AsUCS4Copy(item); + if (buf == NULL) + return NULL; + Py_ssize_t bufsize = PyUnicode_GET_LENGTH(item); - if (resize_buffer(self, bufsize) < 0) { + if (resize_buffer(self, bufsize) < 0) { + PyMem_Free(buf); + return NULL; + } + memcpy(self->buf, buf, bufsize * sizeof(Py_UCS4)); PyMem_Free(buf); - return NULL; + self->string_size = bufsize; + } + else { + assert(item == Py_None); + self->string_size = 0; } - memcpy(self->buf, buf, bufsize * sizeof(Py_UCS4)); - PyMem_Free(buf); - self->string_size = bufsize; } /* Set carefully the position value. Alternatively, we could use the seek diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c index d7cb5abfdc0abd..3fa0301e337991 100644 --- a/Modules/_io/winconsoleio.c +++ b/Modules/_io/winconsoleio.c @@ -135,19 +135,67 @@ char _PyIO_get_console_type(PyObject *path_or_fd) { } static DWORD -_find_last_utf8_boundary(const char *buf, DWORD len) +_find_last_utf8_boundary(const unsigned char *buf, DWORD len) { - /* This function never returns 0, returns the original len instead */ - DWORD count = 1; - if (len == 0 || (buf[len - 1] & 0x80) == 0) { - return len; - } - for (;; count++) { - if (count > 3 || count >= len) { + for (DWORD count = 1; count < 4 && count <= len; count++) { + unsigned char c = buf[len - count]; + if (c < 0x80) { + /* No starting byte found. */ return len; } - if ((buf[len - count] & 0xc0) != 0x80) { - return len - count; + if (c >= 0xc0) { + if (c < 0xe0 /* 2-bytes sequence */ ? count < 2 : + c < 0xf0 /* 3-bytes sequence */ ? count < 3 : + c < 0xf8 /* 4-bytes sequence */) + { + /* Incomplete multibyte sequence. */ + return len - count; + } + /* Either complete or invalid sequence. */ + return len; + } + } + /* Either complete 4-bytes sequence or invalid sequence. */ + return len; +} + +/* Find the number of UTF-8 bytes that corresponds to the specified number of + * wchars. + * I.e. find x <= len so that MultiByteToWideChar(CP_UTF8, 0, s, x, NULL, 0) == n. + * + * WideCharToMultiByte() cannot be used for this, because the UTF-8 -> wchar + * conversion is not reversible (invalid UTF-8 byte produces \ufffd which + * will be converted back to 3-bytes UTF-8 sequence \xef\xbf\xbd). + * So we need to use binary search. + */ +static DWORD +_wchar_to_utf8_count(const unsigned char *s, DWORD len, DWORD n) +{ + DWORD start = 0; + while (1) { + DWORD mid = 0; + for (DWORD i = len / 2; i <= len; i++) { + mid = _find_last_utf8_boundary(s, i); + if (mid != 0) { + break; + } + /* The middle could split the first multibytes sequence. */ + } + if (mid == len) { + return start + len; + } + if (mid == 0) { + mid = len > 1 ? len - 1 : 1; + } + DWORD wlen = MultiByteToWideChar(CP_UTF8, 0, s, mid, NULL, 0); + if (wlen <= n) { + s += mid; + start += mid; + len -= mid; + n -= wlen; + } + else { + len = mid; } } } @@ -563,8 +611,10 @@ read_console_w(HANDLE handle, DWORD maxlen, DWORD *readlen) { int err = 0, sig = 0; wchar_t *buf = (wchar_t*)PyMem_Malloc(maxlen * sizeof(wchar_t)); - if (!buf) + if (!buf) { + PyErr_NoMemory(); goto error; + } *readlen = 0; @@ -622,6 +672,7 @@ read_console_w(HANDLE handle, DWORD maxlen, DWORD *readlen) { Py_UNBLOCK_THREADS if (!newbuf) { sig = -1; + PyErr_NoMemory(); break; } buf = newbuf; @@ -645,8 +696,10 @@ read_console_w(HANDLE handle, DWORD maxlen, DWORD *readlen) { if (*readlen > 0 && buf[0] == L'\x1a') { PyMem_Free(buf); buf = (wchar_t *)PyMem_Malloc(sizeof(wchar_t)); - if (!buf) + if (!buf) { + PyErr_NoMemory(); goto error; + } buf[0] = L'\0'; *readlen = 0; } @@ -824,8 +877,10 @@ _io__WindowsConsoleIO_readall_impl(winconsoleio *self) bufsize = BUFSIZ; buf = (wchar_t*)PyMem_Malloc((bufsize + 1) * sizeof(wchar_t)); - if (buf == NULL) + if (buf == NULL) { + PyErr_NoMemory(); return NULL; + } while (1) { wchar_t *subbuf; @@ -847,6 +902,7 @@ _io__WindowsConsoleIO_readall_impl(winconsoleio *self) (bufsize + 1) * sizeof(wchar_t)); if (tmp == NULL) { PyMem_Free(buf); + PyErr_NoMemory(); return NULL; } buf = tmp; @@ -1022,43 +1078,49 @@ _io__WindowsConsoleIO_write_impl(winconsoleio *self, PyTypeObject *cls, len = (DWORD)b->len; Py_BEGIN_ALLOW_THREADS - wlen = MultiByteToWideChar(CP_UTF8, 0, b->buf, len, NULL, 0); - /* issue11395 there is an unspecified upper bound on how many bytes can be written at once. We cap at 32k - the caller will have to handle partial writes. Since we don't know how many input bytes are being ignored, we have to reduce and recalculate. */ - while (wlen > 32766 / sizeof(wchar_t)) { - len /= 2; + const DWORD max_wlen = 32766 / sizeof(wchar_t); + /* UTF-8 to wchar ratio is at most 3:1. */ + len = Py_MIN(len, max_wlen * 3); + while (1) { /* Fix for github issues gh-110913 and gh-82052. */ len = _find_last_utf8_boundary(b->buf, len); wlen = MultiByteToWideChar(CP_UTF8, 0, b->buf, len, NULL, 0); + if (wlen <= max_wlen) { + break; + } + len /= 2; } Py_END_ALLOW_THREADS - if (!wlen) - return PyErr_SetFromWindowsErr(0); + if (!wlen) { + return PyLong_FromLong(0); + } wbuf = (wchar_t*)PyMem_Malloc(wlen * sizeof(wchar_t)); + if (!wbuf) { + PyErr_NoMemory(); + return NULL; + } Py_BEGIN_ALLOW_THREADS wlen = MultiByteToWideChar(CP_UTF8, 0, b->buf, len, wbuf, wlen); if (wlen) { res = WriteConsoleW(handle, wbuf, wlen, &n, NULL); +#ifdef Py_DEBUG + if (res) { +#else if (res && n < wlen) { +#endif /* Wrote fewer characters than expected, which means our * len value may be wrong. So recalculate it from the - * characters that were written. As this could potentially - * result in a different value, we also validate that value. + * characters that were written. */ - len = WideCharToMultiByte(CP_UTF8, 0, wbuf, n, - NULL, 0, NULL, NULL); - if (len) { - wlen = MultiByteToWideChar(CP_UTF8, 0, b->buf, len, - NULL, 0); - assert(wlen == len); - } + len = _wchar_to_utf8_count(b->buf, len, n); } } else res = 0; diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c index 2a789ea74d27da..876627bcf5fad8 100644 --- a/Modules/_localemodule.c +++ b/Modules/_localemodule.c @@ -636,6 +636,37 @@ restore_locale(char *oldloc) } } +#ifdef __GLIBC__ +#if defined(ALT_DIGITS) || defined(ERA) +static PyObject * +decode_strings(const char *result, size_t max_count) +{ + /* Convert a sequence of NUL-separated C strings to a Python string + * containing semicolon separated items. */ + size_t i = 0; + size_t count = 0; + for (; count < max_count && result[i]; count++) { + i += strlen(result + i) + 1; + } + char *buf = PyMem_Malloc(i); + if (buf == NULL) { + PyErr_NoMemory(); + return NULL; + } + memcpy(buf, result, i); + /* Replace all NULs with semicolons. */ + i = 0; + while (--count) { + i += strlen(buf + i); + buf[i++] = ';'; + } + PyObject *pyresult = PyUnicode_DecodeLocale(buf, NULL); + PyMem_Free(buf); + return pyresult; +} +#endif +#endif + /*[clinic input] _locale.nl_langinfo @@ -668,32 +699,18 @@ _locale_nl_langinfo_impl(PyObject *module, int item) } PyObject *pyresult; #ifdef __GLIBC__ + /* According to the POSIX specification the result must be + * a sequence of semicolon-separated strings. + * But in Glibc they are NUL-separated. */ #ifdef ALT_DIGITS if (item == ALT_DIGITS && *result) { - /* According to the POSIX specification the result must be - * a sequence of up to 100 semicolon-separated strings. - * But in Glibc they are NUL-separated. */ - Py_ssize_t i = 0; - int count = 0; - for (; count < 100 && result[i]; count++) { - i += strlen(result + i) + 1; - } - char *buf = PyMem_Malloc(i); - if (buf == NULL) { - PyErr_NoMemory(); - pyresult = NULL; - } - else { - memcpy(buf, result, i); - /* Replace all NULs with semicolons. */ - i = 0; - while (--count) { - i += strlen(buf + i); - buf[i++] = ';'; - } - pyresult = PyUnicode_DecodeLocale(buf, NULL); - PyMem_Free(buf); - } + pyresult = decode_strings(result, 100); + } + else +#endif +#ifdef ERA + if (item == ERA && *result) { + pyresult = decode_strings(result, SIZE_MAX); } else #endif diff --git a/Modules/_operator.c b/Modules/_operator.c index 7e0d1f3df87e4d..6c1945174ab7cd 100644 --- a/Modules/_operator.c +++ b/Modules/_operator.c @@ -1602,6 +1602,7 @@ typedef struct { vectorcallfunc vectorcall; } methodcallerobject; +#ifndef Py_GIL_DISABLED static int _methodcaller_initialize_vectorcall(methodcallerobject* mc) { PyObject* args = mc->xargs; @@ -1664,6 +1665,7 @@ methodcaller_vectorcall( (PyTuple_GET_SIZE(mc->xargs)) | PY_VECTORCALL_ARGUMENTS_OFFSET, mc->vectorcall_kwnames); } +#endif /* AC 3.5: variable number of arguments, not currently support by AC */ @@ -1703,7 +1705,14 @@ methodcaller_new(PyTypeObject *type, PyObject *args, PyObject *kwds) mc->vectorcall_args = 0; +#ifdef Py_GIL_DISABLED + // gh-127065: The current implementation of methodcaller_vectorcall + // is not thread-safe because it modifies the `vectorcall_args` array, + // which is shared across calls. + mc->vectorcall = NULL; +#else mc->vectorcall = (vectorcallfunc)methodcaller_vectorcall; +#endif PyObject_GC_Track(mc); return (PyObject *)mc; diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 5837cd41a40cd4..2696f38046121f 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -6730,6 +6730,7 @@ load_build(PickleState *st, UnpicklerObject *self) } if (PyObject_SetItem(dict, d_key, d_value) < 0) { Py_DECREF(d_key); + Py_DECREF(dict); goto error; } Py_DECREF(d_key); diff --git a/Modules/_sre/clinic/sre.c.h b/Modules/_sre/clinic/sre.c.h index e287f3d5ad3991..87e4785a428468 100644 --- a/Modules/_sre/clinic/sre.c.h +++ b/Modules/_sre/clinic/sre.c.h @@ -985,6 +985,44 @@ PyDoc_STRVAR(_sre_SRE_Pattern___deepcopy____doc__, #define _SRE_SRE_PATTERN___DEEPCOPY___METHODDEF \ {"__deepcopy__", (PyCFunction)_sre_SRE_Pattern___deepcopy__, METH_O, _sre_SRE_Pattern___deepcopy____doc__}, +#if defined(Py_DEBUG) + +PyDoc_STRVAR(_sre_SRE_Pattern__fail_after__doc__, +"_fail_after($self, count, exception, /)\n" +"--\n" +"\n" +"For debugging."); + +#define _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF \ + {"_fail_after", _PyCFunction_CAST(_sre_SRE_Pattern__fail_after), METH_FASTCALL, _sre_SRE_Pattern__fail_after__doc__}, + +static PyObject * +_sre_SRE_Pattern__fail_after_impl(PatternObject *self, int count, + PyObject *exception); + +static PyObject * +_sre_SRE_Pattern__fail_after(PatternObject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int count; + PyObject *exception; + + if (!_PyArg_CheckPositional("_fail_after", nargs, 2, 2)) { + goto exit; + } + count = PyLong_AsInt(args[0]); + if (count == -1 && PyErr_Occurred()) { + goto exit; + } + exception = args[1]; + return_value = _sre_SRE_Pattern__fail_after_impl(self, count, exception); + +exit: + return return_value; +} + +#endif /* defined(Py_DEBUG) */ + PyDoc_STRVAR(_sre_compile__doc__, "compile($module, /, pattern, flags, code, groups, groupindex,\n" " indexgroup)\n" @@ -1474,4 +1512,8 @@ _sre_SRE_Scanner_search(ScannerObject *self, PyTypeObject *cls, PyObject *const } return _sre_SRE_Scanner_search_impl(self, cls); } -/*[clinic end generated code: output=afaa301d55957cb0 input=a9049054013a1b77]*/ + +#ifndef _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF + #define _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF +#endif /* !defined(_SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF) */ +/*[clinic end generated code: output=f8cb77f2261f0b2e input=a9049054013a1b77]*/ diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index 2c86f8869d8e58..36f542ddb4df2b 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -267,6 +267,85 @@ data_stack_grow(SRE_STATE* state, Py_ssize_t size) return 0; } +/* memory pool functions for SRE_REPEAT, this can avoid memory + leak when SRE(match) function terminates abruptly. + state->repeat_pool_used is a doubly-linked list, so that we + can remove a SRE_REPEAT node from it. + state->repeat_pool_unused is a singly-linked list, we put/get + node at the head. */ +static SRE_REPEAT * +repeat_pool_malloc(SRE_STATE *state) +{ + SRE_REPEAT *repeat; + + if (state->repeat_pool_unused) { + /* remove from unused pool (singly-linked list) */ + repeat = state->repeat_pool_unused; + state->repeat_pool_unused = repeat->pool_next; + } + else { + repeat = PyMem_Malloc(sizeof(SRE_REPEAT)); + if (!repeat) { + return NULL; + } + } + + /* add to used pool (doubly-linked list) */ + SRE_REPEAT *temp = state->repeat_pool_used; + if (temp) { + temp->pool_prev = repeat; + } + repeat->pool_prev = NULL; + repeat->pool_next = temp; + state->repeat_pool_used = repeat; + + return repeat; +} + +static void +repeat_pool_free(SRE_STATE *state, SRE_REPEAT *repeat) +{ + SRE_REPEAT *prev = repeat->pool_prev; + SRE_REPEAT *next = repeat->pool_next; + + /* remove from used pool (doubly-linked list) */ + if (prev) { + prev->pool_next = next; + } + else { + state->repeat_pool_used = next; + } + if (next) { + next->pool_prev = prev; + } + + /* add to unused pool (singly-linked list) */ + repeat->pool_next = state->repeat_pool_unused; + state->repeat_pool_unused = repeat; +} + +static void +repeat_pool_clear(SRE_STATE *state) +{ + /* clear used pool */ + SRE_REPEAT *next = state->repeat_pool_used; + state->repeat_pool_used = NULL; + while (next) { + SRE_REPEAT *temp = next; + next = temp->pool_next; + PyMem_Free(temp); + } + + /* clear unused pool */ + next = state->repeat_pool_unused; + state->repeat_pool_unused = NULL; + while (next) { + SRE_REPEAT *temp = next; + next = temp->pool_next; + PyMem_Free(temp); + } +} + /* generate 8-bit version */ #define SRE_CHAR Py_UCS1 @@ -511,6 +590,11 @@ state_init(SRE_STATE* state, PatternObject* pattern, PyObject* string, state->pos = start; state->endpos = end; +#ifdef Py_DEBUG + state->fail_after_count = pattern->fail_after_count; + state->fail_after_exc = pattern->fail_after_exc; // borrowed ref +#endif + return string; err: /* We add an explicit cast here because MSVC has a bug when @@ -533,6 +617,8 @@ state_fini(SRE_STATE* state) /* See above PyMem_Free() for why we explicitly cast here. */ PyMem_Free((void*) state->mark); state->mark = NULL; + /* SRE_REPEAT pool */ + repeat_pool_clear(state); } /* calculate offset from start of string */ @@ -619,6 +705,9 @@ pattern_traverse(PatternObject *self, visitproc visit, void *arg) Py_VISIT(self->groupindex); Py_VISIT(self->indexgroup); Py_VISIT(self->pattern); +#ifdef Py_DEBUG + Py_VISIT(self->fail_after_exc); +#endif return 0; } @@ -628,6 +717,9 @@ pattern_clear(PatternObject *self) Py_CLEAR(self->groupindex); Py_CLEAR(self->indexgroup); Py_CLEAR(self->pattern); +#ifdef Py_DEBUG + Py_CLEAR(self->fail_after_exc); +#endif return 0; } @@ -690,7 +782,7 @@ _sre_SRE_Pattern_match_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t status; PyObject *match; - if (!state_init(&state, (PatternObject *)self, string, pos, endpos)) + if (!state_init(&state, self, string, pos, endpos)) return NULL; INIT_TRACE(&state); @@ -1381,6 +1473,29 @@ _sre_SRE_Pattern___deepcopy__(PatternObject *self, PyObject *memo) return Py_NewRef(self); } +#ifdef Py_DEBUG +/*[clinic input] +_sre.SRE_Pattern._fail_after + + count: int + exception: object + / + +For debugging. +[clinic start generated code]*/ + +static PyObject * +_sre_SRE_Pattern__fail_after_impl(PatternObject *self, int count, + PyObject *exception) +/*[clinic end generated code: output=9a6bf12135ac50c2 input=ef80a45c66c5499d]*/ +{ + self->fail_after_count = count; + Py_INCREF(exception); + Py_XSETREF(self->fail_after_exc, exception); + Py_RETURN_NONE; +} +#endif /* Py_DEBUG */ + static PyObject * pattern_repr(PatternObject *obj) { @@ -1506,6 +1621,10 @@ _sre_compile_impl(PyObject *module, PyObject *pattern, int flags, self->pattern = NULL; self->groupindex = NULL; self->indexgroup = NULL; +#ifdef Py_DEBUG + self->fail_after_count = -1; + self->fail_after_exc = NULL; +#endif self->codesize = n; @@ -2604,7 +2723,8 @@ pattern_new_match(_sremodulestate* module_state, if (!match) return NULL; - match->pattern = (PatternObject*)Py_NewRef(pattern); + Py_INCREF(pattern); + match->pattern = pattern; match->string = Py_NewRef(state->string); @@ -2740,7 +2860,7 @@ _sre_SRE_Scanner_match_impl(ScannerObject *self, PyTypeObject *cls) return NULL; } - match = pattern_new_match(module_state, (PatternObject*) self->pattern, + match = pattern_new_match(module_state, self->pattern, state, status); if (status == 0) @@ -2790,7 +2910,7 @@ _sre_SRE_Scanner_search_impl(ScannerObject *self, PyTypeObject *cls) return NULL; } - match = pattern_new_match(module_state, (PatternObject*) self->pattern, + match = pattern_new_match(module_state, self->pattern, state, status); if (status == 0) @@ -2826,7 +2946,8 @@ pattern_scanner(_sremodulestate *module_state, return NULL; } - scanner->pattern = Py_NewRef(self); + Py_INCREF(self); + scanner->pattern = self; PyObject_GC_Track(scanner); return (PyObject*) scanner; @@ -3020,6 +3141,7 @@ static PyMethodDef pattern_methods[] = { _SRE_SRE_PATTERN_SCANNER_METHODDEF _SRE_SRE_PATTERN___COPY___METHODDEF _SRE_SRE_PATTERN___DEEPCOPY___METHODDEF + _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, {NULL, NULL} diff --git a/Modules/_sre/sre.h b/Modules/_sre/sre.h index 83d89d57b11199..42681c2addf3c2 100644 --- a/Modules/_sre/sre.h +++ b/Modules/_sre/sre.h @@ -34,6 +34,11 @@ typedef struct { int flags; /* flags used when compiling pattern source */ PyObject *weakreflist; /* List of weak references */ int isbytes; /* pattern type (1 - bytes, 0 - string, -1 - None) */ +#ifdef Py_DEBUG + /* for simulation of user interruption */ + int fail_after_count; + PyObject *fail_after_exc; +#endif /* pattern code */ Py_ssize_t codesize; SRE_CODE code[1]; @@ -68,6 +73,9 @@ typedef struct SRE_REPEAT_T { const SRE_CODE* pattern; /* points to REPEAT operator arguments */ const void* last_ptr; /* helper to check for infinite loops */ struct SRE_REPEAT_T *prev; /* points to previous repeat context */ + /* for SRE_REPEAT pool */ + struct SRE_REPEAT_T *pool_prev; + struct SRE_REPEAT_T *pool_next; } SRE_REPEAT; typedef struct { @@ -95,12 +103,19 @@ typedef struct { size_t data_stack_base; /* current repeat context */ SRE_REPEAT *repeat; + /* SRE_REPEAT pool */ + SRE_REPEAT *repeat_pool_used; + SRE_REPEAT *repeat_pool_unused; unsigned int sigcount; +#ifdef Py_DEBUG + int fail_after_count; + PyObject *fail_after_exc; +#endif } SRE_STATE; typedef struct { PyObject_HEAD - PyObject* pattern; + PatternObject* pattern; SRE_STATE state; int executing; } ScannerObject; diff --git a/Modules/_sre/sre_lib.h b/Modules/_sre/sre_lib.h index 97fbb0a75e54b6..af4bfc56083bcb 100644 --- a/Modules/_sre/sre_lib.h +++ b/Modules/_sre/sre_lib.h @@ -560,13 +560,28 @@ typedef struct { Py_ssize_t last_ctx_pos; } SRE(match_context); -#define MAYBE_CHECK_SIGNALS \ +#define _MAYBE_CHECK_SIGNALS \ do { \ if ((0 == (++sigcount & 0xfff)) && PyErr_CheckSignals()) { \ RETURN_ERROR(SRE_ERROR_INTERRUPTED); \ } \ } while (0) +#ifdef Py_DEBUG +# define MAYBE_CHECK_SIGNALS \ + do { \ + _MAYBE_CHECK_SIGNALS; \ + if (state->fail_after_count >= 0) { \ + if (state->fail_after_count-- == 0) { \ + PyErr_SetNone(state->fail_after_exc); \ + RETURN_ERROR(SRE_ERROR_INTERRUPTED); \ + } \ + } \ + } while (0) +#else +# define MAYBE_CHECK_SIGNALS _MAYBE_CHECK_SIGNALS +#endif /* Py_DEBUG */ + #ifdef HAVE_COMPUTED_GOTOS #ifndef USE_COMPUTED_GOTOS #define USE_COMPUTED_GOTOS 1 @@ -1120,12 +1135,9 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) pattern[1], pattern[2])); /* install new repeat context */ - /* TODO(https://github.com/python/cpython/issues/67877): Fix this - * potential memory leak. */ - ctx->u.rep = (SRE_REPEAT*) PyMem_Malloc(sizeof(*ctx->u.rep)); + ctx->u.rep = repeat_pool_malloc(state); if (!ctx->u.rep) { - PyErr_NoMemory(); - RETURN_FAILURE; + RETURN_ERROR(SRE_ERROR_MEMORY); } ctx->u.rep->count = -1; ctx->u.rep->pattern = pattern; @@ -1136,7 +1148,7 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) state->ptr = ptr; DO_JUMP(JUMP_REPEAT, jump_repeat, pattern+pattern[0]); state->repeat = ctx->u.rep->prev; - PyMem_Free(ctx->u.rep); + repeat_pool_free(state, ctx->u.rep); if (ret) { RETURN_ON_ERROR(ret); @@ -1294,6 +1306,17 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) pointer */ state->ptr = ptr; + /* Set state->repeat to non-NULL */ + ctx->u.rep = repeat_pool_malloc(state); + if (!ctx->u.rep) { + RETURN_ERROR(SRE_ERROR_MEMORY); + } + ctx->u.rep->count = -1; + ctx->u.rep->pattern = NULL; + ctx->u.rep->prev = state->repeat; + ctx->u.rep->last_ptr = NULL; + state->repeat = ctx->u.rep; + /* Initialize Count to 0 */ ctx->count = 0; @@ -1308,6 +1331,9 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) } else { state->ptr = ptr; + /* Restore state->repeat */ + state->repeat = ctx->u.rep->prev; + repeat_pool_free(state, ctx->u.rep); RETURN_FAILURE; } } @@ -1380,6 +1406,10 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) } } + /* Restore state->repeat */ + state->repeat = ctx->u.rep->prev; + repeat_pool_free(state, ctx->u.rep); + /* Evaluate Tail */ /* Jump to end of pattern indicated by skip, and then skip the SUCCESS op code that follows it. */ diff --git a/Modules/_ssl.c b/Modules/_ssl.c index b6b5ebf094c938..e5b8bf21002ea5 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -120,8 +120,9 @@ static void _PySSLFixErrno(void) { #endif /* Include generated data (error codes) */ +/* See make_ssl_data.h for notes on adding a new version. */ #if (OPENSSL_VERSION_NUMBER >= 0x30100000L) -#include "_ssl_data_31.h" +#include "_ssl_data_34.h" #elif (OPENSSL_VERSION_NUMBER >= 0x30000000L) #include "_ssl_data_300.h" #elif (OPENSSL_VERSION_NUMBER >= 0x10101000L) diff --git a/Modules/_ssl_data_111.h b/Modules/_ssl_data_111.h index 093c786e6a26f6..061fac2bd5822b 100644 --- a/Modules/_ssl_data_111.h +++ b/Modules/_ssl_data_111.h @@ -1,4 +1,6 @@ -/* File generated by Tools/ssl/make_ssl_data.py *//* Generated on 2023-06-01T02:58:04.081473 */ +/* File generated by Tools/ssl/make_ssl_data.py */ +/* Generated on 2024-11-27T12:48:46.194048+00:00 */ +/* Generated from Git commit OpenSSL_1_1_1w-0-ge04bd3433f */ static struct py_ssl_library_code library_codes[] = { #ifdef ERR_LIB_ASN1 {"ASN1", ERR_LIB_ASN1}, diff --git a/Modules/_ssl_data_300.h b/Modules/_ssl_data_300.h index dc66731f6b6093..b687ce43c77d66 100644 --- a/Modules/_ssl_data_300.h +++ b/Modules/_ssl_data_300.h @@ -1,4 +1,7 @@ -/* File generated by Tools/ssl/make_ssl_data.py *//* Generated on 2023-06-01T03:03:52.163218 */ +/* File generated by Tools/ssl/make_ssl_data.py */ +/* Generated on 2023-06-01T03:03:52.163218 */ +/* Manually edited to add definitions from 1.1.1 (GH-105174) */ + static struct py_ssl_library_code library_codes[] = { #ifdef ERR_LIB_ASN1 {"ASN1", ERR_LIB_ASN1}, diff --git a/Modules/_ssl_data_31.h b/Modules/_ssl_data_34.h similarity index 92% rename from Modules/_ssl_data_31.h rename to Modules/_ssl_data_34.h index c589c501f4e948..d4af3e1c1fa928 100644 --- a/Modules/_ssl_data_31.h +++ b/Modules/_ssl_data_34.h @@ -1,4 +1,6 @@ -/* File generated by Tools/ssl/make_ssl_data.py *//* Generated on 2023-06-01T03:04:00.275280 */ +/* File generated by Tools/ssl/make_ssl_data.py */ +/* Generated on 2024-11-27T12:35:52.276767+00:00 */ +/* Generated from Git commit openssl-3.4.0-0-g98acb6b028 */ static struct py_ssl_library_code library_codes[] = { #ifdef ERR_LIB_ASN1 {"ASN1", ERR_LIB_ASN1}, @@ -300,6 +302,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"FIRST_NUM_TOO_LARGE", 13, 122}, #endif + #ifdef ASN1_R_GENERALIZEDTIME_IS_TOO_SHORT + {"GENERALIZEDTIME_IS_TOO_SHORT", ERR_LIB_ASN1, ASN1_R_GENERALIZEDTIME_IS_TOO_SHORT}, + #else + {"GENERALIZEDTIME_IS_TOO_SHORT", 13, 232}, + #endif #ifdef ASN1_R_HEADER_TOO_LONG {"HEADER_TOO_LONG", ERR_LIB_ASN1, ASN1_R_HEADER_TOO_LONG}, #else @@ -730,6 +737,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNSUPPORTED_TYPE", 13, 196}, #endif + #ifdef ASN1_R_UTCTIME_IS_TOO_SHORT + {"UTCTIME_IS_TOO_SHORT", ERR_LIB_ASN1, ASN1_R_UTCTIME_IS_TOO_SHORT}, + #else + {"UTCTIME_IS_TOO_SHORT", 13, 233}, + #endif #ifdef ASN1_R_WRONG_INTEGER_TYPE {"WRONG_INTEGER_TYPE", ERR_LIB_ASN1, ASN1_R_WRONG_INTEGER_TYPE}, #else @@ -845,6 +857,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"LISTEN_V6_ONLY", 32, 136}, #endif + #ifdef BIO_R_LOCAL_ADDR_NOT_AVAILABLE + {"LOCAL_ADDR_NOT_AVAILABLE", ERR_LIB_BIO, BIO_R_LOCAL_ADDR_NOT_AVAILABLE}, + #else + {"LOCAL_ADDR_NOT_AVAILABLE", 32, 111}, + #endif #ifdef BIO_R_LOOKUP_RETURNED_NOTHING {"LOOKUP_RETURNED_NOTHING", ERR_LIB_BIO, BIO_R_LOOKUP_RETURNED_NOTHING}, #else @@ -860,6 +877,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"NBIO_CONNECT_ERROR", 32, 110}, #endif + #ifdef BIO_R_NON_FATAL + {"NON_FATAL", ERR_LIB_BIO, BIO_R_NON_FATAL}, + #else + {"NON_FATAL", 32, 112}, + #endif #ifdef BIO_R_NO_ACCEPT_ADDR_OR_SERVICE_SPECIFIED {"NO_ACCEPT_ADDR_OR_SERVICE_SPECIFIED", ERR_LIB_BIO, BIO_R_NO_ACCEPT_ADDR_OR_SERVICE_SPECIFIED}, #else @@ -880,6 +902,26 @@ static struct py_ssl_error_code error_codes[] = { #else {"NO_SUCH_FILE", 32, 128}, #endif + #ifdef BIO_R_PEER_ADDR_NOT_AVAILABLE + {"PEER_ADDR_NOT_AVAILABLE", ERR_LIB_BIO, BIO_R_PEER_ADDR_NOT_AVAILABLE}, + #else + {"PEER_ADDR_NOT_AVAILABLE", 32, 114}, + #endif + #ifdef BIO_R_PORT_MISMATCH + {"PORT_MISMATCH", ERR_LIB_BIO, BIO_R_PORT_MISMATCH}, + #else + {"PORT_MISMATCH", 32, 150}, + #endif + #ifdef BIO_R_TFO_DISABLED + {"TFO_DISABLED", ERR_LIB_BIO, BIO_R_TFO_DISABLED}, + #else + {"TFO_DISABLED", 32, 106}, + #endif + #ifdef BIO_R_TFO_NO_KERNEL_SUPPORT + {"TFO_NO_KERNEL_SUPPORT", ERR_LIB_BIO, BIO_R_TFO_NO_KERNEL_SUPPORT}, + #else + {"TFO_NO_KERNEL_SUPPORT", 32, 108}, + #endif #ifdef BIO_R_TRANSFER_ERROR {"TRANSFER_ERROR", ERR_LIB_BIO, BIO_R_TRANSFER_ERROR}, #else @@ -920,6 +962,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNABLE_TO_REUSEADDR", 32, 139}, #endif + #ifdef BIO_R_UNABLE_TO_TFO + {"UNABLE_TO_TFO", ERR_LIB_BIO, BIO_R_UNABLE_TO_TFO}, + #else + {"UNABLE_TO_TFO", 32, 109}, + #endif #ifdef BIO_R_UNAVAILABLE_IP_FAMILY {"UNAVAILABLE_IP_FAMILY", ERR_LIB_BIO, BIO_R_UNAVAILABLE_IP_FAMILY}, #else @@ -1230,6 +1277,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"ERROR_VALIDATING_SIGNATURE", 58, 171}, #endif + #ifdef CMP_R_EXPECTED_POLLREQ + {"EXPECTED_POLLREQ", ERR_LIB_CMP, CMP_R_EXPECTED_POLLREQ}, + #else + {"EXPECTED_POLLREQ", 58, 104}, + #endif #ifdef CMP_R_FAILED_BUILDING_OWN_CHAIN {"FAILED_BUILDING_OWN_CHAIN", ERR_LIB_CMP, CMP_R_FAILED_BUILDING_OWN_CHAIN}, #else @@ -1250,16 +1302,51 @@ static struct py_ssl_error_code error_codes[] = { #else {"FAIL_INFO_OUT_OF_RANGE", 58, 129}, #endif + #ifdef CMP_R_GENERATE_CERTREQTEMPLATE + {"GENERATE_CERTREQTEMPLATE", ERR_LIB_CMP, CMP_R_GENERATE_CERTREQTEMPLATE}, + #else + {"GENERATE_CERTREQTEMPLATE", 58, 197}, + #endif + #ifdef CMP_R_GENERATE_CRLSTATUS + {"GENERATE_CRLSTATUS", ERR_LIB_CMP, CMP_R_GENERATE_CRLSTATUS}, + #else + {"GENERATE_CRLSTATUS", 58, 198}, + #endif + #ifdef CMP_R_GETTING_GENP + {"GETTING_GENP", ERR_LIB_CMP, CMP_R_GETTING_GENP}, + #else + {"GETTING_GENP", 58, 192}, + #endif + #ifdef CMP_R_GET_ITAV + {"GET_ITAV", ERR_LIB_CMP, CMP_R_GET_ITAV}, + #else + {"GET_ITAV", 58, 199}, + #endif #ifdef CMP_R_INVALID_ARGS {"INVALID_ARGS", ERR_LIB_CMP, CMP_R_INVALID_ARGS}, #else {"INVALID_ARGS", 58, 100}, #endif + #ifdef CMP_R_INVALID_GENP + {"INVALID_GENP", ERR_LIB_CMP, CMP_R_INVALID_GENP}, + #else + {"INVALID_GENP", 58, 193}, + #endif + #ifdef CMP_R_INVALID_KEYSPEC + {"INVALID_KEYSPEC", ERR_LIB_CMP, CMP_R_INVALID_KEYSPEC}, + #else + {"INVALID_KEYSPEC", 58, 202}, + #endif #ifdef CMP_R_INVALID_OPTION {"INVALID_OPTION", ERR_LIB_CMP, CMP_R_INVALID_OPTION}, #else {"INVALID_OPTION", 58, 174}, #endif + #ifdef CMP_R_INVALID_ROOTCAKEYUPDATE + {"INVALID_ROOTCAKEYUPDATE", ERR_LIB_CMP, CMP_R_INVALID_ROOTCAKEYUPDATE}, + #else + {"INVALID_ROOTCAKEYUPDATE", 58, 195}, + #endif #ifdef CMP_R_MISSING_CERTID {"MISSING_CERTID", ERR_LIB_CMP, CMP_R_MISSING_CERTID}, #else @@ -1425,6 +1512,21 @@ static struct py_ssl_error_code error_codes[] = { #else {"TRANSFER_ERROR", 58, 159}, #endif + #ifdef CMP_R_UNCLEAN_CTX + {"UNCLEAN_CTX", ERR_LIB_CMP, CMP_R_UNCLEAN_CTX}, + #else + {"UNCLEAN_CTX", 58, 191}, + #endif + #ifdef CMP_R_UNEXPECTED_CERTPROFILE + {"UNEXPECTED_CERTPROFILE", ERR_LIB_CMP, CMP_R_UNEXPECTED_CERTPROFILE}, + #else + {"UNEXPECTED_CERTPROFILE", 58, 196}, + #endif + #ifdef CMP_R_UNEXPECTED_CRLSTATUSLIST + {"UNEXPECTED_CRLSTATUSLIST", ERR_LIB_CMP, CMP_R_UNEXPECTED_CRLSTATUSLIST}, + #else + {"UNEXPECTED_CRLSTATUSLIST", 58, 201}, + #endif #ifdef CMP_R_UNEXPECTED_PKIBODY {"UNEXPECTED_PKIBODY", ERR_LIB_CMP, CMP_R_UNEXPECTED_PKIBODY}, #else @@ -1435,11 +1537,21 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNEXPECTED_PKISTATUS", 58, 185}, #endif + #ifdef CMP_R_UNEXPECTED_POLLREQ + {"UNEXPECTED_POLLREQ", ERR_LIB_CMP, CMP_R_UNEXPECTED_POLLREQ}, + #else + {"UNEXPECTED_POLLREQ", 58, 105}, + #endif #ifdef CMP_R_UNEXPECTED_PVNO {"UNEXPECTED_PVNO", ERR_LIB_CMP, CMP_R_UNEXPECTED_PVNO}, #else {"UNEXPECTED_PVNO", 58, 153}, #endif + #ifdef CMP_R_UNEXPECTED_SENDER + {"UNEXPECTED_SENDER", ERR_LIB_CMP, CMP_R_UNEXPECTED_SENDER}, + #else + {"UNEXPECTED_SENDER", 58, 106}, + #endif #ifdef CMP_R_UNKNOWN_ALGORITHM_ID {"UNKNOWN_ALGORITHM_ID", ERR_LIB_CMP, CMP_R_UNKNOWN_ALGORITHM_ID}, #else @@ -1450,6 +1562,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNKNOWN_CERT_TYPE", 58, 135}, #endif + #ifdef CMP_R_UNKNOWN_CRL_ISSUER + {"UNKNOWN_CRL_ISSUER", ERR_LIB_CMP, CMP_R_UNKNOWN_CRL_ISSUER}, + #else + {"UNKNOWN_CRL_ISSUER", 58, 200}, + #endif #ifdef CMP_R_UNKNOWN_PKISTATUS {"UNKNOWN_PKISTATUS", ERR_LIB_CMP, CMP_R_UNKNOWN_PKISTATUS}, #else @@ -1465,6 +1582,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNSUPPORTED_KEY_TYPE", 58, 137}, #endif + #ifdef CMP_R_UNSUPPORTED_PKIBODY + {"UNSUPPORTED_PKIBODY", ERR_LIB_CMP, CMP_R_UNSUPPORTED_PKIBODY}, + #else + {"UNSUPPORTED_PKIBODY", 58, 101}, + #endif #ifdef CMP_R_UNSUPPORTED_PROTECTION_ALG_DHBASEDMAC {"UNSUPPORTED_PROTECTION_ALG_DHBASEDMAC", ERR_LIB_CMP, CMP_R_UNSUPPORTED_PROTECTION_ALG_DHBASEDMAC}, #else @@ -1825,6 +1947,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"NO_SIGNERS", 46, 135}, #endif + #ifdef CMS_R_OPERATION_UNSUPPORTED + {"OPERATION_UNSUPPORTED", ERR_LIB_CMS, CMS_R_OPERATION_UNSUPPORTED}, + #else + {"OPERATION_UNSUPPORTED", 46, 182}, + #endif #ifdef CMS_R_PEER_KEY_ERROR {"PEER_KEY_ERROR", ERR_LIB_CMS, CMS_R_PEER_KEY_ERROR}, #else @@ -1960,6 +2087,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNSUPPORTED_RECIPIENT_TYPE", 46, 154}, #endif + #ifdef CMS_R_UNSUPPORTED_SIGNATURE_ALGORITHM + {"UNSUPPORTED_SIGNATURE_ALGORITHM", ERR_LIB_CMS, CMS_R_UNSUPPORTED_SIGNATURE_ALGORITHM}, + #else + {"UNSUPPORTED_SIGNATURE_ALGORITHM", 46, 195}, + #endif #ifdef CMS_R_UNSUPPORTED_TYPE {"UNSUPPORTED_TYPE", ERR_LIB_CMS, CMS_R_UNSUPPORTED_TYPE}, #else @@ -1985,6 +2117,31 @@ static struct py_ssl_error_code error_codes[] = { #else {"WRAP_ERROR", 46, 159}, #endif + #ifdef COMP_R_BROTLI_DECODE_ERROR + {"BROTLI_DECODE_ERROR", ERR_LIB_COMP, COMP_R_BROTLI_DECODE_ERROR}, + #else + {"BROTLI_DECODE_ERROR", 41, 102}, + #endif + #ifdef COMP_R_BROTLI_DEFLATE_ERROR + {"BROTLI_DEFLATE_ERROR", ERR_LIB_COMP, COMP_R_BROTLI_DEFLATE_ERROR}, + #else + {"BROTLI_DEFLATE_ERROR", 41, 103}, + #endif + #ifdef COMP_R_BROTLI_ENCODE_ERROR + {"BROTLI_ENCODE_ERROR", ERR_LIB_COMP, COMP_R_BROTLI_ENCODE_ERROR}, + #else + {"BROTLI_ENCODE_ERROR", 41, 106}, + #endif + #ifdef COMP_R_BROTLI_INFLATE_ERROR + {"BROTLI_INFLATE_ERROR", ERR_LIB_COMP, COMP_R_BROTLI_INFLATE_ERROR}, + #else + {"BROTLI_INFLATE_ERROR", 41, 104}, + #endif + #ifdef COMP_R_BROTLI_NOT_SUPPORTED + {"BROTLI_NOT_SUPPORTED", ERR_LIB_COMP, COMP_R_BROTLI_NOT_SUPPORTED}, + #else + {"BROTLI_NOT_SUPPORTED", 41, 105}, + #endif #ifdef COMP_R_ZLIB_DEFLATE_ERROR {"ZLIB_DEFLATE_ERROR", ERR_LIB_COMP, COMP_R_ZLIB_DEFLATE_ERROR}, #else @@ -2000,6 +2157,26 @@ static struct py_ssl_error_code error_codes[] = { #else {"ZLIB_NOT_SUPPORTED", 41, 101}, #endif + #ifdef COMP_R_ZSTD_COMPRESS_ERROR + {"ZSTD_COMPRESS_ERROR", ERR_LIB_COMP, COMP_R_ZSTD_COMPRESS_ERROR}, + #else + {"ZSTD_COMPRESS_ERROR", 41, 107}, + #endif + #ifdef COMP_R_ZSTD_DECODE_ERROR + {"ZSTD_DECODE_ERROR", ERR_LIB_COMP, COMP_R_ZSTD_DECODE_ERROR}, + #else + {"ZSTD_DECODE_ERROR", 41, 108}, + #endif + #ifdef COMP_R_ZSTD_DECOMPRESS_ERROR + {"ZSTD_DECOMPRESS_ERROR", ERR_LIB_COMP, COMP_R_ZSTD_DECOMPRESS_ERROR}, + #else + {"ZSTD_DECOMPRESS_ERROR", 41, 109}, + #endif + #ifdef COMP_R_ZSTD_NOT_SUPPORTED + {"ZSTD_NOT_SUPPORTED", ERR_LIB_COMP, COMP_R_ZSTD_NOT_SUPPORTED}, + #else + {"ZSTD_NOT_SUPPORTED", 41, 110}, + #endif #ifdef CONF_R_ERROR_LOADING_DSO {"ERROR_LOADING_DSO", ERR_LIB_CONF, CONF_R_ERROR_LOADING_DSO}, #else @@ -2085,6 +2262,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"RECURSIVE_DIRECTORY_INCLUDE", 14, 111}, #endif + #ifdef CONF_R_RECURSIVE_SECTION_REFERENCE + {"RECURSIVE_SECTION_REFERENCE", ERR_LIB_CONF, CONF_R_RECURSIVE_SECTION_REFERENCE}, + #else + {"RECURSIVE_SECTION_REFERENCE", 14, 126}, + #endif #ifdef CONF_R_RELATIVE_PATH {"RELATIVE_PATH", ERR_LIB_CONF, CONF_R_RELATIVE_PATH}, #else @@ -2370,6 +2552,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"TOO_MANY_BYTES", 15, 113}, #endif + #ifdef CRYPTO_R_TOO_MANY_NAMES + {"TOO_MANY_NAMES", ERR_LIB_CRYPTO, CRYPTO_R_TOO_MANY_NAMES}, + #else + {"TOO_MANY_NAMES", 15, 132}, + #endif #ifdef CRYPTO_R_TOO_MANY_RECORDS {"TOO_MANY_RECORDS", ERR_LIB_CRYPTO, CRYPTO_R_TOO_MANY_RECORDS}, #else @@ -2560,6 +2747,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INVALID_SECRET", 5, 128}, #endif + #ifdef DH_R_INVALID_SIZE + {"INVALID_SIZE", ERR_LIB_DH, DH_R_INVALID_SIZE}, + #else + {"INVALID_SIZE", 5, 129}, + #endif #ifdef DH_R_KDF_PARAMETER_ERROR {"KDF_PARAMETER_ERROR", ERR_LIB_DH, DH_R_KDF_PARAMETER_ERROR}, #else @@ -2610,6 +2802,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"PEER_KEY_ERROR", 5, 111}, #endif + #ifdef DH_R_Q_TOO_LARGE + {"Q_TOO_LARGE", ERR_LIB_DH, DH_R_Q_TOO_LARGE}, + #else + {"Q_TOO_LARGE", 5, 130}, + #endif #ifdef DH_R_SHARED_INFO_ERROR {"SHARED_INFO_ERROR", ERR_LIB_DH, DH_R_SHARED_INFO_ERROR}, #else @@ -3545,6 +3742,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"GENERATE_ERROR", 6, 214}, #endif + #ifdef EVP_R_GETTING_ALGORITHMIDENTIFIER_NOT_SUPPORTED + {"GETTING_ALGORITHMIDENTIFIER_NOT_SUPPORTED", ERR_LIB_EVP, EVP_R_GETTING_ALGORITHMIDENTIFIER_NOT_SUPPORTED}, + #else + {"GETTING_ALGORITHMIDENTIFIER_NOT_SUPPORTED", 6, 229}, + #endif #ifdef EVP_R_GET_RAW_KEY_FAILED {"GET_RAW_KEY_FAILED", ERR_LIB_EVP, EVP_R_GET_RAW_KEY_FAILED}, #else @@ -3745,6 +3947,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE", 6, 150}, #endif + #ifdef EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_SIGNATURE_TYPE + {"OPERATION_NOT_SUPPORTED_FOR_THIS_SIGNATURE_TYPE", ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_SIGNATURE_TYPE}, + #else + {"OPERATION_NOT_SUPPORTED_FOR_THIS_SIGNATURE_TYPE", 6, 226}, + #endif #ifdef EVP_R_OUTPUT_WOULD_OVERFLOW {"OUTPUT_WOULD_OVERFLOW", ERR_LIB_EVP, EVP_R_OUTPUT_WOULD_OVERFLOW}, #else @@ -3795,6 +4002,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"SET_DEFAULT_PROPERTY_FAILURE", 6, 209}, #endif + #ifdef EVP_R_SIGNATURE_TYPE_AND_KEY_TYPE_INCOMPATIBLE + {"SIGNATURE_TYPE_AND_KEY_TYPE_INCOMPATIBLE", ERR_LIB_EVP, EVP_R_SIGNATURE_TYPE_AND_KEY_TYPE_INCOMPATIBLE}, + #else + {"SIGNATURE_TYPE_AND_KEY_TYPE_INCOMPATIBLE", 6, 228}, + #endif #ifdef EVP_R_TOO_MANY_RECORDS {"TOO_MANY_RECORDS", ERR_LIB_EVP, EVP_R_TOO_MANY_RECORDS}, #else @@ -3825,6 +4037,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNABLE_TO_SET_CALLBACKS", 6, 217}, #endif + #ifdef EVP_R_UNKNOWN_BITS + {"UNKNOWN_BITS", ERR_LIB_EVP, EVP_R_UNKNOWN_BITS}, + #else + {"UNKNOWN_BITS", 6, 166}, + #endif #ifdef EVP_R_UNKNOWN_CIPHER {"UNKNOWN_CIPHER", ERR_LIB_EVP, EVP_R_UNKNOWN_CIPHER}, #else @@ -3840,6 +4057,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNKNOWN_KEY_TYPE", 6, 207}, #endif + #ifdef EVP_R_UNKNOWN_MAX_SIZE + {"UNKNOWN_MAX_SIZE", ERR_LIB_EVP, EVP_R_UNKNOWN_MAX_SIZE}, + #else + {"UNKNOWN_MAX_SIZE", 6, 167}, + #endif #ifdef EVP_R_UNKNOWN_OPTION {"UNKNOWN_OPTION", ERR_LIB_EVP, EVP_R_UNKNOWN_OPTION}, #else @@ -3850,6 +4072,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNKNOWN_PBE_ALGORITHM", 6, 121}, #endif + #ifdef EVP_R_UNKNOWN_SECURITY_BITS + {"UNKNOWN_SECURITY_BITS", ERR_LIB_EVP, EVP_R_UNKNOWN_SECURITY_BITS}, + #else + {"UNKNOWN_SECURITY_BITS", 6, 168}, + #endif #ifdef EVP_R_UNSUPPORTED_ALGORITHM {"UNSUPPORTED_ALGORITHM", ERR_LIB_EVP, EVP_R_UNSUPPORTED_ALGORITHM}, #else @@ -4040,6 +4267,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"RESPONSE_PARSE_ERROR", 61, 104}, #endif + #ifdef HTTP_R_RESPONSE_TOO_MANY_HDRLINES + {"RESPONSE_TOO_MANY_HDRLINES", ERR_LIB_HTTP, HTTP_R_RESPONSE_TOO_MANY_HDRLINES}, + #else + {"RESPONSE_TOO_MANY_HDRLINES", 61, 130}, + #endif #ifdef HTTP_R_RETRY_TIMEOUT {"RETRY_TIMEOUT", ERR_LIB_HTTP, HTTP_R_RETRY_TIMEOUT}, #else @@ -4530,6 +4762,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNSUPPORTED_PUBLIC_KEY_TYPE", 9, 110}, #endif + #ifdef PKCS12_R_CALLBACK_FAILED + {"CALLBACK_FAILED", ERR_LIB_PKCS12, PKCS12_R_CALLBACK_FAILED}, + #else + {"CALLBACK_FAILED", 35, 115}, + #endif #ifdef PKCS12_R_CANT_PACK_STRUCTURE {"CANT_PACK_STRUCTURE", ERR_LIB_PKCS12, PKCS12_R_CANT_PACK_STRUCTURE}, #else @@ -4920,6 +5157,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"CIPHER_OPERATION_FAILED", 57, 102}, #endif + #ifdef PROV_R_COFACTOR_REQUIRED + {"COFACTOR_REQUIRED", ERR_LIB_PROV, PROV_R_COFACTOR_REQUIRED}, + #else + {"COFACTOR_REQUIRED", 57, 236}, + #endif #ifdef PROV_R_DERIVATION_FUNCTION_INIT_FAILED {"DERIVATION_FUNCTION_INIT_FAILED", ERR_LIB_PROV, PROV_R_DERIVATION_FUNCTION_INIT_FAILED}, #else @@ -4935,6 +5177,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"EMS_NOT_ENABLED", 57, 233}, #endif + #ifdef PROV_R_ENTROPY_SOURCE_FAILED_CONTINUOUS_TESTS + {"ENTROPY_SOURCE_FAILED_CONTINUOUS_TESTS", ERR_LIB_PROV, PROV_R_ENTROPY_SOURCE_FAILED_CONTINUOUS_TESTS}, + #else + {"ENTROPY_SOURCE_FAILED_CONTINUOUS_TESTS", 57, 244}, + #endif #ifdef PROV_R_ENTROPY_SOURCE_STRENGTH_TOO_WEAK {"ENTROPY_SOURCE_STRENGTH_TOO_WEAK", ERR_LIB_PROV, PROV_R_ENTROPY_SOURCE_STRENGTH_TOO_WEAK}, #else @@ -4990,6 +5237,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"FAILED_TO_SIGN", 57, 175}, #endif + #ifdef PROV_R_FINAL_CALL_OUT_OF_ORDER + {"FINAL_CALL_OUT_OF_ORDER", ERR_LIB_PROV, PROV_R_FINAL_CALL_OUT_OF_ORDER}, + #else + {"FINAL_CALL_OUT_OF_ORDER", 57, 237}, + #endif #ifdef PROV_R_FIPS_MODULE_CONDITIONAL_ERROR {"FIPS_MODULE_CONDITIONAL_ERROR", ERR_LIB_PROV, PROV_R_FIPS_MODULE_CONDITIONAL_ERROR}, #else @@ -5020,6 +5272,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INDICATOR_INTEGRITY_FAILURE", 57, 210}, #endif + #ifdef PROV_R_INIT_CALL_OUT_OF_ORDER + {"INIT_CALL_OUT_OF_ORDER", ERR_LIB_PROV, PROV_R_INIT_CALL_OUT_OF_ORDER}, + #else + {"INIT_CALL_OUT_OF_ORDER", 57, 238}, + #endif #ifdef PROV_R_INSUFFICIENT_DRBG_STRENGTH {"INSUFFICIENT_DRBG_STRENGTH", ERR_LIB_PROV, PROV_R_INSUFFICIENT_DRBG_STRENGTH}, #else @@ -5030,6 +5287,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INVALID_AAD", 57, 108}, #endif + #ifdef PROV_R_INVALID_AEAD + {"INVALID_AEAD", ERR_LIB_PROV, PROV_R_INVALID_AEAD}, + #else + {"INVALID_AEAD", 57, 231}, + #endif #ifdef PROV_R_INVALID_CONFIG_DATA {"INVALID_CONFIG_DATA", ERR_LIB_PROV, PROV_R_INVALID_CONFIG_DATA}, #else @@ -5070,6 +5332,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INVALID_DIGEST_SIZE", 57, 218}, #endif + #ifdef PROV_R_INVALID_EDDSA_INSTANCE_FOR_ATTEMPTED_OPERATION + {"INVALID_EDDSA_INSTANCE_FOR_ATTEMPTED_OPERATION", ERR_LIB_PROV, PROV_R_INVALID_EDDSA_INSTANCE_FOR_ATTEMPTED_OPERATION}, + #else + {"INVALID_EDDSA_INSTANCE_FOR_ATTEMPTED_OPERATION", 57, 243}, + #endif #ifdef PROV_R_INVALID_INPUT_LENGTH {"INVALID_INPUT_LENGTH", ERR_LIB_PROV, PROV_R_INVALID_INPUT_LENGTH}, #else @@ -5085,6 +5352,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INVALID_IV_LENGTH", 57, 109}, #endif + #ifdef PROV_R_INVALID_KDF + {"INVALID_KDF", ERR_LIB_PROV, PROV_R_INVALID_KDF}, + #else + {"INVALID_KDF", 57, 232}, + #endif #ifdef PROV_R_INVALID_KEY {"INVALID_KEY", ERR_LIB_PROV, PROV_R_INVALID_KEY}, #else @@ -5100,6 +5372,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INVALID_MAC", 57, 151}, #endif + #ifdef PROV_R_INVALID_MEMORY_SIZE + {"INVALID_MEMORY_SIZE", ERR_LIB_PROV, PROV_R_INVALID_MEMORY_SIZE}, + #else + {"INVALID_MEMORY_SIZE", 57, 235}, + #endif #ifdef PROV_R_INVALID_MGF1_MD {"INVALID_MGF1_MD", ERR_LIB_PROV, PROV_R_INVALID_MGF1_MD}, #else @@ -5120,6 +5397,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INVALID_PADDING_MODE", 57, 168}, #endif + #ifdef PROV_R_INVALID_PREHASHED_DIGEST_LENGTH + {"INVALID_PREHASHED_DIGEST_LENGTH", ERR_LIB_PROV, PROV_R_INVALID_PREHASHED_DIGEST_LENGTH}, + #else + {"INVALID_PREHASHED_DIGEST_LENGTH", 57, 241}, + #endif #ifdef PROV_R_INVALID_PUBINFO {"INVALID_PUBINFO", ERR_LIB_PROV, PROV_R_INVALID_PUBINFO}, #else @@ -5155,6 +5437,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INVALID_TAG_LENGTH", 57, 118}, #endif + #ifdef PROV_R_INVALID_THREAD_POOL_SIZE + {"INVALID_THREAD_POOL_SIZE", ERR_LIB_PROV, PROV_R_INVALID_THREAD_POOL_SIZE}, + #else + {"INVALID_THREAD_POOL_SIZE", 57, 234}, + #endif #ifdef PROV_R_INVALID_UKM_LENGTH {"INVALID_UKM_LENGTH", ERR_LIB_PROV, PROV_R_INVALID_UKM_LENGTH}, #else @@ -5300,6 +5587,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"NOT_XOF_OR_INVALID_LENGTH", 57, 113}, #endif + #ifdef PROV_R_NO_INSTANCE_ALLOWED + {"NO_INSTANCE_ALLOWED", ERR_LIB_PROV, PROV_R_NO_INSTANCE_ALLOWED}, + #else + {"NO_INSTANCE_ALLOWED", 57, 242}, + #endif #ifdef PROV_R_NO_KEY_SET {"NO_KEY_SET", ERR_LIB_PROV, PROV_R_NO_KEY_SET}, #else @@ -5310,6 +5602,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"NO_PARAMETERS_SET", 57, 177}, #endif + #ifdef PROV_R_ONESHOT_CALL_OUT_OF_ORDER + {"ONESHOT_CALL_OUT_OF_ORDER", ERR_LIB_PROV, PROV_R_ONESHOT_CALL_OUT_OF_ORDER}, + #else + {"ONESHOT_CALL_OUT_OF_ORDER", 57, 239}, + #endif #ifdef PROV_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE {"OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE", ERR_LIB_PROV, PROV_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE}, #else @@ -5460,6 +5757,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNSUPPORTED_NUMBER_OF_ROUNDS", 57, 152}, #endif + #ifdef PROV_R_UPDATE_CALL_OUT_OF_ORDER + {"UPDATE_CALL_OUT_OF_ORDER", ERR_LIB_PROV, PROV_R_UPDATE_CALL_OUT_OF_ORDER}, + #else + {"UPDATE_CALL_OUT_OF_ORDER", 57, 240}, + #endif #ifdef PROV_R_URI_AUTHORITY_UNSUPPORTED {"URI_AUTHORITY_UNSUPPORTED", ERR_LIB_PROV, PROV_R_URI_AUTHORITY_UNSUPPORTED}, #else @@ -5595,6 +5897,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"INTERNAL_ERROR", 36, 113}, #endif + #ifdef RAND_R_INVALID_PROPERTY_QUERY + {"INVALID_PROPERTY_QUERY", ERR_LIB_RAND, RAND_R_INVALID_PROPERTY_QUERY}, + #else + {"INVALID_PROPERTY_QUERY", 36, 137}, + #endif #ifdef RAND_R_IN_ERROR_STATE {"IN_ERROR_STATE", ERR_LIB_RAND, RAND_R_IN_ERROR_STATE}, #else @@ -6210,6 +6517,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"AT_LEAST_TLS_1_2_NEEDED_IN_SUITEB_MODE", 20, 158}, #endif + #ifdef SSL_R_BAD_CERTIFICATE + {"BAD_CERTIFICATE", ERR_LIB_SSL, SSL_R_BAD_CERTIFICATE}, + #else + {"BAD_CERTIFICATE", 20, 348}, + #endif #ifdef SSL_R_BAD_CHANGE_CIPHER_SPEC {"BAD_CHANGE_CIPHER_SPEC", ERR_LIB_SSL, SSL_R_BAD_CHANGE_CIPHER_SPEC}, #else @@ -6220,6 +6532,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"BAD_CIPHER", 20, 186}, #endif + #ifdef SSL_R_BAD_COMPRESSION_ALGORITHM + {"BAD_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_BAD_COMPRESSION_ALGORITHM}, + #else + {"BAD_COMPRESSION_ALGORITHM", 20, 326}, + #endif #ifdef SSL_R_BAD_DATA {"BAD_DATA", ERR_LIB_SSL, SSL_R_BAD_DATA}, #else @@ -6495,6 +6812,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"CONNECTION_TYPE_NOT_SET", 20, 144}, #endif + #ifdef SSL_R_CONN_USE_ONLY + {"CONN_USE_ONLY", ERR_LIB_SSL, SSL_R_CONN_USE_ONLY}, + #else + {"CONN_USE_ONLY", 20, 356}, + #endif #ifdef SSL_R_CONTEXT_NOT_DANE_ENABLED {"CONTEXT_NOT_DANE_ENABLED", ERR_LIB_SSL, SSL_R_CONTEXT_NOT_DANE_ENABLED}, #else @@ -6635,6 +6957,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"EE_KEY_TOO_SMALL", 20, 399}, #endif + #ifdef SSL_R_EMPTY_RAW_PUBLIC_KEY + {"EMPTY_RAW_PUBLIC_KEY", ERR_LIB_SSL, SSL_R_EMPTY_RAW_PUBLIC_KEY}, + #else + {"EMPTY_RAW_PUBLIC_KEY", 20, 349}, + #endif #ifdef SSL_R_EMPTY_SRTP_PROTECTION_PROFILE_LIST {"EMPTY_SRTP_PROTECTION_PROFILE_LIST", ERR_LIB_SSL, SSL_R_EMPTY_SRTP_PROTECTION_PROFILE_LIST}, #else @@ -6650,6 +6977,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"ERROR_IN_RECEIVED_CIPHER_LIST", 20, 151}, #endif + #ifdef SSL_R_ERROR_IN_SYSTEM_DEFAULT_CONFIG + {"ERROR_IN_SYSTEM_DEFAULT_CONFIG", ERR_LIB_SSL, SSL_R_ERROR_IN_SYSTEM_DEFAULT_CONFIG}, + #else + {"ERROR_IN_SYSTEM_DEFAULT_CONFIG", 20, 419}, + #endif #ifdef SSL_R_ERROR_SETTING_TLSA_BASE_DOMAIN {"ERROR_SETTING_TLSA_BASE_DOMAIN", ERR_LIB_SSL, SSL_R_ERROR_SETTING_TLSA_BASE_DOMAIN}, #else @@ -6680,11 +7012,26 @@ static struct py_ssl_error_code error_codes[] = { #else {"EXT_LENGTH_MISMATCH", 20, 163}, #endif + #ifdef SSL_R_FAILED_TO_GET_PARAMETER + {"FAILED_TO_GET_PARAMETER", ERR_LIB_SSL, SSL_R_FAILED_TO_GET_PARAMETER}, + #else + {"FAILED_TO_GET_PARAMETER", 20, 316}, + #endif #ifdef SSL_R_FAILED_TO_INIT_ASYNC {"FAILED_TO_INIT_ASYNC", ERR_LIB_SSL, SSL_R_FAILED_TO_INIT_ASYNC}, #else {"FAILED_TO_INIT_ASYNC", 20, 405}, #endif + #ifdef SSL_R_FEATURE_NEGOTIATION_NOT_COMPLETE + {"FEATURE_NEGOTIATION_NOT_COMPLETE", ERR_LIB_SSL, SSL_R_FEATURE_NEGOTIATION_NOT_COMPLETE}, + #else + {"FEATURE_NEGOTIATION_NOT_COMPLETE", 20, 417}, + #endif + #ifdef SSL_R_FEATURE_NOT_RENEGOTIABLE + {"FEATURE_NOT_RENEGOTIABLE", ERR_LIB_SSL, SSL_R_FEATURE_NOT_RENEGOTIABLE}, + #else + {"FEATURE_NOT_RENEGOTIABLE", 20, 413}, + #endif #ifdef SSL_R_FRAGMENTED_CLIENT_HELLO {"FRAGMENTED_CLIENT_HELLO", ERR_LIB_SSL, SSL_R_FRAGMENTED_CLIENT_HELLO}, #else @@ -6805,6 +7152,16 @@ static struct py_ssl_error_code error_codes[] = { #else {"INVALID_NULL_CMD_NAME", 20, 385}, #endif + #ifdef SSL_R_INVALID_RAW_PUBLIC_KEY + {"INVALID_RAW_PUBLIC_KEY", ERR_LIB_SSL, SSL_R_INVALID_RAW_PUBLIC_KEY}, + #else + {"INVALID_RAW_PUBLIC_KEY", 20, 350}, + #endif + #ifdef SSL_R_INVALID_RECORD + {"INVALID_RECORD", ERR_LIB_SSL, SSL_R_INVALID_RECORD}, + #else + {"INVALID_RECORD", 20, 317}, + #endif #ifdef SSL_R_INVALID_SEQUENCE_NUMBER {"INVALID_SEQUENCE_NUMBER", ERR_LIB_SSL, SSL_R_INVALID_SEQUENCE_NUMBER}, #else @@ -6865,6 +7222,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"LIBRARY_HAS_NO_CIPHERS", 20, 161}, #endif + #ifdef SSL_R_MAXIMUM_ENCRYPTED_PKTS_REACHED + {"MAXIMUM_ENCRYPTED_PKTS_REACHED", ERR_LIB_SSL, SSL_R_MAXIMUM_ENCRYPTED_PKTS_REACHED}, + #else + {"MAXIMUM_ENCRYPTED_PKTS_REACHED", 20, 395}, + #endif #ifdef SSL_R_MISSING_DSA_SIGNING_CERT {"MISSING_DSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_DSA_SIGNING_CERT}, #else @@ -6925,6 +7287,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"MISSING_SUPPORTED_GROUPS_EXTENSION", 20, 209}, #endif + #ifdef SSL_R_MISSING_SUPPORTED_VERSIONS_EXTENSION + {"MISSING_SUPPORTED_VERSIONS_EXTENSION", ERR_LIB_SSL, SSL_R_MISSING_SUPPORTED_VERSIONS_EXTENSION}, + #else + {"MISSING_SUPPORTED_VERSIONS_EXTENSION", 20, 420}, + #endif #ifdef SSL_R_MISSING_TMP_DH_KEY {"MISSING_TMP_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_DH_KEY}, #else @@ -7065,6 +7432,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"NO_SRTP_PROFILES", 20, 359}, #endif + #ifdef SSL_R_NO_STREAM + {"NO_STREAM", ERR_LIB_SSL, SSL_R_NO_STREAM}, + #else + {"NO_STREAM", 20, 355}, + #endif #ifdef SSL_R_NO_SUITABLE_DIGEST_ALGORITHM {"NO_SUITABLE_DIGEST_ALGORITHM", ERR_LIB_SSL, SSL_R_NO_SUITABLE_DIGEST_ALGORITHM}, #else @@ -7080,6 +7452,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"NO_SUITABLE_KEY_SHARE", 20, 101}, #endif + #ifdef SSL_R_NO_SUITABLE_RECORD_LAYER + {"NO_SUITABLE_RECORD_LAYER", ERR_LIB_SSL, SSL_R_NO_SUITABLE_RECORD_LAYER}, + #else + {"NO_SUITABLE_RECORD_LAYER", 20, 322}, + #endif #ifdef SSL_R_NO_SUITABLE_SIGNATURE_ALGORITHM {"NO_SUITABLE_SIGNATURE_ALGORITHM", ERR_LIB_SSL, SSL_R_NO_SUITABLE_SIGNATURE_ALGORITHM}, #else @@ -7160,6 +7537,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"PIPELINE_FAILURE", 20, 406}, #endif + #ifdef SSL_R_POLL_REQUEST_NOT_SUPPORTED + {"POLL_REQUEST_NOT_SUPPORTED", ERR_LIB_SSL, SSL_R_POLL_REQUEST_NOT_SUPPORTED}, + #else + {"POLL_REQUEST_NOT_SUPPORTED", 20, 418}, + #endif #ifdef SSL_R_POST_HANDSHAKE_AUTH_ENCODING_ERR {"POST_HANDSHAKE_AUTH_ENCODING_ERR", ERR_LIB_SSL, SSL_R_POST_HANDSHAKE_AUTH_ENCODING_ERR}, #else @@ -7190,6 +7572,21 @@ static struct py_ssl_error_code error_codes[] = { #else {"PSK_NO_SERVER_CB", 20, 225}, #endif + #ifdef SSL_R_QUIC_HANDSHAKE_LAYER_ERROR + {"QUIC_HANDSHAKE_LAYER_ERROR", ERR_LIB_SSL, SSL_R_QUIC_HANDSHAKE_LAYER_ERROR}, + #else + {"QUIC_HANDSHAKE_LAYER_ERROR", 20, 393}, + #endif + #ifdef SSL_R_QUIC_NETWORK_ERROR + {"QUIC_NETWORK_ERROR", ERR_LIB_SSL, SSL_R_QUIC_NETWORK_ERROR}, + #else + {"QUIC_NETWORK_ERROR", 20, 387}, + #endif + #ifdef SSL_R_QUIC_PROTOCOL_ERROR + {"QUIC_PROTOCOL_ERROR", ERR_LIB_SSL, SSL_R_QUIC_PROTOCOL_ERROR}, + #else + {"QUIC_PROTOCOL_ERROR", 20, 382}, + #endif #ifdef SSL_R_READ_BIO_NOT_SET {"READ_BIO_NOT_SET", ERR_LIB_SSL, SSL_R_READ_BIO_NOT_SET}, #else @@ -7200,6 +7597,16 @@ static struct py_ssl_error_code error_codes[] = { #else {"READ_TIMEOUT_EXPIRED", 20, 312}, #endif + #ifdef SSL_R_RECORDS_NOT_RELEASED + {"RECORDS_NOT_RELEASED", ERR_LIB_SSL, SSL_R_RECORDS_NOT_RELEASED}, + #else + {"RECORDS_NOT_RELEASED", 20, 321}, + #endif + #ifdef SSL_R_RECORD_LAYER_FAILURE + {"RECORD_LAYER_FAILURE", ERR_LIB_SSL, SSL_R_RECORD_LAYER_FAILURE}, + #else + {"RECORD_LAYER_FAILURE", 20, 313}, + #endif #ifdef SSL_R_RECORD_LENGTH_MISMATCH {"RECORD_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_RECORD_LENGTH_MISMATCH}, #else @@ -7210,6 +7617,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"RECORD_TOO_SMALL", 20, 298}, #endif + #ifdef SSL_R_REMOTE_PEER_ADDRESS_NOT_SET + {"REMOTE_PEER_ADDRESS_NOT_SET", ERR_LIB_SSL, SSL_R_REMOTE_PEER_ADDRESS_NOT_SET}, + #else + {"REMOTE_PEER_ADDRESS_NOT_SET", 20, 346}, + #endif #ifdef SSL_R_RENEGOTIATE_EXT_TOO_LONG {"RENEGOTIATE_EXT_TOO_LONG", ERR_LIB_SSL, SSL_R_RENEGOTIATE_EXT_TOO_LONG}, #else @@ -7255,6 +7667,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"SCT_VERIFICATION_FAILED", 20, 208}, #endif + #ifdef SSL_R_SEQUENCE_CTR_WRAPPED + {"SEQUENCE_CTR_WRAPPED", ERR_LIB_SSL, SSL_R_SEQUENCE_CTR_WRAPPED}, + #else + {"SEQUENCE_CTR_WRAPPED", 20, 327}, + #endif #ifdef SSL_R_SERVERHELLO_TLSEXT {"SERVERHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_SERVERHELLO_TLSEXT}, #else @@ -7325,6 +7742,16 @@ static struct py_ssl_error_code error_codes[] = { #else {"SSLV3_ALERT_BAD_CERTIFICATE", 20, 1042}, #endif + #ifdef SSL_R_SSLV3_ALERT_BAD_CERTIFICATE + {"SSLV3_ALERT_BAD_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_CERTIFICATE}, + #else + {"SSLV3_ALERT_BAD_CERTIFICATE", 20, 1042}, + #endif + #ifdef SSL_R_SSLV3_ALERT_BAD_RECORD_MAC + {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_RECORD_MAC}, + #else + {"SSLV3_ALERT_BAD_RECORD_MAC", 20, 1020}, + #endif #ifdef SSL_R_SSLV3_ALERT_BAD_RECORD_MAC {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_RECORD_MAC}, #else @@ -7335,11 +7762,26 @@ static struct py_ssl_error_code error_codes[] = { #else {"SSLV3_ALERT_CERTIFICATE_EXPIRED", 20, 1045}, #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED + {"SSLV3_ALERT_CERTIFICATE_EXPIRED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED}, + #else + {"SSLV3_ALERT_CERTIFICATE_EXPIRED", 20, 1045}, + #endif #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED}, #else {"SSLV3_ALERT_CERTIFICATE_REVOKED", 20, 1044}, #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED + {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED}, + #else + {"SSLV3_ALERT_CERTIFICATE_REVOKED", 20, 1044}, + #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN + {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN}, + #else + {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", 20, 1046}, + #endif #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN}, #else @@ -7350,6 +7792,16 @@ static struct py_ssl_error_code error_codes[] = { #else {"SSLV3_ALERT_DECOMPRESSION_FAILURE", 20, 1030}, #endif + #ifdef SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE + {"SSLV3_ALERT_DECOMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE}, + #else + {"SSLV3_ALERT_DECOMPRESSION_FAILURE", 20, 1030}, + #endif + #ifdef SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE + {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE}, + #else + {"SSLV3_ALERT_HANDSHAKE_FAILURE", 20, 1040}, + #endif #ifdef SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE}, #else @@ -7360,11 +7812,26 @@ static struct py_ssl_error_code error_codes[] = { #else {"SSLV3_ALERT_ILLEGAL_PARAMETER", 20, 1047}, #endif + #ifdef SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER + {"SSLV3_ALERT_ILLEGAL_PARAMETER", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER}, + #else + {"SSLV3_ALERT_ILLEGAL_PARAMETER", 20, 1047}, + #endif #ifdef SSL_R_SSLV3_ALERT_NO_CERTIFICATE {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_NO_CERTIFICATE}, #else {"SSLV3_ALERT_NO_CERTIFICATE", 20, 1041}, #endif + #ifdef SSL_R_SSLV3_ALERT_NO_CERTIFICATE + {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_NO_CERTIFICATE}, + #else + {"SSLV3_ALERT_NO_CERTIFICATE", 20, 1041}, + #endif + #ifdef SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE + {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE}, + #else + {"SSLV3_ALERT_UNEXPECTED_MESSAGE", 20, 1010}, + #endif #ifdef SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE}, #else @@ -7375,6 +7842,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", 20, 1043}, #endif + #ifdef SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE + {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE}, + #else + {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", 20, 1043}, + #endif #ifdef SSL_R_SSL_COMMAND_SECTION_EMPTY {"SSL_COMMAND_SECTION_EMPTY", ERR_LIB_SSL, SSL_R_SSL_COMMAND_SECTION_EMPTY}, #else @@ -7450,6 +7922,36 @@ static struct py_ssl_error_code error_codes[] = { #else {"STILL_IN_INIT", 20, 121}, #endif + #ifdef SSL_R_STREAM_COUNT_LIMITED + {"STREAM_COUNT_LIMITED", ERR_LIB_SSL, SSL_R_STREAM_COUNT_LIMITED}, + #else + {"STREAM_COUNT_LIMITED", 20, 411}, + #endif + #ifdef SSL_R_STREAM_FINISHED + {"STREAM_FINISHED", ERR_LIB_SSL, SSL_R_STREAM_FINISHED}, + #else + {"STREAM_FINISHED", 20, 365}, + #endif + #ifdef SSL_R_STREAM_RECV_ONLY + {"STREAM_RECV_ONLY", ERR_LIB_SSL, SSL_R_STREAM_RECV_ONLY}, + #else + {"STREAM_RECV_ONLY", 20, 366}, + #endif + #ifdef SSL_R_STREAM_RESET + {"STREAM_RESET", ERR_LIB_SSL, SSL_R_STREAM_RESET}, + #else + {"STREAM_RESET", 20, 375}, + #endif + #ifdef SSL_R_STREAM_SEND_ONLY + {"STREAM_SEND_ONLY", ERR_LIB_SSL, SSL_R_STREAM_SEND_ONLY}, + #else + {"STREAM_SEND_ONLY", 20, 379}, + #endif + #ifdef SSL_R_TLSV13_ALERT_CERTIFICATE_REQUIRED + {"TLSV13_ALERT_CERTIFICATE_REQUIRED", ERR_LIB_SSL, SSL_R_TLSV13_ALERT_CERTIFICATE_REQUIRED}, + #else + {"TLSV13_ALERT_CERTIFICATE_REQUIRED", 20, 1116}, + #endif #ifdef SSL_R_TLSV13_ALERT_CERTIFICATE_REQUIRED {"TLSV13_ALERT_CERTIFICATE_REQUIRED", ERR_LIB_SSL, SSL_R_TLSV13_ALERT_CERTIFICATE_REQUIRED}, #else @@ -7460,6 +7962,16 @@ static struct py_ssl_error_code error_codes[] = { #else {"TLSV13_ALERT_MISSING_EXTENSION", 20, 1109}, #endif + #ifdef SSL_R_TLSV13_ALERT_MISSING_EXTENSION + {"TLSV13_ALERT_MISSING_EXTENSION", ERR_LIB_SSL, SSL_R_TLSV13_ALERT_MISSING_EXTENSION}, + #else + {"TLSV13_ALERT_MISSING_EXTENSION", 20, 1109}, + #endif + #ifdef SSL_R_TLSV1_ALERT_ACCESS_DENIED + {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_ACCESS_DENIED}, + #else + {"TLSV1_ALERT_ACCESS_DENIED", 20, 1049}, + #endif #ifdef SSL_R_TLSV1_ALERT_ACCESS_DENIED {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_ACCESS_DENIED}, #else @@ -7470,11 +7982,26 @@ static struct py_ssl_error_code error_codes[] = { #else {"TLSV1_ALERT_DECODE_ERROR", 20, 1050}, #endif + #ifdef SSL_R_TLSV1_ALERT_DECODE_ERROR + {"TLSV1_ALERT_DECODE_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECODE_ERROR}, + #else + {"TLSV1_ALERT_DECODE_ERROR", 20, 1050}, + #endif #ifdef SSL_R_TLSV1_ALERT_DECRYPTION_FAILED {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPTION_FAILED}, #else {"TLSV1_ALERT_DECRYPTION_FAILED", 20, 1021}, #endif + #ifdef SSL_R_TLSV1_ALERT_DECRYPTION_FAILED + {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPTION_FAILED}, + #else + {"TLSV1_ALERT_DECRYPTION_FAILED", 20, 1021}, + #endif + #ifdef SSL_R_TLSV1_ALERT_DECRYPT_ERROR + {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPT_ERROR}, + #else + {"TLSV1_ALERT_DECRYPT_ERROR", 20, 1051}, + #endif #ifdef SSL_R_TLSV1_ALERT_DECRYPT_ERROR {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPT_ERROR}, #else @@ -7485,6 +8012,16 @@ static struct py_ssl_error_code error_codes[] = { #else {"TLSV1_ALERT_EXPORT_RESTRICTION", 20, 1060}, #endif + #ifdef SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION + {"TLSV1_ALERT_EXPORT_RESTRICTION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION}, + #else + {"TLSV1_ALERT_EXPORT_RESTRICTION", 20, 1060}, + #endif + #ifdef SSL_R_TLSV1_ALERT_INAPPROPRIATE_FALLBACK + {"TLSV1_ALERT_INAPPROPRIATE_FALLBACK", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INAPPROPRIATE_FALLBACK}, + #else + {"TLSV1_ALERT_INAPPROPRIATE_FALLBACK", 20, 1086}, + #endif #ifdef SSL_R_TLSV1_ALERT_INAPPROPRIATE_FALLBACK {"TLSV1_ALERT_INAPPROPRIATE_FALLBACK", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INAPPROPRIATE_FALLBACK}, #else @@ -7495,11 +8032,36 @@ static struct py_ssl_error_code error_codes[] = { #else {"TLSV1_ALERT_INSUFFICIENT_SECURITY", 20, 1071}, #endif + #ifdef SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY + {"TLSV1_ALERT_INSUFFICIENT_SECURITY", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY}, + #else + {"TLSV1_ALERT_INSUFFICIENT_SECURITY", 20, 1071}, + #endif + #ifdef SSL_R_TLSV1_ALERT_INTERNAL_ERROR + {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INTERNAL_ERROR}, + #else + {"TLSV1_ALERT_INTERNAL_ERROR", 20, 1080}, + #endif #ifdef SSL_R_TLSV1_ALERT_INTERNAL_ERROR {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INTERNAL_ERROR}, #else {"TLSV1_ALERT_INTERNAL_ERROR", 20, 1080}, #endif + #ifdef SSL_R_TLSV1_ALERT_NO_APPLICATION_PROTOCOL + {"TLSV1_ALERT_NO_APPLICATION_PROTOCOL", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_NO_APPLICATION_PROTOCOL}, + #else + {"TLSV1_ALERT_NO_APPLICATION_PROTOCOL", 20, 1120}, + #endif + #ifdef SSL_R_TLSV1_ALERT_NO_APPLICATION_PROTOCOL + {"TLSV1_ALERT_NO_APPLICATION_PROTOCOL", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_NO_APPLICATION_PROTOCOL}, + #else + {"TLSV1_ALERT_NO_APPLICATION_PROTOCOL", 20, 1120}, + #endif + #ifdef SSL_R_TLSV1_ALERT_NO_RENEGOTIATION + {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_NO_RENEGOTIATION}, + #else + {"TLSV1_ALERT_NO_RENEGOTIATION", 20, 1100}, + #endif #ifdef SSL_R_TLSV1_ALERT_NO_RENEGOTIATION {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_NO_RENEGOTIATION}, #else @@ -7510,6 +8072,16 @@ static struct py_ssl_error_code error_codes[] = { #else {"TLSV1_ALERT_PROTOCOL_VERSION", 20, 1070}, #endif + #ifdef SSL_R_TLSV1_ALERT_PROTOCOL_VERSION + {"TLSV1_ALERT_PROTOCOL_VERSION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_PROTOCOL_VERSION}, + #else + {"TLSV1_ALERT_PROTOCOL_VERSION", 20, 1070}, + #endif + #ifdef SSL_R_TLSV1_ALERT_RECORD_OVERFLOW + {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_RECORD_OVERFLOW}, + #else + {"TLSV1_ALERT_RECORD_OVERFLOW", 20, 1022}, + #endif #ifdef SSL_R_TLSV1_ALERT_RECORD_OVERFLOW {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_RECORD_OVERFLOW}, #else @@ -7520,11 +8092,36 @@ static struct py_ssl_error_code error_codes[] = { #else {"TLSV1_ALERT_UNKNOWN_CA", 20, 1048}, #endif + #ifdef SSL_R_TLSV1_ALERT_UNKNOWN_CA + {"TLSV1_ALERT_UNKNOWN_CA", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_UNKNOWN_CA}, + #else + {"TLSV1_ALERT_UNKNOWN_CA", 20, 1048}, + #endif + #ifdef SSL_R_TLSV1_ALERT_UNKNOWN_PSK_IDENTITY + {"TLSV1_ALERT_UNKNOWN_PSK_IDENTITY", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_UNKNOWN_PSK_IDENTITY}, + #else + {"TLSV1_ALERT_UNKNOWN_PSK_IDENTITY", 20, 1115}, + #endif + #ifdef SSL_R_TLSV1_ALERT_UNKNOWN_PSK_IDENTITY + {"TLSV1_ALERT_UNKNOWN_PSK_IDENTITY", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_UNKNOWN_PSK_IDENTITY}, + #else + {"TLSV1_ALERT_UNKNOWN_PSK_IDENTITY", 20, 1115}, + #endif #ifdef SSL_R_TLSV1_ALERT_USER_CANCELLED {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_USER_CANCELLED}, #else {"TLSV1_ALERT_USER_CANCELLED", 20, 1090}, #endif + #ifdef SSL_R_TLSV1_ALERT_USER_CANCELLED + {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_USER_CANCELLED}, + #else + {"TLSV1_ALERT_USER_CANCELLED", 20, 1090}, + #endif + #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE + {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE}, + #else + {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", 20, 1114}, + #endif #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE}, #else @@ -7535,11 +8132,26 @@ static struct py_ssl_error_code error_codes[] = { #else {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", 20, 1113}, #endif + #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE + {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE}, + #else + {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", 20, 1113}, + #endif #ifdef SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE}, #else {"TLSV1_CERTIFICATE_UNOBTAINABLE", 20, 1111}, #endif + #ifdef SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE + {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE}, + #else + {"TLSV1_CERTIFICATE_UNOBTAINABLE", 20, 1111}, + #endif + #ifdef SSL_R_TLSV1_UNRECOGNIZED_NAME + {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, SSL_R_TLSV1_UNRECOGNIZED_NAME}, + #else + {"TLSV1_UNRECOGNIZED_NAME", 20, 1112}, + #endif #ifdef SSL_R_TLSV1_UNRECOGNIZED_NAME {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, SSL_R_TLSV1_UNRECOGNIZED_NAME}, #else @@ -7550,6 +8162,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"TLSV1_UNSUPPORTED_EXTENSION", 20, 1110}, #endif + #ifdef SSL_R_TLSV1_UNSUPPORTED_EXTENSION + {"TLSV1_UNSUPPORTED_EXTENSION", ERR_LIB_SSL, SSL_R_TLSV1_UNSUPPORTED_EXTENSION}, + #else + {"TLSV1_UNSUPPORTED_EXTENSION", 20, 1110}, + #endif #ifdef SSL_R_TLS_ILLEGAL_EXPORTER_LABEL {"TLS_ILLEGAL_EXPORTER_LABEL", ERR_LIB_SSL, SSL_R_TLS_ILLEGAL_EXPORTER_LABEL}, #else @@ -7665,6 +8282,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNKNOWN_KEY_EXCHANGE_TYPE", 20, 250}, #endif + #ifdef SSL_R_UNKNOWN_MANDATORY_PARAMETER + {"UNKNOWN_MANDATORY_PARAMETER", ERR_LIB_SSL, SSL_R_UNKNOWN_MANDATORY_PARAMETER}, + #else + {"UNKNOWN_MANDATORY_PARAMETER", 20, 323}, + #endif #ifdef SSL_R_UNKNOWN_PKEY_TYPE {"UNKNOWN_PKEY_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_PKEY_TYPE}, #else @@ -7700,6 +8322,21 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNSUPPORTED_COMPRESSION_ALGORITHM", 20, 257}, #endif + #ifdef SSL_R_UNSUPPORTED_CONFIG_VALUE + {"UNSUPPORTED_CONFIG_VALUE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_CONFIG_VALUE}, + #else + {"UNSUPPORTED_CONFIG_VALUE", 20, 414}, + #endif + #ifdef SSL_R_UNSUPPORTED_CONFIG_VALUE_CLASS + {"UNSUPPORTED_CONFIG_VALUE_CLASS", ERR_LIB_SSL, SSL_R_UNSUPPORTED_CONFIG_VALUE_CLASS}, + #else + {"UNSUPPORTED_CONFIG_VALUE_CLASS", 20, 415}, + #endif + #ifdef SSL_R_UNSUPPORTED_CONFIG_VALUE_OP + {"UNSUPPORTED_CONFIG_VALUE_OP", ERR_LIB_SSL, SSL_R_UNSUPPORTED_CONFIG_VALUE_OP}, + #else + {"UNSUPPORTED_CONFIG_VALUE_OP", 20, 416}, + #endif #ifdef SSL_R_UNSUPPORTED_ELLIPTIC_CURVE {"UNSUPPORTED_ELLIPTIC_CURVE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_ELLIPTIC_CURVE}, #else @@ -7720,6 +8357,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNSUPPORTED_STATUS_TYPE", 20, 329}, #endif + #ifdef SSL_R_UNSUPPORTED_WRITE_FLAG + {"UNSUPPORTED_WRITE_FLAG", ERR_LIB_SSL, SSL_R_UNSUPPORTED_WRITE_FLAG}, + #else + {"UNSUPPORTED_WRITE_FLAG", 20, 412}, + #endif #ifdef SSL_R_USE_SRTP_NOT_NEGOTIATED {"USE_SRTP_NOT_NEGOTIATED", ERR_LIB_SSL, SSL_R_USE_SRTP_NOT_NEGOTIATED}, #else @@ -7750,6 +8392,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"WRONG_CURVE", 20, 378}, #endif + #ifdef SSL_R_WRONG_RPK_TYPE + {"WRONG_RPK_TYPE", ERR_LIB_SSL, SSL_R_WRONG_RPK_TYPE}, + #else + {"WRONG_RPK_TYPE", 20, 351}, + #endif #ifdef SSL_R_WRONG_SIGNATURE_LENGTH {"WRONG_SIGNATURE_LENGTH", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_LENGTH}, #else @@ -8055,6 +8702,16 @@ static struct py_ssl_error_code error_codes[] = { #else {"BAD_OBJECT", 34, 119}, #endif + #ifdef X509V3_R_BAD_OPTION + {"BAD_OPTION", ERR_LIB_X509V3, X509V3_R_BAD_OPTION}, + #else + {"BAD_OPTION", 34, 170}, + #endif + #ifdef X509V3_R_BAD_VALUE + {"BAD_VALUE", ERR_LIB_X509V3, X509V3_R_BAD_VALUE}, + #else + {"BAD_VALUE", 34, 171}, + #endif #ifdef X509V3_R_BN_DEC2BN_ERROR {"BN_DEC2BN_ERROR", ERR_LIB_X509V3, X509V3_R_BN_DEC2BN_ERROR}, #else @@ -8370,6 +9027,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNKNOWN_OPTION", 34, 120}, #endif + #ifdef X509V3_R_UNKNOWN_VALUE + {"UNKNOWN_VALUE", ERR_LIB_X509V3, X509V3_R_UNKNOWN_VALUE}, + #else + {"UNKNOWN_VALUE", 34, 172}, + #endif #ifdef X509V3_R_UNSUPPORTED_OPTION {"UNSUPPORTED_OPTION", ERR_LIB_X509V3, X509V3_R_UNSUPPORTED_OPTION}, #else @@ -8430,6 +9092,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"CRL_VERIFY_FAILURE", 11, 131}, #endif + #ifdef X509_R_DUPLICATE_ATTRIBUTE + {"DUPLICATE_ATTRIBUTE", ERR_LIB_X509, X509_R_DUPLICATE_ATTRIBUTE}, + #else + {"DUPLICATE_ATTRIBUTE", 11, 140}, + #endif #ifdef X509_R_ERROR_GETTING_MD_BY_NID {"ERROR_GETTING_MD_BY_NID", ERR_LIB_X509, X509_R_ERROR_GETTING_MD_BY_NID}, #else @@ -8590,6 +9257,11 @@ static struct py_ssl_error_code error_codes[] = { #else {"UNSUPPORTED_ALGORITHM", 11, 111}, #endif + #ifdef X509_R_UNSUPPORTED_VERSION + {"UNSUPPORTED_VERSION", ERR_LIB_X509, X509_R_UNSUPPORTED_VERSION}, + #else + {"UNSUPPORTED_VERSION", 11, 145}, + #endif #ifdef X509_R_WRONG_LOOKUP_TYPE {"WRONG_LOOKUP_TYPE", ERR_LIB_X509, X509_R_WRONG_LOOKUP_TYPE}, #else diff --git a/Modules/_testcapi/complex.c b/Modules/_testcapi/complex.c index eceb1310bfe874..b726cd3236f179 100644 --- a/Modules/_testcapi/complex.c +++ b/Modules/_testcapi/complex.c @@ -46,21 +46,59 @@ _py_c_neg(PyObject *Py_UNUSED(module), PyObject *num) static PyObject * \ _py_c_##suffix(PyObject *Py_UNUSED(module), PyObject *args) \ { \ - Py_complex num, exp, res; \ + Py_complex a, b, res; \ \ - if (!PyArg_ParseTuple(args, "DD", &num, &exp)) { \ + if (!PyArg_ParseTuple(args, "DD", &a, &b)) { \ return NULL; \ } \ \ errno = 0; \ - res = _Py_c_##suffix(num, exp); \ + res = _Py_c_##suffix(a, b); \ + return Py_BuildValue("Di", &res, errno); \ + }; + +#define _PY_CR_FUNC2(suffix) \ + static PyObject * \ + _py_cr_##suffix(PyObject *Py_UNUSED(module), PyObject *args) \ + { \ + Py_complex a, res; \ + double b; \ + \ + if (!PyArg_ParseTuple(args, "Dd", &a, &b)) { \ + return NULL; \ + } \ + \ + errno = 0; \ + res = _Py_cr_##suffix(a, b); \ + return Py_BuildValue("Di", &res, errno); \ + }; + +#define _PY_RC_FUNC2(suffix) \ + static PyObject * \ + _py_rc_##suffix(PyObject *Py_UNUSED(module), PyObject *args) \ + { \ + Py_complex b, res; \ + double a; \ + \ + if (!PyArg_ParseTuple(args, "dD", &a, &b)) { \ + return NULL; \ + } \ + \ + errno = 0; \ + res = _Py_rc_##suffix(a, b); \ return Py_BuildValue("Di", &res, errno); \ }; _PY_C_FUNC2(sum) +_PY_CR_FUNC2(sum) _PY_C_FUNC2(diff) +_PY_CR_FUNC2(diff) +_PY_RC_FUNC2(diff) _PY_C_FUNC2(prod) +_PY_CR_FUNC2(prod) _PY_C_FUNC2(quot) +_PY_CR_FUNC2(quot) +_PY_RC_FUNC2(quot) _PY_C_FUNC2(pow) static PyObject* @@ -86,10 +124,16 @@ static PyMethodDef test_methods[] = { {"complex_fromccomplex", complex_fromccomplex, METH_O}, {"complex_asccomplex", complex_asccomplex, METH_O}, {"_py_c_sum", _py_c_sum, METH_VARARGS}, + {"_py_cr_sum", _py_cr_sum, METH_VARARGS}, {"_py_c_diff", _py_c_diff, METH_VARARGS}, + {"_py_cr_diff", _py_cr_diff, METH_VARARGS}, + {"_py_rc_diff", _py_rc_diff, METH_VARARGS}, {"_py_c_neg", _py_c_neg, METH_O}, {"_py_c_prod", _py_c_prod, METH_VARARGS}, + {"_py_cr_prod", _py_cr_prod, METH_VARARGS}, {"_py_c_quot", _py_c_quot, METH_VARARGS}, + {"_py_cr_quot", _py_cr_quot, METH_VARARGS}, + {"_py_rc_quot", _py_rc_quot, METH_VARARGS}, {"_py_c_pow", _py_c_pow, METH_VARARGS}, {"_py_c_abs", _py_c_abs, METH_O}, {NULL}, diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index b02f794d27d5bd..a925191d479bd6 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -2222,7 +2222,7 @@ module_exec(PyObject *module) } if (PyModule_Add(module, "TIER2_THRESHOLD", - PyLong_FromLong(JUMP_BACKWARD_INITIAL_VALUE)) < 0) { + PyLong_FromLong(JUMP_BACKWARD_INITIAL_VALUE + 1)) < 0) { return 1; } diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index d4408aa9e42d9d..4a45445e2f62db 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -421,6 +421,7 @@ ThreadHandle_start(ThreadHandle *self, PyObject *func, PyObject *args, PyThread_handle_t os_handle; if (PyThread_start_joinable_thread(thread_run, boot, &ident, &os_handle)) { PyThreadState_Clear(boot->tstate); + PyThreadState_Delete(boot->tstate); thread_bootstate_free(boot, 1); PyErr_SetString(ThreadError, "can't start new thread"); goto start_failed; @@ -1623,7 +1624,7 @@ local_setattro(localobject *self, PyObject *name, PyObject *v) } if (r == 1) { PyErr_Format(PyExc_AttributeError, - "'%.100s' object attribute '%U' is read-only", + "'%.100s' object attribute %R is read-only", Py_TYPE(self)->tp_name, name); goto err; } diff --git a/Modules/clinic/_ssl.c.h b/Modules/clinic/_ssl.c.h index 582eef16c13244..957f5ced3a2cee 100644 --- a/Modules/clinic/_ssl.c.h +++ b/Modules/clinic/_ssl.c.h @@ -258,9 +258,7 @@ _ssl__SSLSocket_compression(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) return _ssl__SSLSocket_compression_impl(self); } -#if defined(_ssl__SSLSocket_context_HAS_DOCSTR) -# define _ssl__SSLSocket_context_DOCSTR _ssl__SSLSocket_context__doc__ -#else +#if !defined(_ssl__SSLSocket_context_DOCSTR) # define _ssl__SSLSocket_context_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_CONTEXT_GETSETDEF) @@ -285,9 +283,7 @@ _ssl__SSLSocket_context_get(PySSLSocket *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLSOCKET_CONTEXT_HAS_DOCSTR) -# define _ssl__SSLSocket_context_DOCSTR _ssl__SSLSocket_context__doc__ -#else +#if !defined(_ssl__SSLSocket_context_DOCSTR) # define _ssl__SSLSocket_context_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_CONTEXT_GETSETDEF) @@ -314,11 +310,9 @@ _ssl__SSLSocket_context_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED( PyDoc_STRVAR(_ssl__SSLSocket_server_side__doc__, "Whether this is a server-side socket."); -#define _ssl__SSLSocket_server_side_HAS_DOCSTR +#define _ssl__SSLSocket_server_side_DOCSTR _ssl__SSLSocket_server_side__doc__ -#if defined(_ssl__SSLSocket_server_side_HAS_DOCSTR) -# define _ssl__SSLSocket_server_side_DOCSTR _ssl__SSLSocket_server_side__doc__ -#else +#if !defined(_ssl__SSLSocket_server_side_DOCSTR) # define _ssl__SSLSocket_server_side_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_SERVER_SIDE_GETSETDEF) @@ -345,11 +339,9 @@ _ssl__SSLSocket_server_side_get(PySSLSocket *self, void *Py_UNUSED(context)) PyDoc_STRVAR(_ssl__SSLSocket_server_hostname__doc__, "The currently set server hostname (for SNI)."); -#define _ssl__SSLSocket_server_hostname_HAS_DOCSTR +#define _ssl__SSLSocket_server_hostname_DOCSTR _ssl__SSLSocket_server_hostname__doc__ -#if defined(_ssl__SSLSocket_server_hostname_HAS_DOCSTR) -# define _ssl__SSLSocket_server_hostname_DOCSTR _ssl__SSLSocket_server_hostname__doc__ -#else +#if !defined(_ssl__SSLSocket_server_hostname_DOCSTR) # define _ssl__SSLSocket_server_hostname_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_SERVER_HOSTNAME_GETSETDEF) @@ -374,9 +366,7 @@ _ssl__SSLSocket_server_hostname_get(PySSLSocket *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_ssl__SSLSocket_owner_HAS_DOCSTR) -# define _ssl__SSLSocket_owner_DOCSTR _ssl__SSLSocket_owner__doc__ -#else +#if !defined(_ssl__SSLSocket_owner_DOCSTR) # define _ssl__SSLSocket_owner_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_OWNER_GETSETDEF) @@ -401,9 +391,7 @@ _ssl__SSLSocket_owner_get(PySSLSocket *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLSOCKET_OWNER_HAS_DOCSTR) -# define _ssl__SSLSocket_owner_DOCSTR _ssl__SSLSocket_owner__doc__ -#else +#if !defined(_ssl__SSLSocket_owner_DOCSTR) # define _ssl__SSLSocket_owner_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_OWNER_GETSETDEF) @@ -664,9 +652,7 @@ _ssl__SSLSocket_verify_client_post_handshake(PySSLSocket *self, PyObject *Py_UNU return return_value; } -#if defined(_ssl__SSLSocket_session_HAS_DOCSTR) -# define _ssl__SSLSocket_session_DOCSTR _ssl__SSLSocket_session__doc__ -#else +#if !defined(_ssl__SSLSocket_session_DOCSTR) # define _ssl__SSLSocket_session_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_SESSION_GETSETDEF) @@ -691,9 +677,7 @@ _ssl__SSLSocket_session_get(PySSLSocket *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLSOCKET_SESSION_HAS_DOCSTR) -# define _ssl__SSLSocket_session_DOCSTR _ssl__SSLSocket_session__doc__ -#else +#if !defined(_ssl__SSLSocket_session_DOCSTR) # define _ssl__SSLSocket_session_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_SESSION_GETSETDEF) @@ -720,11 +704,9 @@ _ssl__SSLSocket_session_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED( PyDoc_STRVAR(_ssl__SSLSocket_session_reused__doc__, "Was the client session reused during handshake?"); -#define _ssl__SSLSocket_session_reused_HAS_DOCSTR +#define _ssl__SSLSocket_session_reused_DOCSTR _ssl__SSLSocket_session_reused__doc__ -#if defined(_ssl__SSLSocket_session_reused_HAS_DOCSTR) -# define _ssl__SSLSocket_session_reused_DOCSTR _ssl__SSLSocket_session_reused__doc__ -#else +#if !defined(_ssl__SSLSocket_session_reused_DOCSTR) # define _ssl__SSLSocket_session_reused_DOCSTR NULL #endif #if defined(_SSL__SSLSOCKET_SESSION_REUSED_GETSETDEF) @@ -873,9 +855,7 @@ _ssl__SSLContext__set_alpn_protocols(PySSLContext *self, PyObject *arg) return return_value; } -#if defined(_ssl__SSLContext_verify_mode_HAS_DOCSTR) -# define _ssl__SSLContext_verify_mode_DOCSTR _ssl__SSLContext_verify_mode__doc__ -#else +#if !defined(_ssl__SSLContext_verify_mode_DOCSTR) # define _ssl__SSLContext_verify_mode_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_VERIFY_MODE_GETSETDEF) @@ -900,9 +880,7 @@ _ssl__SSLContext_verify_mode_get(PySSLContext *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLCONTEXT_VERIFY_MODE_HAS_DOCSTR) -# define _ssl__SSLContext_verify_mode_DOCSTR _ssl__SSLContext_verify_mode__doc__ -#else +#if !defined(_ssl__SSLContext_verify_mode_DOCSTR) # define _ssl__SSLContext_verify_mode_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_VERIFY_MODE_GETSETDEF) @@ -927,9 +905,7 @@ _ssl__SSLContext_verify_mode_set(PySSLContext *self, PyObject *value, void *Py_U return return_value; } -#if defined(_ssl__SSLContext_verify_flags_HAS_DOCSTR) -# define _ssl__SSLContext_verify_flags_DOCSTR _ssl__SSLContext_verify_flags__doc__ -#else +#if !defined(_ssl__SSLContext_verify_flags_DOCSTR) # define _ssl__SSLContext_verify_flags_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_VERIFY_FLAGS_GETSETDEF) @@ -954,9 +930,7 @@ _ssl__SSLContext_verify_flags_get(PySSLContext *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLCONTEXT_VERIFY_FLAGS_HAS_DOCSTR) -# define _ssl__SSLContext_verify_flags_DOCSTR _ssl__SSLContext_verify_flags__doc__ -#else +#if !defined(_ssl__SSLContext_verify_flags_DOCSTR) # define _ssl__SSLContext_verify_flags_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_VERIFY_FLAGS_GETSETDEF) @@ -981,9 +955,7 @@ _ssl__SSLContext_verify_flags_set(PySSLContext *self, PyObject *value, void *Py_ return return_value; } -#if defined(_ssl__SSLContext_minimum_version_HAS_DOCSTR) -# define _ssl__SSLContext_minimum_version_DOCSTR _ssl__SSLContext_minimum_version__doc__ -#else +#if !defined(_ssl__SSLContext_minimum_version_DOCSTR) # define _ssl__SSLContext_minimum_version_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_MINIMUM_VERSION_GETSETDEF) @@ -1008,9 +980,7 @@ _ssl__SSLContext_minimum_version_get(PySSLContext *self, void *Py_UNUSED(context return return_value; } -#if defined(_SSL__SSLCONTEXT_MINIMUM_VERSION_HAS_DOCSTR) -# define _ssl__SSLContext_minimum_version_DOCSTR _ssl__SSLContext_minimum_version__doc__ -#else +#if !defined(_ssl__SSLContext_minimum_version_DOCSTR) # define _ssl__SSLContext_minimum_version_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_MINIMUM_VERSION_GETSETDEF) @@ -1036,9 +1006,7 @@ _ssl__SSLContext_minimum_version_set(PySSLContext *self, PyObject *value, void * return return_value; } -#if defined(_ssl__SSLContext_maximum_version_HAS_DOCSTR) -# define _ssl__SSLContext_maximum_version_DOCSTR _ssl__SSLContext_maximum_version__doc__ -#else +#if !defined(_ssl__SSLContext_maximum_version_DOCSTR) # define _ssl__SSLContext_maximum_version_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_MAXIMUM_VERSION_GETSETDEF) @@ -1063,9 +1031,7 @@ _ssl__SSLContext_maximum_version_get(PySSLContext *self, void *Py_UNUSED(context return return_value; } -#if defined(_SSL__SSLCONTEXT_MAXIMUM_VERSION_HAS_DOCSTR) -# define _ssl__SSLContext_maximum_version_DOCSTR _ssl__SSLContext_maximum_version__doc__ -#else +#if !defined(_ssl__SSLContext_maximum_version_DOCSTR) # define _ssl__SSLContext_maximum_version_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_MAXIMUM_VERSION_GETSETDEF) @@ -1091,9 +1057,7 @@ _ssl__SSLContext_maximum_version_set(PySSLContext *self, PyObject *value, void * return return_value; } -#if defined(_ssl__SSLContext_num_tickets_HAS_DOCSTR) -# define _ssl__SSLContext_num_tickets_DOCSTR _ssl__SSLContext_num_tickets__doc__ -#else +#if !defined(_ssl__SSLContext_num_tickets_DOCSTR) # define _ssl__SSLContext_num_tickets_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_NUM_TICKETS_GETSETDEF) @@ -1118,9 +1082,7 @@ _ssl__SSLContext_num_tickets_get(PySSLContext *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLCONTEXT_NUM_TICKETS_HAS_DOCSTR) -# define _ssl__SSLContext_num_tickets_DOCSTR _ssl__SSLContext_num_tickets__doc__ -#else +#if !defined(_ssl__SSLContext_num_tickets_DOCSTR) # define _ssl__SSLContext_num_tickets_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_NUM_TICKETS_GETSETDEF) @@ -1145,9 +1107,7 @@ _ssl__SSLContext_num_tickets_set(PySSLContext *self, PyObject *value, void *Py_U return return_value; } -#if defined(_ssl__SSLContext_security_level_HAS_DOCSTR) -# define _ssl__SSLContext_security_level_DOCSTR _ssl__SSLContext_security_level__doc__ -#else +#if !defined(_ssl__SSLContext_security_level_DOCSTR) # define _ssl__SSLContext_security_level_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_SECURITY_LEVEL_GETSETDEF) @@ -1172,9 +1132,7 @@ _ssl__SSLContext_security_level_get(PySSLContext *self, void *Py_UNUSED(context) return return_value; } -#if defined(_ssl__SSLContext_options_HAS_DOCSTR) -# define _ssl__SSLContext_options_DOCSTR _ssl__SSLContext_options__doc__ -#else +#if !defined(_ssl__SSLContext_options_DOCSTR) # define _ssl__SSLContext_options_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_OPTIONS_GETSETDEF) @@ -1199,9 +1157,7 @@ _ssl__SSLContext_options_get(PySSLContext *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLCONTEXT_OPTIONS_HAS_DOCSTR) -# define _ssl__SSLContext_options_DOCSTR _ssl__SSLContext_options__doc__ -#else +#if !defined(_ssl__SSLContext_options_DOCSTR) # define _ssl__SSLContext_options_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_OPTIONS_GETSETDEF) @@ -1226,9 +1182,7 @@ _ssl__SSLContext_options_set(PySSLContext *self, PyObject *value, void *Py_UNUSE return return_value; } -#if defined(_ssl__SSLContext__host_flags_HAS_DOCSTR) -# define _ssl__SSLContext__host_flags_DOCSTR _ssl__SSLContext__host_flags__doc__ -#else +#if !defined(_ssl__SSLContext__host_flags_DOCSTR) # define _ssl__SSLContext__host_flags_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT__HOST_FLAGS_GETSETDEF) @@ -1253,9 +1207,7 @@ _ssl__SSLContext__host_flags_get(PySSLContext *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLCONTEXT__HOST_FLAGS_HAS_DOCSTR) -# define _ssl__SSLContext__host_flags_DOCSTR _ssl__SSLContext__host_flags__doc__ -#else +#if !defined(_ssl__SSLContext__host_flags_DOCSTR) # define _ssl__SSLContext__host_flags_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT__HOST_FLAGS_GETSETDEF) @@ -1280,9 +1232,7 @@ _ssl__SSLContext__host_flags_set(PySSLContext *self, PyObject *value, void *Py_U return return_value; } -#if defined(_ssl__SSLContext_check_hostname_HAS_DOCSTR) -# define _ssl__SSLContext_check_hostname_DOCSTR _ssl__SSLContext_check_hostname__doc__ -#else +#if !defined(_ssl__SSLContext_check_hostname_DOCSTR) # define _ssl__SSLContext_check_hostname_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_CHECK_HOSTNAME_GETSETDEF) @@ -1307,9 +1257,7 @@ _ssl__SSLContext_check_hostname_get(PySSLContext *self, void *Py_UNUSED(context) return return_value; } -#if defined(_SSL__SSLCONTEXT_CHECK_HOSTNAME_HAS_DOCSTR) -# define _ssl__SSLContext_check_hostname_DOCSTR _ssl__SSLContext_check_hostname__doc__ -#else +#if !defined(_ssl__SSLContext_check_hostname_DOCSTR) # define _ssl__SSLContext_check_hostname_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_CHECK_HOSTNAME_GETSETDEF) @@ -1334,9 +1282,7 @@ _ssl__SSLContext_check_hostname_set(PySSLContext *self, PyObject *value, void *P return return_value; } -#if defined(_ssl__SSLContext_protocol_HAS_DOCSTR) -# define _ssl__SSLContext_protocol_DOCSTR _ssl__SSLContext_protocol__doc__ -#else +#if !defined(_ssl__SSLContext_protocol_DOCSTR) # define _ssl__SSLContext_protocol_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_PROTOCOL_GETSETDEF) @@ -1799,9 +1745,7 @@ _ssl__SSLContext_set_ecdh_curve(PySSLContext *self, PyObject *name) return return_value; } -#if defined(_ssl__SSLContext_sni_callback_HAS_DOCSTR) -# define _ssl__SSLContext_sni_callback_DOCSTR _ssl__SSLContext_sni_callback__doc__ -#else +#if !defined(_ssl__SSLContext_sni_callback_DOCSTR) # define _ssl__SSLContext_sni_callback_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_SNI_CALLBACK_GETSETDEF) @@ -1826,9 +1770,7 @@ _ssl__SSLContext_sni_callback_get(PySSLContext *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_SSL__SSLCONTEXT_SNI_CALLBACK_HAS_DOCSTR) -# define _ssl__SSLContext_sni_callback_DOCSTR _ssl__SSLContext_sni_callback__doc__ -#else +#if !defined(_ssl__SSLContext_sni_callback_DOCSTR) # define _ssl__SSLContext_sni_callback_DOCSTR NULL #endif #if defined(_SSL__SSLCONTEXT_SNI_CALLBACK_GETSETDEF) @@ -2121,9 +2063,7 @@ _ssl_MemoryBIO(PyTypeObject *type, PyObject *args, PyObject *kwargs) return return_value; } -#if defined(_ssl_MemoryBIO_pending_HAS_DOCSTR) -# define _ssl_MemoryBIO_pending_DOCSTR _ssl_MemoryBIO_pending__doc__ -#else +#if !defined(_ssl_MemoryBIO_pending_DOCSTR) # define _ssl_MemoryBIO_pending_DOCSTR NULL #endif #if defined(_SSL_MEMORYBIO_PENDING_GETSETDEF) @@ -2148,9 +2088,7 @@ _ssl_MemoryBIO_pending_get(PySSLMemoryBIO *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_ssl_MemoryBIO_eof_HAS_DOCSTR) -# define _ssl_MemoryBIO_eof_DOCSTR _ssl_MemoryBIO_eof__doc__ -#else +#if !defined(_ssl_MemoryBIO_eof_DOCSTR) # define _ssl_MemoryBIO_eof_DOCSTR NULL #endif #if defined(_SSL_MEMORYBIO_EOF_GETSETDEF) @@ -2279,9 +2217,7 @@ _ssl_MemoryBIO_write_eof(PySSLMemoryBIO *self, PyObject *Py_UNUSED(ignored)) return return_value; } -#if defined(_ssl_SSLSession_time_HAS_DOCSTR) -# define _ssl_SSLSession_time_DOCSTR _ssl_SSLSession_time__doc__ -#else +#if !defined(_ssl_SSLSession_time_DOCSTR) # define _ssl_SSLSession_time_DOCSTR NULL #endif #if defined(_SSL_SSLSESSION_TIME_GETSETDEF) @@ -2306,9 +2242,7 @@ _ssl_SSLSession_time_get(PySSLSession *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_ssl_SSLSession_timeout_HAS_DOCSTR) -# define _ssl_SSLSession_timeout_DOCSTR _ssl_SSLSession_timeout__doc__ -#else +#if !defined(_ssl_SSLSession_timeout_DOCSTR) # define _ssl_SSLSession_timeout_DOCSTR NULL #endif #if defined(_SSL_SSLSESSION_TIMEOUT_GETSETDEF) @@ -2333,9 +2267,7 @@ _ssl_SSLSession_timeout_get(PySSLSession *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_ssl_SSLSession_ticket_lifetime_hint_HAS_DOCSTR) -# define _ssl_SSLSession_ticket_lifetime_hint_DOCSTR _ssl_SSLSession_ticket_lifetime_hint__doc__ -#else +#if !defined(_ssl_SSLSession_ticket_lifetime_hint_DOCSTR) # define _ssl_SSLSession_ticket_lifetime_hint_DOCSTR NULL #endif #if defined(_SSL_SSLSESSION_TICKET_LIFETIME_HINT_GETSETDEF) @@ -2360,9 +2292,7 @@ _ssl_SSLSession_ticket_lifetime_hint_get(PySSLSession *self, void *Py_UNUSED(con return return_value; } -#if defined(_ssl_SSLSession_id_HAS_DOCSTR) -# define _ssl_SSLSession_id_DOCSTR _ssl_SSLSession_id__doc__ -#else +#if !defined(_ssl_SSLSession_id_DOCSTR) # define _ssl_SSLSession_id_DOCSTR NULL #endif #if defined(_SSL_SSLSESSION_ID_GETSETDEF) @@ -2387,9 +2317,7 @@ _ssl_SSLSession_id_get(PySSLSession *self, void *Py_UNUSED(context)) return return_value; } -#if defined(_ssl_SSLSession_has_ticket_HAS_DOCSTR) -# define _ssl_SSLSession_has_ticket_DOCSTR _ssl_SSLSession_has_ticket__doc__ -#else +#if !defined(_ssl_SSLSession_has_ticket_DOCSTR) # define _ssl_SSLSession_has_ticket_DOCSTR NULL #endif #if defined(_SSL_SSLSESSION_HAS_TICKET_GETSETDEF) @@ -2839,4 +2767,4 @@ _ssl_enum_crls(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje #ifndef _SSL_ENUM_CRLS_METHODDEF #define _SSL_ENUM_CRLS_METHODDEF #endif /* !defined(_SSL_ENUM_CRLS_METHODDEF) */ -/*[clinic end generated code: output=4c2af0c8fab7ec4e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=44ab066d21277ee5 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/grpmodule.c.h b/Modules/clinic/grpmodule.c.h index cc0ad210f42743..facfa3a43e490e 100644 --- a/Modules/clinic/grpmodule.c.h +++ b/Modules/clinic/grpmodule.c.h @@ -2,6 +2,12 @@ preserve [clinic start generated code]*/ +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif +#include "pycore_modsupport.h" // _PyArg_UnpackKeywords() + PyDoc_STRVAR(grp_getgrgid__doc__, "getgrgid($module, /, id)\n" "--\n" @@ -11,21 +17,49 @@ PyDoc_STRVAR(grp_getgrgid__doc__, "If id is not valid, raise KeyError."); #define GRP_GETGRGID_METHODDEF \ - {"getgrgid", (PyCFunction)(void(*)(void))grp_getgrgid, METH_VARARGS|METH_KEYWORDS, grp_getgrgid__doc__}, + {"getgrgid", _PyCFunction_CAST(grp_getgrgid), METH_FASTCALL|METH_KEYWORDS, grp_getgrgid__doc__}, static PyObject * grp_getgrgid_impl(PyObject *module, PyObject *id); static PyObject * -grp_getgrgid(PyObject *module, PyObject *args, PyObject *kwargs) +grp_getgrgid(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; - static char *_keywords[] = {"id", NULL}; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(id), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"id", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "getgrgid", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; PyObject *id; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:getgrgid", _keywords, - &id)) + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, + /*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf); + if (!args) { goto exit; + } + id = args[0]; return_value = grp_getgrgid_impl(module, id); exit: @@ -41,21 +75,53 @@ PyDoc_STRVAR(grp_getgrnam__doc__, "If name is not valid, raise KeyError."); #define GRP_GETGRNAM_METHODDEF \ - {"getgrnam", (PyCFunction)(void(*)(void))grp_getgrnam, METH_VARARGS|METH_KEYWORDS, grp_getgrnam__doc__}, + {"getgrnam", _PyCFunction_CAST(grp_getgrnam), METH_FASTCALL|METH_KEYWORDS, grp_getgrnam__doc__}, static PyObject * grp_getgrnam_impl(PyObject *module, PyObject *name); static PyObject * -grp_getgrnam(PyObject *module, PyObject *args, PyObject *kwargs) +grp_getgrnam(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; - static char *_keywords[] = {"name", NULL}; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(name), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"name", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "getgrnam", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; PyObject *name; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "U:getgrnam", _keywords, - &name)) + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, + /*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf); + if (!args) { + goto exit; + } + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("getgrnam", "argument 'name'", "str", args[0]); goto exit; + } + name = args[0]; return_value = grp_getgrnam_impl(module, name); exit: @@ -82,4 +148,4 @@ grp_getgrall(PyObject *module, PyObject *Py_UNUSED(ignored)) { return grp_getgrall_impl(module); } -/*[clinic end generated code: output=81f180beb67fc585 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2154194308dab038 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index dce0ea100ec435..cd0c4faeac83d1 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -10128,7 +10128,7 @@ os_fpathconf(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (fd < 0) { goto exit; } - if (!conv_path_confname(args[1], &name)) { + if (!conv_confname(module, args[1], &name, "pathconf_names")) { goto exit; } _return_value = os_fpathconf_impl(module, fd, name); @@ -10203,7 +10203,7 @@ os_pathconf(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!path_converter(args[0], &path)) { goto exit; } - if (!conv_path_confname(args[1], &name)) { + if (!conv_confname(module, args[1], &name, "pathconf_names")) { goto exit; } _return_value = os_pathconf_impl(module, &path, name); @@ -10241,7 +10241,7 @@ os_confstr(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int name; - if (!conv_confstr_confname(arg, &name)) { + if (!conv_confname(module, arg, &name, "confstr_names")) { goto exit; } return_value = os_confstr_impl(module, name); @@ -10273,7 +10273,7 @@ os_sysconf(PyObject *module, PyObject *arg) int name; long _return_value; - if (!conv_sysconf_confname(arg, &name)) { + if (!conv_confname(module, arg, &name, "sysconf_names")) { goto exit; } _return_value = os_sysconf_impl(module, name); @@ -13114,4 +13114,4 @@ os__create_environ(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #define OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #endif /* !defined(OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF) */ -/*[clinic end generated code: output=5358a13b4ce6148b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7ee14f5e880092f5 input=a9049054013a1b77]*/ diff --git a/Modules/getpath.py b/Modules/getpath.py index 1f1bfcb4f64dd4..7949fd813d0d07 100644 --- a/Modules/getpath.py +++ b/Modules/getpath.py @@ -420,7 +420,7 @@ def search_up(prefix, *landmarks, test=isfile): # Only warn if the file actually exists and was unresolvable # Otherwise users who specify a fake executable may get spurious warnings. if isfile(real_executable): - warn(f'Failed to find real location of {base_executable}') + warn(f'Failed to find real location of {real_executable}') if not executable_dir and os_name == 'darwin' and library: # QUIRK: macOS checks adjacent to its library early @@ -640,11 +640,20 @@ def search_up(prefix, *landmarks, test=isfile): # For a venv, update the main prefix/exec_prefix but leave the base ones unchanged -# XXX: We currently do not update prefix here, but it happens in site.py -#if venv_prefix: -# base_prefix = prefix -# base_exec_prefix = exec_prefix -# prefix = exec_prefix = venv_prefix +if venv_prefix: + if not base_prefix: + base_prefix = prefix + if not base_exec_prefix: + base_exec_prefix = exec_prefix + prefix = exec_prefix = venv_prefix + + +# After calculating prefix and exec_prefix, use their values for base_prefix and +# base_exec_prefix if they haven't been set. +if not base_prefix: + base_prefix = prefix +if not base_exec_prefix: + base_exec_prefix = exec_prefix # ****************************************************************************** @@ -679,7 +688,7 @@ def search_up(prefix, *landmarks, test=isfile): # QUIRK: POSIX uses the default prefix when in the build directory pythonpath.append(joinpath(PREFIX, ZIP_LANDMARK)) else: - pythonpath.append(joinpath(prefix, ZIP_LANDMARK)) + pythonpath.append(joinpath(base_prefix, ZIP_LANDMARK)) if os_name == 'nt' and use_environment and winreg: # QUIRK: Windows also lists paths in the registry. Paths are stored @@ -714,13 +723,13 @@ def search_up(prefix, *landmarks, test=isfile): # Then add any entries compiled into the PYTHONPATH macro. if PYTHONPATH: for p in PYTHONPATH.split(DELIM): - pythonpath.append(joinpath(prefix, p)) + pythonpath.append(joinpath(base_prefix, p)) # Then add stdlib_dir and platstdlib_dir - if not stdlib_dir and prefix: - stdlib_dir = joinpath(prefix, STDLIB_SUBDIR) - if not platstdlib_dir and exec_prefix: - platstdlib_dir = joinpath(exec_prefix, PLATSTDLIB_LANDMARK) + if not stdlib_dir and base_prefix: + stdlib_dir = joinpath(base_prefix, STDLIB_SUBDIR) + if not platstdlib_dir and base_exec_prefix: + platstdlib_dir = joinpath(base_exec_prefix, PLATSTDLIB_LANDMARK) if os_name == 'nt': # QUIRK: Windows generates paths differently @@ -750,9 +759,13 @@ def search_up(prefix, *landmarks, test=isfile): # QUIRK: Non-Windows replaces prefix/exec_prefix with defaults when running # in build directory. This happens after pythonpath calculation. +# Virtual environments using the build directory Python still keep their prefix. if os_name != 'nt' and build_prefix: - prefix = config.get('prefix') or PREFIX - exec_prefix = config.get('exec_prefix') or EXEC_PREFIX or prefix + if not venv_prefix: + prefix = config.get('prefix') or PREFIX + exec_prefix = config.get('exec_prefix') or EXEC_PREFIX or prefix + base_prefix = config.get('base_prefix') or PREFIX + base_exec_prefix = config.get('base_exec_prefix') or EXEC_PREFIX or base_prefix # ****************************************************************************** @@ -788,8 +801,8 @@ def search_up(prefix, *landmarks, test=isfile): config['base_executable'] = base_executable config['prefix'] = prefix config['exec_prefix'] = exec_prefix -config['base_prefix'] = base_prefix or prefix -config['base_exec_prefix'] = base_exec_prefix or exec_prefix +config['base_prefix'] = base_prefix +config['base_exec_prefix'] = base_exec_prefix config['platlibdir'] = platlibdir # test_embed expects empty strings, not None diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c index f7d3e12f347ec2..29da9936b65504 100644 --- a/Modules/grpmodule.c +++ b/Modules/grpmodule.c @@ -1,9 +1,8 @@ /* UNIX group file access module */ -// Need limited C API version 3.13 for PyMem_RawRealloc() -#include "pyconfig.h" // Py_GIL_DISABLED -#ifndef Py_GIL_DISABLED -#define Py_LIMITED_API 0x030d0000 +// Argument Clinic uses the internal C API +#ifndef Py_BUILD_CORE_BUILTIN +# define Py_BUILD_CORE_MODULE 1 #endif #include "Python.h" @@ -281,23 +280,33 @@ static PyObject * grp_getgrall_impl(PyObject *module) /*[clinic end generated code: output=585dad35e2e763d7 input=d7df76c825c367df]*/ { - PyObject *d; - struct group *p; - - if ((d = PyList_New(0)) == NULL) + PyObject *d = PyList_New(0); + if (d == NULL) { return NULL; + } + + static PyMutex getgrall_mutex = {0}; + PyMutex_Lock(&getgrall_mutex); setgrent(); + + struct group *p; while ((p = getgrent()) != NULL) { + // gh-126316: Don't release the mutex around mkgrent() since + // setgrent()/endgrent() are not reentrant / thread-safe. A deadlock + // is unlikely since mkgrent() should not be able to call arbitrary + // Python code. PyObject *v = mkgrent(module, p); if (v == NULL || PyList_Append(d, v) != 0) { Py_XDECREF(v); - Py_DECREF(d); - endgrent(); - return NULL; + Py_CLEAR(d); + goto done; } Py_DECREF(v); } + +done: endgrent(); + PyMutex_Unlock(&getgrall_mutex); return d; } diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index da7399de86f213..6eb7054b566e3f 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -3113,18 +3113,22 @@ class Py_off_t_return_converter(long_return_converter): type = 'Py_off_t' conversion_fn = 'PyLong_FromPy_off_t' -class path_confname_converter(CConverter): +class confname_converter(CConverter): type="int" - converter="conv_path_confname" + converter="conv_confname" -class confstr_confname_converter(path_confname_converter): - converter='conv_confstr_confname' + def converter_init(self, *, table): + self.table = table -class sysconf_confname_converter(path_confname_converter): - converter="conv_sysconf_confname" + def parse_arg(self, argname, displayname, *, limited_capi): + return self.format_code(""" + if (!{converter}(module, {argname}, &{paramname}, "{table}")) {{{{ + goto exit; + }}}} + """, argname=argname, converter=self.converter, table=self.table) [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=1860d32584c2a539]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=8189d5ae78244626]*/ /*[clinic input] @@ -13547,46 +13551,38 @@ struct constdef { }; static int -conv_confname(PyObject *arg, int *valuep, struct constdef *table, - size_t tablesize) +conv_confname(PyObject *module, PyObject *arg, int *valuep, const char *tablename) { - if (PyLong_Check(arg)) { - int value = PyLong_AsInt(arg); - if (value == -1 && PyErr_Occurred()) - return 0; - *valuep = value; - return 1; - } - else { - /* look up the value in the table using a binary search */ - size_t lo = 0; - size_t mid; - size_t hi = tablesize; - int cmp; - const char *confname; - if (!PyUnicode_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "configuration names must be strings or integers"); + if (PyUnicode_Check(arg)) { + PyObject *table = PyObject_GetAttrString(module, tablename); + if (table == NULL) { return 0; } - confname = PyUnicode_AsUTF8(arg); - if (confname == NULL) + + arg = PyObject_GetItem(table, arg); + Py_DECREF(table); + if (arg == NULL) { + PyErr_SetString( + PyExc_ValueError, "unrecognized configuration name"); return 0; - while (lo < hi) { - mid = (lo + hi) / 2; - cmp = strcmp(confname, table[mid].name); - if (cmp < 0) - hi = mid; - else if (cmp > 0) - lo = mid + 1; - else { - *valuep = table[mid].value; - return 1; - } } - PyErr_SetString(PyExc_ValueError, "unrecognized configuration name"); - return 0; + } else { + Py_INCREF(arg); // Match the Py_DECREF below. } + + int success = 0; + if (!PyLong_Check(arg)) { + PyErr_SetString(PyExc_TypeError, + "configuration names must be strings or integers"); + } else { + int value = PyLong_AsInt(arg); + if (!(value == -1 && PyErr_Occurred())) { + *valuep = value; + success = 1; + } + } + Py_DECREF(arg); + return success; } @@ -13677,14 +13673,6 @@ static struct constdef posix_constants_pathconf[] = { {"PC_TIMESTAMP_RESOLUTION", _PC_TIMESTAMP_RESOLUTION}, #endif }; - -static int -conv_path_confname(PyObject *arg, int *valuep) -{ - return conv_confname(arg, valuep, posix_constants_pathconf, - sizeof(posix_constants_pathconf) - / sizeof(struct constdef)); -} #endif @@ -13693,7 +13681,7 @@ conv_path_confname(PyObject *arg, int *valuep) os.fpathconf -> long fd: fildes - name: path_confname + name: confname(table="pathconf_names") / Return the configuration limit name for the file descriptor fd. @@ -13703,7 +13691,7 @@ If there is no limit, return -1. static long os_fpathconf_impl(PyObject *module, int fd, int name) -/*[clinic end generated code: output=d5b7042425fc3e21 input=5b8d2471cfaae186]*/ +/*[clinic end generated code: output=d5b7042425fc3e21 input=023d44589c9ed6aa]*/ { long limit; @@ -13721,7 +13709,7 @@ os_fpathconf_impl(PyObject *module, int fd, int name) /*[clinic input] os.pathconf -> long path: path_t(allow_fd='PATH_HAVE_FPATHCONF') - name: path_confname + name: confname(table="pathconf_names") Return the configuration limit name for the file or directory path. @@ -13732,7 +13720,7 @@ On some platforms, path may also be specified as an open file descriptor. static long os_pathconf_impl(PyObject *module, path_t *path, int name) -/*[clinic end generated code: output=5bedee35b293a089 input=bc3e2a985af27e5e]*/ +/*[clinic end generated code: output=5bedee35b293a089 input=6f6072f57b10c787]*/ { long limit; @@ -13909,19 +13897,11 @@ static struct constdef posix_constants_confstr[] = { #endif }; -static int -conv_confstr_confname(PyObject *arg, int *valuep) -{ - return conv_confname(arg, valuep, posix_constants_confstr, - sizeof(posix_constants_confstr) - / sizeof(struct constdef)); -} - /*[clinic input] os.confstr - name: confstr_confname + name: confname(table="confstr_names") / Return a string-valued system configuration variable. @@ -13929,7 +13909,7 @@ Return a string-valued system configuration variable. static PyObject * os_confstr_impl(PyObject *module, int name) -/*[clinic end generated code: output=bfb0b1b1e49b9383 input=18fb4d0567242e65]*/ +/*[clinic end generated code: output=bfb0b1b1e49b9383 input=4c6ffca2837ec959]*/ { PyObject *result = NULL; char buffer[255]; @@ -14466,18 +14446,10 @@ static struct constdef posix_constants_sysconf[] = { #endif }; -static int -conv_sysconf_confname(PyObject *arg, int *valuep) -{ - return conv_confname(arg, valuep, posix_constants_sysconf, - sizeof(posix_constants_sysconf) - / sizeof(struct constdef)); -} - /*[clinic input] os.sysconf -> long - name: sysconf_confname + name: confname(table="sysconf_names") / Return an integer-valued system configuration variable. @@ -14485,7 +14457,7 @@ Return an integer-valued system configuration variable. static long os_sysconf_impl(PyObject *module, int name) -/*[clinic end generated code: output=3662f945fc0cc756 input=279e3430a33f29e4]*/ +/*[clinic end generated code: output=3662f945fc0cc756 input=930b8f23b5d15086]*/ { long value; @@ -14498,40 +14470,15 @@ os_sysconf_impl(PyObject *module, int name) #endif /* HAVE_SYSCONF */ -/* This code is used to ensure that the tables of configuration value names - * are in sorted order as required by conv_confname(), and also to build - * the exported dictionaries that are used to publish information about the - * names available on the host platform. - * - * Sorting the table at runtime ensures that the table is properly ordered - * when used, even for platforms we're not able to test on. It also makes - * it easier to add additional entries to the tables. - */ - -static int -cmp_constdefs(const void *v1, const void *v2) -{ - const struct constdef *c1 = - (const struct constdef *) v1; - const struct constdef *c2 = - (const struct constdef *) v2; - - return strcmp(c1->name, c2->name); -} - static int setup_confname_table(struct constdef *table, size_t tablesize, const char *tablename, PyObject *module) { - PyObject *d = NULL; - size_t i; - - qsort(table, tablesize, sizeof(struct constdef), cmp_constdefs); - d = PyDict_New(); + PyObject *d = PyDict_New(); if (d == NULL) return -1; - for (i=0; i < tablesize; ++i) { + for (size_t i=0; i < tablesize; ++i) { PyObject *o = PyLong_FromLong(table[i].value); if (o == NULL || PyDict_SetItemString(d, table[i].name, o) == -1) { Py_XDECREF(o); diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 2764bd6e2b2a47..9394f1c940bedf 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -810,7 +810,9 @@ internal_select(PySocketSockObject *s, int writing, PyTime_t interval, /* s->sock_timeout is in seconds, timeout in ms */ ms = _PyTime_AsMilliseconds(interval, _PyTime_ROUND_CEILING); - assert(ms <= INT_MAX); + if (ms > INT_MAX) { + ms = INT_MAX; + } /* On some OSes, typically BSD-based ones, the timeout parameter of the poll() syscall, when negative, must be exactly INFTIM, where defined, @@ -822,6 +824,7 @@ internal_select(PySocketSockObject *s, int writing, PyTime_t interval, ms = -1; #endif } + assert(INT_MIN <= ms && ms <= INT_MAX); Py_BEGIN_ALLOW_THREADS; n = poll(&pollfd, 1, (int)ms); @@ -7591,36 +7594,20 @@ socket_exec(PyObject *m) /* */ ADD_INT_MACRO(m, AF_NETLINK); ADD_INT_MACRO(m, NETLINK_ROUTE); -#ifdef NETLINK_SKIP - ADD_INT_MACRO(m, NETLINK_SKIP); -#endif -#ifdef NETLINK_W1 - ADD_INT_MACRO(m, NETLINK_W1); -#endif ADD_INT_MACRO(m, NETLINK_USERSOCK); ADD_INT_MACRO(m, NETLINK_FIREWALL); -#ifdef NETLINK_TCPDIAG - ADD_INT_MACRO(m, NETLINK_TCPDIAG); -#endif #ifdef NETLINK_NFLOG ADD_INT_MACRO(m, NETLINK_NFLOG); #endif #ifdef NETLINK_XFRM ADD_INT_MACRO(m, NETLINK_XFRM); #endif -#ifdef NETLINK_ARPD - ADD_INT_MACRO(m, NETLINK_ARPD); -#endif -#ifdef NETLINK_ROUTE6 - ADD_INT_MACRO(m, NETLINK_ROUTE6); -#endif +#ifdef NETLINK_IP6_FW ADD_INT_MACRO(m, NETLINK_IP6_FW); +#endif #ifdef NETLINK_DNRTMSG ADD_INT_MACRO(m, NETLINK_DNRTMSG); #endif -#ifdef NETLINK_TAPBASE - ADD_INT_MACRO(m, NETLINK_TAPBASE); -#endif #ifdef NETLINK_CRYPTO ADD_INT_MACRO(m, NETLINK_CRYPTO); #endif diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index 5a52b2f702ad0b..871f99b6f885ba 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -52,8 +52,9 @@ bytearray_getbuffer(PyObject *self, Py_buffer *view, int flags) } void *ptr = (void *) PyByteArray_AS_STRING(obj); - /* cannot fail if view != NULL and readonly == 0 */ - (void)PyBuffer_FillInfo(view, (PyObject*)obj, ptr, Py_SIZE(obj), 0, flags); + if (PyBuffer_FillInfo(view, (PyObject*)obj, ptr, Py_SIZE(obj), 0, flags) < 0) { + return -1; + } obj->ob_exports++; return 0; } diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index ac02cfe7cf01c5..8c7651f0f3aa45 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -3196,6 +3196,7 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) #ifdef Py_TRACE_REFS _Py_ForgetReference(v); #endif + _PyReftracerTrack(v, PyRefTracer_DESTROY); *pv = (PyObject *) PyObject_Realloc(v, PyBytesObject_SIZE + newsize); if (*pv == NULL) { diff --git a/Objects/codeobject.c b/Objects/codeobject.c index dba43d5911da95..148350cc4b9195 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -302,21 +302,32 @@ validate_and_copy_tuple(PyObject *tup) } static int -init_co_cached(PyCodeObject *self) { - if (self->_co_cached == NULL) { - self->_co_cached = PyMem_New(_PyCoCached, 1); - if (self->_co_cached == NULL) { +init_co_cached(PyCodeObject *self) +{ + _PyCoCached *cached = FT_ATOMIC_LOAD_PTR(self->_co_cached); + if (cached != NULL) { + return 0; + } + + Py_BEGIN_CRITICAL_SECTION(self); + cached = self->_co_cached; + if (cached == NULL) { + cached = PyMem_New(_PyCoCached, 1); + if (cached == NULL) { PyErr_NoMemory(); - return -1; } - self->_co_cached->_co_code = NULL; - self->_co_cached->_co_cellvars = NULL; - self->_co_cached->_co_freevars = NULL; - self->_co_cached->_co_varnames = NULL; + else { + cached->_co_code = NULL; + cached->_co_cellvars = NULL; + cached->_co_freevars = NULL; + cached->_co_varnames = NULL; + FT_ATOMIC_STORE_PTR(self->_co_cached, cached); + } } - return 0; - + Py_END_CRITICAL_SECTION(); + return cached != NULL ? 0 : -1; } + /****************** * _PyCode_New() ******************/ @@ -1571,16 +1582,21 @@ get_cached_locals(PyCodeObject *co, PyObject **cached_field, { assert(cached_field != NULL); assert(co->_co_cached != NULL); - if (*cached_field != NULL) { - return Py_NewRef(*cached_field); + PyObject *varnames = FT_ATOMIC_LOAD_PTR(*cached_field); + if (varnames != NULL) { + return Py_NewRef(varnames); } - assert(*cached_field == NULL); - PyObject *varnames = get_localsplus_names(co, kind, num); + + Py_BEGIN_CRITICAL_SECTION(co); + varnames = *cached_field; if (varnames == NULL) { - return NULL; + varnames = get_localsplus_names(co, kind, num); + if (varnames != NULL) { + FT_ATOMIC_STORE_PTR(*cached_field, varnames); + } } - *cached_field = Py_NewRef(varnames); - return varnames; + Py_END_CRITICAL_SECTION(); + return Py_XNewRef(varnames); } PyObject * @@ -1674,18 +1690,26 @@ _PyCode_GetCode(PyCodeObject *co) if (init_co_cached(co)) { return NULL; } - if (co->_co_cached->_co_code != NULL) { - return Py_NewRef(co->_co_cached->_co_code); + + _PyCoCached *cached = co->_co_cached; + PyObject *code = FT_ATOMIC_LOAD_PTR(cached->_co_code); + if (code != NULL) { + return Py_NewRef(code); } - PyObject *code = PyBytes_FromStringAndSize((const char *)_PyCode_CODE(co), - _PyCode_NBYTES(co)); + + Py_BEGIN_CRITICAL_SECTION(co); + code = cached->_co_code; if (code == NULL) { - return NULL; + code = PyBytes_FromStringAndSize((const char *)_PyCode_CODE(co), + _PyCode_NBYTES(co)); + if (code != NULL) { + deopt_code(co, (_Py_CODEUNIT *)PyBytes_AS_STRING(code)); + assert(cached->_co_code == NULL); + FT_ATOMIC_STORE_PTR(cached->_co_code, code); + } } - deopt_code(co, (_Py_CODEUNIT *)PyBytes_AS_STRING(code)); - assert(co->_co_cached->_co_code == NULL); - co->_co_cached->_co_code = Py_NewRef(code); - return code; + Py_END_CRITICAL_SECTION(); + return Py_XNewRef(code); } PyObject * @@ -2871,20 +2895,22 @@ get_indices_in_use(PyInterpreterState *interp, struct flag_set *in_use) assert(interp->stoptheworld.world_stopped); assert(in_use->flags == NULL); int32_t max_index = 0; - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { int32_t idx = ((_PyThreadStateImpl *) p)->tlbc_index; if (idx > max_index) { max_index = idx; } } + _Py_FOR_EACH_TSTATE_END(interp); in_use->size = (size_t) max_index + 1; in_use->flags = PyMem_Calloc(in_use->size, sizeof(*in_use->flags)); if (in_use->flags == NULL) { return -1; } - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { in_use->flags[((_PyThreadStateImpl *) p)->tlbc_index] = 1; } + _Py_FOR_EACH_TSTATE_END(interp); return 0; } diff --git a/Objects/complexobject.c b/Objects/complexobject.c index 7b4948fc8ebe3d..8fbca3cb02d80a 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -8,6 +8,7 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_complexobject.h" // _PyComplex_FormatAdvancedWriter() +#include "pycore_floatobject.h" // _Py_convert_int_to_double() #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_object.h" // _PyObject_Init() #include "pycore_pymath.h" // _Py_ADJUST_ERANGE2() @@ -34,6 +35,20 @@ _Py_c_sum(Py_complex a, Py_complex b) return r; } +Py_complex +_Py_cr_sum(Py_complex a, double b) +{ + Py_complex r = a; + r.real += b; + return r; +} + +static inline Py_complex +_Py_rc_sum(double a, Py_complex b) +{ + return _Py_cr_sum(b, a); +} + Py_complex _Py_c_diff(Py_complex a, Py_complex b) { @@ -43,6 +58,23 @@ _Py_c_diff(Py_complex a, Py_complex b) return r; } +Py_complex +_Py_cr_diff(Py_complex a, double b) +{ + Py_complex r = a; + r.real -= b; + return r; +} + +Py_complex +_Py_rc_diff(double a, Py_complex b) +{ + Py_complex r; + r.real = a - b.real; + r.imag = -b.imag; + return r; +} + Py_complex _Py_c_neg(Py_complex a) { @@ -61,6 +93,21 @@ _Py_c_prod(Py_complex a, Py_complex b) return r; } +Py_complex +_Py_cr_prod(Py_complex a, double b) +{ + Py_complex r = a; + r.real *= b; + r.imag *= b; + return r; +} + +static inline Py_complex +_Py_rc_prod(double a, Py_complex b) +{ + return _Py_cr_prod(b, a); +} + /* Avoid bad optimization on Windows ARM64 until the compiler is fixed */ #ifdef _M_ARM64 #pragma optimize("", off) @@ -143,6 +190,64 @@ _Py_c_quot(Py_complex a, Py_complex b) return r; } + +Py_complex +_Py_cr_quot(Py_complex a, double b) +{ + Py_complex r = a; + if (b) { + r.real /= b; + r.imag /= b; + } + else { + errno = EDOM; + r.real = r.imag = 0.0; + } + return r; +} + +/* an equivalent of _Py_c_quot() function, when 1st argument is real */ +Py_complex +_Py_rc_quot(double a, Py_complex b) +{ + Py_complex r; + const double abs_breal = b.real < 0 ? -b.real : b.real; + const double abs_bimag = b.imag < 0 ? -b.imag : b.imag; + + if (abs_breal >= abs_bimag) { + if (abs_breal == 0.0) { + errno = EDOM; + r.real = r.imag = 0.0; + } + else { + const double ratio = b.imag / b.real; + const double denom = b.real + b.imag * ratio; + r.real = a / denom; + r.imag = (-a * ratio) / denom; + } + } + else if (abs_bimag >= abs_breal) { + const double ratio = b.real / b.imag; + const double denom = b.real * ratio + b.imag; + assert(b.imag != 0.0); + r.real = (a * ratio) / denom; + r.imag = (-a) / denom; + } + else { + r.real = r.imag = Py_NAN; + } + + if (isnan(r.real) && isnan(r.imag) && isfinite(a) + && (isinf(abs_breal) || isinf(abs_bimag))) + { + const double x = copysign(isinf(b.real) ? 1.0 : 0.0, b.real); + const double y = copysign(isinf(b.imag) ? 1.0 : 0.0, b.imag); + r.real = 0.0 * (a*x); + r.imag = 0.0 * (-a*y); + } + + return r; +} #ifdef _M_ARM64 #pragma optimize("", on) #endif @@ -168,7 +273,7 @@ _Py_c_pow(Py_complex a, Py_complex b) at = atan2(a.imag, a.real); phase = at*b.real; if (b.imag != 0.0) { - len /= exp(at*b.imag); + len *= exp(-at*b.imag); phase += b.imag*log(vabs); } r.real = len*cos(phase); @@ -474,83 +579,90 @@ complex_hash(PyComplexObject *v) } /* This macro may return! */ -#define TO_COMPLEX(obj, c) \ - if (PyComplex_Check(obj)) \ - c = ((PyComplexObject *)(obj))->cval; \ - else if (to_complex(&(obj), &(c)) < 0) \ +#define TO_COMPLEX(obj, c) \ + if (PyComplex_Check(obj)) \ + c = ((PyComplexObject *)(obj))->cval; \ + else if (real_to_complex(&(obj), &(c)) < 0) \ return (obj) static int -to_complex(PyObject **pobj, Py_complex *pc) +real_to_double(PyObject **pobj, double *dbl) { PyObject *obj = *pobj; - pc->real = pc->imag = 0.0; - if (PyLong_Check(obj)) { - pc->real = PyLong_AsDouble(obj); - if (pc->real == -1.0 && PyErr_Occurred()) { - *pobj = NULL; - return -1; - } - return 0; - } if (PyFloat_Check(obj)) { - pc->real = PyFloat_AsDouble(obj); - return 0; + *dbl = PyFloat_AS_DOUBLE(obj); + } + else if (_Py_convert_int_to_double(pobj, dbl) < 0) { + return -1; } - *pobj = Py_NewRef(Py_NotImplemented); - return -1; + return 0; } - -static PyObject * -complex_add(PyObject *v, PyObject *w) +static int +real_to_complex(PyObject **pobj, Py_complex *pc) { - Py_complex result; - Py_complex a, b; - TO_COMPLEX(v, a); - TO_COMPLEX(w, b); - result = _Py_c_sum(a, b); - return PyComplex_FromCComplex(result); + pc->imag = 0.0; + return real_to_double(pobj, &(pc->real)); } -static PyObject * -complex_sub(PyObject *v, PyObject *w) -{ - Py_complex result; - Py_complex a, b; - TO_COMPLEX(v, a); - TO_COMPLEX(w, b); - result = _Py_c_diff(a, b); - return PyComplex_FromCComplex(result); -} +/* Complex arithmetic rules implement special mixed-mode case where combining + a pure-real (float or int) value and a complex value is performed directly + without first coercing the real value to a complex value. -static PyObject * -complex_mul(PyObject *v, PyObject *w) -{ - Py_complex result; - Py_complex a, b; - TO_COMPLEX(v, a); - TO_COMPLEX(w, b); - result = _Py_c_prod(a, b); - return PyComplex_FromCComplex(result); -} + Let us consider the addition as an example, assuming that ints are implicitly + converted to floats. We have the following rules (up to variants with changed + order of operands): -static PyObject * -complex_div(PyObject *v, PyObject *w) -{ - Py_complex quot; - Py_complex a, b; - TO_COMPLEX(v, a); - TO_COMPLEX(w, b); - errno = 0; - quot = _Py_c_quot(a, b); - if (errno == EDOM) { - PyErr_SetString(PyExc_ZeroDivisionError, "division by zero"); - return NULL; - } - return PyComplex_FromCComplex(quot); -} + complex(a, b) + complex(c, d) = complex(a + c, b + d) + float(a) + complex(b, c) = complex(a + b, c) + + Similar rules are implemented for subtraction, multiplication and division. + See C11's Annex G, sections G.5.1 and G.5.2. + */ + +#define COMPLEX_BINOP(NAME, FUNC) \ + static PyObject * \ + complex_##NAME(PyObject *v, PyObject *w) \ + { \ + Py_complex a; \ + errno = 0; \ + if (PyComplex_Check(w)) { \ + Py_complex b = ((PyComplexObject *)w)->cval; \ + if (PyComplex_Check(v)) { \ + a = ((PyComplexObject *)v)->cval; \ + a = _Py_c_##FUNC(a, b); \ + } \ + else if (real_to_double(&v, &a.real) < 0) { \ + return v; \ + } \ + else { \ + a = _Py_rc_##FUNC(a.real, b); \ + } \ + } \ + else if (!PyComplex_Check(v)) { \ + Py_RETURN_NOTIMPLEMENTED; \ + } \ + else { \ + a = ((PyComplexObject *)v)->cval; \ + double b; \ + if (real_to_double(&w, &b) < 0) { \ + return w; \ + } \ + a = _Py_cr_##FUNC(a, b); \ + } \ + if (errno == EDOM) { \ + PyErr_SetString(PyExc_ZeroDivisionError, \ + "division by zero"); \ + return NULL; \ + } \ + return PyComplex_FromCComplex(a); \ + } + +COMPLEX_BINOP(add, sum) +COMPLEX_BINOP(mul, prod) +COMPLEX_BINOP(sub, diff) +COMPLEX_BINOP(div, quot) static PyObject * complex_pow(PyObject *v, PyObject *w, PyObject *z) diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 2090008055b7c0..49b213eaa817e2 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -883,6 +883,7 @@ new_dict(PyInterpreterState *interp, mp->ma_used = used; mp->_ma_watcher_tag = 0; ASSERT_CONSISTENT(mp); + _PyObject_GC_TRACK(mp); return (PyObject *)mp; } @@ -1284,6 +1285,20 @@ ensure_shared_on_resize(PyDictObject *mp) #endif } +static inline void +ensure_shared_on_keys_version_assignment(PyDictObject *mp) +{ + ASSERT_DICT_LOCKED((PyObject *) mp); + #ifdef Py_GIL_DISABLED + if (!IS_DICT_SHARED(mp)) { + // This ensures that a concurrent resize operation will delay + // freeing the old keys or values using QSBR, which is necessary to + // safely allow concurrent reads without locking. + SET_DICT_SHARED(mp); + } + #endif +} + #ifdef Py_GIL_DISABLED static inline Py_ALWAYS_INLINE int @@ -1578,64 +1593,6 @@ _PyDict_HasOnlyStringKeys(PyObject *dict) return 1; } -#define MAINTAIN_TRACKING(mp, key, value) \ - do { \ - if (!_PyObject_GC_IS_TRACKED(mp)) { \ - if (_PyObject_GC_MAY_BE_TRACKED(key) || \ - _PyObject_GC_MAY_BE_TRACKED(value)) { \ - _PyObject_GC_TRACK(mp); \ - } \ - } \ - } while(0) - -void -_PyDict_MaybeUntrack(PyObject *op) -{ - PyDictObject *mp; - PyObject *value; - Py_ssize_t i, numentries; - - ASSERT_WORLD_STOPPED_OR_DICT_LOCKED(op); - - if (!PyDict_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op)) - return; - - mp = (PyDictObject *) op; - ASSERT_CONSISTENT(mp); - numentries = mp->ma_keys->dk_nentries; - if (_PyDict_HasSplitTable(mp)) { - for (i = 0; i < numentries; i++) { - if ((value = mp->ma_values->values[i]) == NULL) - continue; - if (_PyObject_GC_MAY_BE_TRACKED(value)) { - return; - } - } - } - else { - if (DK_IS_UNICODE(mp->ma_keys)) { - PyDictUnicodeEntry *ep0 = DK_UNICODE_ENTRIES(mp->ma_keys); - for (i = 0; i < numentries; i++) { - if ((value = ep0[i].me_value) == NULL) - continue; - if (_PyObject_GC_MAY_BE_TRACKED(value)) - return; - } - } - else { - PyDictKeyEntry *ep0 = DK_ENTRIES(mp->ma_keys); - for (i = 0; i < numentries; i++) { - if ((value = ep0[i].me_value) == NULL) - continue; - if (_PyObject_GC_MAY_BE_TRACKED(value) || - _PyObject_GC_MAY_BE_TRACKED(ep0[i].me_key)) - return; - } - } - } - _PyObject_GC_UNTRACK(op); -} - void _PyDict_EnablePerThreadRefcounting(PyObject *op) { @@ -1701,7 +1658,7 @@ insert_combined_dict(PyInterpreterState *interp, PyDictObject *mp, } _PyDict_NotifyEvent(interp, PyDict_EVENT_ADDED, mp, key, value); - mp->ma_keys->dk_version = 0; + FT_ATOMIC_STORE_UINT32_RELAXED(mp->ma_keys->dk_version, 0); Py_ssize_t hashpos = find_empty_slot(mp->ma_keys, hash); dictkeys_set_index(mp->ma_keys, hashpos, mp->ma_keys->dk_nentries); @@ -1743,7 +1700,7 @@ insert_split_key(PyDictKeysObject *keys, PyObject *key, Py_hash_t hash) ix = unicodekeys_lookup_unicode(keys, key, hash); if (ix == DKIX_EMPTY && keys->dk_usable > 0) { // Insert into new slot - keys->dk_version = 0; + FT_ATOMIC_STORE_UINT32_RELAXED(keys->dk_version, 0); Py_ssize_t hashpos = find_empty_slot(keys, hash); ix = keys->dk_nentries; dictkeys_set_index(keys, hashpos, ix); @@ -1761,7 +1718,6 @@ insert_split_value(PyInterpreterState *interp, PyDictObject *mp, PyObject *key, { assert(PyUnicode_CheckExact(key)); ASSERT_DICT_LOCKED(mp); - MAINTAIN_TRACKING(mp, key, value); PyObject *old_value = mp->ma_values->values[ix]; if (old_value == NULL) { _PyDict_NotifyEvent(interp, PyDict_EVENT_ADDED, mp, key, value); @@ -1818,8 +1774,6 @@ insertdict(PyInterpreterState *interp, PyDictObject *mp, if (ix == DKIX_ERROR) goto Fail; - MAINTAIN_TRACKING(mp, key, value); - if (ix == DKIX_EMPTY) { assert(!_PyDict_HasSplitTable(mp)); /* Insert into new slot. */ @@ -1878,8 +1832,6 @@ insert_to_emptydict(PyInterpreterState *interp, PyDictObject *mp, /* We don't decref Py_EMPTY_KEYS here because it is immortal. */ assert(mp->ma_values == NULL); - MAINTAIN_TRACKING(mp, key, value); - size_t hashpos = (size_t)hash & (PyDict_MINSIZE-1); dictkeys_set_index(newkeys, hashpos, 0); if (unicode) { @@ -2679,7 +2631,7 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix, ASSERT_CONSISTENT(mp); } else { - mp->ma_keys->dk_version = 0; + FT_ATOMIC_STORE_UINT32_RELAXED(mp->ma_keys->dk_version, 0); dictkeys_set_index(mp->ma_keys, hashpos, DKIX_DUMMY); if (DK_IS_UNICODE(mp->ma_keys)) { PyDictUnicodeEntry *ep = &DK_UNICODE_ENTRIES(mp->ma_keys)[ix]; @@ -4024,8 +3976,7 @@ copy_lock_held(PyObject *o) split_copy->ma_used = mp->ma_used; split_copy->_ma_watcher_tag = 0; dictkeys_incref(mp->ma_keys); - if (_PyObject_GC_IS_TRACKED(mp)) - _PyObject_GC_TRACK(split_copy); + _PyObject_GC_TRACK(split_copy); return (PyObject *)split_copy; } @@ -4060,11 +4011,6 @@ copy_lock_held(PyObject *o) new->ma_used = mp->ma_used; ASSERT_CONSISTENT(new); - if (_PyObject_GC_IS_TRACKED(mp)) { - /* Maintain tracking. */ - _PyObject_GC_TRACK(new); - } - return (PyObject *)new; } @@ -4351,7 +4297,6 @@ dict_setdefault_ref_lock_held(PyObject *d, PyObject *key, PyObject *default_valu } } - MAINTAIN_TRACKING(mp, key, value); STORE_USED(mp, mp->ma_used + 1); assert(mp->ma_keys->dk_usable >= 0); ASSERT_CONSISTENT(mp); @@ -4498,7 +4443,7 @@ dict_popitem_impl(PyDictObject *self) return NULL; } } - self->ma_keys->dk_version = 0; + FT_ATOMIC_STORE_UINT32_RELAXED(self->ma_keys->dk_version, 0); /* Pop last item */ PyObject *key, *value; @@ -4800,16 +4745,8 @@ dict_new(PyTypeObject *type, PyObject *args, PyObject *kwds) d->ma_keys = Py_EMPTY_KEYS; d->ma_values = NULL; ASSERT_CONSISTENT(d); - - if (type != &PyDict_Type) { - // Don't track if a subclass tp_alloc is PyType_GenericAlloc() - if (!_PyObject_GC_IS_TRACKED(d)) { - _PyObject_GC_TRACK(d); - } - } - else { - // _PyType_AllocNoTrack() does not track the created object - assert(!_PyObject_GC_IS_TRACKED(d)); + if (!_PyObject_GC_IS_TRACKED(d)) { + _PyObject_GC_TRACK(d); } return self; } @@ -6746,19 +6683,14 @@ make_dict_from_instance_attributes(PyInterpreterState *interp, { dictkeys_incref(keys); Py_ssize_t used = 0; - Py_ssize_t track = 0; size_t size = shared_keys_usable_size(keys); for (size_t i = 0; i < size; i++) { PyObject *val = values->values[i]; if (val != NULL) { used += 1; - track += _PyObject_GC_MAY_BE_TRACKED(val); } } PyDictObject *res = (PyDictObject *)new_dict(interp, keys, values, used, 0); - if (track && res) { - _PyObject_GC_TRACK(res); - } return res; } @@ -7169,50 +7101,146 @@ set_dict_inline_values(PyObject *obj, PyDictObject *new_dict) } } -int -_PyObject_SetManagedDict(PyObject *obj, PyObject *new_dict) +#ifdef Py_GIL_DISABLED + +// Trys and sets the dictionary for an object in the easy case when our current +// dictionary is either completely not materialized or is a dictionary which +// does not point at the inline values. +static bool +try_set_dict_inline_only_or_other_dict(PyObject *obj, PyObject *new_dict, PyDictObject **cur_dict) +{ + bool replaced = false; + Py_BEGIN_CRITICAL_SECTION(obj); + + PyDictObject *dict = *cur_dict = _PyObject_GetManagedDict(obj); + if (dict == NULL) { + // We only have inline values, we can just completely replace them. + set_dict_inline_values(obj, (PyDictObject *)new_dict); + replaced = true; + goto exit_lock; + } + + if (FT_ATOMIC_LOAD_PTR_RELAXED(dict->ma_values) != _PyObject_InlineValues(obj)) { + // We have a materialized dict which doesn't point at the inline values, + // We get to simply swap dictionaries and free the old dictionary. + FT_ATOMIC_STORE_PTR(_PyObject_ManagedDictPointer(obj)->dict, + (PyDictObject *)Py_XNewRef(new_dict)); + replaced = true; + goto exit_lock; + } + else { + // We have inline values, we need to lock the dict and the object + // at the same time to safely dematerialize them. To do that while releasing + // the object lock we need a strong reference to the current dictionary. + Py_INCREF(dict); + } +exit_lock: + Py_END_CRITICAL_SECTION(); + return replaced; +} + +// Replaces a dictionary that is probably the dictionary which has been +// materialized and points at the inline values. We could have raced +// and replaced it with another dictionary though. +static int +replace_dict_probably_inline_materialized(PyObject *obj, PyDictObject *inline_dict, + PyDictObject *cur_dict, PyObject *new_dict) +{ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(obj); + + if (cur_dict == inline_dict) { + assert(FT_ATOMIC_LOAD_PTR_RELAXED(inline_dict->ma_values) == _PyObject_InlineValues(obj)); + + int err = _PyDict_DetachFromObject(inline_dict, obj); + if (err != 0) { + assert(new_dict == NULL); + return err; + } + } + + FT_ATOMIC_STORE_PTR(_PyObject_ManagedDictPointer(obj)->dict, + (PyDictObject *)Py_XNewRef(new_dict)); + return 0; +} + +#endif + +static void +decref_maybe_delay(PyObject *obj, bool delay) +{ + if (delay) { + _PyObject_XDecRefDelayed(obj); + } + else { + Py_XDECREF(obj); + } +} + +static int +set_or_clear_managed_dict(PyObject *obj, PyObject *new_dict, bool clear) { assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT); +#ifndef NDEBUG + Py_BEGIN_CRITICAL_SECTION(obj); assert(_PyObject_InlineValuesConsistencyCheck(obj)); + Py_END_CRITICAL_SECTION(); +#endif int err = 0; PyTypeObject *tp = Py_TYPE(obj); if (tp->tp_flags & Py_TPFLAGS_INLINE_VALUES) { - PyDictObject *dict = _PyObject_GetManagedDict(obj); - if (dict == NULL) { #ifdef Py_GIL_DISABLED - Py_BEGIN_CRITICAL_SECTION(obj); + PyDictObject *prev_dict; + if (!try_set_dict_inline_only_or_other_dict(obj, new_dict, &prev_dict)) { + // We had a materialized dictionary which pointed at the inline + // values. We need to lock both the object and the dict at the + // same time to safely replace it. We can't merely lock the dictionary + // while the object is locked because it could suspend the object lock. + PyDictObject *cur_dict; - dict = _PyObject_ManagedDictPointer(obj)->dict; - if (dict == NULL) { - set_dict_inline_values(obj, (PyDictObject *)new_dict); - } + assert(prev_dict != NULL); + Py_BEGIN_CRITICAL_SECTION2(obj, prev_dict); - Py_END_CRITICAL_SECTION(); + // We could have had another thread race in between the call to + // try_set_dict_inline_only_or_other_dict where we locked the object + // and when we unlocked and re-locked the dictionary. + cur_dict = _PyObject_GetManagedDict(obj); - if (dict == NULL) { - return 0; + err = replace_dict_probably_inline_materialized(obj, prev_dict, + cur_dict, new_dict); + + Py_END_CRITICAL_SECTION2(); + + // Decref for the dictionary we incref'd in try_set_dict_inline_only_or_other_dict + // while the object was locked + decref_maybe_delay((PyObject *)prev_dict, + !clear && prev_dict != cur_dict); + if (err != 0) { + return err; } -#else - set_dict_inline_values(obj, (PyDictObject *)new_dict); - return 0; -#endif - } - Py_BEGIN_CRITICAL_SECTION2(dict, obj); + prev_dict = cur_dict; + } - // We've locked dict, but the actual dict could have changed - // since we locked it. - dict = _PyObject_ManagedDictPointer(obj)->dict; - err = _PyDict_DetachFromObject(dict, obj); - if (err == 0) { - FT_ATOMIC_STORE_PTR(_PyObject_ManagedDictPointer(obj)->dict, - (PyDictObject *)Py_XNewRef(new_dict)); + if (prev_dict != NULL) { + // decref for the dictionary that we replaced + decref_maybe_delay((PyObject *)prev_dict, !clear); } - Py_END_CRITICAL_SECTION2(); - if (err == 0) { - Py_XDECREF(dict); + return 0; +#else + PyDictObject *dict = _PyObject_GetManagedDict(obj); + if (dict == NULL) { + set_dict_inline_values(obj, (PyDictObject *)new_dict); + return 0; } + if (_PyDict_DetachFromObject(dict, obj) == 0) { + _PyObject_ManagedDictPointer(obj)->dict = (PyDictObject *)Py_XNewRef(new_dict); + Py_DECREF(dict); + return 0; + } + assert(new_dict == NULL); + return -1; +#endif } else { PyDictObject *dict; @@ -7225,18 +7253,37 @@ _PyObject_SetManagedDict(PyObject *obj, PyObject *new_dict) (PyDictObject *)Py_XNewRef(new_dict)); Py_END_CRITICAL_SECTION(); - - Py_XDECREF(dict); + decref_maybe_delay((PyObject *)dict, !clear); } assert(_PyObject_InlineValuesConsistencyCheck(obj)); return err; } +int +_PyObject_SetManagedDict(PyObject *obj, PyObject *new_dict) +{ + return set_or_clear_managed_dict(obj, new_dict, false); +} + void PyObject_ClearManagedDict(PyObject *obj) { - if (_PyObject_SetManagedDict(obj, NULL) < 0) { + if (set_or_clear_managed_dict(obj, NULL, true) < 0) { + /* Must be out of memory */ + assert(PyErr_Occurred() == PyExc_MemoryError); PyErr_WriteUnraisable(NULL); + /* Clear the dict */ + PyDictObject *dict = _PyObject_GetManagedDict(obj); + Py_BEGIN_CRITICAL_SECTION2(dict, obj); + dict = _PyObject_ManagedDictPointer(obj)->dict; + PyInterpreterState *interp = _PyInterpreterState_GET(); + PyDictKeysObject *oldkeys = dict->ma_keys; + set_keys(dict, Py_EMPTY_KEYS); + dict->ma_values = NULL; + dictkeys_decref(interp, oldkeys, IS_DICT_SHARED(dict)); + STORE_USED(dict, 0); + set_dict_inline_values(obj, NULL); + Py_END_CRITICAL_SECTION2(); } } @@ -7261,12 +7308,6 @@ _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj) PyDictValues *values = copy_values(mp->ma_values); if (values == NULL) { - /* Out of memory. Clear the dict */ - PyInterpreterState *interp = _PyInterpreterState_GET(); - PyDictKeysObject *oldkeys = mp->ma_keys; - set_keys(mp, Py_EMPTY_KEYS); - dictkeys_decref(interp, oldkeys, IS_DICT_SHARED(mp)); - STORE_USED(mp, 0); PyErr_NoMemory(); return -1; } @@ -7390,20 +7431,54 @@ _PyDictKeys_DecRef(PyDictKeysObject *keys) dictkeys_decref(interp, keys, false); } -uint32_t _PyDictKeys_GetVersionForCurrentState(PyInterpreterState *interp, - PyDictKeysObject *dictkeys) +static inline uint32_t +get_next_dict_keys_version(PyInterpreterState *interp) { - if (dictkeys->dk_version != 0) { - return dictkeys->dk_version; - } +#ifdef Py_GIL_DISABLED + uint32_t v; + do { + v = _Py_atomic_load_uint32_relaxed( + &interp->dict_state.next_keys_version); + if (v == 0) { + return 0; + } + } while (!_Py_atomic_compare_exchange_uint32( + &interp->dict_state.next_keys_version, &v, v + 1)); +#else if (interp->dict_state.next_keys_version == 0) { return 0; } uint32_t v = interp->dict_state.next_keys_version++; - dictkeys->dk_version = v; +#endif return v; } +// In free-threaded builds the caller must ensure that the keys object is not +// being mutated concurrently by another thread. +uint32_t +_PyDictKeys_GetVersionForCurrentState(PyInterpreterState *interp, + PyDictKeysObject *dictkeys) +{ + uint32_t dk_version = FT_ATOMIC_LOAD_UINT32_RELAXED(dictkeys->dk_version); + if (dk_version != 0) { + return dk_version; + } + dk_version = get_next_dict_keys_version(interp); + FT_ATOMIC_STORE_UINT32_RELAXED(dictkeys->dk_version, dk_version); + return dk_version; +} + +uint32_t +_PyDict_GetKeysVersionForCurrentState(PyInterpreterState *interp, + PyDictObject *dict) +{ + ASSERT_DICT_LOCKED((PyObject *) dict); + uint32_t dk_version = + _PyDictKeys_GetVersionForCurrentState(interp, dict->ma_keys); + ensure_shared_on_keys_version_assignment(dict); + return dk_version; +} + static inline int validate_watcher_id(PyInterpreterState *interp, int watcher_id) { diff --git a/Objects/floatobject.c b/Objects/floatobject.c index f00b6a6b4b2bdc..bcc77287454768 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -341,16 +341,16 @@ PyFloat_AsDouble(PyObject *op) obj is not of float or int type, Py_NotImplemented is incref'ed, stored in obj, and returned from the function invoking this macro. */ -#define CONVERT_TO_DOUBLE(obj, dbl) \ - if (PyFloat_Check(obj)) \ - dbl = PyFloat_AS_DOUBLE(obj); \ - else if (convert_to_double(&(obj), &(dbl)) < 0) \ +#define CONVERT_TO_DOUBLE(obj, dbl) \ + if (PyFloat_Check(obj)) \ + dbl = PyFloat_AS_DOUBLE(obj); \ + else if (_Py_convert_int_to_double(&(obj), &(dbl)) < 0) \ return obj; /* Methods */ -static int -convert_to_double(PyObject **v, double *dbl) +int +_Py_convert_int_to_double(PyObject **v, double *dbl) { PyObject *obj = *v; diff --git a/Objects/funcobject.c b/Objects/funcobject.c index 1f2387f68440aa..4ba47285f7152f 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -289,12 +289,14 @@ functions is running. */ +#ifndef Py_GIL_DISABLED static inline struct _func_version_cache_item * get_cache_item(PyInterpreterState *interp, uint32_t version) { return interp->func_state.func_version_cache + (version % FUNC_VERSION_CACHE_SIZE); } +#endif void _PyFunction_SetVersion(PyFunctionObject *func, uint32_t version) diff --git a/Objects/genobject.c b/Objects/genobject.c index 19c2c4e3331a89..e87f199c2504ba 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -471,14 +471,14 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, return gen_send_ex(gen, Py_None, 1, 0); goto throw_here; } + PyThreadState *tstate = _PyThreadState_GET(); + assert(tstate != NULL); if (PyGen_CheckExact(yf) || PyCoro_CheckExact(yf)) { /* `yf` is a generator or a coroutine. */ - PyThreadState *tstate = _PyThreadState_GET(); - /* Since we are fast-tracking things by skipping the eval loop, - we need to update the current frame so the stack trace - will be reported correctly to the user. */ - /* XXX We should probably be updating the current frame - somewhere in ceval.c. */ + + /* Link frame into the stack to enable complete backtraces. */ + /* XXX We should probably be updating the current frame somewhere in + ceval.c. */ _PyInterpreterFrame *prev = tstate->current_frame; frame->previous = prev; tstate->current_frame = frame; @@ -502,10 +502,16 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, Py_DECREF(yf); goto throw_here; } + + _PyInterpreterFrame *prev = tstate->current_frame; + frame->previous = prev; + tstate->current_frame = frame; PyFrameState state = gen->gi_frame_state; gen->gi_frame_state = FRAME_EXECUTING; ret = PyObject_CallFunctionObjArgs(meth, typ, val, tb, NULL); gen->gi_frame_state = state; + tstate->current_frame = prev; + frame->previous = NULL; Py_DECREF(meth); } Py_DECREF(yf); diff --git a/Objects/lnotab_notes.txt b/Objects/lnotab_notes.txt index 0f3599340318f0..335e441cfded3d 100644 --- a/Objects/lnotab_notes.txt +++ b/Objects/lnotab_notes.txt @@ -1,7 +1,7 @@ Description of the internal format of the line number table in Python 3.10 and earlier. -(For 3.11 onwards, see Objects/locations.md) +(For 3.11 onwards, see InternalDocs/code_objects.md) Conceptually, the line number table consists of a sequence of triples: start-offset (inclusive), end-offset (exclusive), line-number. diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index 535b0d068f064f..a8d64c9aefae6b 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -107,8 +107,6 @@ static void track_module(PyModuleObject *m) { _PyDict_EnablePerThreadRefcounting(m->md_dict); - PyObject_GC_Track(m->md_dict); - _PyObject_SetDeferredRefcount((PyObject *)m); PyObject_GC_Track(m); } diff --git a/Objects/object.c b/Objects/object.c index 052dea9ad1feff..8868fa29066404 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -119,7 +119,7 @@ get_reftotal(PyInterpreterState *interp) since we can't determine which interpreter updated it. */ Py_ssize_t total = REFTOTAL(interp); #ifdef Py_GIL_DISABLED - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, p) { /* This may race with other threads modifications to their reftotal */ _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)p; total += _Py_atomic_load_ssize_relaxed(&tstate_impl->reftotal); @@ -2457,11 +2457,7 @@ new_reference(PyObject *op) #ifdef Py_TRACE_REFS _Py_AddToAllObjects(op); #endif - struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; - if (tracer->tracer_func != NULL) { - void* data = tracer->tracer_data; - tracer->tracer_func(op, PyRefTracer_CREATE, data); - } + _PyReftracerTrack(op, PyRefTracer_CREATE); } void @@ -2554,10 +2550,6 @@ _Py_ResurrectReference(PyObject *op) #ifdef Py_TRACE_REFS _Py_AddToAllObjects(op); #endif - if (_PyRuntime.ref_tracer.tracer_func != NULL) { - void* data = _PyRuntime.ref_tracer.tracer_data; - _PyRuntime.ref_tracer.tracer_func(op, PyRefTracer_CREATE, data); - } } @@ -2947,15 +2939,10 @@ _Py_Dealloc(PyObject *op) Py_INCREF(type); #endif - struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; - if (tracer->tracer_func != NULL) { - void* data = tracer->tracer_data; - tracer->tracer_func(op, PyRefTracer_DESTROY, data); - } - #ifdef Py_TRACE_REFS _Py_ForgetReference(op); #endif + _PyReftracerTrack(op, PyRefTracer_DESTROY); (*dealloc)(op); #ifdef Py_DEBUG diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index dfeccfa4dd7658..2cc0377f68f990 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -1093,10 +1093,24 @@ struct _mem_work_chunk { }; static void -free_work_item(uintptr_t ptr) +free_work_item(uintptr_t ptr, delayed_dealloc_cb cb, void *state) { if (ptr & 0x01) { - PyObject_Free((char *)(ptr - 1)); + PyObject *obj = (PyObject *)(ptr - 1); +#ifdef Py_GIL_DISABLED + if (cb == NULL) { + assert(!_PyInterpreterState_GET()->stoptheworld.world_stopped); + Py_DECREF(obj); + return; + } + + Py_ssize_t refcount = _Py_ExplicitMergeRefcount(obj, -1); + if (refcount == 0) { + cb(obj, state); + } +#else + Py_DECREF(obj); +#endif } else { PyMem_Free((void *)ptr); @@ -1107,7 +1121,7 @@ static void free_delayed(uintptr_t ptr) { #ifndef Py_GIL_DISABLED - free_work_item(ptr); + free_work_item(ptr, NULL, NULL); #else PyInterpreterState *interp = _PyInterpreterState_GET(); if (_PyInterpreterState_GetFinalizing(interp) != NULL || @@ -1115,7 +1129,8 @@ free_delayed(uintptr_t ptr) { // Free immediately during interpreter shutdown or if the world is // stopped. - free_work_item(ptr); + assert(!interp->stoptheworld.world_stopped || !(ptr & 0x01)); + free_work_item(ptr, NULL, NULL); return; } @@ -1142,7 +1157,8 @@ free_delayed(uintptr_t ptr) if (buf == NULL) { // failed to allocate a buffer, free immediately _PyEval_StopTheWorld(tstate->base.interp); - free_work_item(ptr); + // TODO: Fix me + free_work_item(ptr, NULL, NULL); _PyEval_StartTheWorld(tstate->base.interp); return; } @@ -1166,12 +1182,16 @@ _PyMem_FreeDelayed(void *ptr) free_delayed((uintptr_t)ptr); } +#ifdef Py_GIL_DISABLED void -_PyObject_FreeDelayed(void *ptr) +_PyObject_XDecRefDelayed(PyObject *ptr) { assert(!((uintptr_t)ptr & 0x01)); - free_delayed(((uintptr_t)ptr)|0x01); + if (ptr != NULL) { + free_delayed(((uintptr_t)ptr)|0x01); + } } +#endif static struct _mem_work_chunk * work_queue_first(struct llist_node *head) @@ -1181,7 +1201,7 @@ work_queue_first(struct llist_node *head) static void process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr, - bool keep_empty) + bool keep_empty, delayed_dealloc_cb cb, void *state) { while (!llist_empty(head)) { struct _mem_work_chunk *buf = work_queue_first(head); @@ -1192,7 +1212,7 @@ process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr, return; } - free_work_item(item->ptr); + free_work_item(item->ptr, cb, state); buf->rd_idx++; } @@ -1210,7 +1230,8 @@ process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr, static void process_interp_queue(struct _Py_mem_interp_free_queue *queue, - struct _qsbr_thread_state *qsbr) + struct _qsbr_thread_state *qsbr, delayed_dealloc_cb cb, + void *state) { if (!_Py_atomic_load_int_relaxed(&queue->has_work)) { return; @@ -1218,7 +1239,7 @@ process_interp_queue(struct _Py_mem_interp_free_queue *queue, // Try to acquire the lock, but don't block if it's already held. if (_PyMutex_LockTimed(&queue->mutex, 0, 0) == PY_LOCK_ACQUIRED) { - process_queue(&queue->head, qsbr, false); + process_queue(&queue->head, qsbr, false, cb, state); int more_work = !llist_empty(&queue->head); _Py_atomic_store_int_relaxed(&queue->has_work, more_work); @@ -1234,10 +1255,23 @@ _PyMem_ProcessDelayed(PyThreadState *tstate) _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate; // Process thread-local work - process_queue(&tstate_impl->mem_free_queue, tstate_impl->qsbr, true); + process_queue(&tstate_impl->mem_free_queue, tstate_impl->qsbr, true, NULL, NULL); + + // Process shared interpreter work + process_interp_queue(&interp->mem_free_queue, tstate_impl->qsbr, NULL, NULL); +} + +void +_PyMem_ProcessDelayedNoDealloc(PyThreadState *tstate, delayed_dealloc_cb cb, void *state) +{ + PyInterpreterState *interp = tstate->interp; + _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate; + + // Process thread-local work + process_queue(&tstate_impl->mem_free_queue, tstate_impl->qsbr, true, cb, state); // Process shared interpreter work - process_interp_queue(&interp->mem_free_queue, tstate_impl->qsbr); + process_interp_queue(&interp->mem_free_queue, tstate_impl->qsbr, cb, state); } void @@ -1279,7 +1313,7 @@ _PyMem_FiniDelayed(PyInterpreterState *interp) // Free the remaining items immediately. There should be no other // threads accessing the memory at this point during shutdown. struct _mem_work_item *item = &buf->array[buf->rd_idx]; - free_work_item(item->ptr); + free_work_item(item->ptr, NULL, NULL); buf->rd_idx++; } @@ -1405,7 +1439,7 @@ get_mimalloc_allocated_blocks(PyInterpreterState *interp) { size_t allocated_blocks = 0; #ifdef Py_GIL_DISABLED - for (PyThreadState *t = interp->threads.head; t != NULL; t = t->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, t) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)t; for (int i = 0; i < _Py_MIMALLOC_HEAP_COUNT; i++) { mi_heap_t *heap = &tstate->mimalloc.heaps[i]; diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 193914d54bd90e..49977726eadca9 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -966,6 +966,7 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) for (i = newsize; i < oldsize; i++) { Py_CLEAR(v->ob_item[i]); } + _PyReftracerTrack((PyObject *)v, PyRefTracer_DESTROY); sv = PyObject_GC_Resize(PyTupleObject, v, newsize); if (sv == NULL) { *pv = NULL; diff --git a/Objects/typeobject.c b/Objects/typeobject.c index a6cf3da542b691..2611404a3d0d61 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -5645,6 +5645,24 @@ _PyType_SetFlags(PyTypeObject *self, unsigned long mask, unsigned long flags) END_TYPE_LOCK(); } +int +_PyType_Validate(PyTypeObject *ty, _py_validate_type validate, unsigned int *tp_version) +{ + int err; + BEGIN_TYPE_LOCK(); + err = validate(ty); + if (!err) { + if(assign_version_tag(_PyInterpreterState_GET(), ty)) { + *tp_version = ty->tp_version_tag; + } + else { + err = -1; + } + } + END_TYPE_LOCK(); + return err; +} + static void set_flags_recursive(PyTypeObject *self, unsigned long mask, unsigned long flags) { @@ -9314,13 +9332,13 @@ wrap_buffer(PyObject *self, PyObject *args, void *wrapped) if (flags == -1 && PyErr_Occurred()) { return NULL; } - if (flags > INT_MAX) { + if (flags > INT_MAX || flags < INT_MIN) { PyErr_SetString(PyExc_OverflowError, - "buffer flags too large"); + "buffer flags out of range"); return NULL; } - return _PyMemoryView_FromBufferProc(self, Py_SAFE_DOWNCAST(flags, Py_ssize_t, int), + return _PyMemoryView_FromBufferProc(self, (int)flags, (getbufferproc)wrapped); } diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c index bacb858978c5d7..4ed40aa71a595e 100644 --- a/Objects/typevarobject.c +++ b/Objects/typevarobject.c @@ -1,6 +1,6 @@ // TypeVar, TypeVarTuple, and ParamSpec #include "Python.h" -#include "pycore_object.h" // _PyObject_GC_TRACK/UNTRACK +#include "pycore_object.h" // _PyObject_GC_TRACK/UNTRACK, PyAnnotateFormat #include "pycore_typevarobject.h" #include "pycore_unionobject.h" // _Py_union_type_or @@ -168,7 +168,7 @@ constevaluator_call(PyObject *self, PyObject *args, PyObject *kwargs) return NULL; } PyObject *value = ((constevaluatorobject *)self)->value; - if (format == 3) { // STRING + if (format == _Py_ANNOTATE_FORMAT_STRING) { PyUnicodeWriter *writer = PyUnicodeWriter_Create(5); // cannot be <5 if (writer == NULL) { return NULL; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 9cd9781e412524..ab4f07ed054385 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1129,6 +1129,7 @@ resize_compact(PyObject *unicode, Py_ssize_t length) #ifdef Py_TRACE_REFS _Py_ForgetReference(unicode); #endif + _PyReftracerTrack(unicode, PyRefTracer_DESTROY); new_unicode = (PyObject *)PyObject_Realloc(unicode, new_size); if (new_unicode == NULL) { @@ -4978,39 +4979,228 @@ PyUnicode_DecodeUTF8(const char *s, #include "stringlib/codecs.h" #include "stringlib/undef.h" +#if (SIZEOF_SIZE_T == 8) /* Mask to quickly check whether a C 'size_t' contains a non-ASCII, UTF8-encoded char. */ -#if (SIZEOF_SIZE_T == 8) # define ASCII_CHAR_MASK 0x8080808080808080ULL +// used to count codepoints in UTF-8 string. +# define VECTOR_0101 0x0101010101010101ULL +# define VECTOR_00FF 0x00ff00ff00ff00ffULL #elif (SIZEOF_SIZE_T == 4) # define ASCII_CHAR_MASK 0x80808080U +# define VECTOR_0101 0x01010101U +# define VECTOR_00FF 0x00ff00ffU #else # error C 'size_t' size should be either 4 or 8! #endif +#if (defined(__clang__) || defined(__GNUC__)) +#define HAVE_CTZ 1 +static inline unsigned int +ctz(size_t v) +{ + return __builtin_ctzll((unsigned long long)v); +} +#elif defined(_MSC_VER) +#define HAVE_CTZ 1 +static inline unsigned int +ctz(size_t v) +{ + unsigned long pos; +#if SIZEOF_SIZE_T == 4 + _BitScanForward(&pos, v); +#else + _BitScanForward64(&pos, v); +#endif /* SIZEOF_SIZE_T */ + return pos; +} +#endif + +#if HAVE_CTZ +// load p[0]..p[size-1] as a little-endian size_t +// without unaligned access nor read ahead. +static size_t +load_unaligned(const unsigned char *p, size_t size) +{ + assert(size <= SIZEOF_SIZE_T); + union { + size_t s; + unsigned char b[SIZEOF_SIZE_T]; + } u; + u.s = 0; + switch (size) { + case 8: + u.b[7] = p[7]; + _Py_FALLTHROUGH; + case 7: + u.b[6] = p[6]; + _Py_FALLTHROUGH; + case 6: + u.b[5] = p[5]; + _Py_FALLTHROUGH; + case 5: + u.b[4] = p[4]; + _Py_FALLTHROUGH; + case 4: + u.b[3] = p[3]; + _Py_FALLTHROUGH; + case 3: + u.b[2] = p[2]; + _Py_FALLTHROUGH; + case 2: + u.b[1] = p[1]; + _Py_FALLTHROUGH; + case 1: + u.b[0] = p[0]; + break; + case 0: + break; + default: + Py_UNREACHABLE(); + } + return u.s; +} +#endif + +/* + * Find the first non-ASCII character in a byte sequence. + * + * This function scans a range of bytes from `start` to `end` and returns the + * index of the first byte that is not an ASCII character (i.e., has the most + * significant bit set). If all characters in the range are ASCII, it returns + * `end - start`. + */ static Py_ssize_t -ascii_decode(const char *start, const char *end, Py_UCS1 *dest) +find_first_nonascii(const unsigned char *start, const unsigned char *end) { - const char *p = start; + const unsigned char *p = start; + if (end - start >= SIZEOF_SIZE_T) { + const unsigned char *p2 = _Py_ALIGN_UP(p, SIZEOF_SIZE_T); + if (p < p2) { +#if HAVE_CTZ +#if defined(_M_AMD64) || defined(_M_IX86) || defined(__x86_64__) || defined(__i386__) + // x86 and amd64 are little endian and can load unaligned memory. + size_t u = *(const size_t*)p & ASCII_CHAR_MASK; +#else + size_t u = load_unaligned(p, p2 - p) & ASCII_CHAR_MASK; +#endif + if (u) { + return p - start + (ctz(u) - 7) / 8; + } + p = p2; + } +#else + while (p < p2) { + if (*p & 0x80) { + return p - start; + } + p++; + } +#endif + const unsigned char *e = end - SIZEOF_SIZE_T; + while (p <= e) { + size_t u = (*(const size_t *)p) & ASCII_CHAR_MASK; + if (u) { +#if PY_LITTLE_ENDIAN && HAVE_CTZ + return p - start + (ctz(u) - 7) / 8; +#else + // big endian and minor compilers are difficult to test. + // fallback to per byte check. + break; +#endif + } + p += SIZEOF_SIZE_T; + } + } +#if HAVE_CTZ + // we can not use *(const size_t*)p to avoid buffer overrun. + size_t u = load_unaligned(p, end - p) & ASCII_CHAR_MASK; + if (u) { + return p - start + (ctz(u) - 7) / 8; + } + return end - start; +#else + while (p < end) { + if (*p & 0x80) { + break; + } + p++; + } + return p - start; +#endif +} + +static inline int +scalar_utf8_start_char(unsigned int ch) +{ + // 0xxxxxxx or 11xxxxxx are first byte. + return (~ch >> 7 | ch >> 6) & 1; +} + +static inline size_t +vector_utf8_start_chars(size_t v) +{ + return ((~v >> 7) | (v >> 6)) & VECTOR_0101; +} + + +// Count the number of UTF-8 code points in a given byte sequence. +static Py_ssize_t +utf8_count_codepoints(const unsigned char *s, const unsigned char *end) +{ + Py_ssize_t len = 0; + + if (end - s >= SIZEOF_SIZE_T) { + while (!_Py_IS_ALIGNED(s, ALIGNOF_SIZE_T)) { + len += scalar_utf8_start_char(*s++); + } + + while (s + SIZEOF_SIZE_T <= end) { + const unsigned char *e = end; + if (e - s > SIZEOF_SIZE_T * 255) { + e = s + SIZEOF_SIZE_T * 255; + } + Py_ssize_t vstart = 0; + while (s + SIZEOF_SIZE_T <= e) { + size_t v = *(size_t*)s; + size_t vs = vector_utf8_start_chars(v); + vstart += vs; + s += SIZEOF_SIZE_T; + } + vstart = (vstart & VECTOR_00FF) + ((vstart >> 8) & VECTOR_00FF); + vstart += vstart >> 16; +#if SIZEOF_SIZE_T == 8 + vstart += vstart >> 32; +#endif + len += vstart & 0x7ff; + } + } + while (s < end) { + len += scalar_utf8_start_char(*s++); + } + return len; +} + +static Py_ssize_t +ascii_decode(const char *start, const char *end, Py_UCS1 *dest) +{ #if SIZEOF_SIZE_T <= SIZEOF_VOID_P - if (_Py_IS_ALIGNED(p, ALIGNOF_SIZE_T) + if (_Py_IS_ALIGNED(start, ALIGNOF_SIZE_T) && _Py_IS_ALIGNED(dest, ALIGNOF_SIZE_T)) { /* Fast path, see in STRINGLIB(utf8_decode) for an explanation. */ - /* Help allocation */ - const char *_p = p; - Py_UCS1 * q = dest; - while (_p + SIZEOF_SIZE_T <= end) { - size_t value = *(const size_t *) _p; + const char *p = start; + Py_UCS1 *q = dest; + while (p + SIZEOF_SIZE_T <= end) { + size_t value = *(const size_t *) p; if (value & ASCII_CHAR_MASK) break; *((size_t *)q) = value; - _p += SIZEOF_SIZE_T; + p += SIZEOF_SIZE_T; q += SIZEOF_SIZE_T; } - p = _p; while (p < end) { if ((unsigned char)*p & 0x80) break; @@ -5019,31 +5209,12 @@ ascii_decode(const char *start, const char *end, Py_UCS1 *dest) return p - start; } #endif - while (p < end) { - /* Fast path, see in STRINGLIB(utf8_decode) in stringlib/codecs.h - for an explanation. */ - if (_Py_IS_ALIGNED(p, ALIGNOF_SIZE_T)) { - /* Help allocation */ - const char *_p = p; - while (_p + SIZEOF_SIZE_T <= end) { - size_t value = *(const size_t *) _p; - if (value & ASCII_CHAR_MASK) - break; - _p += SIZEOF_SIZE_T; - } - p = _p; - if (_p == end) - break; - } - if ((unsigned char)*p & 0x80) - break; - ++p; - } - memcpy(dest, start, p - start); - return p - start; + Py_ssize_t pos = find_first_nonascii((const unsigned char*)start, + (const unsigned char*)end); + memcpy(dest, start, pos); + return pos; } - static int unicode_decode_utf8_impl(_PyUnicodeWriter *writer, const char *starts, const char *s, const char *end, @@ -5187,27 +5358,69 @@ unicode_decode_utf8(const char *s, Py_ssize_t size, return get_latin1_char((unsigned char)s[0]); } - // fast path: try ASCII string. - const char *starts = s; - const char *end = s + size; - PyObject *u = PyUnicode_New(size, 127); - if (u == NULL) { + // I don't know this check is necessary or not. But there is a test + // case that requires size=PY_SSIZE_T_MAX cause MemoryError. + if (PY_SSIZE_T_MAX - sizeof(PyCompactUnicodeObject) < (size_t)size) { + PyErr_NoMemory(); return NULL; } - Py_ssize_t decoded = ascii_decode(s, end, PyUnicode_1BYTE_DATA(u)); - if (decoded == size) { + + const char *starts = s; + const char *end = s + size; + + Py_ssize_t pos = find_first_nonascii((const unsigned char*)starts, (const unsigned char*)end); + if (pos == size) { // fast path: ASCII string. + PyObject *u = PyUnicode_New(size, 127); + if (u == NULL) { + return NULL; + } + memcpy(PyUnicode_1BYTE_DATA(u), s, size); if (consumed) { *consumed = size; } return u; } - s += decoded; - size -= decoded; + + int maxchr = 127; + Py_ssize_t maxsize = size; + + unsigned char ch = (unsigned char)(s[pos]); + // error handler other than strict may remove/replace the invalid byte. + // consumed != NULL allows 1~3 bytes remainings. + // 0x80 <= ch < 0xc2 is invalid start byte that cause UnicodeDecodeError. + // otherwise: check the input and decide the maxchr and maxsize to reduce + // reallocation and copy. + if (error_handler == _Py_ERROR_STRICT && !consumed && ch >= 0xc2) { + // we only calculate the number of codepoints and don't determine the exact maxchr. + // This is because writing fast and portable SIMD code to find maxchr is difficult. + // If reallocation occurs for a larger maxchar, knowing the exact number of codepoints + // means that it is no longer necessary to allocate several times the required amount + // of memory. + maxsize = utf8_count_codepoints((const unsigned char *)s, (const unsigned char *)end); + if (ch < 0xc4) { // latin1 + maxchr = 0xff; + } + else if (ch < 0xf0) { // ucs2 + maxchr = 0xffff; + } + else { // ucs4 + maxchr = 0x10ffff; + } + } + PyObject *u = PyUnicode_New(maxsize, maxchr); + if (!u) { + return NULL; + } // Use _PyUnicodeWriter after fast path is failed. _PyUnicodeWriter writer; _PyUnicodeWriter_InitWithBuffer(&writer, u); - writer.pos = decoded; + if (maxchr <= 255) { + memcpy(PyUnicode_1BYTE_DATA(u), s, pos); + s += pos; + size -= pos; + writer.pos = pos; + } if (unicode_decode_utf8_impl(&writer, starts, s, end, error_handler, errors, @@ -5267,7 +5480,9 @@ PyUnicode_DecodeUTF8Stateful(const char *s, const char *errors, Py_ssize_t *consumed) { - return unicode_decode_utf8(s, size, _Py_ERROR_UNKNOWN, errors, consumed); + return unicode_decode_utf8(s, size, + errors ? _Py_ERROR_UNKNOWN : _Py_ERROR_STRICT, + errors, consumed); } diff --git a/PCbuild/_elementtree.vcxproj b/PCbuild/_elementtree.vcxproj index 8c9c0e42f7fe3e..3eb9c89bcb6740 100644 --- a/PCbuild/_elementtree.vcxproj +++ b/PCbuild/_elementtree.vcxproj @@ -106,7 +106,6 @@ - diff --git a/PCbuild/_elementtree.vcxproj.filters b/PCbuild/_elementtree.vcxproj.filters index bc14e31f32b95c..a5368024ccebfd 100644 --- a/PCbuild/_elementtree.vcxproj.filters +++ b/PCbuild/_elementtree.vcxproj.filters @@ -42,9 +42,6 @@ Header Files\expat - - Header Files\expat - Header Files\expat diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters index 09a5f4d30ef490..3842f52e514bb4 100644 --- a/PCbuild/_freeze_module.vcxproj.filters +++ b/PCbuild/_freeze_module.vcxproj.filters @@ -239,6 +239,8 @@ Source Files + Source Files + Source Files diff --git a/PCbuild/_testinternalcapi.vcxproj.filters b/PCbuild/_testinternalcapi.vcxproj.filters index 27429ea5833077..eb0a83ba17f815 100644 --- a/PCbuild/_testinternalcapi.vcxproj.filters +++ b/PCbuild/_testinternalcapi.vcxproj.filters @@ -21,6 +21,9 @@ Source Files + + Source Files + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index d10e1c46a91480..f2a48a7cb63666 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -1099,30 +1099,30 @@ Global {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|Win32.Build.0 = Release|Win32 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.ActiveCfg = Release|x64 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.Build.0 = Release|x64 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|ARM.ActiveCfg = Debug|Win32 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|ARM.Build.0 = Debug|Win32 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|ARM64.ActiveCfg = Debug|x64 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|ARM64.Build.0 = Debug|x64 + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|ARM.ActiveCfg = Debug|ARM + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|ARM.Build.0 = Debug|ARM + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|ARM64.Build.0 = Debug|ARM64 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|Win32.ActiveCfg = Debug|Win32 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|Win32.Build.0 = Debug|Win32 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|x64.ActiveCfg = Debug|x64 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Debug|x64.Build.0 = Debug|x64 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|ARM.ActiveCfg = Release|Win32 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|ARM.Build.0 = Release|Win32 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|ARM64.ActiveCfg = Release|x64 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|ARM64.Build.0 = Release|x64 + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|ARM.ActiveCfg = Release|ARM + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|ARM.Build.0 = Release|ARM + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|ARM64.ActiveCfg = Release|ARM64 + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|ARM64.Build.0 = Release|ARM64 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|Win32.ActiveCfg = Release|Win32 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|Win32.Build.0 = Release|Win32 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|x64.ActiveCfg = Release|x64 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGInstrument|x64.Build.0 = Release|x64 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGUpdate|ARM.ActiveCfg = Release|Win32 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGUpdate|ARM64.ActiveCfg = Release|x64 + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGUpdate|ARM.ActiveCfg = Release|ARM + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGUpdate|ARM64.ActiveCfg = Release|ARM64 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGUpdate|Win32.ActiveCfg = Release|Win32 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.PGUpdate|x64.ActiveCfg = Release|x64 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|ARM.ActiveCfg = Release|Win32 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|ARM.Build.0 = Release|Win32 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|ARM64.ActiveCfg = Release|x64 - {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|ARM64.Build.0 = Release|x64 + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|ARM.ActiveCfg = Release|ARM + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|ARM.Build.0 = Release|ARM + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|ARM64.ActiveCfg = Release|ARM64 + {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|ARM64.Build.0 = Release|ARM64 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|Win32.ActiveCfg = Release|Win32 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|Win32.Build.0 = Release|Win32 {19C0C13F-47CA-4432-AFF3-799A296A4DDC}.Release|x64.ActiveCfg = Release|x64 @@ -1441,26 +1441,26 @@ Global {12728250-16EC-4DC6-94D7-E21DD88947F8}.Release|Win32.Build.0 = Release|Win32 {12728250-16EC-4DC6-94D7-E21DD88947F8}.Release|x64.ActiveCfg = Release|x64 {12728250-16EC-4DC6-94D7-E21DD88947F8}.Release|x64.Build.0 = Release|x64 - {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Debug|ARM.ActiveCfg = Debug|Win32 - {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Debug|ARM64.ActiveCfg = Debug|Win32 + {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Debug|ARM.ActiveCfg = Debug|ARM + {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Debug|ARM64.ActiveCfg = Debug|ARM64 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Debug|Win32.ActiveCfg = Debug|Win32 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Debug|Win32.Build.0 = Debug|Win32 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Debug|x64.ActiveCfg = Debug|x64 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Debug|x64.Build.0 = Debug|x64 - {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGInstrument|ARM.ActiveCfg = PGInstrument|Win32 - {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGInstrument|ARM64.ActiveCfg = PGInstrument|Win32 + {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGInstrument|Win32.ActiveCfg = PGInstrument|Win32 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGInstrument|Win32.Build.0 = PGInstrument|Win32 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGInstrument|x64.ActiveCfg = PGInstrument|x64 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGInstrument|x64.Build.0 = PGInstrument|x64 - {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGUpdate|ARM.ActiveCfg = PGUpdate|Win32 - {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGUpdate|ARM64.ActiveCfg = PGUpdate|Win32 + {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGUpdate|Win32.ActiveCfg = PGUpdate|Win32 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGUpdate|Win32.Build.0 = PGUpdate|Win32 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGUpdate|x64.ActiveCfg = PGUpdate|x64 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.PGUpdate|x64.Build.0 = PGUpdate|x64 - {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Release|ARM.ActiveCfg = Release|Win32 - {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Release|ARM64.ActiveCfg = Release|Win32 + {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Release|ARM.ActiveCfg = Release|ARM + {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Release|ARM64.ActiveCfg = Release|ARM64 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Release|Win32.ActiveCfg = Release|Win32 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Release|Win32.Build.0 = Release|Win32 {9DE9E23D-C8D4-4817-92A9-920A8B1FE5FF}.Release|x64.ActiveCfg = Release|x64 @@ -1529,27 +1529,27 @@ Global {FDB84CBB-2FB6-47C8-A2D6-091E0833239D}.Release|Win32.Build.0 = Release|Win32 {FDB84CBB-2FB6-47C8-A2D6-091E0833239D}.Release|x64.ActiveCfg = Release|x64 {FDB84CBB-2FB6-47C8-A2D6-091E0833239D}.Release|x64.Build.0 = Release|x64 - {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|ARM.ActiveCfg = Debug|Win32 - {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|ARM.Build.0 = Debug|Win32 - {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|ARM64.ActiveCfg = Debug|Win32 + {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|ARM.ActiveCfg = Debug|ARM + {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|ARM.Build.0 = Debug|ARM + {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|ARM64.ActiveCfg = Debug|ARM64 {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|Win32.ActiveCfg = Debug|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|Win32.Build.0 = Debug|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|x64.ActiveCfg = Debug|x64 {AB603547-1E2A-45B3-9E09-B04596006393}.Debug|x64.Build.0 = Debug|x64 - {AB603547-1E2A-45B3-9E09-B04596006393}.PGInstrument|ARM.ActiveCfg = PGInstrument|Win32 - {AB603547-1E2A-45B3-9E09-B04596006393}.PGInstrument|ARM64.ActiveCfg = PGInstrument|Win32 + {AB603547-1E2A-45B3-9E09-B04596006393}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {AB603547-1E2A-45B3-9E09-B04596006393}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 {AB603547-1E2A-45B3-9E09-B04596006393}.PGInstrument|Win32.ActiveCfg = PGInstrument|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.PGInstrument|Win32.Build.0 = PGInstrument|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.PGInstrument|x64.ActiveCfg = PGInstrument|x64 {AB603547-1E2A-45B3-9E09-B04596006393}.PGInstrument|x64.Build.0 = PGInstrument|x64 - {AB603547-1E2A-45B3-9E09-B04596006393}.PGUpdate|ARM.ActiveCfg = PGUpdate|Win32 - {AB603547-1E2A-45B3-9E09-B04596006393}.PGUpdate|ARM64.ActiveCfg = PGUpdate|Win32 + {AB603547-1E2A-45B3-9E09-B04596006393}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {AB603547-1E2A-45B3-9E09-B04596006393}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 {AB603547-1E2A-45B3-9E09-B04596006393}.PGUpdate|Win32.ActiveCfg = PGUpdate|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.PGUpdate|Win32.Build.0 = PGUpdate|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.PGUpdate|x64.ActiveCfg = PGUpdate|x64 {AB603547-1E2A-45B3-9E09-B04596006393}.PGUpdate|x64.Build.0 = PGUpdate|x64 - {AB603547-1E2A-45B3-9E09-B04596006393}.Release|ARM.ActiveCfg = Release|Win32 - {AB603547-1E2A-45B3-9E09-B04596006393}.Release|ARM64.ActiveCfg = Release|Win32 + {AB603547-1E2A-45B3-9E09-B04596006393}.Release|ARM.ActiveCfg = Release|ARM + {AB603547-1E2A-45B3-9E09-B04596006393}.Release|ARM64.ActiveCfg = Release|ARM64 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|Win32.ActiveCfg = Release|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|Win32.Build.0 = Release|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|x64.ActiveCfg = Release|x64 diff --git a/PCbuild/pylauncher.vcxproj b/PCbuild/pylauncher.vcxproj index 35f2f7e505bf92..ca422ef4d02e93 100644 --- a/PCbuild/pylauncher.vcxproj +++ b/PCbuild/pylauncher.vcxproj @@ -103,7 +103,7 @@ - + diff --git a/PCbuild/pylauncher.vcxproj.filters b/PCbuild/pylauncher.vcxproj.filters index 17d0389ca50f1c..f10f6d5669e283 100644 --- a/PCbuild/pylauncher.vcxproj.filters +++ b/PCbuild/pylauncher.vcxproj.filters @@ -16,7 +16,7 @@ - + Resource Files diff --git a/PCbuild/python.vcxproj b/PCbuild/python.vcxproj index 4a99ffc677c287..70dabaa3c8bc02 100644 --- a/PCbuild/python.vcxproj +++ b/PCbuild/python.vcxproj @@ -103,7 +103,7 @@ - + diff --git a/PCbuild/python.vcxproj.filters b/PCbuild/python.vcxproj.filters index 0662a4e7f5f933..31f8ad9b77058e 100644 --- a/PCbuild/python.vcxproj.filters +++ b/PCbuild/python.vcxproj.filters @@ -9,7 +9,7 @@ - + Resource Files diff --git a/PCbuild/python_uwp.vcxproj b/PCbuild/python_uwp.vcxproj index fb27e9e71222e3..3f8772d30b4ee4 100644 --- a/PCbuild/python_uwp.vcxproj +++ b/PCbuild/python_uwp.vcxproj @@ -108,7 +108,7 @@ - + diff --git a/PCbuild/python_uwp.vcxproj.filters b/PCbuild/python_uwp.vcxproj.filters index 79e87461eb1b7c..76b7a5f29f0a5b 100644 --- a/PCbuild/python_uwp.vcxproj.filters +++ b/PCbuild/python_uwp.vcxproj.filters @@ -9,7 +9,7 @@ - + Resource Files diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 95552cade52b75..9ebf58ae8a9bc4 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -169,9 +169,7 @@ - - @@ -220,7 +218,6 @@ - @@ -366,14 +363,12 @@ - - @@ -415,7 +410,7 @@ - + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 1708cf6e0b3a52..6c76a6ab592a84 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -231,9 +231,6 @@ Include - - Include - Include @@ -342,9 +339,6 @@ Include - - Modules - Parser @@ -447,15 +441,9 @@ Include\cpython - - Include - Include - - Include - Include @@ -903,7 +891,7 @@ Modules\zlib - + Modules\zlib diff --git a/PCbuild/pythonw.vcxproj b/PCbuild/pythonw.vcxproj index d08c210ef8a1dc..c6a5b8ce90a0d9 100644 --- a/PCbuild/pythonw.vcxproj +++ b/PCbuild/pythonw.vcxproj @@ -97,6 +97,9 @@ 3000000 + + + diff --git a/PCbuild/pythonw.vcxproj.filters b/PCbuild/pythonw.vcxproj.filters index 20d87051e2511d..9f92485d1a74d2 100644 --- a/PCbuild/pythonw.vcxproj.filters +++ b/PCbuild/pythonw.vcxproj.filters @@ -18,4 +18,9 @@ Resource Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/pythonw_uwp.vcxproj b/PCbuild/pythonw_uwp.vcxproj index e21e46a1b722ed..f548d022642890 100644 --- a/PCbuild/pythonw_uwp.vcxproj +++ b/PCbuild/pythonw_uwp.vcxproj @@ -108,7 +108,7 @@ - + diff --git a/PCbuild/pythonw_uwp.vcxproj.filters b/PCbuild/pythonw_uwp.vcxproj.filters index 2f39bdea9e910d..72756e14cdc4a9 100644 --- a/PCbuild/pythonw_uwp.vcxproj.filters +++ b/PCbuild/pythonw_uwp.vcxproj.filters @@ -9,7 +9,7 @@ - + Resource Files diff --git a/PCbuild/pywlauncher.vcxproj b/PCbuild/pywlauncher.vcxproj index e50b69aefe2b9c..1694548935a165 100644 --- a/PCbuild/pywlauncher.vcxproj +++ b/PCbuild/pywlauncher.vcxproj @@ -103,7 +103,7 @@ - + diff --git a/PCbuild/pywlauncher.vcxproj.filters b/PCbuild/pywlauncher.vcxproj.filters index 17d0389ca50f1c..f10f6d5669e283 100644 --- a/PCbuild/pywlauncher.vcxproj.filters +++ b/PCbuild/pywlauncher.vcxproj.filters @@ -16,7 +16,7 @@ - + Resource Files diff --git a/PCbuild/venvlauncher.vcxproj b/PCbuild/venvlauncher.vcxproj index 1193e032245c94..abaf3a979af268 100644 --- a/PCbuild/venvlauncher.vcxproj +++ b/PCbuild/venvlauncher.vcxproj @@ -107,7 +107,7 @@ - + diff --git a/PCbuild/venvlauncher.vcxproj.filters b/PCbuild/venvlauncher.vcxproj.filters index 56a0f005a3fa2a..bc98687a6da0d3 100644 --- a/PCbuild/venvlauncher.vcxproj.filters +++ b/PCbuild/venvlauncher.vcxproj.filters @@ -9,7 +9,7 @@ - + Resource Files diff --git a/PCbuild/venvwlauncher.vcxproj b/PCbuild/venvwlauncher.vcxproj index db7128272f06db..c58280deb8abeb 100644 --- a/PCbuild/venvwlauncher.vcxproj +++ b/PCbuild/venvwlauncher.vcxproj @@ -107,7 +107,7 @@ - + diff --git a/PCbuild/venvwlauncher.vcxproj.filters b/PCbuild/venvwlauncher.vcxproj.filters index 61a514395e82dc..5193c38b12d53d 100644 --- a/PCbuild/venvwlauncher.vcxproj.filters +++ b/PCbuild/venvwlauncher.vcxproj.filters @@ -14,7 +14,7 @@ - + Resource Files diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 85ebd5b00cc18b..17df9208f224f4 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2829,7 +2829,6 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) double value = PyLong_AsDouble(item); if (value != -1.0 || !PyErr_Occurred()) { re_sum = cs_add(re_sum, value); - im_sum.hi += 0.0; Py_DECREF(item); continue; } @@ -2842,7 +2841,6 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start) if (PyFloat_Check(item)) { double value = PyFloat_AS_DOUBLE(item); re_sum = cs_add(re_sum, value); - im_sum.hi += 0.0; _Py_DECREF_SPECIALIZED(item, _PyFloat_ExactDealloc); continue; } diff --git a/Python/bytecodes.c b/Python/bytecodes.c index c85b49842daf44..a14b32b8108be8 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -376,7 +376,7 @@ dummy_func( pure inst(UNARY_NOT, (value -- res)) { assert(PyStackRef_BoolCheck(value)); - res = PyStackRef_Is(value, PyStackRef_False) + res = PyStackRef_IsFalse(value) ? PyStackRef_True : PyStackRef_False; DEAD(value); } @@ -391,7 +391,7 @@ dummy_func( }; specializing op(_SPECIALIZE_TO_BOOL, (counter/1, value -- value)) { - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _Py_Specialize_ToBool(value, next_instr); @@ -399,7 +399,7 @@ dummy_func( } OPCODE_DEFERRED_INC(TO_BOOL); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } op(_TO_BOOL, (value -- res)) { @@ -435,13 +435,13 @@ dummy_func( PyObject *value_o = PyStackRef_AsPyObjectBorrow(value); EXIT_IF(!PyList_CheckExact(value_o)); STAT_INC(TO_BOOL, hit); - res = Py_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; + res = PyList_GET_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; DECREF_INPUTS(); } inst(TO_BOOL_NONE, (unused/1, unused/2, value -- res)) { // This one is a bit weird, because we expect *some* failures: - EXIT_IF(!PyStackRef_Is(value, PyStackRef_None)); + EXIT_IF(!PyStackRef_IsNone(value)); DEAD(value); STAT_INC(TO_BOOL, hit); res = PyStackRef_False; @@ -651,9 +651,7 @@ dummy_func( // specializations, but there is no output. // At the end we just skip over the STORE_FAST. op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) { - #ifndef NDEBUG PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); - #endif PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); int next_oparg; @@ -664,7 +662,7 @@ dummy_func( next_oparg = CURRENT_OPERAND0(); #endif _PyStackRef *target_local = &GETLOCAL(next_oparg); - DEOPT_IF(!PyStackRef_Is(*target_local, left)); + DEOPT_IF(PyStackRef_AsPyObjectBorrow(*target_local) != left_o); STAT_INC(BINARY_OP, hit); /* Handle `left = left + right` or `left += right` for str. * @@ -912,7 +910,7 @@ dummy_func( }; specializing op(_SPECIALIZE_STORE_SUBSCR, (counter/1, container, sub -- container, sub)) { - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _Py_Specialize_StoreSubscr(container, sub, next_instr); @@ -920,7 +918,7 @@ dummy_func( } OPCODE_DEFERRED_INC(STORE_SUBSCR); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } op(_STORE_SUBSCR, (v, container, sub -- )) { @@ -942,13 +940,18 @@ dummy_func( // Ensure nonnegative, zero-or-one-digit ints. DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; + DEOPT_IF(!LOCK_OBJECT(list)); // Ensure index < len(list) - DEOPT_IF(index >= PyList_GET_SIZE(list)); + if (index >= PyList_GET_SIZE(list)) { + UNLOCK_OBJECT(list); + DEOPT_IF(true); + } STAT_INC(STORE_SUBSCR, hit); PyObject *old_value = PyList_GET_ITEM(list, index); PyList_SET_ITEM(list, index, PyStackRef_AsPyObjectSteal(value)); assert(old_value != NULL); + UNLOCK_OBJECT(list); // unlock before decrefs! Py_DECREF(old_value); PyStackRef_CLOSE_SPECIALIZED(sub_st, (destructor)PyObject_Free); DEAD(sub_st); @@ -1141,7 +1144,7 @@ dummy_func( gen_frame->previous = frame; DISPATCH_INLINED(gen_frame); } - if (PyStackRef_Is(v, PyStackRef_None) && PyIter_Check(receiver_o)) { + if (PyStackRef_IsNone(v) && PyIter_Check(receiver_o)) { retval_o = Py_TYPE(receiver_o)->tp_iternext(receiver_o); } else { @@ -1249,7 +1252,7 @@ dummy_func( inst(POP_EXCEPT, (exc_value -- )) { _PyErr_StackItem *exc_info = tstate->exc_info; Py_XSETREF(exc_info->exc_value, - PyStackRef_Is(exc_value, PyStackRef_None) + PyStackRef_IsNone(exc_value) ? NULL : PyStackRef_AsPyObjectSteal(exc_value)); } @@ -1381,7 +1384,7 @@ dummy_func( }; specializing op(_SPECIALIZE_UNPACK_SEQUENCE, (counter/1, seq -- seq)) { - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _Py_Specialize_UnpackSequence(seq, next_instr, oparg); @@ -1389,7 +1392,7 @@ dummy_func( } OPCODE_DEFERRED_INC(UNPACK_SEQUENCE); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ (void)seq; (void)counter; } @@ -1429,12 +1432,24 @@ dummy_func( inst(UNPACK_SEQUENCE_LIST, (unused/1, seq -- values[oparg])) { PyObject *seq_o = PyStackRef_AsPyObjectBorrow(seq); DEOPT_IF(!PyList_CheckExact(seq_o)); - DEOPT_IF(PyList_GET_SIZE(seq_o) != oparg); + #ifdef Py_GIL_DISABLED + PyCriticalSection cs; + PyCriticalSection_Begin(&cs, seq_o); + #endif + if (PyList_GET_SIZE(seq_o) != oparg) { + #ifdef Py_GIL_DISABLED + PyCriticalSection_End(&cs); + #endif + DEOPT_IF(true); + } STAT_INC(UNPACK_SEQUENCE, hit); PyObject **items = _PyList_ITEMS(seq_o); for (int i = oparg; --i >= 0; ) { *values++ = PyStackRef_FromPyObjectNew(items[i]); } + #ifdef Py_GIL_DISABLED + PyCriticalSection_End(&cs); + #endif DECREF_INPUTS(); } @@ -1569,7 +1584,7 @@ dummy_func( }; specializing op(_SPECIALIZE_LOAD_GLOBAL, (counter/1 -- )) { - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); next_instr = this_instr; @@ -1578,7 +1593,7 @@ dummy_func( } OPCODE_DEFERRED_INC(LOAD_GLOBAL); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } // res[1] because we need a pointer to res to pass it to _PyEval_LoadGlobalStackRef @@ -1599,16 +1614,18 @@ dummy_func( op(_GUARD_GLOBALS_VERSION, (version/1 --)) { PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict)); - DEOPT_IF(dict->ma_keys->dk_version != version); - assert(DK_IS_UNICODE(dict->ma_keys)); + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version); + assert(DK_IS_UNICODE(keys)); } op(_GUARD_GLOBALS_VERSION_PUSH_KEYS, (version / 1 -- globals_keys: PyDictKeysObject *)) { PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict)); - DEOPT_IF(dict->ma_keys->dk_version != version); - globals_keys = dict->ma_keys; + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version); + globals_keys = keys; assert(DK_IS_UNICODE(globals_keys)); } @@ -1616,33 +1633,44 @@ dummy_func( { PyDictObject *dict = (PyDictObject *)BUILTINS(); DEOPT_IF(!PyDict_CheckExact(dict)); - DEOPT_IF(dict->ma_keys->dk_version != version); - builtins_keys = dict->ma_keys; + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version); + builtins_keys = keys; assert(DK_IS_UNICODE(builtins_keys)); } op(_LOAD_GLOBAL_MODULE_FROM_KEYS, (index/1, globals_keys: PyDictKeysObject* -- res, null if (oparg & 1))) { PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEAD(globals_keys); SYNC_SP(); DEOPT_IF(res_o == NULL); + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + DEOPT_IF(!increfed); + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); } op(_LOAD_GLOBAL_BUILTINS_FROM_KEYS, (index/1, builtins_keys: PyDictKeysObject* -- res, null if (oparg & 1))) { PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEAD(builtins_keys); SYNC_SP(); DEOPT_IF(res_o == NULL); + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + DEOPT_IF(!increfed); + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); } macro(LOAD_GLOBAL_MODULE) = @@ -2340,10 +2368,6 @@ dummy_func( DEOPT_IF(ep->me_key != name); PyObject *old_value = ep->me_value; DEOPT_IF(old_value == NULL); - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) { - _PyObject_GC_TRACK(dict); - } _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value)); ep->me_value = PyStackRef_AsPyObjectSteal(value); // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, @@ -2481,13 +2505,7 @@ dummy_func( } inst(IS_OP, (left, right -- b)) { -#ifdef Py_GIL_DISABLED - // On free-threaded builds, objects are conditionally immortalized. - // So their bits don't always compare equally. int res = Py_Is(PyStackRef_AsPyObjectBorrow(left), PyStackRef_AsPyObjectBorrow(right)) ^ oparg; -#else - int res = PyStackRef_Is(left, right) ^ oparg; -#endif DECREF_INPUTS(); b = res ? PyStackRef_True : PyStackRef_False; } @@ -2516,7 +2534,7 @@ dummy_func( } OPCODE_DEFERRED_INC(CONTAINS_OP); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } macro(CONTAINS_OP) = _SPECIALIZE_CONTAINS_OP + _CONTAINS_OP; @@ -2624,15 +2642,16 @@ dummy_func( } _PyExecutorObject *executor; int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor, 0); - ERROR_IF(optimized < 0, error); - if (optimized) { + if (optimized <= 0) { + this_instr[1].counter = restart_backoff_counter(counter); + ERROR_IF(optimized < 0, error); + } + else { + this_instr[1].counter = initial_jump_backoff_counter(); assert(tstate->previous_executor == NULL); tstate->previous_executor = Py_None; GOTO_TIER_TWO(executor); } - else { - this_instr[1].counter = restart_backoff_counter(counter); - } } else { ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); @@ -2693,7 +2712,7 @@ dummy_func( replaced op(_POP_JUMP_IF_FALSE, (cond -- )) { assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_False); + int flag = PyStackRef_IsFalse(cond); DEAD(cond); RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); JUMPBY(oparg * flag); @@ -2701,14 +2720,14 @@ dummy_func( replaced op(_POP_JUMP_IF_TRUE, (cond -- )) { assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_True); + int flag = PyStackRef_IsTrue(cond); DEAD(cond); RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); JUMPBY(oparg * flag); } op(_IS_NONE, (value -- b)) { - if (PyStackRef_Is(value, PyStackRef_None)) { + if (PyStackRef_IsNone(value)) { b = PyStackRef_True; DEAD(value); } @@ -3752,7 +3771,7 @@ dummy_func( inst(EXIT_INIT_CHECK, (should_be_none -- )) { assert(STACK_LEVEL() == 2); - if (!PyStackRef_Is(should_be_none, PyStackRef_None)) { + if (!PyStackRef_IsNone(should_be_none)) { PyErr_Format(PyExc_TypeError, "__init__() should return None, not '%.200s'", Py_TYPE(PyStackRef_AsPyObjectBorrow(should_be_none))->tp_name); @@ -4712,7 +4731,7 @@ dummy_func( inst(INSTRUMENTED_POP_JUMP_IF_TRUE, (unused/1 -- )) { _PyStackRef cond = POP(); assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_True); + int flag = PyStackRef_IsTrue(cond); int offset = flag * oparg; RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH); @@ -4721,7 +4740,7 @@ dummy_func( inst(INSTRUMENTED_POP_JUMP_IF_FALSE, (unused/1 -- )) { _PyStackRef cond = POP(); assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_False); + int flag = PyStackRef_IsFalse(cond); int offset = flag * oparg; RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH); @@ -4729,7 +4748,7 @@ dummy_func( inst(INSTRUMENTED_POP_JUMP_IF_NONE, (unused/1 -- )) { _PyStackRef value_stackref = POP(); - int flag = PyStackRef_Is(value_stackref, PyStackRef_None); + int flag = PyStackRef_IsNone(value_stackref); int offset; if (flag) { offset = oparg; @@ -4745,7 +4764,7 @@ dummy_func( inst(INSTRUMENTED_POP_JUMP_IF_NOT_NONE, (unused/1 -- )) { _PyStackRef value_stackref = POP(); int offset; - int nflag = PyStackRef_Is(value_stackref, PyStackRef_None); + int nflag = PyStackRef_IsNone(value_stackref); if (nflag) { offset = 0; } @@ -4780,21 +4799,21 @@ dummy_func( ///////// Tier-2 only opcodes ///////// op (_GUARD_IS_TRUE_POP, (flag -- )) { - int is_true = PyStackRef_Is(flag, PyStackRef_True); + int is_true = PyStackRef_IsTrue(flag); DEAD(flag); SYNC_SP(); EXIT_IF(!is_true); } op (_GUARD_IS_FALSE_POP, (flag -- )) { - int is_false = PyStackRef_Is(flag, PyStackRef_False); + int is_false = PyStackRef_IsFalse(flag); DEAD(flag); SYNC_SP(); EXIT_IF(!is_false); } op (_GUARD_IS_NONE_POP, (val -- )) { - int is_none = PyStackRef_Is(val, PyStackRef_None); + int is_none = PyStackRef_IsNone(val); if (!is_none) { PyStackRef_CLOSE(val); SYNC_SP(); @@ -4804,7 +4823,7 @@ dummy_func( } op (_GUARD_IS_NOT_NONE_POP, (val -- )) { - int is_none = PyStackRef_Is(val, PyStackRef_None); + int is_none = PyStackRef_IsNone(val); PyStackRef_CLOSE(val); SYNC_SP(); EXIT_IF(is_none); @@ -4875,6 +4894,9 @@ dummy_func( tstate->previous_executor = (PyObject *)current_executor; GOTO_TIER_ONE(target); } + else { + exit->temperature = initial_temperature_backoff_counter(); + } } exit->executor = executor; } diff --git a/Python/ceval.c b/Python/ceval.c index 9a608f06966688..eba0f233a81ef3 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -99,11 +99,7 @@ } \ _Py_DECREF_STAT_INC(); \ if (--op->ob_refcnt == 0) { \ - struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; \ - if (tracer->tracer_func != NULL) { \ - void* data = tracer->tracer_data; \ - tracer->tracer_func(op, PyRefTracer_DESTROY, data); \ - } \ + _PyReftracerTrack(op, PyRefTracer_DESTROY); \ destructor d = (destructor)(dealloc); \ d(op); \ } \ @@ -300,11 +296,12 @@ Py_SetRecursionLimit(int new_limit) { PyInterpreterState *interp = _PyInterpreterState_GET(); interp->ceval.recursion_limit = new_limit; - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { int depth = p->py_recursion_limit - p->py_recursion_remaining; p->py_recursion_limit = new_limit; p->py_recursion_remaining = new_limit - depth; } + _Py_FOR_EACH_TSTATE_END(interp); } /* The function _Py_EnterRecursiveCallTstate() only calls _Py_CheckRecursiveCall() @@ -1803,33 +1800,6 @@ _PyEvalFramePushAndInit(PyThreadState *tstate, _PyStackRef func, return NULL; } -static _PyInterpreterFrame * -_PyEvalFramePushAndInit_UnTagged(PyThreadState *tstate, _PyStackRef func, - PyObject *locals, PyObject *const* args, - size_t argcount, PyObject *kwnames, _PyInterpreterFrame *previous) -{ -#if defined(Py_GIL_DISABLED) - size_t kw_count = kwnames == NULL ? 0 : PyTuple_GET_SIZE(kwnames); - size_t total_argcount = argcount + kw_count; - _PyStackRef *tagged_args_buffer = PyMem_Malloc(sizeof(_PyStackRef) * total_argcount); - if (tagged_args_buffer == NULL) { - PyErr_NoMemory(); - return NULL; - } - for (size_t i = 0; i < argcount; i++) { - tagged_args_buffer[i] = PyStackRef_FromPyObjectSteal(args[i]); - } - for (size_t i = 0; i < kw_count; i++) { - tagged_args_buffer[argcount + i] = PyStackRef_FromPyObjectSteal(args[argcount + i]); - } - _PyInterpreterFrame *res = _PyEvalFramePushAndInit(tstate, func, locals, (_PyStackRef const *)tagged_args_buffer, argcount, kwnames, previous); - PyMem_Free(tagged_args_buffer); - return res; -#else - return _PyEvalFramePushAndInit(tstate, func, locals, (_PyStackRef const *)args, argcount, kwnames, previous); -#endif -} - /* Same as _PyEvalFramePushAndInit but takes an args tuple and kwargs dict. Steals references to func, callargs and kwargs. */ @@ -1854,9 +1824,9 @@ _PyEvalFramePushAndInit_Ex(PyThreadState *tstate, _PyStackRef func, Py_INCREF(PyTuple_GET_ITEM(callargs, i)); } } - _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_UnTagged( + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( tstate, func, locals, - newargs, nargs, kwnames, previous + (_PyStackRef const *)newargs, nargs, kwnames, previous ); if (has_dict) { _PyStack_UnpackDict_FreeNoDecRef(newargs, kwnames); @@ -1891,9 +1861,9 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func, Py_INCREF(args[i+argcount]); } } - _PyInterpreterFrame *frame = _PyEvalFramePushAndInit_UnTagged( + _PyInterpreterFrame *frame = _PyEvalFramePushAndInit( tstate, PyStackRef_FromPyObjectNew(func), locals, - args, argcount, kwnames, NULL); + (_PyStackRef const *)args, argcount, kwnames, NULL); if (frame == NULL) { return NULL; } diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 4c9f59f837e11b..1f811e72406130 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -977,25 +977,19 @@ make_pending_calls(PyThreadState *tstate) void _Py_set_eval_breaker_bit_all(PyInterpreterState *interp, uintptr_t bit) { - _PyRuntimeState *runtime = &_PyRuntime; - - HEAD_LOCK(runtime); - for (PyThreadState *tstate = interp->threads.head; tstate != NULL; tstate = tstate->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, tstate) { _Py_set_eval_breaker_bit(tstate, bit); } - HEAD_UNLOCK(runtime); + _Py_FOR_EACH_TSTATE_END(interp); } void _Py_unset_eval_breaker_bit_all(PyInterpreterState *interp, uintptr_t bit) { - _PyRuntimeState *runtime = &_PyRuntime; - - HEAD_LOCK(runtime); - for (PyThreadState *tstate = interp->threads.head; tstate != NULL; tstate = tstate->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, tstate) { _Py_unset_eval_breaker_bit(tstate, bit); } - HEAD_UNLOCK(runtime); + _Py_FOR_EACH_TSTATE_END(interp); } void diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index 603b71ea938cde..9250b86e42ced1 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -284,6 +284,29 @@ GETITEM(PyObject *v, Py_ssize_t i) { } +// Try to lock an object in the free threading build, if it's not already +// locked. Use with a DEOPT_IF() to deopt if the object is already locked. +// These are no-ops in the default GIL build. The general pattern is: +// +// DEOPT_IF(!LOCK_OBJECT(op)); +// if (/* condition fails */) { +// UNLOCK_OBJECT(op); +// DEOPT_IF(true); +// } +// ... +// UNLOCK_OBJECT(op); +// +// NOTE: The object must be unlocked on every exit code path and you should +// avoid any potentially escaping calls (like PyStackRef_CLOSE) while the +// object is locked. +#ifdef Py_GIL_DISABLED +# define LOCK_OBJECT(op) PyMutex_LockFast(&(_PyObject_CAST(op))->ob_mutex._bits) +# define UNLOCK_OBJECT(op) PyMutex_Unlock(&(_PyObject_CAST(op))->ob_mutex) +#else +# define LOCK_OBJECT(op) (1) +# define UNLOCK_OBJECT(op) ((void)0) +#endif + #define GLOBALS() frame->f_globals #define BUILTINS() frame->f_builtins #define LOCALS() frame->f_locals diff --git a/Python/codegen.c b/Python/codegen.c index bce3b94b27a45d..a5e550cf8c947e 100644 --- a/Python/codegen.c +++ b/Python/codegen.c @@ -24,6 +24,7 @@ #include "pycore_instruction_sequence.h" // _PyInstructionSequence_NewLabel() #include "pycore_intrinsics.h" #include "pycore_long.h" // _PyLong_GetZero() +#include "pycore_object.h" // _Py_ANNOTATE_FORMAT_VALUE_WITH_FAKE_GLOBALS #include "pycore_pystate.h" // _Py_GetConfig() #include "pycore_symtable.h" // PySTEntryObject @@ -672,14 +673,16 @@ codegen_setup_annotations_scope(compiler *c, location loc, codegen_enter_scope(c, name, COMPILE_SCOPE_ANNOTATIONS, key, loc.lineno, NULL, &umd)); + // if .format > VALUE_WITH_FAKE_GLOBALS: raise NotImplementedError + PyObject *value_with_fake_globals = PyLong_FromLong(_Py_ANNOTATE_FORMAT_VALUE_WITH_FAKE_GLOBALS); assert(!SYMTABLE_ENTRY(c)->ste_has_docstring); - // if .format != 1: raise NotImplementedError _Py_DECLARE_STR(format, ".format"); ADDOP_I(c, loc, LOAD_FAST, 0); - ADDOP_LOAD_CONST(c, loc, _PyLong_GetOne()); - ADDOP_I(c, loc, COMPARE_OP, (Py_NE << 5) | compare_masks[Py_NE]); + ADDOP_LOAD_CONST(c, loc, value_with_fake_globals); + ADDOP_I(c, loc, COMPARE_OP, (Py_GT << 5) | compare_masks[Py_GT]); NEW_JUMP_TARGET_LABEL(c, body); ADDOP_JUMP(c, loc, POP_JUMP_IF_FALSE, body); + ADDOP_I(c, loc, LOAD_COMMON_CONSTANT, CONSTANT_NOTIMPLEMENTEDERROR); ADDOP_I(c, loc, RAISE_VARARGS, 1); USE_LABEL(c, body); @@ -2033,7 +2036,7 @@ codegen_async_for(compiler *c, stmt_ty s) ADDOP(c, loc, END_ASYNC_FOR); /* `else` block */ - VISIT_SEQ(c, stmt, s->v.For.orelse); + VISIT_SEQ(c, stmt, s->v.AsyncFor.orelse); USE_LABEL(c, end); return SUCCESS; diff --git a/Python/crossinterp.c b/Python/crossinterp.c index fe7d75f6b72f68..7aaa045f375cf0 100644 --- a/Python/crossinterp.c +++ b/Python/crossinterp.c @@ -983,8 +983,7 @@ _PyXI_ApplyErrorCode(_PyXI_errcode code, PyInterpreterState *interp) break; case _PyXI_ERR_ALREADY_RUNNING: assert(interp != NULL); - assert(_PyInterpreterState_IsRunningMain(interp)); - _PyInterpreterState_FailIfRunningMain(interp); + _PyErr_SetInterpreterAlreadyRunning(); break; case _PyXI_ERR_MAIN_NS_FAILURE: PyErr_SetString(PyExc_InterpreterError, diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 2c2a09adf281a7..d46412a193332b 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -445,7 +445,7 @@ _PyStackRef res; value = stack_pointer[-1]; assert(PyStackRef_BoolCheck(value)); - res = PyStackRef_Is(value, PyStackRef_False) + res = PyStackRef_IsFalse(value) ? PyStackRef_True : PyStackRef_False; stack_pointer[-1] = res; break; @@ -508,7 +508,7 @@ JUMP_TO_JUMP_TARGET(); } STAT_INC(TO_BOOL, hit); - res = Py_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; + res = PyList_GET_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; PyStackRef_CLOSE(value); stack_pointer[-1] = res; break; @@ -519,7 +519,7 @@ _PyStackRef res; value = stack_pointer[-1]; // This one is a bit weird, because we expect *some* failures: - if (!PyStackRef_Is(value, PyStackRef_None)) { + if (!PyStackRef_IsNone(value)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } @@ -822,9 +822,7 @@ _PyStackRef left; right = stack_pointer[-1]; left = stack_pointer[-2]; - #ifndef NDEBUG PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); - #endif PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); int next_oparg; #if TIER_ONE @@ -834,7 +832,7 @@ next_oparg = CURRENT_OPERAND0(); #endif _PyStackRef *target_local = &GETLOCAL(next_oparg); - if (!PyStackRef_Is(*target_local, left)) { + if (PyStackRef_AsPyObjectBorrow(*target_local) != left_o) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } @@ -1237,15 +1235,23 @@ JUMP_TO_JUMP_TARGET(); } Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; - // Ensure index < len(list) - if (index >= PyList_GET_SIZE(list)) { + if (!LOCK_OBJECT(list)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } + // Ensure index < len(list) + if (index >= PyList_GET_SIZE(list)) { + UNLOCK_OBJECT(list); + if (true) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + } STAT_INC(STORE_SUBSCR, hit); PyObject *old_value = PyList_GET_ITEM(list, index); PyList_SET_ITEM(list, index, PyStackRef_AsPyObjectSteal(value)); assert(old_value != NULL); + UNLOCK_OBJECT(list); // unlock before decrefs! Py_DECREF(old_value); PyStackRef_CLOSE_SPECIALIZED(sub_st, (destructor)PyObject_Free); PyStackRef_CLOSE(list_st); @@ -1522,7 +1528,7 @@ _PyErr_StackItem *exc_info = tstate->exc_info; _PyFrame_SetStackPointer(frame, stack_pointer); Py_XSETREF(exc_info->exc_value, - PyStackRef_Is(exc_value, PyStackRef_None) + PyStackRef_IsNone(exc_value) ? NULL : PyStackRef_AsPyObjectSteal(exc_value)); stack_pointer = _PyFrame_GetStackPointer(frame); stack_pointer += -1; @@ -1711,15 +1717,33 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } + #ifdef Py_GIL_DISABLED + PyCriticalSection cs; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyCriticalSection_Begin(&cs, seq_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + #endif if (PyList_GET_SIZE(seq_o) != oparg) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); + #ifdef Py_GIL_DISABLED + _PyFrame_SetStackPointer(frame, stack_pointer); + PyCriticalSection_End(&cs); + stack_pointer = _PyFrame_GetStackPointer(frame); + #endif + if (true) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } } STAT_INC(UNPACK_SEQUENCE, hit); PyObject **items = _PyList_ITEMS(seq_o); for (int i = oparg; --i >= 0; ) { *values++ = PyStackRef_FromPyObjectNew(items[i]); } + #ifdef Py_GIL_DISABLED + _PyFrame_SetStackPointer(frame, stack_pointer); + PyCriticalSection_End(&cs); + stack_pointer = _PyFrame_GetStackPointer(frame); + #endif PyStackRef_CLOSE(seq); stack_pointer += -1 + oparg; assert(WITHIN_STACK_BOUNDS()); @@ -1870,11 +1894,12 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - if (dict->ma_keys->dk_version != version) { + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + if (FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - assert(DK_IS_UNICODE(dict->ma_keys)); + assert(DK_IS_UNICODE(keys)); break; } @@ -1886,11 +1911,12 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - if (dict->ma_keys->dk_version != version) { + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + if (FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - globals_keys = dict->ma_keys; + globals_keys = keys; assert(DK_IS_UNICODE(globals_keys)); stack_pointer[0].bits = (uintptr_t)globals_keys; stack_pointer += 1; @@ -1906,11 +1932,12 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - if (dict->ma_keys->dk_version != version) { + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + if (FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - builtins_keys = dict->ma_keys; + builtins_keys = keys; assert(DK_IS_UNICODE(builtins_keys)); stack_pointer[0].bits = (uintptr_t)builtins_keys; stack_pointer += 1; @@ -1926,17 +1953,25 @@ globals_keys = (PyDictKeysObject *)stack_pointer[-1].bits; uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); if (res_o == NULL) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + if (!increfed) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + (oparg & 1); @@ -1952,17 +1987,25 @@ builtins_keys = (PyDictKeysObject *)stack_pointer[-1].bits; uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); if (res_o == NULL) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + if (!increfed) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + (oparg & 1); @@ -2914,10 +2957,6 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) { - _PyObject_GC_TRACK(dict); - } _PyFrame_SetStackPointer(frame, stack_pointer); _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -3077,13 +3116,7 @@ oparg = CURRENT_OPARG(); right = stack_pointer[-1]; left = stack_pointer[-2]; - #ifdef Py_GIL_DISABLED - // On free-threaded builds, objects are conditionally immortalized. - // So their bits don't always compare equally. int res = Py_Is(PyStackRef_AsPyObjectBorrow(left), PyStackRef_AsPyObjectBorrow(right)) ^ oparg; - #else - int res = PyStackRef_Is(left, right) ^ oparg; - #endif PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); b = res ? PyStackRef_True : PyStackRef_False; @@ -3287,7 +3320,7 @@ _PyStackRef value; _PyStackRef b; value = stack_pointer[-1]; - if (PyStackRef_Is(value, PyStackRef_None)) { + if (PyStackRef_IsNone(value)) { b = PyStackRef_True; } else { @@ -4529,7 +4562,7 @@ _PyStackRef should_be_none; should_be_none = stack_pointer[-1]; assert(STACK_LEVEL() == 2); - if (!PyStackRef_Is(should_be_none, PyStackRef_None)) { + if (!PyStackRef_IsNone(should_be_none)) { _PyFrame_SetStackPointer(frame, stack_pointer); PyErr_Format(PyExc_TypeError, "__init__() should return None, not '%.200s'", @@ -5610,7 +5643,7 @@ case _GUARD_IS_TRUE_POP: { _PyStackRef flag; flag = stack_pointer[-1]; - int is_true = PyStackRef_Is(flag, PyStackRef_True); + int is_true = PyStackRef_IsTrue(flag); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); if (!is_true) { @@ -5623,7 +5656,7 @@ case _GUARD_IS_FALSE_POP: { _PyStackRef flag; flag = stack_pointer[-1]; - int is_false = PyStackRef_Is(flag, PyStackRef_False); + int is_false = PyStackRef_IsFalse(flag); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); if (!is_false) { @@ -5636,7 +5669,7 @@ case _GUARD_IS_NONE_POP: { _PyStackRef val; val = stack_pointer[-1]; - int is_none = PyStackRef_Is(val, PyStackRef_None); + int is_none = PyStackRef_IsNone(val); if (!is_none) { PyStackRef_CLOSE(val); stack_pointer += -1; @@ -5654,7 +5687,7 @@ case _GUARD_IS_NOT_NONE_POP: { _PyStackRef val; val = stack_pointer[-1]; - int is_none = PyStackRef_Is(val, PyStackRef_None); + int is_none = PyStackRef_IsNone(val); PyStackRef_CLOSE(val); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -5750,6 +5783,9 @@ tstate->previous_executor = (PyObject *)current_executor; GOTO_TIER_ONE(target); } + else { + exit->temperature = initial_temperature_backoff_counter(); + } } exit->executor = executor; } diff --git a/Python/fileutils.c b/Python/fileutils.c index c9ae1b3f54e167..9529b14d377c60 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -2506,37 +2506,38 @@ _Py_normpath_and_size(wchar_t *path, Py_ssize_t size, Py_ssize_t *normsize) #endif #define SEP_OR_END(x) (IS_SEP(x) || IS_END(x)) - if (p1[0] == L'.' && IS_SEP(&p1[1])) { - // Skip leading '.\' - path = &path[2]; - while (IS_SEP(path)) { - path++; - } - p1 = p2 = minP2 = path; - lastC = SEP; - } - else { - Py_ssize_t drvsize, rootsize; - _Py_skiproot(path, size, &drvsize, &rootsize); - if (drvsize || rootsize) { - // Skip past root and update minP2 - p1 = &path[drvsize + rootsize]; + Py_ssize_t drvsize, rootsize; + _Py_skiproot(path, size, &drvsize, &rootsize); + if (drvsize || rootsize) { + // Skip past root and update minP2 + p1 = &path[drvsize + rootsize]; #ifndef ALTSEP - p2 = p1; + p2 = p1; #else - for (; p2 < p1; ++p2) { - if (*p2 == ALTSEP) { - *p2 = SEP; - } + for (; p2 < p1; ++p2) { + if (*p2 == ALTSEP) { + *p2 = SEP; } + } #endif - minP2 = p2 - 1; - lastC = *minP2; + minP2 = p2 - 1; + lastC = *minP2; #ifdef MS_WINDOWS - if (lastC != SEP) { - minP2++; - } + if (lastC != SEP) { + minP2++; + } +#endif + } + if (p1[0] == L'.' && SEP_OR_END(&p1[1])) { + // Skip leading '.\' + lastC = *++p1; +#ifdef ALTSEP + if (lastC == ALTSEP) { + lastC = SEP; + } #endif + while (IS_SEP(p1)) { + p1++; } } diff --git a/Python/flowgraph.c b/Python/flowgraph.c index 5418131950076d..b1097b64469ecd 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -733,7 +733,7 @@ make_cfg_traversal_stack(basicblock *entryblock) { return stack; } -/* Return the stack effect of opcode with argument oparg. +/* Compute the stack effects of opcode with argument oparg. Some opcodes have different stack effect when jump to the target and when not jump. The 'jump' parameter specifies the case: @@ -742,25 +742,42 @@ make_cfg_traversal_stack(basicblock *entryblock) { * 1 -- when jump * -1 -- maximal */ +typedef struct { + /* The stack effect of the instruction. */ + int net; + + /* The maximum stack usage of the instruction. Some instructions may + * temporarily push extra values to the stack while they are executing. + */ + int max; +} stack_effects; + Py_LOCAL(int) -stack_effect(int opcode, int oparg, int jump) +get_stack_effects(int opcode, int oparg, int jump, stack_effects *effects) { if (opcode < 0) { - return PY_INVALID_STACK_EFFECT; + return -1; } if ((opcode <= MAX_REAL_OPCODE) && (_PyOpcode_Deopt[opcode] != opcode)) { // Specialized instructions are not supported. - return PY_INVALID_STACK_EFFECT; + return -1; } int popped = _PyOpcode_num_popped(opcode, oparg); int pushed = _PyOpcode_num_pushed(opcode, oparg); if (popped < 0 || pushed < 0) { - return PY_INVALID_STACK_EFFECT; + return -1; } if (IS_BLOCK_PUSH_OPCODE(opcode) && !jump) { + effects->net = 0; + effects->max = 0; return 0; } - return pushed - popped; + if (_PyOpcode_max_stack_effect(opcode, oparg, &effects->max) < 0) { + return -1; + } + effects->net = pushed - popped; + assert(effects->max >= effects->net); + return 0; } Py_LOCAL_INLINE(int) @@ -807,35 +824,30 @@ calculate_stackdepth(cfg_builder *g) basicblock *next = b->b_next; for (int i = 0; i < b->b_iused; i++) { cfg_instr *instr = &b->b_instr[i]; - int effect = stack_effect(instr->i_opcode, instr->i_oparg, 0); - if (effect == PY_INVALID_STACK_EFFECT) { + stack_effects effects; + if (get_stack_effects(instr->i_opcode, instr->i_oparg, 0, &effects) < 0) { PyErr_Format(PyExc_SystemError, "Invalid stack effect for opcode=%d, arg=%i", instr->i_opcode, instr->i_oparg); goto error; } - int new_depth = depth + effect; + int new_depth = depth + effects.net; if (new_depth < 0) { - PyErr_Format(PyExc_ValueError, - "Invalid CFG, stack underflow"); - goto error; - } - if (new_depth > maxdepth) { - maxdepth = new_depth; + PyErr_Format(PyExc_ValueError, + "Invalid CFG, stack underflow"); + goto error; } + maxdepth = Py_MAX(maxdepth, depth + effects.max); if (HAS_TARGET(instr->i_opcode)) { - effect = stack_effect(instr->i_opcode, instr->i_oparg, 1); - if (effect == PY_INVALID_STACK_EFFECT) { + if (get_stack_effects(instr->i_opcode, instr->i_oparg, 1, &effects) < 0) { PyErr_Format(PyExc_SystemError, "Invalid stack effect for opcode=%d, arg=%i", instr->i_opcode, instr->i_oparg); goto error; } - int target_depth = depth + effect; + int target_depth = depth + effects.net; assert(target_depth >= 0); /* invalid code or bug in stackdepth() */ - if (target_depth > maxdepth) { - maxdepth = target_depth; - } + maxdepth = Py_MAX(maxdepth, depth + effects.max); if (stackdepth_push(&sp, instr->i_target, target_depth) < 0) { goto error; } @@ -2936,13 +2948,21 @@ _PyCfg_JumpLabelsToTargets(cfg_builder *g) int PyCompile_OpcodeStackEffectWithJump(int opcode, int oparg, int jump) { - return stack_effect(opcode, oparg, jump); + stack_effects effs; + if (get_stack_effects(opcode, oparg, jump, &effs) < 0) { + return PY_INVALID_STACK_EFFECT; + } + return effs.net; } int PyCompile_OpcodeStackEffect(int opcode, int oparg) { - return stack_effect(opcode, oparg, -1); + stack_effects effs; + if (get_stack_effects(opcode, oparg, -1, &effs) < 0) { + return PY_INVALID_STACK_EFFECT; + } + return effs.net; } /* Access to compiler optimizations for unit tests. diff --git a/Python/frozen.c b/Python/frozen.c index 627f2ff9413562..15d256b6743e0a 100644 --- a/Python/frozen.c +++ b/Python/frozen.c @@ -84,7 +84,6 @@ static const struct _frozen stdlib_modules[] = { {"genericpath", _Py_M__genericpath, (int)sizeof(_Py_M__genericpath), false}, {"ntpath", _Py_M__ntpath, (int)sizeof(_Py_M__ntpath), false}, {"posixpath", _Py_M__posixpath, (int)sizeof(_Py_M__posixpath), false}, - {"os.path", _Py_M__posixpath, (int)sizeof(_Py_M__posixpath), false}, {"os", _Py_M__os, (int)sizeof(_Py_M__os), false}, {"site", _Py_M__site, (int)sizeof(_Py_M__site), false}, {"stat", _Py_M__stat, (int)sizeof(_Py_M__stat), false}, @@ -116,7 +115,6 @@ const struct _frozen *_PyImport_FrozenTest = test_modules; static const struct _module_alias aliases[] = { {"_frozen_importlib", "importlib._bootstrap"}, {"_frozen_importlib_external", "importlib._bootstrap_external"}, - {"os.path", "posixpath"}, {"__hello_alias__", "__hello__"}, {"__phello_alias__", "__hello__"}, {"__phello_alias__.spam", "__hello__"}, diff --git a/Python/gc.c b/Python/gc.c index fe81ca5989c621..63adecf0e05114 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -5,7 +5,7 @@ #include "Python.h" #include "pycore_ceval.h" // _Py_set_eval_breaker_bit() #include "pycore_context.h" -#include "pycore_dict.h" // _PyDict_MaybeUntrack() +#include "pycore_dict.h" // _PyInlineValuesSize() #include "pycore_initconfig.h" #include "pycore_interp.h" // PyInterpreterState.gc #include "pycore_object.h" @@ -747,21 +747,6 @@ untrack_tuples(PyGC_Head *head) } } -/* Try to untrack all currently tracked dictionaries */ -static void -untrack_dicts(PyGC_Head *head) -{ - PyGC_Head *next, *gc = GC_NEXT(head); - while (gc != head) { - PyObject *op = FROM_GC(gc); - next = GC_NEXT(gc); - if (PyDict_CheckExact(op)) { - _PyDict_MaybeUntrack(op); - } - gc = next; - } -} - /* Return true if object has a pre-PEP 442 finalization method. */ static int has_legacy_finalizer(PyObject *op) @@ -1258,15 +1243,10 @@ handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable, gc_list_merge(resurrected, old_generation); } - -#define UNTRACK_TUPLES 1 -#define UNTRACK_DICTS 2 - static void gc_collect_region(PyThreadState *tstate, PyGC_Head *from, PyGC_Head *to, - int untrack, struct gc_collection_stats *stats); static inline Py_ssize_t @@ -1328,7 +1308,7 @@ gc_collect_young(PyThreadState *tstate, PyGC_Head survivors; gc_list_init(&survivors); - gc_collect_region(tstate, young, &survivors, UNTRACK_TUPLES, stats); + gc_collect_region(tstate, young, &survivors, stats); Py_ssize_t survivor_count = 0; if (gcstate->visited_space) { /* objects in visited space have bit set, so we set it here */ @@ -1394,7 +1374,6 @@ visit_add_to_container(PyObject *op, void *arg) static uintptr_t expand_region_transitively_reachable(PyGC_Head *container, PyGC_Head *gc, GCState *gcstate) { - validate_list(container, collecting_clear_unreachable_clear); struct container_and_flag arg = { .container = container, .visited_space = gcstate->visited_space, @@ -1468,10 +1447,11 @@ gc_collect_increment(PyThreadState *tstate, struct gc_collection_stats *stats) gc_set_old_space(gc, gcstate->visited_space); increment_size += expand_region_transitively_reachable(&increment, gc, gcstate); } + validate_list(&increment, collecting_clear_unreachable_clear); gc_list_validate_space(&increment, gcstate->visited_space); PyGC_Head survivors; gc_list_init(&survivors); - gc_collect_region(tstate, &increment, &survivors, UNTRACK_TUPLES, stats); + gc_collect_region(tstate, &increment, &survivors, stats); gc_list_validate_space(&survivors, gcstate->visited_space); gc_list_merge(&survivors, visited); assert(gc_list_is_empty(&increment)); @@ -1504,7 +1484,6 @@ gc_collect_full(PyThreadState *tstate, gc_list_merge(pending, visited); gc_collect_region(tstate, visited, visited, - UNTRACK_TUPLES | UNTRACK_DICTS, stats); gcstate->young.count = 0; gcstate->old[0].count = 0; @@ -1522,7 +1501,6 @@ static void gc_collect_region(PyThreadState *tstate, PyGC_Head *from, PyGC_Head *to, - int untrack, struct gc_collection_stats *stats) { PyGC_Head unreachable; /* non-problematic unreachable trash */ @@ -1536,12 +1514,7 @@ gc_collect_region(PyThreadState *tstate, gc_list_init(&unreachable); deduce_unreachable(from, &unreachable); validate_consistent_old_space(from); - if (untrack & UNTRACK_TUPLES) { - untrack_tuples(from); - } - if (untrack & UNTRACK_DICTS) { - untrack_dicts(from); - } + untrack_tuples(from); validate_consistent_old_space(to); if (from != to) { gc_list_merge(from, to); diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index 499ee51fdb2cd4..f7f44407494e51 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -3,7 +3,7 @@ #include "pycore_brc.h" // struct _brc_thread_state #include "pycore_ceval.h" // _Py_set_eval_breaker_bit() #include "pycore_context.h" -#include "pycore_dict.h" // _PyDict_MaybeUntrack() +#include "pycore_dict.h" // _PyInlineValuesSize() #include "pycore_freelist.h" // _PyObject_ClearFreeLists() #include "pycore_initconfig.h" #include "pycore_interp.h" // PyInterpreterState.gc @@ -304,7 +304,7 @@ gc_visit_heaps_lock_held(PyInterpreterState *interp, mi_block_visit_fun *visitor Py_ssize_t offset_pre = offset_base + 2 * sizeof(PyObject*); // visit each thread's heaps for GC objects - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, p) { struct _mimalloc_thread_state *m = &((_PyThreadStateImpl *)p)->mimalloc; if (!_Py_atomic_load_int(&m->initialized)) { // The thread may not have called tstate_mimalloc_bind() yet. @@ -374,8 +374,7 @@ gc_visit_stackref(_PyStackRef stackref) static void gc_visit_thread_stacks(PyInterpreterState *interp) { - HEAD_LOCK(&_PyRuntime); - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { for (_PyInterpreterFrame *f = p->current_frame; f != NULL; f = f->previous) { PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable); if (executable == NULL || !PyCode_Check(executable)) { @@ -390,7 +389,24 @@ gc_visit_thread_stacks(PyInterpreterState *interp) } } } - HEAD_UNLOCK(&_PyRuntime); + _Py_FOR_EACH_TSTATE_END(interp); +} + +static void +queue_untracked_obj_decref(PyObject *op, struct collection_state *state) +{ + if (!_PyObject_GC_IS_TRACKED(op)) { + // GC objects with zero refcount are handled subsequently by the + // GC as if they were cyclic trash, but we have to handle dead + // non-GC objects here. Add one to the refcount so that we can + // decref and deallocate the object once we start the world again. + op->ob_ref_shared += (1 << _Py_REF_SHARED_SHIFT); +#ifdef Py_REF_DEBUG + _Py_IncRefTotal(_PyThreadState_GET()); +#endif + worklist_push(&state->objs_to_decref, op); + } + } static void @@ -404,22 +420,20 @@ merge_queued_objects(_PyThreadStateImpl *tstate, struct collection_state *state) // Subtract one when merging because the queue had a reference. Py_ssize_t refcount = merge_refcount(op, -1); - if (!_PyObject_GC_IS_TRACKED(op) && refcount == 0) { - // GC objects with zero refcount are handled subsequently by the - // GC as if they were cyclic trash, but we have to handle dead - // non-GC objects here. Add one to the refcount so that we can - // decref and deallocate the object once we start the world again. - op->ob_ref_shared += (1 << _Py_REF_SHARED_SHIFT); -#ifdef Py_REF_DEBUG - _Py_IncRefTotal(_PyThreadState_GET()); -#endif - worklist_push(&state->objs_to_decref, op); + if (refcount == 0) { + queue_untracked_obj_decref(op, state); } } } static void -process_delayed_frees(PyInterpreterState *interp) +queue_freed_object(PyObject *obj, void *arg) +{ + queue_untracked_obj_decref(obj, arg); +} + +static void +process_delayed_frees(PyInterpreterState *interp, struct collection_state *state) { // While we are in a "stop the world" pause, we can observe the latest // write sequence by advancing the write sequence immediately. @@ -429,16 +443,15 @@ process_delayed_frees(PyInterpreterState *interp) // Merge the queues from other threads into our own queue so that we can // process all of the pending delayed free requests at once. - HEAD_LOCK(&_PyRuntime); - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { _PyThreadStateImpl *other = (_PyThreadStateImpl *)p; if (other != current_tstate) { llist_concat(¤t_tstate->mem_free_queue, &other->mem_free_queue); } } - HEAD_UNLOCK(&_PyRuntime); + _Py_FOR_EACH_TSTATE_END(interp); - _PyMem_ProcessDelayed((PyThreadState *)current_tstate); + _PyMem_ProcessDelayedNoDealloc((PyThreadState *)current_tstate, queue_freed_object, state); } // Subtract an incoming reference from the computed "gc_refs" refcount. @@ -493,13 +506,6 @@ update_refs(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } } - else if (PyDict_CheckExact(op)) { - _PyDict_MaybeUntrack(op); - if (!_PyObject_GC_IS_TRACKED(op)) { - gc_restore_refs(op); - return true; - } - } } // We repurpose ob_tid to compute "gc_refs", the number of external @@ -1226,8 +1232,7 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, state->gcstate->old[i-1].count = 0; } - HEAD_LOCK(&_PyRuntime); - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)p; // merge per-thread refcount for types into the type's actual refcount @@ -1236,9 +1241,9 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, // merge refcounts for all queued objects merge_queued_objects(tstate, state); } - HEAD_UNLOCK(&_PyRuntime); + _Py_FOR_EACH_TSTATE_END(interp); - process_delayed_frees(interp); + process_delayed_frees(interp, state); // Find unreachable objects int err = deduce_unreachable_heap(interp, state); @@ -1917,13 +1922,7 @@ PyObject_GC_Del(void *op) } record_deallocation(_PyThreadState_GET()); - PyObject *self = (PyObject *)op; - if (_PyObject_GC_IS_SHARED_INLINE(self)) { - _PyObject_FreeDelayed(((char *)op)-presize); - } - else { - PyObject_Free(((char *)op)-presize); - } + PyObject_Free(((char *)op)-presize); } int @@ -1991,13 +1990,11 @@ PyUnstable_GC_VisitObjects(gcvisitobjects_t callback, void *arg) void _PyGC_ClearAllFreeLists(PyInterpreterState *interp) { - HEAD_LOCK(&_PyRuntime); - _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)interp->threads.head; - while (tstate != NULL) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { + _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)p; _PyObject_ClearFreeLists(&tstate->freelists, 0); - tstate = (_PyThreadStateImpl *)tstate->base.next; } - HEAD_UNLOCK(&_PyRuntime); + _Py_FOR_EACH_TSTATE_END(interp); } #endif // Py_GIL_DISABLED diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 15308d6f1f7146..c9a5132269398c 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -183,9 +183,7 @@ /* Skip 1 cache entry */ // _BINARY_OP_INPLACE_ADD_UNICODE { - #ifndef NDEBUG PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); - #endif PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); int next_oparg; #if TIER_ONE @@ -195,7 +193,7 @@ next_oparg = CURRENT_OPERAND0(); #endif _PyStackRef *target_local = &GETLOCAL(next_oparg); - DEOPT_IF(!PyStackRef_Is(*target_local, left), BINARY_OP); + DEOPT_IF(PyStackRef_AsPyObjectBorrow(*target_local) != left_o, BINARY_OP); STAT_INC(BINARY_OP, hit); /* Handle `left = left + right` or `left += right` for str. * @@ -3405,7 +3403,7 @@ } OPCODE_DEFERRED_INC(CONTAINS_OP); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } // _CONTAINS_OP { @@ -3824,7 +3822,7 @@ _PyStackRef should_be_none; should_be_none = stack_pointer[-1]; assert(STACK_LEVEL() == 2); - if (!PyStackRef_Is(should_be_none, PyStackRef_None)) { + if (!PyStackRef_IsNone(should_be_none)) { _PyFrame_SetStackPointer(frame, stack_pointer); PyErr_Format(PyExc_TypeError, "__init__() should return None, not '%.200s'", @@ -4760,7 +4758,7 @@ /* Skip 1 cache entry */ _PyStackRef cond = POP(); assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_False); + int flag = PyStackRef_IsFalse(cond); int offset = flag * oparg; RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH); @@ -4774,7 +4772,7 @@ INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_NONE); /* Skip 1 cache entry */ _PyStackRef value_stackref = POP(); - int flag = PyStackRef_Is(value_stackref, PyStackRef_None); + int flag = PyStackRef_IsNone(value_stackref); int offset; if (flag) { offset = oparg; @@ -4796,7 +4794,7 @@ /* Skip 1 cache entry */ _PyStackRef value_stackref = POP(); int offset; - int nflag = PyStackRef_Is(value_stackref, PyStackRef_None); + int nflag = PyStackRef_IsNone(value_stackref); if (nflag) { offset = 0; } @@ -4819,7 +4817,7 @@ /* Skip 1 cache entry */ _PyStackRef cond = POP(); assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_True); + int flag = PyStackRef_IsTrue(cond); int offset = flag * oparg; RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH); @@ -5040,13 +5038,7 @@ _PyStackRef b; right = stack_pointer[-1]; left = stack_pointer[-2]; - #ifdef Py_GIL_DISABLED - // On free-threaded builds, objects are conditionally immortalized. - // So their bits don't always compare equally. int res = Py_Is(PyStackRef_AsPyObjectBorrow(left), PyStackRef_AsPyObjectBorrow(right)) ^ oparg; - #else - int res = PyStackRef_Is(left, right) ^ oparg; - #endif PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); b = res ? PyStackRef_True : PyStackRef_False; @@ -5092,15 +5084,18 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor, 0); stack_pointer = _PyFrame_GetStackPointer(frame); - if (optimized < 0) goto error; - if (optimized) { + if (optimized <= 0) { + this_instr[1].counter = restart_backoff_counter(counter); + if (optimized < 0) goto error; + } + else { + _PyFrame_SetStackPointer(frame, stack_pointer); + this_instr[1].counter = initial_jump_backoff_counter(); + stack_pointer = _PyFrame_GetStackPointer(frame); assert(tstate->previous_executor == NULL); tstate->previous_executor = Py_None; GOTO_TIER_TWO(executor); } - else { - this_instr[1].counter = restart_backoff_counter(counter); - } } else { ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); @@ -6095,7 +6090,7 @@ { uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); next_instr = this_instr; @@ -6106,7 +6101,7 @@ } OPCODE_DEFERRED_INC(LOAD_GLOBAL); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } /* Skip 1 cache entry */ /* Skip 1 cache entry */ @@ -6141,28 +6136,35 @@ uint16_t version = read_u16(&this_instr[2].cache); PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); - DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); - assert(DK_IS_UNICODE(dict->ma_keys)); + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(keys)); } // _GUARD_BUILTINS_VERSION_PUSH_KEYS { uint16_t version = read_u16(&this_instr[3].cache); PyDictObject *dict = (PyDictObject *)BUILTINS(); DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); - DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); - builtins_keys = dict->ma_keys; + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version, LOAD_GLOBAL); + builtins_keys = keys; assert(DK_IS_UNICODE(builtins_keys)); } // _LOAD_GLOBAL_BUILTINS_FROM_KEYS { uint16_t index = read_u16(&this_instr[4].cache); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEOPT_IF(res_o == NULL, LOAD_GLOBAL); + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + DEOPT_IF(!increfed, LOAD_GLOBAL); + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; @@ -6185,8 +6187,9 @@ uint16_t version = read_u16(&this_instr[2].cache); PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); - DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); - globals_keys = dict->ma_keys; + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version, LOAD_GLOBAL); + globals_keys = keys; assert(DK_IS_UNICODE(globals_keys)); } /* Skip 1 cache entry */ @@ -6194,12 +6197,17 @@ { uint16_t index = read_u16(&this_instr[4].cache); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEOPT_IF(res_o == NULL, LOAD_GLOBAL); + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + DEOPT_IF(!increfed, LOAD_GLOBAL); + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; @@ -6647,7 +6655,7 @@ _PyErr_StackItem *exc_info = tstate->exc_info; _PyFrame_SetStackPointer(frame, stack_pointer); Py_XSETREF(exc_info->exc_value, - PyStackRef_Is(exc_value, PyStackRef_None) + PyStackRef_IsNone(exc_value) ? NULL : PyStackRef_AsPyObjectSteal(exc_value)); stack_pointer = _PyFrame_GetStackPointer(frame); stack_pointer += -1; @@ -6664,7 +6672,7 @@ /* Skip 1 cache entry */ cond = stack_pointer[-1]; assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_False); + int flag = PyStackRef_IsFalse(cond); RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); JUMPBY(oparg * flag); stack_pointer += -1; @@ -6684,7 +6692,7 @@ // _IS_NONE { value = stack_pointer[-1]; - if (PyStackRef_Is(value, PyStackRef_None)) { + if (PyStackRef_IsNone(value)) { b = PyStackRef_True; } else { @@ -6696,7 +6704,7 @@ { cond = b; assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_True); + int flag = PyStackRef_IsTrue(cond); RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); JUMPBY(oparg * flag); } @@ -6717,7 +6725,7 @@ // _IS_NONE { value = stack_pointer[-1]; - if (PyStackRef_Is(value, PyStackRef_None)) { + if (PyStackRef_IsNone(value)) { b = PyStackRef_True; } else { @@ -6729,7 +6737,7 @@ { cond = b; assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_False); + int flag = PyStackRef_IsFalse(cond); RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); JUMPBY(oparg * flag); } @@ -6747,7 +6755,7 @@ /* Skip 1 cache entry */ cond = stack_pointer[-1]; assert(PyStackRef_BoolCheck(cond)); - int flag = PyStackRef_Is(cond, PyStackRef_True); + int flag = PyStackRef_IsTrue(cond); RECORD_BRANCH_TAKEN(this_instr[1].cache, flag); JUMPBY(oparg * flag); stack_pointer += -1; @@ -7084,7 +7092,7 @@ gen_frame->previous = frame; DISPATCH_INLINED(gen_frame); } - if (PyStackRef_Is(v, PyStackRef_None) && PyIter_Check(receiver_o)) { + if (PyStackRef_IsNone(v) && PyIter_Check(receiver_o)) { _PyFrame_SetStackPointer(frame, stack_pointer); retval_o = Py_TYPE(receiver_o)->tp_iternext(receiver_o); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -7432,10 +7440,6 @@ DEOPT_IF(ep->me_key != name, STORE_ATTR); PyObject *old_value = ep->me_value; DEOPT_IF(old_value == NULL, STORE_ATTR); - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) { - _PyObject_GC_TRACK(dict); - } _PyFrame_SetStackPointer(frame, stack_pointer); _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -7625,7 +7629,7 @@ container = stack_pointer[-2]; uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _PyFrame_SetStackPointer(frame, stack_pointer); @@ -7635,7 +7639,7 @@ } OPCODE_DEFERRED_INC(STORE_SUBSCR); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } // _STORE_SUBSCR { @@ -7700,12 +7704,17 @@ // Ensure nonnegative, zero-or-one-digit ints. DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), STORE_SUBSCR); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; + DEOPT_IF(!LOCK_OBJECT(list), STORE_SUBSCR); // Ensure index < len(list) - DEOPT_IF(index >= PyList_GET_SIZE(list), STORE_SUBSCR); + if (index >= PyList_GET_SIZE(list)) { + UNLOCK_OBJECT(list); + DEOPT_IF(true, STORE_SUBSCR); + } STAT_INC(STORE_SUBSCR, hit); PyObject *old_value = PyList_GET_ITEM(list, index); PyList_SET_ITEM(list, index, PyStackRef_AsPyObjectSteal(value)); assert(old_value != NULL); + UNLOCK_OBJECT(list); // unlock before decrefs! Py_DECREF(old_value); PyStackRef_CLOSE_SPECIALIZED(sub_st, (destructor)PyObject_Free); PyStackRef_CLOSE(list_st); @@ -7746,7 +7755,7 @@ value = stack_pointer[-1]; uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _PyFrame_SetStackPointer(frame, stack_pointer); @@ -7756,7 +7765,7 @@ } OPCODE_DEFERRED_INC(TO_BOOL); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } /* Skip 2 cache entries */ // _TO_BOOL @@ -7851,7 +7860,7 @@ PyObject *value_o = PyStackRef_AsPyObjectBorrow(value); DEOPT_IF(!PyList_CheckExact(value_o), TO_BOOL); STAT_INC(TO_BOOL, hit); - res = Py_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; + res = PyList_GET_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; PyStackRef_CLOSE(value); stack_pointer[-1] = res; DISPATCH(); @@ -7868,7 +7877,7 @@ /* Skip 2 cache entries */ value = stack_pointer[-1]; // This one is a bit weird, because we expect *some* failures: - DEOPT_IF(!PyStackRef_Is(value, PyStackRef_None), TO_BOOL); + DEOPT_IF(!PyStackRef_IsNone(value), TO_BOOL); STAT_INC(TO_BOOL, hit); res = PyStackRef_False; stack_pointer[-1] = res; @@ -7943,7 +7952,7 @@ _PyStackRef res; value = stack_pointer[-1]; assert(PyStackRef_BoolCheck(value)); - res = PyStackRef_Is(value, PyStackRef_False) + res = PyStackRef_IsFalse(value) ? PyStackRef_True : PyStackRef_False; stack_pointer[-1] = res; DISPATCH(); @@ -7982,7 +7991,7 @@ seq = stack_pointer[-1]; uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _PyFrame_SetStackPointer(frame, stack_pointer); @@ -7992,7 +8001,7 @@ } OPCODE_DEFERRED_INC(UNPACK_SEQUENCE); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ (void)seq; (void)counter; } @@ -8023,12 +8032,30 @@ values = &stack_pointer[-1]; PyObject *seq_o = PyStackRef_AsPyObjectBorrow(seq); DEOPT_IF(!PyList_CheckExact(seq_o), UNPACK_SEQUENCE); - DEOPT_IF(PyList_GET_SIZE(seq_o) != oparg, UNPACK_SEQUENCE); + #ifdef Py_GIL_DISABLED + PyCriticalSection cs; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyCriticalSection_Begin(&cs, seq_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + #endif + if (PyList_GET_SIZE(seq_o) != oparg) { + #ifdef Py_GIL_DISABLED + _PyFrame_SetStackPointer(frame, stack_pointer); + PyCriticalSection_End(&cs); + stack_pointer = _PyFrame_GetStackPointer(frame); + #endif + DEOPT_IF(true, UNPACK_SEQUENCE); + } STAT_INC(UNPACK_SEQUENCE, hit); PyObject **items = _PyList_ITEMS(seq_o); for (int i = oparg; --i >= 0; ) { *values++ = PyStackRef_FromPyObjectNew(items[i]); } + #ifdef Py_GIL_DISABLED + _PyFrame_SetStackPointer(frame, stack_pointer); + PyCriticalSection_End(&cs); + stack_pointer = _PyFrame_GetStackPointer(frame); + #endif PyStackRef_CLOSE(seq); stack_pointer += -1 + oparg; assert(WITHIN_STACK_BOUNDS()); diff --git a/Python/instrumentation.c b/Python/instrumentation.c index 87c2addaf809eb..3503809e3306cb 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -1006,13 +1006,10 @@ set_global_version(PyThreadState *tstate, uint32_t version) #ifdef Py_GIL_DISABLED // Set the version on all threads in free-threaded builds. - _PyRuntimeState *runtime = &_PyRuntime; - HEAD_LOCK(runtime); - for (tstate = interp->threads.head; tstate; - tstate = PyThreadState_Next(tstate)) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, tstate) { set_version_raw(&tstate->eval_breaker, version); }; - HEAD_UNLOCK(runtime); + _Py_FOR_EACH_TSTATE_END(interp); #else // Normal builds take the current version from instrumentation_version when // attaching a thread, so we only have to set the current thread's version. diff --git a/Python/optimizer.c b/Python/optimizer.c index bc2ecc098b0e15..6a232218981dcd 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -205,8 +205,8 @@ _PyOptimizer_Optimize( return 1; } -_PyExecutorObject * -_Py_GetExecutor(PyCodeObject *code, int offset) +static _PyExecutorObject * +get_executor_lock_held(PyCodeObject *code, int offset) { int code_len = (int)Py_SIZE(code); for (int i = 0 ; i < code_len;) { @@ -222,6 +222,16 @@ _Py_GetExecutor(PyCodeObject *code, int offset) return NULL; } +_PyExecutorObject * +_Py_GetExecutor(PyCodeObject *code, int offset) +{ + _PyExecutorObject *executor; + Py_BEGIN_CRITICAL_SECTION(code); + executor = get_executor_lock_held(code, offset); + Py_END_CRITICAL_SECTION(); + return executor; +} + static PyObject * is_valid(PyObject *self, PyObject *Py_UNUSED(ignored)) { diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 23882d083844ac..ceb30e9f02df2c 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1888,7 +1888,6 @@ finalize_interp_clear(PyThreadState *tstate) _PyXI_Fini(tstate->interp); _PyExc_ClearExceptionGroupType(tstate->interp); _Py_clear_generic_types(tstate->interp); - _PyDtoa_Fini(tstate->interp); /* Clear interpreter state and all thread states */ _PyInterpreterState_Clear(tstate); @@ -1910,6 +1909,9 @@ finalize_interp_clear(PyThreadState *tstate) finalize_interp_types(tstate->interp); + /* Finalize dtoa at last so that finalizers calling repr of float doesn't crash */ + _PyDtoa_Fini(tstate->interp); + /* Free any delayed free requests immediately */ _PyMem_FiniDelayed(tstate->interp); diff --git a/Python/pystate.c b/Python/pystate.c index 24ee73c145cbcc..3ceae229f75cd0 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -629,6 +629,8 @@ init_interpreter(PyInterpreterState *interp, assert(next != NULL || (interp == runtime->interpreters.main)); interp->next = next; + interp->threads.preallocated = &interp->_initial_thread; + // We would call _PyObject_InitState() at this point // if interp->feature_flags were alredy set. @@ -766,7 +768,6 @@ PyInterpreterState_New(void) return interp; } - static void interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) { @@ -789,18 +790,15 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) } // Clear the current/main thread state last. - HEAD_LOCK(runtime); - PyThreadState *p = interp->threads.head; - HEAD_UNLOCK(runtime); - while (p != NULL) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { // See https://github.com/python/cpython/issues/102126 // Must be called without HEAD_LOCK held as it can deadlock // if any finalizer tries to acquire that lock. + HEAD_UNLOCK(runtime); PyThreadState_Clear(p); HEAD_LOCK(runtime); - p = p->next; - HEAD_UNLOCK(runtime); } + _Py_FOR_EACH_TSTATE_END(interp); if (tstate->interp == interp) { /* We fix tstate->_status below when we for sure aren't using it (e.g. no longer need the GIL). */ @@ -910,6 +908,9 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) // XXX Once we have one allocator per interpreter (i.e. // per-interpreter GC) we must ensure that all of the interpreter's // objects have been cleaned up at the point. + + // We could clear interp->threads.freelist here + // if it held more than just the initial thread state. } @@ -1047,10 +1048,17 @@ get_main_thread(PyInterpreterState *interp) return _Py_atomic_load_ptr_relaxed(&interp->threads.main); } +void +_PyErr_SetInterpreterAlreadyRunning(void) +{ + PyErr_SetString(PyExc_InterpreterError, "interpreter already running"); +} + int _PyInterpreterState_SetRunningMain(PyInterpreterState *interp) { - if (_PyInterpreterState_FailIfRunningMain(interp) < 0) { + if (get_main_thread(interp) != NULL) { + _PyErr_SetInterpreterAlreadyRunning(); return -1; } PyThreadState *tstate = current_fast_get(); @@ -1096,17 +1104,6 @@ _PyThreadState_IsRunningMain(PyThreadState *tstate) return get_main_thread(interp) == tstate; } -int -_PyInterpreterState_FailIfRunningMain(PyInterpreterState *interp) -{ - if (get_main_thread(interp) != NULL) { - PyErr_SetString(PyExc_InterpreterError, - "interpreter already running"); - return -1; - } - return 0; -} - void _PyInterpreterState_ReinitRunningMain(PyThreadState *tstate) { @@ -1390,22 +1387,45 @@ allocate_chunk(int size_in_bytes, _PyStackChunk* previous) return res; } +static void +reset_threadstate(_PyThreadStateImpl *tstate) +{ + // Set to _PyThreadState_INIT directly? + memcpy(tstate, + &initial._main_interpreter._initial_thread, + sizeof(*tstate)); +} + static _PyThreadStateImpl * -alloc_threadstate(void) +alloc_threadstate(PyInterpreterState *interp) { - return PyMem_RawCalloc(1, sizeof(_PyThreadStateImpl)); + _PyThreadStateImpl *tstate; + + // Try the preallocated tstate first. + tstate = _Py_atomic_exchange_ptr(&interp->threads.preallocated, NULL); + + // Fall back to the allocator. + if (tstate == NULL) { + tstate = PyMem_RawCalloc(1, sizeof(_PyThreadStateImpl)); + if (tstate == NULL) { + return NULL; + } + reset_threadstate(tstate); + } + return tstate; } static void free_threadstate(_PyThreadStateImpl *tstate) { + PyInterpreterState *interp = tstate->base.interp; // The initial thread state of the interpreter is allocated // as part of the interpreter state so should not be freed. - if (tstate == &tstate->base.interp->_initial_thread) { - // Restore to _PyThreadState_INIT. - memcpy(tstate, - &initial._main_interpreter._initial_thread, - sizeof(*tstate)); + if (tstate == &interp->_initial_thread) { + // Make it available again. + reset_threadstate(tstate); + assert(interp->threads.preallocated == NULL); + _Py_atomic_store_ptr(&interp->threads.preallocated, tstate); } else { PyMem_RawFree(tstate); @@ -1496,66 +1516,38 @@ add_threadstate(PyInterpreterState *interp, PyThreadState *tstate, static PyThreadState * new_threadstate(PyInterpreterState *interp, int whence) { - _PyThreadStateImpl *tstate; - _PyRuntimeState *runtime = interp->runtime; - // We don't need to allocate a thread state for the main interpreter - // (the common case), but doing it later for the other case revealed a - // reentrancy problem (deadlock). So for now we always allocate before - // taking the interpreters lock. See GH-96071. - _PyThreadStateImpl *new_tstate = alloc_threadstate(); - int used_newtstate; - if (new_tstate == NULL) { + // Allocate the thread state. + _PyThreadStateImpl *tstate = alloc_threadstate(interp); + if (tstate == NULL) { return NULL; } + #ifdef Py_GIL_DISABLED Py_ssize_t qsbr_idx = _Py_qsbr_reserve(interp); if (qsbr_idx < 0) { - PyMem_RawFree(new_tstate); + free_threadstate(tstate); return NULL; } int32_t tlbc_idx = _Py_ReserveTLBCIndex(interp); if (tlbc_idx < 0) { - PyMem_RawFree(new_tstate); + free_threadstate(tstate); return NULL; } #endif /* We serialize concurrent creation to protect global state. */ - HEAD_LOCK(runtime); + HEAD_LOCK(interp->runtime); + // Initialize the new thread state. interp->threads.next_unique_id += 1; uint64_t id = interp->threads.next_unique_id; + init_threadstate(tstate, interp, id, whence); - // Allocate the thread state and add it to the interpreter. + // Add the new thread state to the interpreter. PyThreadState *old_head = interp->threads.head; - if (old_head == NULL) { - // It's the interpreter's initial thread state. - used_newtstate = 0; - tstate = &interp->_initial_thread; - } - // XXX Re-use interp->_initial_thread if not in use? - else { - // Every valid interpreter must have at least one thread. - assert(id > 1); - assert(old_head->prev == NULL); - used_newtstate = 1; - tstate = new_tstate; - // Set to _PyThreadState_INIT. - memcpy(tstate, - &initial._main_interpreter._initial_thread, - sizeof(*tstate)); - } - - init_threadstate(tstate, interp, id, whence); add_threadstate(interp, (PyThreadState *)tstate, old_head); - HEAD_UNLOCK(runtime); - if (!used_newtstate) { - // Must be called with lock unlocked to avoid re-entrancy deadlock. - PyMem_RawFree(new_tstate); - } - else { - } + HEAD_UNLOCK(interp->runtime); #ifdef Py_GIL_DISABLED // Must be called with lock unlocked to avoid lock ordering deadlocks. @@ -1654,6 +1646,11 @@ PyThreadState_Clear(PyThreadState *tstate) "PyThreadState_Clear: warning: thread still has a frame\n"); } + if (verbose && tstate->current_exception != NULL) { + fprintf(stderr, "PyThreadState_Clear: warning: thread has an exception set\n"); + _PyErr_Print(tstate); + } + /* At this point tstate shouldn't be used any more, neither to run Python code nor for other uses. @@ -1782,7 +1779,9 @@ tstate_delete_common(PyThreadState *tstate, int release_gil) if (tstate->_status.bound_gilstate) { unbind_gilstate_tstate(tstate); } - unbind_tstate(tstate); + if (tstate->_status.bound) { + unbind_tstate(tstate); + } // XXX Move to PyThreadState_Clear()? clear_datastack(tstate); @@ -1801,10 +1800,9 @@ tstate_delete_common(PyThreadState *tstate, int release_gil) static void zapthreads(PyInterpreterState *interp) { - PyThreadState *tstate; /* No need to lock the mutex here because this should only happen when the threads are all really dead (XXX famous last words). */ - while ((tstate = interp->threads.head) != NULL) { + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, tstate) { tstate_verify_not_active(tstate); tstate_delete_common(tstate, 0); free_threadstate((_PyThreadStateImpl *)tstate); @@ -2161,7 +2159,7 @@ decrement_stoptheworld_countdown(struct _stoptheworld_state *stw) } #ifdef Py_GIL_DISABLED -// Interpreter for _Py_FOR_EACH_THREAD(). For global stop-the-world events, +// Interpreter for _Py_FOR_EACH_STW_INTERP(). For global stop-the-world events, // we start with the first interpreter and then iterate over all interpreters. // For per-interpreter stop-the-world events, we only operate on the one // interpreter. @@ -2176,10 +2174,9 @@ interp_for_stop_the_world(struct _stoptheworld_state *stw) // Loops over threads for a stop-the-world event. // For global: all threads in all interpreters // For per-interpreter: all threads in the interpreter -#define _Py_FOR_EACH_THREAD(stw, i, t) \ - for (i = interp_for_stop_the_world((stw)); \ - i != NULL; i = ((stw->is_global) ? i->next : NULL)) \ - for (t = i->threads.head; t; t = t->next) +#define _Py_FOR_EACH_STW_INTERP(stw, i) \ + for (PyInterpreterState *i = interp_for_stop_the_world((stw)); \ + i != NULL; i = ((stw->is_global) ? i->next : NULL)) // Try to transition threads atomically from the "detached" state to the @@ -2188,19 +2185,19 @@ static bool park_detached_threads(struct _stoptheworld_state *stw) { int num_parked = 0; - PyInterpreterState *i; - PyThreadState *t; - _Py_FOR_EACH_THREAD(stw, i, t) { - int state = _Py_atomic_load_int_relaxed(&t->state); - if (state == _Py_THREAD_DETACHED) { - // Atomically transition to "suspended" if in "detached" state. - if (_Py_atomic_compare_exchange_int(&t->state, - &state, _Py_THREAD_SUSPENDED)) { - num_parked++; + _Py_FOR_EACH_STW_INTERP(stw, i) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { + int state = _Py_atomic_load_int_relaxed(&t->state); + if (state == _Py_THREAD_DETACHED) { + // Atomically transition to "suspended" if in "detached" state. + if (_Py_atomic_compare_exchange_int( + &t->state, &state, _Py_THREAD_SUSPENDED)) { + num_parked++; + } + } + else if (state == _Py_THREAD_ATTACHED && t != stw->requester) { + _Py_set_eval_breaker_bit(t, _PY_EVAL_PLEASE_STOP_BIT); } - } - else if (state == _Py_THREAD_ATTACHED && t != stw->requester) { - _Py_set_eval_breaker_bit(t, _PY_EVAL_PLEASE_STOP_BIT); } } stw->thread_countdown -= num_parked; @@ -2227,12 +2224,12 @@ stop_the_world(struct _stoptheworld_state *stw) stw->stop_event = (PyEvent){0}; // zero-initialize (unset) stw->requester = _PyThreadState_GET(); // may be NULL - PyInterpreterState *i; - PyThreadState *t; - _Py_FOR_EACH_THREAD(stw, i, t) { - if (t != stw->requester) { - // Count all the other threads (we don't wait on ourself). - stw->thread_countdown++; + _Py_FOR_EACH_STW_INTERP(stw, i) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { + if (t != stw->requester) { + // Count all the other threads (we don't wait on ourself). + stw->thread_countdown++; + } } } @@ -2273,14 +2270,14 @@ start_the_world(struct _stoptheworld_state *stw) stw->requested = 0; stw->world_stopped = 0; // Switch threads back to the detached state. - PyInterpreterState *i; - PyThreadState *t; - _Py_FOR_EACH_THREAD(stw, i, t) { - if (t != stw->requester) { - assert(_Py_atomic_load_int_relaxed(&t->state) == - _Py_THREAD_SUSPENDED); - _Py_atomic_store_int(&t->state, _Py_THREAD_DETACHED); - _PyParkingLot_UnparkAll(&t->state); + _Py_FOR_EACH_STW_INTERP(stw, i) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { + if (t != stw->requester) { + assert(_Py_atomic_load_int_relaxed(&t->state) == + _Py_THREAD_SUSPENDED); + _Py_atomic_store_int(&t->state, _Py_THREAD_DETACHED); + _PyParkingLot_UnparkAll(&t->state); + } } } stw->requester = NULL; @@ -2344,7 +2341,6 @@ _PyEval_StartTheWorld(PyInterpreterState *interp) int PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) { - _PyRuntimeState *runtime = &_PyRuntime; PyInterpreterState *interp = _PyInterpreterState_GET(); /* Although the GIL is held, a few C API functions can be called @@ -2353,12 +2349,16 @@ PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) * list of thread states we're traversing, so to prevent that we lock * head_mutex for the duration. */ - HEAD_LOCK(runtime); - for (PyThreadState *tstate = interp->threads.head; tstate != NULL; tstate = tstate->next) { - if (tstate->thread_id != id) { - continue; + PyThreadState *tstate = NULL; + _Py_FOR_EACH_TSTATE_BEGIN(interp, t) { + if (t->thread_id == id) { + tstate = t; + break; } + } + _Py_FOR_EACH_TSTATE_END(interp); + if (tstate != NULL) { /* Tricky: we need to decref the current value * (if any) in tstate->async_exc, but that can in turn * allow arbitrary Python code to run, including @@ -2368,14 +2368,12 @@ PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) */ Py_XINCREF(exc); PyObject *old_exc = _Py_atomic_exchange_ptr(&tstate->async_exc, exc); - HEAD_UNLOCK(runtime); Py_XDECREF(old_exc); _Py_set_eval_breaker_bit(tstate, _PY_ASYNC_EXCEPTION_BIT); - return 1; } - HEAD_UNLOCK(runtime); - return 0; + + return tstate != NULL; } //--------------------------------- @@ -2515,8 +2513,7 @@ _PyThread_CurrentFrames(void) HEAD_LOCK(runtime); PyInterpreterState *i; for (i = runtime->interpreters.head; i != NULL; i = i->next) { - PyThreadState *t; - for (t = i->threads.head; t != NULL; t = t->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { _PyInterpreterFrame *frame = t->current_frame; frame = _PyFrame_GetFirstComplete(frame); if (frame == NULL) { @@ -2581,8 +2578,7 @@ _PyThread_CurrentExceptions(void) HEAD_LOCK(runtime); PyInterpreterState *i; for (i = runtime->interpreters.head; i != NULL; i = i->next) { - PyThreadState *t; - for (t = i->threads.head; t != NULL; t = t->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { _PyErr_StackItem *err_info = _PyErr_GetTopmostException(t); if (err_info == NULL) { continue; diff --git a/Python/specialize.c b/Python/specialize.c index 4c8cf8534b3dc7..172dae7d374602 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -715,11 +715,12 @@ specialize(_Py_CODEUNIT *instr, uint8_t specialized_opcode) SPEC_FAIL_OTHER); return; } + STAT_INC(_PyOpcode_Deopt[specialized_opcode], success); set_counter((_Py_BackoffCounter *)instr + 1, adaptive_counter_cooldown()); } static inline void -unspecialize(_Py_CODEUNIT *instr, int reason) +unspecialize(_Py_CODEUNIT *instr) { assert(!PyErr_Occurred()); uint8_t opcode = FT_ATOMIC_LOAD_UINT8_RELAXED(instr->op.code); @@ -729,7 +730,6 @@ unspecialize(_Py_CODEUNIT *instr, int reason) SPECIALIZATION_FAIL(generic_opcode, SPEC_FAIL_OTHER); return; } - SPECIALIZATION_FAIL(generic_opcode, reason); _Py_BackoffCounter *counter = (_Py_BackoffCounter *)instr + 1; _Py_BackoffCounter cur = load_counter(counter); set_counter(counter, adaptive_counter_backoff(cur)); @@ -1520,12 +1520,12 @@ PyObject *descr, DescriptorClassification kind, bool is_method) return 1; } -void -_Py_Specialize_LoadGlobal( +static void +specialize_load_global_lock_held( PyObject *globals, PyObject *builtins, _Py_CODEUNIT *instr, PyObject *name) { - assert(ENABLE_SPECIALIZATION); + assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[LOAD_GLOBAL] == INLINE_CACHE_ENTRIES_LOAD_GLOBAL); /* Use inline cache */ _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)(instr + 1); @@ -1550,8 +1550,8 @@ _Py_Specialize_LoadGlobal( SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE); goto fail; } - uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState( - interp, globals_keys); + uint32_t keys_version = _PyDict_GetKeysVersionForCurrentState( + interp, (PyDictObject*) globals); if (keys_version == 0) { SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_VERSIONS); goto fail; @@ -1562,8 +1562,8 @@ _Py_Specialize_LoadGlobal( } cache->index = (uint16_t)index; cache->module_keys_version = (uint16_t)keys_version; - instr->op.code = LOAD_GLOBAL_MODULE; - goto success; + specialize(instr, LOAD_GLOBAL_MODULE); + return; } if (!PyDict_CheckExact(builtins)) { SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_DICT); @@ -1583,8 +1583,8 @@ _Py_Specialize_LoadGlobal( SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE); goto fail; } - uint32_t globals_version = _PyDictKeys_GetVersionForCurrentState( - interp, globals_keys); + uint32_t globals_version = _PyDict_GetKeysVersionForCurrentState( + interp, (PyDictObject*) globals); if (globals_version == 0) { SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_VERSIONS); goto fail; @@ -1593,8 +1593,8 @@ _Py_Specialize_LoadGlobal( SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE); goto fail; } - uint32_t builtins_version = _PyDictKeys_GetVersionForCurrentState( - interp, builtin_keys); + uint32_t builtins_version = _PyDict_GetKeysVersionForCurrentState( + interp, (PyDictObject*) builtins); if (builtins_version == 0) { SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_VERSIONS); goto fail; @@ -1606,18 +1606,20 @@ _Py_Specialize_LoadGlobal( cache->index = (uint16_t)index; cache->module_keys_version = (uint16_t)globals_version; cache->builtin_keys_version = (uint16_t)builtins_version; - instr->op.code = LOAD_GLOBAL_BUILTIN; - goto success; -fail: - STAT_INC(LOAD_GLOBAL, failure); - assert(!PyErr_Occurred()); - instr->op.code = LOAD_GLOBAL; - cache->counter = adaptive_counter_backoff(cache->counter); + specialize(instr, LOAD_GLOBAL_BUILTIN); return; -success: - STAT_INC(LOAD_GLOBAL, success); - assert(!PyErr_Occurred()); - cache->counter = adaptive_counter_cooldown(); +fail: + unspecialize(instr); +} + +void +_Py_Specialize_LoadGlobal( + PyObject *globals, PyObject *builtins, + _Py_CODEUNIT *instr, PyObject *name) +{ + Py_BEGIN_CRITICAL_SECTION2(globals, builtins); + specialize_load_global_lock_held(globals, builtins, instr, name); + Py_END_CRITICAL_SECTION2(); } #ifdef Py_STATS @@ -1812,86 +1814,54 @@ _Py_Specialize_BinarySubscr( cache->counter = adaptive_counter_cooldown(); } -void -_Py_Specialize_StoreSubscr(_PyStackRef container_st, _PyStackRef sub_st, _Py_CODEUNIT *instr) -{ - PyObject *container = PyStackRef_AsPyObjectBorrow(container_st); - PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st); - assert(ENABLE_SPECIALIZATION); - _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)(instr + 1); - PyTypeObject *container_type = Py_TYPE(container); - if (container_type == &PyList_Type) { - if (PyLong_CheckExact(sub)) { - if (_PyLong_IsNonNegativeCompact((PyLongObject *)sub) - && ((PyLongObject *)sub)->long_value.ob_digit[0] < (size_t)PyList_GET_SIZE(container)) - { - instr->op.code = STORE_SUBSCR_LIST_INT; - goto success; - } - else { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OUT_OF_RANGE); - goto fail; - } - } - else if (PySlice_Check(sub)) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_LIST_SLICE); - goto fail; - } - else { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OTHER); - goto fail; - } - } - if (container_type == &PyDict_Type) { - instr->op.code = STORE_SUBSCR_DICT; - goto success; - } #ifdef Py_STATS +static int +store_subscr_fail_kind(PyObject *container_type) +{ PyMappingMethods *as_mapping = container_type->tp_as_mapping; if (as_mapping && (as_mapping->mp_ass_subscript == PyDict_Type.tp_as_mapping->mp_ass_subscript)) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_DICT_SUBCLASS_NO_OVERRIDE); - goto fail; + return SPEC_FAIL_SUBSCR_DICT_SUBCLASS_NO_OVERRIDE; } if (PyObject_CheckBuffer(container)) { if (PyLong_CheckExact(sub) && (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub))) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OUT_OF_RANGE); + return SPEC_FAIL_OUT_OF_RANGE; } else if (strcmp(container_type->tp_name, "array.array") == 0) { if (PyLong_CheckExact(sub)) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_ARRAY_INT); + return SPEC_FAIL_SUBSCR_ARRAY_INT; } else if (PySlice_Check(sub)) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_ARRAY_SLICE); + return SPEC_FAIL_SUBSCR_ARRAY_SLICE; } else { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OTHER); + return SPEC_FAIL_OTHER; } } else if (PyByteArray_CheckExact(container)) { if (PyLong_CheckExact(sub)) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_BYTEARRAY_INT); + return SPEC_FAIL_SUBSCR_BYTEARRAY_INT; } else if (PySlice_Check(sub)) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_BYTEARRAY_SLICE); + return SPEC_FAIL_SUBSCR_BYTEARRAY_SLICE; } else { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OTHER); + return SPEC_FAIL_OTHER; } } else { if (PyLong_CheckExact(sub)) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_BUFFER_INT); + return SPEC_FAIL_SUBSCR_BUFFER_INT; } else if (PySlice_Check(sub)) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_BUFFER_SLICE); + return SPEC_FAIL_SUBSCR_BUFFER_SLICE; } else { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OTHER); + return SPEC_FAIL_OTHER; } } - goto fail; + return SPEC_FAIL_OTHER; } PyObject *descriptor = _PyType_Lookup(container_type, &_Py_ID(__setitem__)); if (descriptor && Py_TYPE(descriptor) == &PyFunction_Type) { @@ -1899,25 +1869,55 @@ _Py_Specialize_StoreSubscr(_PyStackRef container_st, _PyStackRef sub_st, _Py_COD PyCodeObject *code = (PyCodeObject *)func->func_code; int kind = function_kind(code); if (kind == SIMPLE_FUNCTION) { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_PY_SIMPLE); + return SPEC_FAIL_SUBSCR_PY_SIMPLE; } else { - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_PY_OTHER); + return SPEC_FAIL_SUBSCR_PY_OTHER; } - goto fail; } -#endif // Py_STATS - SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OTHER); -fail: - STAT_INC(STORE_SUBSCR, failure); - assert(!PyErr_Occurred()); - instr->op.code = STORE_SUBSCR; - cache->counter = adaptive_counter_backoff(cache->counter); - return; -success: - STAT_INC(STORE_SUBSCR, success); - assert(!PyErr_Occurred()); - cache->counter = adaptive_counter_cooldown(); + return SPEC_FAIL_OTHER; +} +#endif + +void +_Py_Specialize_StoreSubscr(_PyStackRef container_st, _PyStackRef sub_st, _Py_CODEUNIT *instr) +{ + PyObject *container = PyStackRef_AsPyObjectBorrow(container_st); + PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st); + + assert(ENABLE_SPECIALIZATION_FT); + PyTypeObject *container_type = Py_TYPE(container); + if (container_type == &PyList_Type) { + if (PyLong_CheckExact(sub)) { + if (_PyLong_IsNonNegativeCompact((PyLongObject *)sub) + && ((PyLongObject *)sub)->long_value.ob_digit[0] < (size_t)PyList_GET_SIZE(container)) + { + specialize(instr, STORE_SUBSCR_LIST_INT); + return; + } + else { + SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OUT_OF_RANGE); + unspecialize(instr); + return; + } + } + else if (PySlice_Check(sub)) { + SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_SUBSCR_LIST_SLICE); + unspecialize(instr); + return; + } + else { + SPECIALIZATION_FAIL(STORE_SUBSCR, SPEC_FAIL_OTHER); + unspecialize(instr); + return; + } + } + if (container_type == &PyDict_Type) { + specialize(instr, STORE_SUBSCR_DICT); + return; + } + SPECIALIZATION_FAIL(STORE_SUBSCR, store_subscr_fail_kind(container_type)); + unspecialize(instr); } /* Returns a borrowed reference. @@ -2243,6 +2243,7 @@ _Py_Specialize_CallKw(_PyStackRef callable_st, _Py_CODEUNIT *instr, int nargs) } } +#ifdef Py_STATS static int binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) { @@ -2310,6 +2311,7 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) } Py_UNREACHABLE(); } +#endif void _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr, @@ -2373,7 +2375,8 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in } break; } - unspecialize(instr, binary_op_fail_kind(oparg, lhs, rhs)); + SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs)); + unspecialize(instr); } @@ -2483,39 +2486,33 @@ _Py_Specialize_UnpackSequence(_PyStackRef seq_st, _Py_CODEUNIT *instr, int oparg { PyObject *seq = PyStackRef_AsPyObjectBorrow(seq_st); - assert(ENABLE_SPECIALIZATION); + assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[UNPACK_SEQUENCE] == INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE); - _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)(instr + 1); if (PyTuple_CheckExact(seq)) { if (PyTuple_GET_SIZE(seq) != oparg) { SPECIALIZATION_FAIL(UNPACK_SEQUENCE, SPEC_FAIL_EXPECTED_ERROR); - goto failure; + unspecialize(instr); + return; } if (PyTuple_GET_SIZE(seq) == 2) { - instr->op.code = UNPACK_SEQUENCE_TWO_TUPLE; - goto success; + specialize(instr, UNPACK_SEQUENCE_TWO_TUPLE); + return; } - instr->op.code = UNPACK_SEQUENCE_TUPLE; - goto success; + specialize(instr, UNPACK_SEQUENCE_TUPLE); + return; } if (PyList_CheckExact(seq)) { if (PyList_GET_SIZE(seq) != oparg) { SPECIALIZATION_FAIL(UNPACK_SEQUENCE, SPEC_FAIL_EXPECTED_ERROR); - goto failure; + unspecialize(instr); + return; } - instr->op.code = UNPACK_SEQUENCE_LIST; - goto success; + specialize(instr, UNPACK_SEQUENCE_LIST); + return; } SPECIALIZATION_FAIL(UNPACK_SEQUENCE, unpack_sequence_fail_kind(seq)); -failure: - STAT_INC(UNPACK_SEQUENCE, failure); - instr->op.code = UNPACK_SEQUENCE; - cache->counter = adaptive_counter_backoff(cache->counter); - return; -success: - STAT_INC(UNPACK_SEQUENCE, success); - cache->counter = adaptive_counter_cooldown(); + unspecialize(instr); } #ifdef Py_STATS @@ -2663,103 +2660,109 @@ _Py_Specialize_Send(_PyStackRef receiver_st, _Py_CODEUNIT *instr) cache->counter = adaptive_counter_cooldown(); } +#ifdef Py_STATS +static int +to_bool_fail_kind(PyObject *value) +{ + if (PyByteArray_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_BYTEARRAY; + } + if (PyBytes_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_BYTES; + } + if (PyDict_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_DICT; + } + if (PyFloat_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_FLOAT; + } + if (PyMemoryView_Check(value)) { + return SPEC_FAIL_TO_BOOL_MEMORY_VIEW; + } + if (PyAnySet_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_SET; + } + if (PyTuple_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_TUPLE; + } + return SPEC_FAIL_OTHER; +} +#endif // Py_STATS + +static int +check_type_always_true(PyTypeObject *ty) +{ + PyNumberMethods *nb = ty->tp_as_number; + if (nb && nb->nb_bool) { + return SPEC_FAIL_TO_BOOL_NUMBER; + } + PyMappingMethods *mp = ty->tp_as_mapping; + if (mp && mp->mp_length) { + return SPEC_FAIL_TO_BOOL_MAPPING; + } + PySequenceMethods *sq = ty->tp_as_sequence; + if (sq && sq->sq_length) { + return SPEC_FAIL_TO_BOOL_SEQUENCE; + } + return 0; +} + void _Py_Specialize_ToBool(_PyStackRef value_o, _Py_CODEUNIT *instr) { - assert(ENABLE_SPECIALIZATION); + assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[TO_BOOL] == INLINE_CACHE_ENTRIES_TO_BOOL); _PyToBoolCache *cache = (_PyToBoolCache *)(instr + 1); PyObject *value = PyStackRef_AsPyObjectBorrow(value_o); + uint8_t specialized_op; if (PyBool_Check(value)) { - instr->op.code = TO_BOOL_BOOL; + specialized_op = TO_BOOL_BOOL; goto success; } if (PyLong_CheckExact(value)) { - instr->op.code = TO_BOOL_INT; + specialized_op = TO_BOOL_INT; goto success; } if (PyList_CheckExact(value)) { - instr->op.code = TO_BOOL_LIST; + specialized_op = TO_BOOL_LIST; goto success; } if (Py_IsNone(value)) { - instr->op.code = TO_BOOL_NONE; + specialized_op = TO_BOOL_NONE; goto success; } if (PyUnicode_CheckExact(value)) { - instr->op.code = TO_BOOL_STR; + specialized_op = TO_BOOL_STR; goto success; } if (PyType_HasFeature(Py_TYPE(value), Py_TPFLAGS_HEAPTYPE)) { - PyNumberMethods *nb = Py_TYPE(value)->tp_as_number; - if (nb && nb->nb_bool) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_NUMBER); - goto failure; - } - PyMappingMethods *mp = Py_TYPE(value)->tp_as_mapping; - if (mp && mp->mp_length) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_MAPPING); - goto failure; - } - PySequenceMethods *sq = Py_TYPE(value)->tp_as_sequence; - if (sq && sq->sq_length) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_SEQUENCE); - goto failure; - } - if (!PyUnstable_Type_AssignVersionTag(Py_TYPE(value))) { + unsigned int version = 0; + int err = _PyType_Validate(Py_TYPE(value), check_type_always_true, &version); + if (err < 0) { SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_OUT_OF_VERSIONS); goto failure; } - uint32_t version = type_get_version(Py_TYPE(value), TO_BOOL); - if (version == 0) { + else if (err > 0) { + SPECIALIZATION_FAIL(TO_BOOL, err); goto failure; } - instr->op.code = TO_BOOL_ALWAYS_TRUE; - write_u32(cache->version, version); + + assert(err == 0); assert(version); + write_u32(cache->version, version); + specialized_op = TO_BOOL_ALWAYS_TRUE; goto success; } -#ifdef Py_STATS - if (PyByteArray_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_BYTEARRAY); - goto failure; - } - if (PyBytes_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_BYTES); - goto failure; - } - if (PyDict_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_DICT); - goto failure; - } - if (PyFloat_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_FLOAT); - goto failure; - } - if (PyMemoryView_Check(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_MEMORY_VIEW); - goto failure; - } - if (PyAnySet_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_SET); - goto failure; - } - if (PyTuple_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_TUPLE); - goto failure; - } - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_OTHER); -#endif // Py_STATS + + SPECIALIZATION_FAIL(TO_BOOL, to_bool_fail_kind(value)); failure: - STAT_INC(TO_BOOL, failure); - instr->op.code = TO_BOOL; - cache->counter = adaptive_counter_backoff(cache->counter); + unspecialize(instr); return; success: - STAT_INC(TO_BOOL, success); - cache->counter = adaptive_counter_cooldown(); + specialize(instr, specialized_op); } +#ifdef Py_STATS static int containsop_fail_kind(PyObject *value) { if (PyUnicode_CheckExact(value)) { @@ -2776,6 +2779,7 @@ containsop_fail_kind(PyObject *value) { } return SPEC_FAIL_OTHER; } +#endif void _Py_Specialize_ContainsOp(_PyStackRef value_st, _Py_CODEUNIT *instr) @@ -2793,7 +2797,8 @@ _Py_Specialize_ContainsOp(_PyStackRef value_st, _Py_CODEUNIT *instr) return; } - unspecialize(instr, containsop_fail_kind(value)); + SPECIALIZATION_FAIL(CONTAINS_OP, containsop_fail_kind(value)); + unspecialize(instr); return; } diff --git a/Python/sysmodule.c b/Python/sysmodule.c index aaef5aa532412b..6df297f364c5d3 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -4104,7 +4104,7 @@ _PySys_SetIntMaxStrDigits(int maxdigits) { if (maxdigits != 0 && maxdigits < _PY_LONG_MAX_STR_DIGITS_THRESHOLD) { PyErr_Format( - PyExc_ValueError, "maxdigits must be 0 or larger than %d", + PyExc_ValueError, "maxdigits must be >= %d or 0 for unlimited", _PY_LONG_MAX_STR_DIGITS_THRESHOLD); return -1; } diff --git a/Python/traceback.c b/Python/traceback.c index 47b77c9108dd9a..e819909b6045c3 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -890,6 +890,8 @@ _Py_DumpASCII(int fd, PyObject *text) static void dump_frame(int fd, _PyInterpreterFrame *frame) { + assert(frame->owner != FRAME_OWNED_BY_CSTACK); + PyCodeObject *code =_PyFrame_GetCode(frame); PUTS(fd, " File "); if (code->co_filename != NULL @@ -963,6 +965,17 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) unsigned int depth = 0; while (1) { + if (frame->owner == FRAME_OWNED_BY_CSTACK) { + /* Trampoline frame */ + frame = frame->previous; + if (frame == NULL) { + break; + } + + /* Can't have more than one shim frame in a row */ + assert(frame->owner != FRAME_OWNED_BY_CSTACK); + } + if (MAX_FRAME_DEPTH <= depth) { if (MAX_FRAME_DEPTH < depth) { PUTS(fd, "plus "); @@ -971,20 +984,12 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) } break; } + dump_frame(fd, frame); frame = frame->previous; if (frame == NULL) { break; } - if (frame->owner == FRAME_OWNED_BY_CSTACK) { - /* Trampoline frame */ - frame = frame->previous; - } - if (frame == NULL) { - break; - } - /* Can't have more than one shim frame in a row */ - assert(frame->owner != FRAME_OWNED_BY_CSTACK); depth++; } } diff --git a/README.rst b/README.rst index 0134aafe2a969a..394cdc3638485d 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -This is Python version 3.14.0 alpha 1 +This is Python version 3.14.0 alpha 2 ===================================== .. image:: https://github.com/python/cpython/actions/workflows/build.yml/badge.svg?branch=main&event=push @@ -64,7 +64,7 @@ the executable is called ``python.exe``; elsewhere it's just ``python``. Building a complete Python installation requires the use of various additional third-party libraries, depending on your build platform and configure options. Not all standard library modules are buildable or -useable on all platforms. Refer to the +usable on all platforms. Refer to the `Install dependencies `_ section of the `Developer Guide`_ for current detailed information on dependencies for various Linux distributions and macOS. diff --git a/Tools/build/freeze_modules.py b/Tools/build/freeze_modules.py index 7f1dee18319749..8f74abdc83db47 100644 --- a/Tools/build/freeze_modules.py +++ b/Tools/build/freeze_modules.py @@ -63,9 +63,6 @@ 'genericpath', 'ntpath', 'posixpath', - # We must explicitly mark os.path as a frozen module - # even though it will never be imported. - f'{OS_PATH} : os.path', 'os', 'site', 'stat', diff --git a/Tools/build/generate_token.py b/Tools/build/generate_token.py index 16c38841e44a4d..d32747f19945d8 100755 --- a/Tools/build/generate_token.py +++ b/Tools/build/generate_token.py @@ -226,7 +226,8 @@ def make_rst(infile, outfile='Doc/library/token-list.inc'): # {AUTO_GENERATED_BY_SCRIPT} ''' token_py_template += ''' -__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF'] +__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF', + 'EXACT_TOKEN_TYPES'] %s N_TOKENS = %d diff --git a/Tools/c-analyzer/cpython/_analyzer.py b/Tools/c-analyzer/cpython/_analyzer.py index f07fa8af495e17..6204353e9bd26a 100644 --- a/Tools/c-analyzer/cpython/_analyzer.py +++ b/Tools/c-analyzer/cpython/_analyzer.py @@ -280,12 +280,26 @@ def _is_kwlist(decl): vartype = ''.join(str(decl.vartype).split()) return vartype == 'char*[]' +def _is_local_static_mutex(decl): + if not hasattr(decl, "vartype"): + return False + + if not hasattr(decl, "parent") or decl.parent is None: + # We only want to allow local variables + return False + + vartype = decl.vartype + return (vartype.typespec == 'PyMutex') and (decl.storage == 'static') def _has_other_supported_type(decl): if hasattr(decl, 'file') and decl.file.filename.endswith('.c.h'): assert 'clinic' in decl.file.filename, (decl,) if decl.name == '_kwtuple': return True + if _is_local_static_mutex(decl): + # GH-127081: Local static mutexes are used to + # wrap libc functions that aren't thread safe + return True vartype = str(decl.vartype).split() if vartype[0] == 'struct': vartype = vartype[1:] diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py index 21be53e78841d5..a08b32fa45db3e 100644 --- a/Tools/c-analyzer/cpython/_parser.py +++ b/Tools/c-analyzer/cpython/_parser.py @@ -70,9 +70,7 @@ def clean_lines(text): Python/thread_pthread_stubs.h # only huge constants (safe but parsing is slow) -Modules/_ssl_data_31.h -Modules/_ssl_data_300.h -Modules/_ssl_data_111.h +Modules/_ssl_data_*.h Modules/cjkcodecs/mappings_*.h Modules/unicodedata_db.h Modules/unicodename_db.h diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 96b32445fb62e2..eca851e6de87ae 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -548,7 +548,10 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "PyStackRef_FromPyObjectImmortal", "PyStackRef_FromPyObjectNew", "PyStackRef_FromPyObjectSteal", - "PyStackRef_Is", + "PyStackRef_IsExactly", + "PyStackRef_IsNone", + "PyStackRef_IsTrue", + "PyStackRef_IsFalse", "PyStackRef_IsNull", "PyStackRef_None", "PyStackRef_TYPE", @@ -623,6 +626,9 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_Py_NewRef", "_Py_SINGLETON", "_Py_STR", + "_Py_TryIncrefCompare", + "_Py_TryIncrefCompareStackRef", + "_Py_atomic_load_ptr_acquire", "_Py_atomic_load_uintptr_relaxed", "_Py_set_eval_breaker_bit", "advance_backoff_counter", diff --git a/Tools/cases_generator/opcode_metadata_generator.py b/Tools/cases_generator/opcode_metadata_generator.py index 2ad7604af9cc0d..1a9849c0cbbb25 100644 --- a/Tools/cases_generator/opcode_metadata_generator.py +++ b/Tools/cases_generator/opcode_metadata_generator.py @@ -19,8 +19,9 @@ cflags, ) from cwriter import CWriter +from dataclasses import dataclass from typing import TextIO -from stack import get_stack_effect +from stack import Stack, get_stack_effect, get_stack_effects # Constants used instead of size for macro expansions. # Note: 1, 2, 4 must match actual cache entry sizes. @@ -107,6 +108,101 @@ def add(inst: Instruction | PseudoInstruction) -> None: emit_stack_effect_function(out, "popped", sorted(popped_data)) emit_stack_effect_function(out, "pushed", sorted(pushed_data)) + generate_max_stack_effect_function(analysis, out) + + +def emit_max_stack_effect_function( + out: CWriter, effects: list[tuple[str, list[str]]] +) -> None: + out.emit("extern int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect);\n") + out.emit("#ifdef NEED_OPCODE_METADATA\n") + out.emit(f"int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) {{\n") + out.emit("switch(opcode) {\n") + for name, exprs in effects: + out.emit(f"case {name}: {{\n") + if len(exprs) == 1: + out.emit(f"*effect = {exprs[0]};\n") + elif len(exprs) == 2: + out.emit(f"*effect = Py_MAX({exprs[0]}, {exprs[1]});\n") + else: + assert len(exprs) > 2 + out.emit(f"int max_eff = Py_MAX({exprs[0]}, {exprs[1]});\n") + for expr in exprs[2:]: + out.emit(f"max_eff = Py_MAX(max_eff, {expr});\n") + out.emit(f"*effect = max_eff;\n") + out.emit(f"return 0;\n") + out.emit("}\n") + out.emit("default:\n") + out.emit(" return -1;\n") + out.emit("}\n") + out.emit("}\n\n") + out.emit("#endif\n\n") + + +@dataclass +class MaxStackEffectSet: + int_effect: int | None + cond_effects: set[str] + + def __init__(self) -> None: + self.int_effect = None + self.cond_effects = set() + + def add(self, stack: Stack) -> None: + top_off = stack.top_offset + top_off_int = top_off.as_int() + if top_off_int is not None: + if self.int_effect is None or top_off_int > self.int_effect: + self.int_effect = top_off_int + else: + self.cond_effects.add(top_off.to_c()) + + def update(self, other: "MaxStackEffectSet") -> None: + if self.int_effect is None: + if other.int_effect is not None: + self.int_effect = other.int_effect + elif other.int_effect is not None: + self.int_effect = max(self.int_effect, other.int_effect) + self.cond_effects.update(other.cond_effects) + + +def generate_max_stack_effect_function(analysis: Analysis, out: CWriter) -> None: + """Generate a function that returns the maximum stack effect of an + instruction while it is executing. + + Specialized instructions that are composed of uops may have a greater stack + effect during instruction execution than the net stack effect of the + instruction if the uops pass values on the stack. + """ + effects: dict[str, MaxStackEffectSet] = {} + + def add(inst: Instruction | PseudoInstruction) -> None: + inst_effect = MaxStackEffectSet() + for stack in get_stack_effects(inst): + inst_effect.add(stack) + effects[inst.name] = inst_effect + + # Collect unique stack effects for each instruction + for inst in analysis.instructions.values(): + add(inst) + for pseudo in analysis.pseudos.values(): + add(pseudo) + + # Merge the effects of all specializations in a family into the generic + # instruction + for family in analysis.families.values(): + for inst in family.members: + effects[family.name].update(effects[inst.name]) + + data: list[tuple[str, list[str]]] = [] + for name, effs in sorted(effects.items(), key=lambda kv: kv[0]): + exprs = [] + if effs.int_effect is not None: + exprs.append(str(effs.int_effect)) + exprs.extend(sorted(effs.cond_effects)) + data.append((name, exprs)) + emit_max_stack_effect_function(out, data) + def generate_is_pseudo(analysis: Analysis, out: CWriter) -> None: """Write the IS_PSEUDO_INSTR macro""" diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index a954bed4df073c..286f47d0cfb11b 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -1,8 +1,9 @@ import re from analyzer import StackItem, StackEffect, Instruction, Uop, PseudoInstruction +from collections import defaultdict from dataclasses import dataclass from cwriter import CWriter -from typing import Iterator +from typing import Iterator, Tuple UNUSED = {"unused"} @@ -385,31 +386,46 @@ def merge(self, other: "Stack", out: CWriter) -> None: self.align(other, out) +def stacks(inst: Instruction | PseudoInstruction) -> Iterator[StackEffect]: + if isinstance(inst, Instruction): + for uop in inst.parts: + if isinstance(uop, Uop): + yield uop.stack + else: + assert isinstance(inst, PseudoInstruction) + yield inst.stack + + +def apply_stack_effect(stack: Stack, effect: StackEffect) -> None: + locals: dict[str, Local] = {} + for var in reversed(effect.inputs): + _, local = stack.pop(var) + if var.name != "unused": + locals[local.name] = local + for var in effect.outputs: + if var.name in locals: + local = locals[var.name] + else: + local = Local.unused(var) + stack.push(local) + + def get_stack_effect(inst: Instruction | PseudoInstruction) -> Stack: stack = Stack() + for s in stacks(inst): + apply_stack_effect(stack, s) + return stack - def stacks(inst: Instruction | PseudoInstruction) -> Iterator[StackEffect]: - if isinstance(inst, Instruction): - for uop in inst.parts: - if isinstance(uop, Uop): - yield uop.stack - else: - assert isinstance(inst, PseudoInstruction) - yield inst.stack +def get_stack_effects(inst: Instruction | PseudoInstruction) -> list[Stack]: + """Returns a list of stack effects after each uop""" + result = [] + stack = Stack() for s in stacks(inst): - locals: dict[str, Local] = {} - for var in reversed(s.inputs): - _, local = stack.pop(var) - if var.name != "unused": - locals[local.name] = local - for var in s.outputs: - if var.name in locals: - local = locals[var.name] - else: - local = Local.unused(var) - stack.push(local) - return stack + apply_stack_effect(stack, s) + result.append(stack.copy()) + return result + @dataclass class Storage: diff --git a/Tools/clinic/libclinic/parse_args.py b/Tools/clinic/libclinic/parse_args.py index fc2d9fe987096d..a57d729bec5733 100644 --- a/Tools/clinic/libclinic/parse_args.py +++ b/Tools/clinic/libclinic/parse_args.py @@ -146,7 +146,7 @@ def declare_parser( GETSET_DOCSTRING_PROTOTYPE_STRVAR: Final[str] = libclinic.normalize_snippet(""" PyDoc_STRVAR({getset_basename}__doc__, {docstring}); - #define {getset_basename}_HAS_DOCSTR + #define {getset_basename}_DOCSTR {getset_basename}__doc__ """) IMPL_DEFINITION_PROTOTYPE: Final[str] = libclinic.normalize_snippet(""" static {impl_return_type} @@ -157,9 +157,7 @@ def declare_parser( {{"{name}", {methoddef_cast}{c_basename}{methoddef_cast_end}, {methoddef_flags}, {c_basename}__doc__}}, """) GETTERDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r""" - #if defined({getset_basename}_HAS_DOCSTR) - # define {getset_basename}_DOCSTR {getset_basename}__doc__ - #else + #if !defined({getset_basename}_DOCSTR) # define {getset_basename}_DOCSTR NULL #endif #if defined({getset_name}_GETSETDEF) @@ -170,9 +168,7 @@ def declare_parser( #endif """) SETTERDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r""" - #if defined({getset_name}_HAS_DOCSTR) - # define {getset_basename}_DOCSTR {getset_basename}__doc__ - #else + #if !defined({getset_basename}_DOCSTR) # define {getset_basename}_DOCSTR NULL #endif #if defined({getset_name}_GETSETDEF) diff --git a/Tools/i18n/pygettext.py b/Tools/i18n/pygettext.py index 0d16e8f7da0071..f78ff16bff9039 100755 --- a/Tools/i18n/pygettext.py +++ b/Tools/i18n/pygettext.py @@ -163,16 +163,13 @@ import time import getopt import ast -import token import tokenize +from collections import defaultdict +from dataclasses import dataclass, field +from operator import itemgetter __version__ = '1.5' -default_keywords = ['_'] -DEFAULTKEYWORDS = ', '.join(default_keywords) - -EMPTYSTRING = '' - # The normal pot-file header. msgmerge and Emacs's po-mode work better if it's # there. @@ -306,12 +303,64 @@ def getFilesForName(name): return [] +# Key is the function name, value is a dictionary mapping argument positions to the +# type of the argument. The type is one of 'msgid', 'msgid_plural', or 'msgctxt'. +DEFAULTKEYWORDS = { + '_': {0: 'msgid'}, + 'gettext': {0: 'msgid'}, + 'ngettext': {0: 'msgid', 1: 'msgid_plural'}, + 'pgettext': {0: 'msgctxt', 1: 'msgid'}, + 'npgettext': {0: 'msgctxt', 1: 'msgid', 2: 'msgid_plural'}, + 'dgettext': {1: 'msgid'}, + 'dngettext': {1: 'msgid', 2: 'msgid_plural'}, + 'dpgettext': {1: 'msgctxt', 2: 'msgid'}, + 'dnpgettext': {1: 'msgctxt', 2: 'msgid', 3: 'msgid_plural'}, +} + + +def matches_spec(message, spec): + """Check if a message has all the keys defined by the keyword spec.""" + return all(key in message for key in spec.values()) + + +@dataclass(frozen=True) +class Location: + filename: str + lineno: int + + def __lt__(self, other): + return (self.filename, self.lineno) < (other.filename, other.lineno) + + +@dataclass +class Message: + msgid: str + msgid_plural: str | None + msgctxt: str | None + locations: set[Location] = field(default_factory=set) + is_docstring: bool = False + + def add_location(self, filename, lineno, msgid_plural=None, *, is_docstring=False): + if self.msgid_plural is None: + self.msgid_plural = msgid_plural + self.locations.add(Location(filename, lineno)) + self.is_docstring |= is_docstring + + +def key_for(msgid, msgctxt=None): + if msgctxt is not None: + return (msgctxt, msgid) + return msgid + + class TokenEater: def __init__(self, options): self.__options = options self.__messages = {} self.__state = self.__waiting - self.__data = [] + self.__data = defaultdict(str) + self.__curr_arg = 0 + self.__curr_keyword = None self.__lineno = -1 self.__freshmodule = 1 self.__curfile = None @@ -331,7 +380,7 @@ def __waiting(self, ttype, tstring, lineno): # module docstring? if self.__freshmodule: if ttype == tokenize.STRING and is_literal_string(tstring): - self.__addentry(safe_eval(tstring), lineno, isdocstring=1) + self.__addentry({'msgid': safe_eval(tstring)}, lineno, is_docstring=True) self.__freshmodule = 0 return if ttype in (tokenize.COMMENT, tokenize.NL, tokenize.ENCODING): @@ -346,6 +395,7 @@ def __waiting(self, ttype, tstring, lineno): return if ttype == tokenize.NAME and tstring in opts.keywords: self.__state = self.__keywordseen + self.__curr_keyword = tstring return if ttype == tokenize.STRING: maybe_fstring = ast.parse(tstring, mode='eval').body @@ -397,7 +447,8 @@ def __waiting(self, ttype, tstring, lineno): }, file=sys.stderr) continue if isinstance(arg.value, str): - self.__addentry(arg.value, lineno) + self.__curr_keyword = func_name + self.__addentry({'msgid': arg.value}, lineno) def __suiteseen(self, ttype, tstring, lineno): # skip over any enclosure pairs until we see the colon @@ -413,7 +464,7 @@ def __suiteseen(self, ttype, tstring, lineno): def __suitedocstring(self, ttype, tstring, lineno): # ignore any intervening noise if ttype == tokenize.STRING and is_literal_string(tstring): - self.__addentry(safe_eval(tstring), lineno, isdocstring=1) + self.__addentry({'msgid': safe_eval(tstring)}, lineno, is_docstring=True) self.__state = self.__waiting elif ttype not in (tokenize.NEWLINE, tokenize.INDENT, tokenize.COMMENT): @@ -422,44 +473,90 @@ def __suitedocstring(self, ttype, tstring, lineno): def __keywordseen(self, ttype, tstring, lineno): if ttype == tokenize.OP and tstring == '(': - self.__data = [] + self.__data.clear() + self.__curr_arg = 0 + self.__enclosurecount = 0 self.__lineno = lineno self.__state = self.__openseen else: self.__state = self.__waiting def __openseen(self, ttype, tstring, lineno): - if ttype == tokenize.OP and tstring == ')': - # We've seen the last of the translatable strings. Record the - # line number of the first line of the strings and update the list - # of messages seen. Reset state for the next batch. If there - # were no strings inside _(), then just ignore this entry. - if self.__data: - self.__addentry(EMPTYSTRING.join(self.__data)) - self.__state = self.__waiting - elif ttype == tokenize.STRING and is_literal_string(tstring): - self.__data.append(safe_eval(tstring)) - elif ttype not in [tokenize.COMMENT, token.INDENT, token.DEDENT, - token.NEWLINE, tokenize.NL]: - # warn if we see anything else than STRING or whitespace - print(_( - '*** %(file)s:%(lineno)s: Seen unexpected token "%(token)s"' - ) % { - 'token': tstring, - 'file': self.__curfile, - 'lineno': self.__lineno - }, file=sys.stderr) - self.__state = self.__waiting + spec = self.__options.keywords[self.__curr_keyword] + arg_type = spec.get(self.__curr_arg) + expect_string_literal = arg_type is not None + + if ttype == tokenize.OP and self.__enclosurecount == 0: + if tstring == ')': + # We've seen the last of the translatable strings. Record the + # line number of the first line of the strings and update the list + # of messages seen. Reset state for the next batch. If there + # were no strings inside _(), then just ignore this entry. + if self.__data: + self.__addentry(self.__data) + self.__state = self.__waiting + return + elif tstring == ',': + # Advance to the next argument + self.__curr_arg += 1 + return + + if expect_string_literal: + if ttype == tokenize.STRING and is_literal_string(tstring): + self.__data[arg_type] += safe_eval(tstring) + elif ttype not in (tokenize.COMMENT, tokenize.INDENT, tokenize.DEDENT, + tokenize.NEWLINE, tokenize.NL): + # We are inside an argument which is a translatable string and + # we encountered a token that is not a string. This is an error. + self.warn_unexpected_token(tstring) + self.__enclosurecount = 0 + self.__state = self.__waiting + elif ttype == tokenize.OP: + if tstring in '([{': + self.__enclosurecount += 1 + elif tstring in ')]}': + self.__enclosurecount -= 1 def __ignorenext(self, ttype, tstring, lineno): self.__state = self.__waiting - def __addentry(self, msg, lineno=None, isdocstring=0): + def __addentry(self, msg, lineno=None, *, is_docstring=False): + msgid = msg.get('msgid') + if msgid in self.__options.toexclude: + return + if not is_docstring: + spec = self.__options.keywords[self.__curr_keyword] + if not matches_spec(msg, spec): + return if lineno is None: lineno = self.__lineno - if not msg in self.__options.toexclude: - entry = (self.__curfile, lineno) - self.__messages.setdefault(msg, {})[entry] = isdocstring + msgctxt = msg.get('msgctxt') + msgid_plural = msg.get('msgid_plural') + key = key_for(msgid, msgctxt) + if key in self.__messages: + self.__messages[key].add_location( + self.__curfile, + lineno, + msgid_plural, + is_docstring=is_docstring, + ) + else: + self.__messages[key] = Message( + msgid=msgid, + msgid_plural=msgid_plural, + msgctxt=msgctxt, + locations={Location(self.__curfile, lineno)}, + is_docstring=is_docstring, + ) + + def warn_unexpected_token(self, token): + print(_( + '*** %(file)s:%(lineno)s: Seen unexpected token "%(token)s"' + ) % { + 'token': token, + 'file': self.__curfile, + 'lineno': self.__lineno + }, file=sys.stderr) def set_filename(self, filename): self.__curfile = filename @@ -472,55 +569,54 @@ def write(self, fp): print(pot_header % {'time': timestamp, 'version': __version__, 'charset': encoding, 'encoding': '8bit'}, file=fp) - # Sort the entries. First sort each particular entry's keys, then - # sort all the entries by their first item. - reverse = {} - for k, v in self.__messages.items(): - keys = sorted(v.keys()) - reverse.setdefault(tuple(keys), []).append((k, v)) - rkeys = sorted(reverse.keys()) - for rkey in rkeys: - rentries = reverse[rkey] - rentries.sort() - for k, v in rentries: - # If the entry was gleaned out of a docstring, then add a - # comment stating so. This is to aid translators who may wish - # to skip translating some unimportant docstrings. - isdocstring = any(v.values()) - # k is the message string, v is a dictionary-set of (filename, - # lineno) tuples. We want to sort the entries in v first by - # file name and then by line number. - v = sorted(v.keys()) - if not options.writelocations: - pass + + # Sort locations within each message by filename and lineno + sorted_keys = [ + (key, sorted(msg.locations)) + for key, msg in self.__messages.items() + ] + # Sort messages by locations + # For example, a message with locations [('test.py', 1), ('test.py', 2)] will + # appear before a message with locations [('test.py', 1), ('test.py', 3)] + sorted_keys.sort(key=itemgetter(1)) + + for key, locations in sorted_keys: + msg = self.__messages[key] + if options.writelocations: # location comments are different b/w Solaris and GNU: - elif options.locationstyle == options.SOLARIS: - for filename, lineno in v: - d = {'filename': filename, 'lineno': lineno} - print(_( - '# File: %(filename)s, line: %(lineno)d') % d, file=fp) + if options.locationstyle == options.SOLARIS: + for location in locations: + print(f'# File: {location.filename}, line: {location.lineno}', file=fp) elif options.locationstyle == options.GNU: # fit as many locations on one line, as long as the # resulting line length doesn't exceed 'options.width' locline = '#:' - for filename, lineno in v: - d = {'filename': filename, 'lineno': lineno} - s = _(' %(filename)s:%(lineno)d') % d + for location in locations: + s = f' {location.filename}:{location.lineno}' if len(locline) + len(s) <= options.width: locline = locline + s else: print(locline, file=fp) - locline = "#:" + s + locline = f'#:{s}' if len(locline) > 2: print(locline, file=fp) - if isdocstring: - print('#, docstring', file=fp) - print('msgid', normalize(k, encoding), file=fp) + if msg.is_docstring: + # If the entry was gleaned out of a docstring, then add a + # comment stating so. This is to aid translators who may wish + # to skip translating some unimportant docstrings. + print('#, docstring', file=fp) + if msg.msgctxt is not None: + print('msgctxt', normalize(msg.msgctxt, encoding), file=fp) + print('msgid', normalize(msg.msgid, encoding), file=fp) + if msg.msgid_plural is not None: + print('msgid_plural', normalize(msg.msgid_plural, encoding), file=fp) + print('msgstr[0] ""', file=fp) + print('msgstr[1] ""\n', file=fp) + else: print('msgstr ""\n', file=fp) def main(): - global default_keywords try: opts, args = getopt.getopt( sys.argv[1:], @@ -557,7 +653,7 @@ class Options: locations = {'gnu' : options.GNU, 'solaris' : options.SOLARIS, } - + no_default_keywords = False # parse options for opt, arg in opts: if opt in ('-h', '--help'): @@ -573,7 +669,7 @@ class Options: elif opt in ('-k', '--keyword'): options.keywords.append(arg) elif opt in ('-K', '--no-default-keywords'): - default_keywords = [] + no_default_keywords = True elif opt in ('-n', '--add-location'): options.writelocations = 1 elif opt in ('--no-location',): @@ -613,7 +709,9 @@ class Options: make_escapes(not options.escape) # calculate all keywords - options.keywords.extend(default_keywords) + options.keywords = {kw: {0: 'msgid'} for kw in options.keywords} + if not no_default_keywords: + options.keywords |= DEFAULTKEYWORDS # initialize list of strings to exclude if options.excludefilename: diff --git a/Tools/jit/_stencils.py b/Tools/jit/_stencils.py index 61be8fd3bbdf55..ee761a73fa808a 100644 --- a/Tools/jit/_stencils.py +++ b/Tools/jit/_stencils.py @@ -202,7 +202,8 @@ def pad(self, alignment: int) -> None: """Pad the stencil to the given alignment.""" offset = len(self.body) padding = -offset % alignment - self.disassembly.append(f"{offset:x}: {' '.join(['00'] * padding)}") + if padding: + self.disassembly.append(f"{offset:x}: {' '.join(['00'] * padding)}") self.body.extend([0] * padding) def remove_jump(self, *, alignment: int = 1) -> None: diff --git a/Tools/jit/_targets.py b/Tools/jit/_targets.py index d8dce0a905c0f8..d23ced19842347 100644 --- a/Tools/jit/_targets.py +++ b/Tools/jit/_targets.py @@ -61,10 +61,11 @@ async def _parse(self, path: pathlib.Path) -> _stencils.StencilGroup: args = ["--disassemble", "--reloc", f"{path}"] output = await _llvm.maybe_run("llvm-objdump", args, echo=self.verbose) if output is not None: + # Make sure that full paths don't leak out (for reproducibility): + long, short = str(path), str(path.name) group.code.disassembly.extend( - line.expandtabs().strip() + line.expandtabs().strip().replace(long, short) for line in output.splitlines() - if not line.isspace() ) args = [ "--elf-output-style=JSON", @@ -90,9 +91,6 @@ async def _parse(self, path: pathlib.Path) -> _stencils.StencilGroup: if group.data.body: line = f"0: {str(bytes(group.data.body)).removeprefix('b')}" group.data.disassembly.append(line) - group.process_relocations( - known_symbols=self.known_symbols, alignment=self.alignment - ) return group def _handle_section(self, section: _S, group: _stencils.StencilGroup) -> None: @@ -122,6 +120,10 @@ async def _compile( f"-I{CPYTHON / 'Tools' / 'jit'}", "-O3", "-c", + # Shorten full absolute file paths in the generated code (like the + # __FILE__ macro and assert failure messages) for reproducibility: + f"-ffile-prefix-map={CPYTHON}=.", + f"-ffile-prefix-map={tempdir}=.", # This debug info isn't necessary, and bloats out the JIT'ed code. # We *may* be able to re-enable this, process it, and JIT it for a # nicer debugging experience... but that needs a lot more research: @@ -167,7 +169,12 @@ async def _build_stencils(self) -> dict[str, _stencils.StencilGroup]: c.write_text(template.replace("CASE", case)) coro = self._compile(opname, c, work) tasks.append(group.create_task(coro, name=opname)) - return {task.get_name(): task.result() for task in tasks} + stencil_groups = {task.get_name(): task.result() for task in tasks} + for stencil_group in stencil_groups.values(): + stencil_group.process_relocations( + known_symbols=self.known_symbols, alignment=self.alignment + ) + return stencil_groups def build( self, out: pathlib.Path, *, comment: str = "", force: bool = False diff --git a/Tools/jit/_writer.py b/Tools/jit/_writer.py index 81a9f08db31703..5588784544ee00 100644 --- a/Tools/jit/_writer.py +++ b/Tools/jit/_writer.py @@ -77,6 +77,6 @@ def dump( groups: dict[str, _stencils.StencilGroup], symbols: dict[str, int] ) -> typing.Iterator[str]: """Yield a JIT compiler line-by-line as a C header file.""" - for opname, group in sorted(groups.items()): + for opname, group in groups.items(): yield from _dump_stencil(opname, group) yield from _dump_footer(groups, symbols) diff --git a/Tools/jit/build.py b/Tools/jit/build.py index 4a23c6f0afa74a..a8cb0f67c36363 100644 --- a/Tools/jit/build.py +++ b/Tools/jit/build.py @@ -8,7 +8,7 @@ import _targets if __name__ == "__main__": - comment = f"$ {shlex.join([sys.executable] + sys.argv)}" + comment = f"$ {shlex.join([pathlib.Path(sys.executable).name] + sys.argv)}" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( "target", type=_targets.get_target, help="a PEP 11 target triple to compile for" diff --git a/Tools/msi/bundle/Default.wxl b/Tools/msi/bundle/Default.wxl index 49f681d3e11d2e..7208d83ddae61b 100644 --- a/Tools/msi/bundle/Default.wxl +++ b/Tools/msi/bundle/Default.wxl @@ -70,8 +70,8 @@ Select Customize to review current options. Installs the Python documentation files. &pip Installs pip, which can download and install other Python packages. - tcl/tk and &IDLE - Installs tkinter and the IDLE development environment. + Tcl/Tk, turtle and &IDLE + Installs tkinter, turtle and the IDLE development environment. Python &test suite Installs the standard library test suite. py &launcher diff --git a/Tools/msi/lib/lib.wixproj b/Tools/msi/lib/lib.wixproj index 26311ea32724d1..02078e503d74a4 100644 --- a/Tools/msi/lib/lib.wixproj +++ b/Tools/msi/lib/lib.wixproj @@ -19,6 +19,7 @@ @@ -32,4 +33,4 @@ - \ No newline at end of file + diff --git a/Tools/msi/tcltk/tcltk.wixproj b/Tools/msi/tcltk/tcltk.wixproj index 218f3d15ec88fc..c8b7ab77c4dc6b 100644 --- a/Tools/msi/tcltk/tcltk.wixproj +++ b/Tools/msi/tcltk/tcltk.wixproj @@ -28,7 +28,7 @@ tcltk_lib - $(PySourcePath) !(bindpath.src) @@ -39,4 +39,4 @@ - \ No newline at end of file + diff --git a/Tools/ssl/make_ssl_data.py b/Tools/ssl/make_ssl_data.py index d24e02210d489c..da05d2bc8b9752 100755 --- a/Tools/ssl/make_ssl_data.py +++ b/Tools/ssl/make_ssl_data.py @@ -5,9 +5,28 @@ `library` and `reason` mnemonics to a more recent OpenSSL version. It takes two arguments: -- the path to the OpenSSL source tree (e.g. git checkout) +- the path to the OpenSSL git checkout - the path to the header file to be generated Modules/_ssl_data_{version}.h - error codes are version specific + +The OpenSSL git checkout should be at a specific tag, using commands like: + git tag --list 'openssl-*' + git switch --detach openssl-3.4.0 + + +After generating the definitions, compare the result with newest pre-existing file. +You can use a command like: + + git diff --no-index Modules/_ssl_data_31.h Modules/_ssl_data_34.h + +- If the new version *only* adds new definitions, remove the pre-existing file + and adjust the #include in _ssl.c to point to the new version. +- If the new version removes or renumbers some definitions, keep both files and + add a new #include in _ssl.c. + +A newly supported OpenSSL version should also be added to: +- Tools/ssl/multissltests.py +- .github/workflows/build.yml """ import argparse @@ -15,6 +34,7 @@ import operator import os import re +import subprocess parser = argparse.ArgumentParser( @@ -117,9 +137,17 @@ def main(): # sort by libname, numeric error code args.reasons = sorted(reasons, key=operator.itemgetter(0, 3)) + git_describe = subprocess.run( + ['git', 'describe', '--long', '--dirty'], + cwd=args.srcdir, + capture_output=True, + encoding='utf-8', + check=True, + ) lines = [ - "/* File generated by Tools/ssl/make_ssl_data.py */" - f"/* Generated on {datetime.datetime.utcnow().isoformat()} */" + "/* File generated by Tools/ssl/make_ssl_data.py */", + f"/* Generated on {datetime.datetime.now(datetime.UTC).isoformat()} */", + f"/* Generated from Git commit {git_describe.stdout.strip()} */", ] lines.extend(gen_library_codes(args)) lines.append("") diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index eae0e0c5e8761f..2cd0c39b5a6477 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -51,6 +51,8 @@ "3.1.7", "3.2.3", "3.3.2", + "3.4.0", + # See make_ssl_data.py for notes on adding a new version. ] LIBRESSL_OLD_VERSIONS = [ diff --git a/Tools/wasm/emscripten/__main__.py b/Tools/wasm/emscripten/__main__.py index 2015a3764ea8c8..9ce8dd6a364ad6 100644 --- a/Tools/wasm/emscripten/__main__.py +++ b/Tools/wasm/emscripten/__main__.py @@ -4,20 +4,22 @@ import contextlib import functools import os - -try: - from os import process_cpu_count as cpu_count -except ImportError: - from os import cpu_count -from pathlib import Path import shutil import subprocess import sys import sysconfig import tempfile +from pathlib import Path +from textwrap import dedent + +try: + from os import process_cpu_count as cpu_count +except ImportError: + from os import cpu_count -WASM_DIR = Path(__file__).parent.parent -CHECKOUT = WASM_DIR.parent.parent + +EMSCRIPTEN_DIR = Path(__file__).parent +CHECKOUT = EMSCRIPTEN_DIR.parent.parent.parent CROSS_BUILD_DIR = CHECKOUT / "cross-build" BUILD_DIR = CROSS_BUILD_DIR / "build" @@ -72,7 +74,7 @@ def wrapper(context): print("⎯" * terminal_width) print("📁", working_dir) if clean_ok and getattr(context, "clean", False) and working_dir.exists(): - print(f"🚮 Deleting directory (--clean)...") + print("🚮 Deleting directory (--clean)...") shutil.rmtree(working_dir) working_dir.mkdir(parents=True, exist_ok=True) @@ -207,9 +209,21 @@ def configure_emscripten_python(context, working_dir): quiet=context.quiet, ) - python_js = working_dir / "python.js" + shutil.copy(EMSCRIPTEN_DIR / "node_entry.mjs", working_dir / "node_entry.mjs") + + node_entry = working_dir / "node_entry.mjs" exec_script = working_dir / "python.sh" - exec_script.write_text(f'#!/bin/sh\nexec {host_runner} {python_js} "$@"\n') + exec_script.write_text( + dedent( + f"""\ + #!/bin/sh + + # We compute our own path, not following symlinks and pass it in so that + # node_entry.mjs can set sys.executable correctly. + exec {host_runner} {node_entry} "$(realpath -s $0)" "$@" + """ + ) + ) exec_script.chmod(0o755) print(f"🏃‍♀️ Created {exec_script} ... ") sys.stdout.flush() diff --git a/Tools/wasm/emscripten/node_entry.mjs b/Tools/wasm/emscripten/node_entry.mjs new file mode 100644 index 00000000000000..cb1c6ff3cba6aa --- /dev/null +++ b/Tools/wasm/emscripten/node_entry.mjs @@ -0,0 +1,30 @@ +import EmscriptenModule from "./python.mjs"; +import { dirname } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +if (process?.versions?.node) { + const nodeVersion = Number(process.versions.node.split(".", 1)[0]); + if (nodeVersion < 18) { + process.stderr.write( + `Node version must be >= 18, got version ${process.version}\n`, + ); + process.exit(1); + } +} + +const settings = { + preRun(Module) { + const __dirname = dirname(fileURLToPath(import.meta.url)); + Module.FS.mkdirTree("/lib/"); + Module.FS.mount(Module.FS.filesystems.NODEFS, { root: __dirname + "/lib/" }, "/lib/"); + }, + // The first three arguments are: "node", path to this file, path to + // python.sh. After that come the arguments the user passed to python.sh. + arguments: process.argv.slice(3), + // Ensure that sys.executable, sys._base_executable, etc point to python.sh + // not to this file. To properly handle symlinks, python.sh needs to compute + // its own path. + thisProgram: process.argv[2], +}; + +await EmscriptenModule(settings); diff --git a/Tools/wasm/emscripten/node_pre.js b/Tools/wasm/emscripten/node_pre.js deleted file mode 100644 index 54b09dc08233f3..00000000000000 --- a/Tools/wasm/emscripten/node_pre.js +++ /dev/null @@ -1,15 +0,0 @@ -// If process is undefined, we're not running in the node runtime let it go I -// guess? -if (typeof process !== "undefined") { - const nodeVersion = Number(process.versions.node.split(".", 1)[0]); - if (nodeVersion < 18) { - process.stderr.write( - `Node version must be >= 18, got version ${process.version}\n`, - ); - process.exit(1); - } - Module.preRun = () => { - FS.mkdirTree("/lib/"); - FS.mount(NODEFS, { root: __dirname + "/lib/" }, "/lib/"); - }; -} diff --git a/configure b/configure index 5b44a3d69929a0..84b74ac3584bcd 100755 --- a/configure +++ b/configure @@ -7291,7 +7291,7 @@ else $as_nop case $ac_sys_system in #( Emscripten) : - EXEEXT=.js ;; #( + EXEEXT=.mjs ;; #( WASI) : EXEEXT=.wasm ;; #( *) : @@ -9432,6 +9432,7 @@ fi as_fn_append LDFLAGS_NODIST " -sWASM_BIGINT" as_fn_append LDFLAGS_NODIST " -sFORCE_FILESYSTEM -lidbfs.js -lnodefs.js -lproxyfs.js -lworkerfs.js" + as_fn_append LDFLAGS_NODIST " -sEXPORTED_RUNTIME_METHODS=FS" if test "x$enable_wasm_dynamic_linking" = xyes then : @@ -9450,7 +9451,6 @@ then : fi as_fn_append LDFLAGS_NODIST " -sALLOW_MEMORY_GROWTH" as_fn_append LDFLAGS_NODIST " -sEXIT_RUNTIME" - as_fn_append LDFLAGS_NODIST " --pre-js=\$(srcdir)/Tools/wasm/emscripten/node_pre.js" WASM_LINKFORSHARED_DEBUG="-gseparate-dwarf --emit-symbol-map" if test "x$wasm_debug" = xyes diff --git a/configure.ac b/configure.ac index 7904f8990c48ee..8fa6cb60900ad1 100644 --- a/configure.ac +++ b/configure.ac @@ -1327,7 +1327,7 @@ AC_ARG_WITH([suffix], ) ], [ AS_CASE([$ac_sys_system], - [Emscripten], [EXEEXT=.js], + [Emscripten], [EXEEXT=.mjs], [WASI], [EXEEXT=.wasm], [EXEEXT=] ) @@ -2328,6 +2328,7 @@ AS_CASE([$ac_sys_system], dnl Include file system support AS_VAR_APPEND([LDFLAGS_NODIST], [" -sFORCE_FILESYSTEM -lidbfs.js -lnodefs.js -lproxyfs.js -lworkerfs.js"]) + AS_VAR_APPEND([LDFLAGS_NODIST], [" -sEXPORTED_RUNTIME_METHODS=FS"]) AS_VAR_IF([enable_wasm_dynamic_linking], [yes], [ AS_VAR_APPEND([LINKFORSHARED], [" -sMAIN_MODULE"]) @@ -2341,7 +2342,6 @@ AS_CASE([$ac_sys_system], AS_VAR_APPEND([LDFLAGS_NODIST], [" -sALLOW_MEMORY_GROWTH"]) dnl not completely sure whether or not we want -sEXIT_RUNTIME, keeping it for now. AS_VAR_APPEND([LDFLAGS_NODIST], [" -sEXIT_RUNTIME"]) - AS_VAR_APPEND([LDFLAGS_NODIST], [" --pre-js=\$(srcdir)/Tools/wasm/emscripten/node_pre.js"]) WASM_LINKFORSHARED_DEBUG="-gseparate-dwarf --emit-symbol-map" AS_VAR_IF([wasm_debug], [yes], [ diff --git a/iOS/README.rst b/iOS/README.rst index 4d7c344d5e9e17..e33455eef8f44a 100644 --- a/iOS/README.rst +++ b/iOS/README.rst @@ -351,13 +351,13 @@ Running specific tests ^^^^^^^^^^^^^^^^^^^^^^ As the test suite is being executed on an iOS simulator, it is not possible to -pass in command line arguments to configure test suite operation. To work around -this limitation, the arguments that would normally be passed as command line -arguments are configured as a static string at the start of the XCTest method -``- (void)testPython`` in ``iOSTestbedTests.m``. To pass an argument to the test -suite, add a a string to the ``argv`` definition. These arguments will be passed -to the test suite as if they had been passed to ``python -m test`` at the -command line. +pass in command line arguments to configure test suite operation. To work +around this limitation, the arguments that would normally be passed as command +line arguments are configured as part of the ``iOSTestbed-Info.plist`` file +that is used to configure the iOS testbed app. In this file, the ``TestArgs`` +key is an array containing the arguments that would be passed to ``python -m`` +on the command line (including ``test`` in position 0, the name of the test +module to be executed). Disabling automated breakpoints ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj b/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj index d57cfc3dbe0304..6819ac0eeed95f 100644 --- a/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj +++ b/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj @@ -17,6 +17,8 @@ 607A66502B0EFFE00010BFC8 /* Python.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = 607A664A2B0EFB310010BFC8 /* Python.xcframework */; }; 607A66512B0EFFE00010BFC8 /* Python.xcframework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 607A664A2B0EFB310010BFC8 /* Python.xcframework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; 607A66582B0F079F0010BFC8 /* dylib-Info-template.plist in Resources */ = {isa = PBXBuildFile; fileRef = 607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */; }; + 608619542CB77BA900F46182 /* app_packages in Resources */ = {isa = PBXBuildFile; fileRef = 608619532CB77BA900F46182 /* app_packages */; }; + 608619562CB7819B00F46182 /* app in Resources */ = {isa = PBXBuildFile; fileRef = 608619552CB7819B00F46182 /* app */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -66,6 +68,8 @@ 607A664A2B0EFB310010BFC8 /* Python.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; path = Python.xcframework; sourceTree = ""; }; 607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "dylib-Info-template.plist"; sourceTree = ""; }; 607A66592B0F08600010BFC8 /* iOSTestbed-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "iOSTestbed-Info.plist"; sourceTree = ""; }; + 608619532CB77BA900F46182 /* app_packages */ = {isa = PBXFileReference; lastKnownFileType = folder; path = app_packages; sourceTree = ""; }; + 608619552CB7819B00F46182 /* app */ = {isa = PBXFileReference; lastKnownFileType = folder; path = app; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -111,6 +115,8 @@ 607A66142B0EFA380010BFC8 /* iOSTestbed */ = { isa = PBXGroup; children = ( + 608619552CB7819B00F46182 /* app */, + 608619532CB77BA900F46182 /* app_packages */, 607A66592B0F08600010BFC8 /* iOSTestbed-Info.plist */, 607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */, 607A66152B0EFA380010BFC8 /* AppDelegate.h */, @@ -223,7 +229,9 @@ files = ( 607A66252B0EFA390010BFC8 /* LaunchScreen.storyboard in Resources */, 607A66582B0F079F0010BFC8 /* dylib-Info-template.plist in Resources */, + 608619562CB7819B00F46182 /* app in Resources */, 607A66222B0EFA390010BFC8 /* Assets.xcassets in Resources */, + 608619542CB77BA900F46182 /* app_packages in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -273,7 +281,7 @@ ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "set -e\n\ninstall_dylib () {\n INSTALL_BASE=$1\n FULL_EXT=$2\n\n # The name of the extension file\n EXT=$(basename \"$FULL_EXT\")\n # The location of the extension file, relative to the bundle\n RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/} \n # The path to the extension file, relative to the install base\n PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/}\n # The full dotted name of the extension module, constructed from the file path.\n FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d \".\" -f 1 | tr \"/\" \".\"); \n # A bundle identifier; not actually used, but required by Xcode framework packaging\n FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr \"_\" \"-\")\n # The name of the framework folder.\n FRAMEWORK_FOLDER=\"Frameworks/$FULL_MODULE_NAME.framework\"\n\n # If the framework folder doesn't exist, create it.\n if [ ! -d \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\" ]; then\n echo \"Creating framework for $RELATIVE_EXT\" \n mkdir -p \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n cp \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleExecutable -string \"$FULL_MODULE_NAME\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleIdentifier -string \"$FRAMEWORK_BUNDLE_ID\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n fi\n \n echo \"Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" \n mv \"$FULL_EXT\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\"\n # Create a placeholder .fwork file where the .so was\n echo \"$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" > ${FULL_EXT%.so}.fwork\n # Create a back reference to the .so file location in the framework\n echo \"${RELATIVE_EXT%.so}.fwork\" > \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin\" \n}\n\nPYTHON_VER=$(ls -1 \"$CODESIGNING_FOLDER_PATH/python/lib\")\necho \"Install Python $PYTHON_VER standard library extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib python/lib/$PYTHON_VER/lib-dynload/ \"$FULL_EXT\"\ndone\n\n# Clean up dylib template \nrm -f \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\"\necho \"Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)...\"\nfind \"$CODESIGNING_FOLDER_PATH/Frameworks\" -name \"*.framework\" -exec /usr/bin/codesign --force --sign \"$EXPANDED_CODE_SIGN_IDENTITY\" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der \"{}\" \\; \n"; + shellScript = "set -e\n\ninstall_dylib () {\n INSTALL_BASE=$1\n FULL_EXT=$2\n\n # The name of the extension file\n EXT=$(basename \"$FULL_EXT\")\n # The location of the extension file, relative to the bundle\n RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/} \n # The path to the extension file, relative to the install base\n PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/}\n # The full dotted name of the extension module, constructed from the file path.\n FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d \".\" -f 1 | tr \"/\" \".\"); \n # A bundle identifier; not actually used, but required by Xcode framework packaging\n FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr \"_\" \"-\")\n # The name of the framework folder.\n FRAMEWORK_FOLDER=\"Frameworks/$FULL_MODULE_NAME.framework\"\n\n # If the framework folder doesn't exist, create it.\n if [ ! -d \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\" ]; then\n echo \"Creating framework for $RELATIVE_EXT\" \n mkdir -p \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n cp \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleExecutable -string \"$FULL_MODULE_NAME\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleIdentifier -string \"$FRAMEWORK_BUNDLE_ID\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n fi\n \n echo \"Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" \n mv \"$FULL_EXT\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\"\n # Create a placeholder .fwork file where the .so was\n echo \"$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" > ${FULL_EXT%.so}.fwork\n # Create a back reference to the .so file location in the framework\n echo \"${RELATIVE_EXT%.so}.fwork\" > \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin\" \n}\n\nPYTHON_VER=$(ls -1 \"$CODESIGNING_FOLDER_PATH/python/lib\")\necho \"Install Python $PYTHON_VER standard library extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib python/lib/$PYTHON_VER/lib-dynload/ \"$FULL_EXT\"\ndone\necho \"Install app package extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/app_packages\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib app_packages/ \"$FULL_EXT\"\ndone\necho \"Install app extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/app\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib app/ \"$FULL_EXT\"\ndone\n\n# Clean up dylib template \nrm -f \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\"\necho \"Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)...\"\nfind \"$CODESIGNING_FOLDER_PATH/Frameworks\" -name \"*.framework\" -exec /usr/bin/codesign --force --sign \"$EXPANDED_CODE_SIGN_IDENTITY\" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der \"{}\" \\; \n"; }; /* End PBXShellScriptBuildPhase section */ diff --git a/iOS/testbed/iOSTestbed/app/README b/iOS/testbed/iOSTestbed/app/README new file mode 100644 index 00000000000000..af22c685f87976 --- /dev/null +++ b/iOS/testbed/iOSTestbed/app/README @@ -0,0 +1,7 @@ +This folder can contain any Python application code. + +During the build, any binary modules found in this folder will be processed into +iOS Framework form. + +When the test suite runs, this folder will be on the PYTHONPATH, and will be the +working directory for the test suite. diff --git a/iOS/testbed/iOSTestbed/app_packages/README b/iOS/testbed/iOSTestbed/app_packages/README new file mode 100644 index 00000000000000..42d7fdeb813250 --- /dev/null +++ b/iOS/testbed/iOSTestbed/app_packages/README @@ -0,0 +1,7 @@ +This folder can be a target for installing any Python dependencies needed by the +test suite. + +During the build, any binary modules found in this folder will be processed into +iOS Framework form. + +When the test suite runs, this folder will be on the PYTHONPATH. diff --git a/iOS/testbed/iOSTestbed/iOSTestbed-Info.plist b/iOS/testbed/iOSTestbed/iOSTestbed-Info.plist index e2aa460b6fd5ee..a582f42a212783 100644 --- a/iOS/testbed/iOSTestbed/iOSTestbed-Info.plist +++ b/iOS/testbed/iOSTestbed/iOSTestbed-Info.plist @@ -41,8 +41,18 @@ UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight - MainModule - ios + TestArgs + + test + -uall + --single-process + --rerun + -W + + UIApplicationSceneManifest UIApplicationSupportsMultipleScenes diff --git a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m index 9bf502a808eb88..db00d43da85cbc 100644 --- a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m +++ b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m @@ -9,30 +9,38 @@ @implementation iOSTestbedTests - (void)testPython { - // Arguments to pass into the test suite runner. - // argv[0] must identify the process; any subsequent arg - // will be handled as if it were an argument to `python -m test` - const char *argv[] = { - "iOSTestbed", // argv[0] is the process that is running. - "-uall", // Enable all resources - "--single-process", // always run all tests sequentially in a single process - "--rerun", // Re-run failed tests in verbose mode - "-W", // Display test output on failure - // To run a subset of tests, add the test names below; e.g., - // "test_os", - // "test_sys", - }; - - // Start a Python interpreter. + const char **argv; int exit_code; + int failed; PyStatus status; PyPreConfig preconfig; PyConfig config; + PyObject *sys_module; + PyObject *sys_path_attr; + NSArray *test_args; NSString *python_home; + NSString *path; wchar_t *wtmp_str; NSString *resourcePath = [[NSBundle mainBundle] resourcePath]; + // Disable all color, as the Xcode log can't display color + setenv("NO_COLOR", "1", true); + + // Arguments to pass into the test suite runner. + // argv[0] must identify the process; any subsequent arg + // will be handled as if it were an argument to `python -m test` + test_args = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"TestArgs"]; + if (test_args == NULL) { + NSLog(@"Unable to identify test arguments."); + } + argv = malloc(sizeof(char *) * ([test_args count] + 1)); + argv[0] = "iOSTestbed"; + for (int i = 1; i < [test_args count]; i++) { + argv[i] = [[test_args objectAtIndex:i] UTF8String]; + } + NSLog(@"Test command: %@", test_args); + // Generate an isolated Python configuration. NSLog(@"Configuring isolated Python..."); PyPreConfig_InitIsolatedConfig(&preconfig); @@ -50,7 +58,7 @@ - (void)testPython { // Ensure that signal handlers are installed config.install_signal_handlers = 1; // Run the test module. - config.run_module = Py_DecodeLocale("test", NULL); + config.run_module = Py_DecodeLocale([[test_args objectAtIndex:0] UTF8String], NULL); // For debugging - enable verbose mode. // config.verbose = 1; @@ -83,7 +91,7 @@ - (void)testPython { } NSLog(@"Configure argc/argv..."); - status = PyConfig_SetBytesArgv(&config, sizeof(argv) / sizeof(char *), (char**) argv); + status = PyConfig_SetBytesArgv(&config, [test_args count], (char**) argv); if (PyStatus_Exception(status)) { XCTFail(@"Unable to configure argc/argv: %s", status.err_msg); PyConfig_Clear(&config); @@ -98,11 +106,47 @@ - (void)testPython { return; } + sys_module = PyImport_ImportModule("sys"); + if (sys_module == NULL) { + XCTFail(@"Could not import sys module"); + return; + } + + sys_path_attr = PyObject_GetAttrString(sys_module, "path"); + if (sys_path_attr == NULL) { + XCTFail(@"Could not access sys.path"); + return; + } + + // Add the app packages path + path = [NSString stringWithFormat:@"%@/app_packages", resourcePath, nil]; + NSLog(@"App packages path: %@", path); + wtmp_str = Py_DecodeLocale([path UTF8String], NULL); + failed = PyList_Insert(sys_path_attr, 0, PyUnicode_FromString([path UTF8String])); + if (failed) { + XCTFail(@"Unable to add app packages to sys.path"); + return; + } + PyMem_RawFree(wtmp_str); + + path = [NSString stringWithFormat:@"%@/app", resourcePath, nil]; + NSLog(@"App path: %@", path); + wtmp_str = Py_DecodeLocale([path UTF8String], NULL); + failed = PyList_Insert(sys_path_attr, 0, PyUnicode_FromString([path UTF8String])); + if (failed) { + XCTFail(@"Unable to add app to sys.path"); + return; + } + PyMem_RawFree(wtmp_str); + + // Ensure the working directory is the app folder. + chdir([path UTF8String]); + // Start the test suite. Print a separator to differentiate Python startup logs from app logs NSLog(@"---------------------------------------------------------------------------"); exit_code = Py_RunMain(); - XCTAssertEqual(exit_code, 0, @"Python test suite did not pass"); + XCTAssertEqual(exit_code, 0, @"Test suite did not pass"); NSLog(@"---------------------------------------------------------------------------");