diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9162f9c7bb1576..9e190d43b28ef9 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -16,6 +16,9 @@ configure* @erlend-aasland @corona10 Makefile.pre.in @erlend-aasland Modules/Setup* @erlend-aasland +# argparse +**/*argparse* @savannahostrowski + # asyncio **/*asyncio* @1st1 @asvetlov @kumaraditya303 @willingc @@ -23,7 +26,7 @@ Modules/Setup* @erlend-aasland **/*context* @1st1 **/*genobject* @markshannon **/*hamt* @1st1 -**/*jit* @brandtbucher +**/*jit* @brandtbucher @savannahostrowski Objects/set* @rhettinger Objects/dict* @methane @markshannon Objects/typevarobject.c @JelleZijlstra @@ -78,6 +81,17 @@ Programs/_bootstrap_python.c @ericsnowcurrently Programs/python.c @ericsnowcurrently Tools/build/generate_global_objects.py @ericsnowcurrently +# Initialization +Doc/library/sys_path_init.rst @FFY00 +Doc/c-api/init_config.rst @FFY00 + +# getpath +**/*getpath* @FFY00 + +# site +**/*site.py @FFY00 +Doc/library/site.rst @FFY00 + # Exceptions Lib/test/test_except*.py @iritkatriel Objects/exceptions.c @iritkatriel @@ -94,7 +108,7 @@ Modules/_hacl/** @gpshead **/*logging* @vsajip # venv -**/*venv* @vsajip +**/*venv* @vsajip @FFY00 # Launcher /PC/launcher.c @vsajip diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f63c4606220494..1f2204d3f2c6fe 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -46,14 +46,14 @@ jobs: # reproducible: to get the same tools versions (autoconf, aclocal, ...) runs-on: ubuntu-24.04 container: - image: ghcr.io/python/autoconf:2024.10.16.11360930377 + image: ghcr.io/python/autoconf:2024.11.11.11786316759 timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' steps: - name: Install Git run: | - apt install git -yq + apt update && apt install git -yq git config --global --add safe.directory "$GITHUB_WORKSPACE" - uses: actions/checkout@v4 with: @@ -76,7 +76,7 @@ jobs: # Check for changes in regenerated files if test -n "$changes"; then echo "Generated files not up to date." - echo "Perhaps you forgot to run make regen-all or build.bat --regen. ;)" + echo "Perhaps you forgot to run make regen-configure ;)" echo "configure files must be regenerated with a specific version of autoconf." echo "$changes" echo "" diff --git a/Doc/Makefile b/Doc/Makefile index a090ee5ba92705..22e43ee3e542ee 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -294,7 +294,7 @@ check: _ensure-pre-commit .PHONY: serve serve: - @echo "The serve target was removed, use htmlview instead (see bpo-36329)" + @echo "The serve target was removed, use htmllive instead (see gh-80510)" # Targets for daily automated doc build # By default, Sphinx only rebuilds pages where the page content has changed. diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 6e881590131cab..24d876d1f35506 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -1379,6 +1379,9 @@ All of the following functions must be called after :c:func:`Py_Initialize`. This function now calls the :c:member:`PyThreadState.on_delete` callback. Previously, that happened in :c:func:`PyThreadState_Delete`. + .. versionchanged:: 3.13 + The :c:member:`PyThreadState.on_delete` callback was removed. + .. c:function:: void PyThreadState_Delete(PyThreadState *tstate) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 9ff3e5265004a1..cb12d43d92026f 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -582,6 +582,39 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionadded:: 3.14 +.. c:function:: int PyLong_IsPositive(PyObject *obj) + + Check if the integer object *obj* is positive (``obj > 0``). + + If *obj* is an instance of :c:type:`PyLongObject` or its subtype, + return ``1`` when it's positive and ``0`` otherwise. Else set an + exception and return ``-1``. + + .. versionadded:: 3.14 + + +.. c:function:: int PyLong_IsNegative(PyObject *obj) + + Check if the integer object *obj* is negative (``obj < 0``). + + If *obj* is an instance of :c:type:`PyLongObject` or its subtype, + return ``1`` when it's negative and ``0`` otherwise. Else set an + exception and return ``-1``. + + .. versionadded:: 3.14 + + +.. c:function:: int PyLong_IsZero(PyObject *obj) + + Check if the integer object *obj* is zero. + + If *obj* is an instance of :c:type:`PyLongObject` or its subtype, + return ``1`` when it's zero and ``0`` otherwise. Else set an + exception and return ``-1``. + + .. versionadded:: 3.14 + + .. c:function:: PyObject* PyLong_GetInfo(void) On success, return a read only :term:`named tuple`, that holds diff --git a/Doc/c-api/marshal.rst b/Doc/c-api/marshal.rst index b9085ad3ec361d..61218a1bf6f171 100644 --- a/Doc/c-api/marshal.rst +++ b/Doc/c-api/marshal.rst @@ -13,11 +13,12 @@ binary mode. Numeric values are stored with the least significant byte first. -The module supports two versions of the data format: version 0 is the -historical version, version 1 shares interned strings in the file, and upon -unmarshalling. Version 2 uses a binary format for floating-point numbers. -``Py_MARSHAL_VERSION`` indicates the current file format (currently 2). +The module supports several versions of the data format; see +the :py:mod:`Python module documentation ` for details. +.. c:macro:: Py_MARSHAL_VERSION + + The current format version. See :py:data:`marshal.version`. .. c:function:: void PyMarshal_WriteLongToFile(long value, FILE *file, int version) diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 630114a4339110..2ac975ff7d1a59 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -575,3 +575,27 @@ Object Protocol has the :c:macro:`Py_TPFLAGS_MANAGED_DICT` flag set. .. versionadded:: 3.13 + +.. c:function:: int PyUnstable_Object_EnableDeferredRefcount(PyObject *obj) + + Enable `deferred reference counting `_ on *obj*, + if supported by the runtime. In the :term:`free-threaded ` build, + this allows the interpreter to avoid reference count adjustments to *obj*, + which may improve multi-threaded performance. The tradeoff is + that *obj* will only be deallocated by the tracing garbage collector. + + This function returns ``1`` if deferred reference counting is enabled on *obj* + (including when it was enabled before the call), + and ``0`` if deferred reference counting is not supported or if the hint was + ignored by the runtime. This function is thread-safe, and cannot fail. + + This function does nothing on builds with the :term:`GIL` enabled, which do + not support deferred reference counting. This also does nothing if *obj* is not + an object tracked by the garbage collector (see :func:`gc.is_tracked` and + :c:func:`PyObject_GC_IsTracked`). + + This function is intended to be used soon after *obj* is created, + by the code that creates it. + + .. versionadded:: 3.14 + diff --git a/Doc/conf.py b/Doc/conf.py index 73d7d5db26ff7b..738c9901eef06f 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -67,10 +67,7 @@ # General substitutions. project = 'Python' -if sphinx.version_info[:2] >= (8, 1): - copyright = "2001-%Y, Python Software Foundation" -else: - copyright = f"2001-{time.strftime('%Y')}, Python Software Foundation" +copyright = "2001 Python Software Foundation" # We look for the Include/patchlevel.h file in the current Python source tree # and replace the values accordingly. diff --git a/Doc/copyright.rst b/Doc/copyright.rst index 8629ed1fc38009..9210d5f50ed841 100644 --- a/Doc/copyright.rst +++ b/Doc/copyright.rst @@ -4,7 +4,7 @@ Copyright Python and this documentation is: -Copyright © 2001-2024 Python Software Foundation. All rights reserved. +Copyright © 2001 Python Software Foundation. All rights reserved. Copyright © 2000 BeOpen.com. All rights reserved. diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index 65d48f8bea7de8..6bfcc191b2270b 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -1284,6 +1284,19 @@ PyLong_FromUnsignedLong:unsignedlong:v:: PyLong_FromVoidPtr:PyObject*::+1: PyLong_FromVoidPtr:void*:p:: +PyLong_IsPositive:int::: +PyLong_IsPositive:PyObject*:obj:0: + +PyLong_IsNegative:int::: +PyLong_IsNegative:PyObject*:obj:0: + +PyLong_IsZero:int::: +PyLong_IsZero:PyObject*:obj:0: + +PyLong_GetSign:int::: +PyLong_GetSign:PyObject*:v:0: +PyLong_GetSign:int*:sign:: + PyMapping_Check:int::: PyMapping_Check:PyObject*:o:0: diff --git a/Doc/deprecations/pending-removal-in-3.14.rst b/Doc/deprecations/pending-removal-in-3.14.rst index 1904465b856506..6159fa48848285 100644 --- a/Doc/deprecations/pending-removal-in-3.14.rst +++ b/Doc/deprecations/pending-removal-in-3.14.rst @@ -1,13 +1,6 @@ Pending removal in Python 3.14 ------------------------------ -* The import system: - - * Setting :attr:`~module.__loader__` on a module while - failing to set :attr:`__spec__.loader ` - is deprecated. In Python 3.14, :attr:`!__loader__` will cease to be set or - taken into consideration by the import system or the standard library. - * :mod:`argparse`: The *type*, *choices*, and *metavar* parameters of :class:`!argparse.BooleanOptionalAction` are deprecated and will be removed in 3.14. diff --git a/Doc/deprecations/pending-removal-in-3.16.rst b/Doc/deprecations/pending-removal-in-3.16.rst index fac500d34742ca..6f6954b783a1ae 100644 --- a/Doc/deprecations/pending-removal-in-3.16.rst +++ b/Doc/deprecations/pending-removal-in-3.16.rst @@ -1,6 +1,13 @@ Pending removal in Python 3.16 ------------------------------ +* The import system: + + * Setting :attr:`~module.__loader__` on a module while + failing to set :attr:`__spec__.loader ` + is deprecated. In Python 3.16, :attr:`!__loader__` will cease to be set or + taken into consideration by the import system or the standard library. + * :mod:`array`: * The ``'u'`` format code (:c:type:`wchar_t`) diff --git a/Doc/deprecations/pending-removal-in-future.rst b/Doc/deprecations/pending-removal-in-future.rst index 3be0dabfd1f257..ea7581bf16bd34 100644 --- a/Doc/deprecations/pending-removal-in-future.rst +++ b/Doc/deprecations/pending-removal-in-future.rst @@ -145,10 +145,6 @@ although there is currently no date scheduled for their removal. * ``splitvalue()`` * ``to_bytes()`` -* :mod:`urllib.request`: :class:`~urllib.request.URLopener` and - :class:`~urllib.request.FancyURLopener` style of invoking requests is - deprecated. Use newer :func:`~urllib.request.urlopen` functions and methods. - * :mod:`wsgiref`: ``SimpleHandler.stdout.write()`` should not do partial writes. diff --git a/Doc/library/aifc.rst b/Doc/library/aifc.rst new file mode 100644 index 00000000000000..a756d679036ecb --- /dev/null +++ b/Doc/library/aifc.rst @@ -0,0 +1,15 @@ +:mod:`!aifc` --- Read and write AIFF and AIFC files +=================================================== + +.. module:: aifc + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!aifc` module was +`Python 3.12 `_. diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index 7638798ca2552f..a4695547921faa 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -801,7 +801,8 @@ Only actions that consume command-line arguments (e.g. ``'store'``, The recommended way to create a custom action is to extend :class:`Action`, overriding the :meth:`!__call__` method and optionally the :meth:`!__init__` and -:meth:`!format_usage` methods. +:meth:`!format_usage` methods. You can also register custom actions using the +:meth:`~ArgumentParser.register` method and reference them by their registered name. An example of a custom action:: @@ -1020,10 +1021,11 @@ necessary type-checking and type conversions to be performed. If the type_ keyword is used with the default_ keyword, the type converter is only applied if the default is a string. -The argument to ``type`` can be any callable that accepts a single string. +The argument to ``type`` can be a callable that accepts a single string or +the name of a registered type (see :meth:`~ArgumentParser.register`) If the function raises :exc:`ArgumentTypeError`, :exc:`TypeError`, or :exc:`ValueError`, the exception is caught and a nicely formatted error -message is displayed. No other exception types are handled. +message is displayed. Other exception types are not handled. Common built-in types and functions can be used as type converters: @@ -2163,6 +2165,34 @@ Intermixed parsing .. versionadded:: 3.7 +Registering custom types or actions +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. method:: ArgumentParser.register(registry_name, value, object) + + Sometimes it's desirable to use a custom string in error messages to provide + more user-friendly output. In these cases, :meth:`!register` can be used to + register custom actions or types with a parser and allow you to reference the + type by their registered name instead of their callable name. + + The :meth:`!register` method accepts three arguments - a *registry_name*, + specifying the internal registry where the object will be stored (e.g., + ``action``, ``type``), *value*, which is the key under which the object will + be registered, and object, the callable to be registered. + + The following example shows how to register a custom type with a parser:: + + >>> import argparse + >>> parser = argparse.ArgumentParser() + >>> parser.register('type', 'hexadecimal integer', lambda s: int(s, 16)) + >>> parser.add_argument('--foo', type='hexadecimal integer') + _StoreAction(option_strings=['--foo'], dest='foo', nargs=None, const=None, default=None, type='hexadecimal integer', choices=None, required=False, help=None, metavar=None, deprecated=False) + >>> parser.parse_args(['--foo', '0xFA']) + Namespace(foo=250) + >>> parser.parse_args(['--foo', '1.2']) + usage: PROG [-h] [--foo FOO] + PROG: error: argument --foo: invalid 'hexadecimal integer' value: '1.2' + Exceptions ---------- diff --git a/Doc/library/asynchat.rst b/Doc/library/asynchat.rst new file mode 100644 index 00000000000000..5e5c3a99fe66f1 --- /dev/null +++ b/Doc/library/asynchat.rst @@ -0,0 +1,17 @@ +:mod:`!asynchat` --- Asynchronous socket command/response handler +================================================================= + +.. module:: asynchat + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.6 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.6. The removal was decided in :pep:`594`. + +Applications should use the :mod:`asyncio` module instead. + +The last version of Python that provided the :mod:`!asynchat` module was +`Python 3.11 `_. diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 3ace6eda4d7f29..9f1aec148f8750 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -1797,7 +1797,7 @@ By default asyncio is configured to use :class:`EventLoop`. .. seealso:: `MSDN documentation on I/O Completion Ports - `_. + `_. .. class:: EventLoop diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index 3fdc79b3c6896c..48f2890c5eef8c 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -92,7 +92,8 @@ and work with streams: family=socket.AF_UNSPEC, \ flags=socket.AI_PASSIVE, sock=None, \ backlog=100, ssl=None, reuse_address=None, \ - reuse_port=None, ssl_handshake_timeout=None, \ + reuse_port=None, keep_alive=None, \ + ssl_handshake_timeout=None, \ ssl_shutdown_timeout=None, start_serving=True) Start a socket server. @@ -128,6 +129,9 @@ and work with streams: .. versionchanged:: 3.11 Added the *ssl_shutdown_timeout* parameter. + .. versionchanged:: 3.13 + Added the *keep_alive* parameter. + .. rubric:: Unix Sockets diff --git a/Doc/library/asyncore.rst b/Doc/library/asyncore.rst new file mode 100644 index 00000000000000..22c9881c3cca36 --- /dev/null +++ b/Doc/library/asyncore.rst @@ -0,0 +1,17 @@ +:mod:`!asyncore` --- Asynchronous socket handler +================================================ + +.. module:: asyncore + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.6 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.6. The removal was decided in :pep:`594`. + +Applications should use the :mod:`asyncio` module instead. + +The last version of Python that provided the :mod:`!asyncore` module was +`Python 3.11 `_. diff --git a/Doc/library/audioop.rst b/Doc/library/audioop.rst new file mode 100644 index 00000000000000..3bc580b0bd3433 --- /dev/null +++ b/Doc/library/audioop.rst @@ -0,0 +1,15 @@ +:mod:`!audioop` --- Manipulate raw audio data +============================================= + +.. module:: audioop + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!audioop` module was +`Python 3.12 `_. diff --git a/Doc/library/cgi.rst b/Doc/library/cgi.rst new file mode 100644 index 00000000000000..f9108fa954a906 --- /dev/null +++ b/Doc/library/cgi.rst @@ -0,0 +1,19 @@ +:mod:`!cgi` --- Common Gateway Interface support +================================================ + +.. module:: cgi + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +A fork of the module on PyPI can be used instead: :pypi:`legacy-cgi`. +This is a copy of the cgi module, no longer maintained or supported by the core +Python team. + +The last version of Python that provided the :mod:`!cgi` module was +`Python 3.12 `_. diff --git a/Doc/library/cgitb.rst b/Doc/library/cgitb.rst new file mode 100644 index 00000000000000..fc646aa4c48acd --- /dev/null +++ b/Doc/library/cgitb.rst @@ -0,0 +1,19 @@ +:mod:`!cgitb` --- Traceback manager for CGI scripts +=================================================== + +.. module:: cgitb + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +A fork of the module on PyPI can now be used instead: :pypi:`legacy-cgi`. +This is a copy of the cgi module, no longer maintained or supported by the core +Python team. + +The last version of Python that provided the :mod:`!cgitb` module was +`Python 3.12 `_. diff --git a/Doc/library/chunk.rst b/Doc/library/chunk.rst new file mode 100644 index 00000000000000..9950a0ea70649a --- /dev/null +++ b/Doc/library/chunk.rst @@ -0,0 +1,15 @@ +:mod:`!chunk` --- Read IFF chunked data +======================================= + +.. module:: chunk + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!chunk` module was +`Python 3.12 `_. diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index 2cfd8a1eaee806..a129a26190ba99 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -1042,6 +1042,10 @@ is meant to be exhaustive. Notice that spelling alternatives that only differ in case or use a hyphen instead of an underscore are also valid aliases; therefore, e.g. ``'utf-8'`` is a valid alias for the ``'utf_8'`` codec. +On Windows, ``cpXXX`` codecs are available for all code pages. +But only codecs listed in the following table are guarantead to exist on +other platforms. + .. impl-detail:: Some common encodings can bypass the codecs lookup machinery to @@ -1307,6 +1311,9 @@ particular, the following variants typically exist: .. versionchanged:: 3.8 ``cp65001`` is now an alias to ``utf_8``. +.. versionchanged:: 3.14 + On Windows, ``cpXXX`` codecs are now available for all code pages. + Python Specific Encodings ------------------------- diff --git a/Doc/library/concurrent.futures.rst b/Doc/library/concurrent.futures.rst index 48e027152a9851..a57120e88103de 100644 --- a/Doc/library/concurrent.futures.rst +++ b/Doc/library/concurrent.futures.rst @@ -690,7 +690,7 @@ Exception classes of a :class:`~concurrent.futures.InterpreterPoolExecutor` has failed initializing. - .. versionadded:: next + .. versionadded:: 3.14 .. exception:: ExecutionFailed @@ -699,7 +699,7 @@ Exception classes :meth:`~concurrent.futures.Executor.submit` when there's an uncaught exception from the submitted task. - .. versionadded:: next + .. versionadded:: 3.14 .. currentmodule:: concurrent.futures.process diff --git a/Doc/library/crypt.rst b/Doc/library/crypt.rst new file mode 100644 index 00000000000000..9ff37196ccf69f --- /dev/null +++ b/Doc/library/crypt.rst @@ -0,0 +1,20 @@ +:mod:`!crypt` --- Function to check Unix passwords +================================================== + +.. module:: crypt + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Applications can use the :mod:`hashlib` module from the standard library. +Other possible replacements are third-party libraries from PyPI: +:pypi:`legacycrypt`, :pypi:`bcrypt`, :pypi:`argon2-cffi`, or :pypi:`passlib`. +These are not supported or maintained by the Python core team. + +The last version of Python that provided the :mod:`!crypt` module was +`Python 3.12 `_. diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 99909de20ef439..f490f7563b58a5 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -1413,13 +1413,15 @@ way is to instantiate one of the following classes: .. class:: OleDLL(name, mode=DEFAULT_MODE, handle=None, use_errno=False, use_last_error=False, winmode=None) - Windows only: Instances of this class represent loaded shared libraries, + Instances of this class represent loaded shared libraries, functions in these libraries use the ``stdcall`` calling convention, and are assumed to return the windows specific :class:`HRESULT` code. :class:`HRESULT` values contain information specifying whether the function call failed or succeeded, together with additional error code. If the return value signals a failure, an :class:`OSError` is automatically raised. + .. availability:: Windows + .. versionchanged:: 3.3 :exc:`WindowsError` used to be raised, which is now an alias of :exc:`OSError`. @@ -1431,14 +1433,17 @@ way is to instantiate one of the following classes: .. class:: WinDLL(name, mode=DEFAULT_MODE, handle=None, use_errno=False, use_last_error=False, winmode=None) - Windows only: Instances of this class represent loaded shared libraries, + Instances of this class represent loaded shared libraries, functions in these libraries use the ``stdcall`` calling convention, and are assumed to return :c:expr:`int` by default. + .. availability:: Windows + .. versionchanged:: 3.12 The *name* parameter can now be a :term:`path-like object`. + The Python :term:`global interpreter lock` is released before calling any function exported by these libraries, and reacquired afterwards. @@ -1574,13 +1579,17 @@ These prefabricated library loaders are available: .. data:: windll :noindex: - Windows only: Creates :class:`WinDLL` instances. + Creates :class:`WinDLL` instances. + + .. availability:: Windows .. data:: oledll :noindex: - Windows only: Creates :class:`OleDLL` instances. + Creates :class:`OleDLL` instances. + + .. availability:: Windows .. data:: pydll @@ -1707,12 +1716,6 @@ in :mod:`!ctypes`) which inherits from the private :class:`_CFuncPtr` class: and raise an exception if the foreign function call failed. -.. exception:: ArgumentError - - This exception is raised when a foreign function call cannot convert one of the - passed arguments. - - .. audit-event:: ctypes.set_exception code foreign-functions On Windows, when a foreign function call raises a system exception (for @@ -1752,11 +1755,13 @@ See :ref:`ctypes-callback-functions` for examples. .. function:: WINFUNCTYPE(restype, *argtypes, use_errno=False, use_last_error=False) - Windows only: The returned function prototype creates functions that use the + The returned function prototype creates functions that use the ``stdcall`` calling convention. The function will release the GIL during the call. *use_errno* and *use_last_error* have the same meaning as above. + .. availability:: Windows + .. function:: PYFUNCTYPE(restype, *argtypes) @@ -1799,10 +1804,15 @@ different ways, depending on the type and number of the parameters in the call: integer. *name* is name of the COM method. *iid* is an optional pointer to the interface identifier which is used in extended error reporting. + If *iid* is not specified, an :exc:`OSError` is raised if the COM method + call fails. If *iid* is specified, a :exc:`~ctypes.COMError` is raised + instead. + COM methods use a special calling convention: They require a pointer to the COM interface as first argument, in addition to those parameters that are specified in the :attr:`!argtypes` tuple. + The optional *paramflags* parameter creates foreign function wrappers with much more functionality than the features described above. @@ -1982,17 +1992,21 @@ Utility functions .. function:: DllCanUnloadNow() - Windows only: This function is a hook which allows implementing in-process + This function is a hook which allows implementing in-process COM servers with ctypes. It is called from the DllCanUnloadNow function that the _ctypes extension dll exports. + .. availability:: Windows + .. function:: DllGetClassObject() - Windows only: This function is a hook which allows implementing in-process + This function is a hook which allows implementing in-process COM servers with ctypes. It is called from the DllGetClassObject function that the ``_ctypes`` extension dll exports. + .. availability:: Windows + .. function:: find_library(name) :module: ctypes.util @@ -2008,7 +2022,7 @@ Utility functions .. function:: find_msvcrt() :module: ctypes.util - Windows only: return the filename of the VC runtime library used by Python, + Returns the filename of the VC runtime library used by Python, and by the extension modules. If the name of the library cannot be determined, ``None`` is returned. @@ -2016,20 +2030,27 @@ Utility functions with a call to the ``free(void *)``, it is important that you use the function in the same library that allocated the memory. + .. availability:: Windows + .. function:: FormatError([code]) - Windows only: Returns a textual description of the error code *code*. If no + Returns a textual description of the error code *code*. If no error code is specified, the last error code is used by calling the Windows api function GetLastError. + .. availability:: Windows + .. function:: GetLastError() - Windows only: Returns the last error code set by Windows in the calling thread. + Returns the last error code set by Windows in the calling thread. This function calls the Windows ``GetLastError()`` function directly, it does not return the ctypes-private copy of the error code. + .. availability:: Windows + + .. function:: get_errno() Returns the current value of the ctypes-private copy of the system @@ -2039,11 +2060,14 @@ Utility functions .. function:: get_last_error() - Windows only: returns the current value of the ctypes-private copy of the system + Returns the current value of the ctypes-private copy of the system :data:`!LastError` variable in the calling thread. + .. availability:: Windows + .. audit-event:: ctypes.get_last_error "" ctypes.get_last_error + .. function:: memmove(dst, src, count) Same as the standard C memmove library function: copies *count* bytes from @@ -2092,10 +2116,12 @@ Utility functions .. function:: set_last_error(value) - Windows only: set the current value of the ctypes-private copy of the system + Sets the current value of the ctypes-private copy of the system :data:`!LastError` variable in the calling thread to *value* and return the previous value. + .. availability:: Windows + .. audit-event:: ctypes.set_last_error error ctypes.set_last_error @@ -2116,12 +2142,14 @@ Utility functions .. function:: WinError(code=None, descr=None) - Windows only: this function is probably the worst-named thing in ctypes. It + This function is probably the worst-named thing in ctypes. It creates an instance of :exc:`OSError`. If *code* is not specified, ``GetLastError`` is called to determine the error code. If *descr* is not specified, :func:`FormatError` is called to get a textual description of the error. + .. availability:: Windows + .. versionchanged:: 3.3 An instance of :exc:`WindowsError` used to be created, which is now an alias of :exc:`OSError`. @@ -2485,9 +2513,11 @@ These are the fundamental ctypes data types: .. class:: HRESULT - Windows only: Represents a :c:type:`!HRESULT` value, which contains success or + Represents a :c:type:`!HRESULT` value, which contains success or error information for a function or method call. + .. availability:: Windows + .. class:: py_object @@ -2741,3 +2771,41 @@ Arrays and pointers Returns the object to which to pointer points. Assigning to this attribute changes the pointer to point to the assigned object. + + +.. _ctypes-exceptions: + +Exceptions +^^^^^^^^^^ + +.. exception:: ArgumentError + + This exception is raised when a foreign function call cannot convert one of the + passed arguments. + + +.. exception:: COMError(hresult, text, details) + + This exception is raised when a COM method call failed. + + .. attribute:: hresult + + The integer value representing the error code. + + .. attribute:: text + + The error message. + + .. attribute:: details + + The 5-tuple ``(descr, source, helpfile, helpcontext, progid)``. + + *descr* is the textual description. *source* is the language-dependent + ``ProgID`` for the class or application that raised the error. *helpfile* + is the path of the help file. *helpcontext* is the help context + identifier. *progid* is the ``ProgID`` of the interface that defined the + error. + + .. availability:: Windows + + .. versionadded:: next diff --git a/Doc/library/distutils.rst b/Doc/library/distutils.rst new file mode 100644 index 00000000000000..af63e035bf3c4a --- /dev/null +++ b/Doc/library/distutils.rst @@ -0,0 +1,17 @@ +:mod:`!distutils` --- Building and installing Python modules +============================================================ + +.. module:: distutils + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.10 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.10. The removal was decided in :pep:`632`, +which has `migration advice +`_. + +The last version of Python that provided the :mod:`!distutils` module was +`Python 3.11 `_. diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index e26a2226aa947a..69d9d81c848124 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -453,7 +453,7 @@ The :mod:`functools` module defines the following functions: .. versionadded:: 3.4 -.. function:: reduce(function, iterable[, initial], /) +.. function:: reduce(function, iterable, /[, initial]) Apply *function* of two arguments cumulatively to the items of *iterable*, from left to right, so as to reduce the iterable to a single value. For example, @@ -468,7 +468,7 @@ The :mod:`functools` module defines the following functions: initial_missing = object() - def reduce(function, iterable, initial=initial_missing, /): + def reduce(function, iterable, /, initial=initial_missing): it = iter(iterable) if initial is initial_missing: value = next(it) @@ -481,6 +481,9 @@ The :mod:`functools` module defines the following functions: See :func:`itertools.accumulate` for an iterator that yields all intermediate values. + .. versionchanged:: 3.14 + *initial* is now supported as a keyword argument. + .. decorator:: singledispatch Transform a function into a :term:`single-dispatch >> gc.is_tracked({}) False >>> gc.is_tracked({"a": 1}) - False - >>> gc.is_tracked({"a": []}) True .. versionadded:: 3.1 diff --git a/Doc/library/getopt.rst b/Doc/library/getopt.rst index 3ab44b9fc56108..891885d3afbf7a 100644 --- a/Doc/library/getopt.rst +++ b/Doc/library/getopt.rst @@ -38,7 +38,8 @@ exception: be parsed, without the leading reference to the running program. Typically, this means ``sys.argv[1:]``. *shortopts* is the string of option letters that the script wants to recognize, with options that require an argument followed by a - colon (``':'``; i.e., the same format that Unix :c:func:`!getopt` uses). + colon (``':'``) and options that accept an optional argument followed by + two colons (``'::'``); i.e., the same format that Unix :c:func:`!getopt` uses. .. note:: @@ -49,8 +50,10 @@ exception: *longopts*, if specified, must be a list of strings with the names of the long options which should be supported. The leading ``'--'`` characters should not be included in the option name. Long options which require an - argument should be followed by an equal sign (``'='``). Optional arguments - are not supported. To accept only long options, *shortopts* should be an + argument should be followed by an equal sign (``'='``). + Long options which accept an optional argument should be followed by + an equal sign and question mark (``'=?'``). + To accept only long options, *shortopts* should be an empty string. Long options on the command line can be recognized so long as they provide a prefix of the option name that matches exactly one of the accepted options. For example, if *longopts* is ``['foo', 'frob']``, the @@ -67,6 +70,9 @@ exception: options occur in the list in the same order in which they were found, thus allowing multiple occurrences. Long and short options may be mixed. + .. versionchanged:: 3.14 + Optional arguments are supported. + .. function:: gnu_getopt(args, shortopts, longopts=[]) @@ -79,6 +85,16 @@ exception: variable :envvar:`!POSIXLY_CORRECT` is set, then option processing stops as soon as a non-option argument is encountered. + If the first character of the option string is ``'-'``, non-option arguments + that are followed by options are added to the list of option-and-value pairs + as a pair that has ``None`` as its first element and the list of non-option + arguments as its second element. + The second element of the :func:`!gnu_getopt` result is a list of + program arguments after the last option. + + .. versionchanged:: 3.14 + Support for returning intermixed options and non-option arguments in order. + .. exception:: GetoptError @@ -124,6 +140,34 @@ Using long option names is equally easy: >>> args ['a1', 'a2'] +Optional arguments should be specified explicitly: + +.. doctest:: + + >>> s = '-Con -C --color=off --color a1 a2' + >>> args = s.split() + >>> args + ['-Con', '-C', '--color=off', '--color', 'a1', 'a2'] + >>> optlist, args = getopt.getopt(args, 'C::', ['color=?']) + >>> optlist + [('-C', 'on'), ('-C', ''), ('--color', 'off'), ('--color', '')] + >>> args + ['a1', 'a2'] + +The order of options and non-option arguments can be preserved: + +.. doctest:: + + >>> s = 'a1 -x a2 a3 a4 --long a5 a6' + >>> args = s.split() + >>> args + ['a1', '-x', 'a2', 'a3', 'a4', '--long', 'a5', 'a6'] + >>> optlist, args = getopt.gnu_getopt(args, '-x:', ['long=']) + >>> optlist + [(None, ['a1']), ('-x', 'a2'), (None, ['a3', 'a4']), ('--long', 'a5')] + >>> args + ['a6'] + In a script, typical usage is something like this: .. testcode:: diff --git a/Doc/library/imghdr.rst b/Doc/library/imghdr.rst new file mode 100644 index 00000000000000..56f26355f42558 --- /dev/null +++ b/Doc/library/imghdr.rst @@ -0,0 +1,19 @@ +:mod:`!imghdr` --- Determine the type of an image +================================================= + +.. module:: imghdr + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Possible replacements are third-party libraries from PyPI: +:pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic`. +These are not supported or maintained by the Python core team. + +The last version of Python that provided the :mod:`!imghdr` module was +`Python 3.12 `_. diff --git a/Doc/library/imp.rst b/Doc/library/imp.rst new file mode 100644 index 00000000000000..3dc4c568b1ae2f --- /dev/null +++ b/Doc/library/imp.rst @@ -0,0 +1,18 @@ +:mod:`!imp` --- Access the import internals +=========================================== + +.. module:: imp + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.4 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.4. + +The :ref:`removal notice ` includes guidance for +migrating code from :mod:`!imp` to :mod:`importlib`. + +The last version of Python that provided the :mod:`!imp` module was +`Python 3.11 `_. diff --git a/Doc/library/index.rst b/Doc/library/index.rst index 0b348ae6f5c8c0..951fbcf13fbb13 100644 --- a/Doc/library/index.rst +++ b/Doc/library/index.rst @@ -75,4 +75,5 @@ the `Python Package Index `_. unix.rst cmdline.rst superseded.rst + removed.rst security_warnings.rst diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst index f172a55080efc9..426e3a06e1ef11 100644 --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -281,7 +281,8 @@ The :mod:`locale` module defines the following exception and functions: .. data:: ERA - Get a string that represents the era used in the current locale. + Get a string which describes how years are counted and displayed for + each era in a locale. Most locales do not define this value. An example of a locale which does define this value is the Japanese one. In Japan, the traditional @@ -290,9 +291,10 @@ The :mod:`locale` module defines the following exception and functions: Normally it should not be necessary to use this value directly. Specifying the ``E`` modifier in their format strings causes the :func:`time.strftime` - function to use this information. The format of the returned string is not - specified, and therefore you should not assume knowledge of it on different - systems. + function to use this information. + The format of the returned string is specified in *The Open Group Base + Specifications Issue 8*, paragraph `7.3.5.2 LC_TIME C-Language Access + `_. .. data:: ERA_D_T_FMT diff --git a/Doc/library/mailcap.rst b/Doc/library/mailcap.rst new file mode 100644 index 00000000000000..4467da146a5a05 --- /dev/null +++ b/Doc/library/mailcap.rst @@ -0,0 +1,15 @@ +:mod:`!mailcap` --- Mailcap file handling +========================================= + +.. module:: mailcap + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!mailcap` module was +`Python 3.12 `_. diff --git a/Doc/library/marshal.rst b/Doc/library/marshal.rst index 9e4606df0f774e..e8e9071a5c9ef4 100644 --- a/Doc/library/marshal.rst +++ b/Doc/library/marshal.rst @@ -38,23 +38,39 @@ supports a substantially wider range of objects than marshal. maliciously constructed data. Never unmarshal data received from an untrusted or unauthenticated source. +There are functions that read/write files as well as functions operating on +bytes-like objects. + .. index:: object; code, code object Not all Python object types are supported; in general, only objects whose value is independent from a particular invocation of Python can be written and read by -this module. The following types are supported: booleans, integers, floating-point -numbers, complex numbers, strings, bytes, bytearrays, tuples, lists, sets, -frozensets, dictionaries, and code objects (if *allow_code* is true), -where it should be understood that -tuples, lists, sets, frozensets and dictionaries are only supported as long as -the values contained therein are themselves supported. The -singletons :const:`None`, :const:`Ellipsis` and :exc:`StopIteration` can also be -marshalled and unmarshalled. -For format *version* lower than 3, recursive lists, sets and dictionaries cannot -be written (see below). +this module. The following types are supported: + +* Numeric types: :class:`int`, :class:`bool`, :class:`float`, :class:`complex`. +* Strings (:class:`str`) and :class:`bytes`. + :term:`Bytes-like objects ` like :class:`bytearray` are + marshalled as :class:`!bytes`. +* Containers: :class:`tuple`, :class:`list`, :class:`set`, :class:`frozenset`, + and (since :data:`version` 5), :class:`slice`. + It should be understood that these are supported only if the values contained + therein are themselves supported. + Recursive containers are supported since :data:`version` 3. +* The singletons :const:`None`, :const:`Ellipsis` and :exc:`StopIteration`. +* :class:`code` objects, if *allow_code* is true. See note above about + version dependence. + +.. versionchanged:: 3.4 + + * Added format version 3, which supports marshalling recursive lists, sets + and dictionaries. + * Added format version 4, which supports efficient representations + of short strings. + +.. versionchanged:: 3.14 + + Added format version 5, which allows marshalling slices. -There are functions that read/write files as well as functions operating on -bytes-like objects. The module defines these functions: @@ -140,11 +156,24 @@ In addition, the following constants are defined: .. data:: version - Indicates the format that the module uses. Version 0 is the historical - format, version 1 shares interned strings and version 2 uses a binary format - for floating-point numbers. - Version 3 adds support for object instancing and recursion. - The current version is 4. + Indicates the format that the module uses. + Version 0 is the historical first version; subsequent versions + add new features. + Generally, a new version becomes the default when it is introduced. + + ======= =============== ==================================================== + Version Available since New features + ======= =============== ==================================================== + 1 Python 2.4 Sharing interned strings + ------- --------------- ---------------------------------------------------- + 2 Python 2.5 Binary representation of floats + ------- --------------- ---------------------------------------------------- + 3 Python 3.4 Support for object instancing and recursion + ------- --------------- ---------------------------------------------------- + 4 Python 3.4 Efficient representation of short strings + ------- --------------- ---------------------------------------------------- + 5 Python 3.14 Support for :class:`slice` objects + ======= =============== ==================================================== .. rubric:: Footnotes @@ -154,4 +183,3 @@ In addition, the following constants are defined: around in a self-contained form. Strictly speaking, "to marshal" means to convert some data from internal to external form (in an RPC buffer for instance) and "unmarshalling" for the reverse process. - diff --git a/Doc/library/math.rst b/Doc/library/math.rst index 5ce2ad2d6aec47..bf79b23a72bbf9 100644 --- a/Doc/library/math.rst +++ b/Doc/library/math.rst @@ -27,36 +27,39 @@ noted otherwise, all return values are floats. ==================================================== ============================================ -**Number-theoretic and representation functions** +**Number-theoretic functions** -------------------------------------------------------------------------------------------------- -:func:`ceil(x) ` Ceiling of *x*, the smallest integer greater than or equal to *x* :func:`comb(n, k) ` Number of ways to choose *k* items from *n* items without repetition and without order -:func:`copysign(x, y) ` Magnitude (absolute value) of *x* with the sign of *y* -:func:`fabs(x) ` Absolute value of *x* :func:`factorial(n) ` *n* factorial -:func:`floor (x) ` Floor of *x*, the largest integer less than or equal to *x* +:func:`gcd(*integers) ` Greatest common divisor of the integer arguments +:func:`isqrt(n) ` Integer square root of a nonnegative integer *n* +:func:`lcm(*integers) ` Least common multiple of the integer arguments +:func:`perm(n, k) ` Number of ways to choose *k* items from *n* items without repetition and with order + +**Floating point arithmetic** +-------------------------------------------------------------------------------------------------- +:func:`ceil(x) ` Ceiling of *x*, the smallest integer greater than or equal to *x* +:func:`fabs(x) ` Absolute value of *x* +:func:`floor(x) ` Floor of *x*, the largest integer less than or equal to *x* :func:`fma(x, y, z) ` Fused multiply-add operation: ``(x * y) + z`` :func:`fmod(x, y) ` Remainder of division ``x / y`` +:func:`modf(x) ` Fractional and integer parts of *x* +:func:`remainder(x, y) ` Remainder of *x* with respect to *y* +:func:`trunc(x) ` Integer part of *x* + +**Floating point manipulation functions** +-------------------------------------------------------------------------------------------------- +:func:`copysign(x, y) ` Magnitude (absolute value) of *x* with the sign of *y* :func:`frexp(x) ` Mantissa and exponent of *x* -:func:`fsum(iterable) ` Sum of values in the input *iterable* -:func:`gcd(*integers) ` Greatest common divisor of the integer arguments :func:`isclose(a, b, rel_tol, abs_tol) ` Check if the values *a* and *b* are close to each other :func:`isfinite(x) ` Check if *x* is neither an infinity nor a NaN :func:`isinf(x) ` Check if *x* is a positive or negative infinity :func:`isnan(x) ` Check if *x* is a NaN (not a number) -:func:`isqrt(n) ` Integer square root of a nonnegative integer *n* -:func:`lcm(*integers) ` Least common multiple of the integer arguments :func:`ldexp(x, i) ` ``x * (2**i)``, inverse of function :func:`frexp` -:func:`modf(x) ` Fractional and integer parts of *x* :func:`nextafter(x, y, steps) ` Floating-point value *steps* steps after *x* towards *y* -:func:`perm(n, k) ` Number of ways to choose *k* items from *n* items without repetition and with order -:func:`prod(iterable, start) ` Product of elements in the input *iterable* with a *start* value -:func:`remainder(x, y) ` Remainder of *x* with respect to *y* -:func:`sumprod(p, q) ` Sum of products from two iterables *p* and *q* -:func:`trunc(x) ` Integer part of *x* :func:`ulp(x) ` Value of the least significant bit of *x* -**Power and logarithmic functions** +**Power, exponential and logarithmic functions** -------------------------------------------------------------------------------------------------- :func:`cbrt(x) ` Cube root of *x* :func:`exp(x) ` *e* raised to the power *x* @@ -69,6 +72,19 @@ noted otherwise, all return values are floats. :func:`pow(x, y) ` *x* raised to the power *y* :func:`sqrt(x) ` Square root of *x* +**Summation and product functions** +-------------------------------------------------------------------------------------------------- +:func:`dist(p, q) ` Euclidean distance between two points *p* and *q* given as an iterable of coordinates +:func:`fsum(iterable) ` Sum of values in the input *iterable* +:func:`hypot(*coordinates) ` Euclidean norm of an iterable of coordinates +:func:`prod(iterable, start) ` Product of elements in the input *iterable* with a *start* value +:func:`sumprod(p, q) ` Sum of products from two iterables *p* and *q* + +**Angular conversion** +-------------------------------------------------------------------------------------------------- +:func:`degrees(x) ` Convert angle *x* from radians to degrees +:func:`radians(x) ` Convert angle *x* from degrees to radians + **Trigonometric functions** -------------------------------------------------------------------------------------------------- :func:`acos(x) ` Arc cosine of *x* @@ -76,16 +92,9 @@ noted otherwise, all return values are floats. :func:`atan(x) ` Arc tangent of *x* :func:`atan2(y, x) ` ``atan(y / x)`` :func:`cos(x) ` Cosine of *x* -:func:`dist(p, q) ` Euclidean distance between two points *p* and *q* given as an iterable of coordinates -:func:`hypot(*coordinates) ` Euclidean norm of an iterable of coordinates :func:`sin(x) ` Sine of *x* :func:`tan(x) ` Tangent of *x* -**Angular conversion** --------------------------------------------------------------------------------------------------- -:func:`degrees(x) ` Convert angle *x* from radians to degrees -:func:`radians(x) ` Convert angle *x* from degrees to radians - **Hyperbolic functions** -------------------------------------------------------------------------------------------------- :func:`acosh(x) ` Inverse hyperbolic cosine of *x* @@ -112,15 +121,8 @@ noted otherwise, all return values are floats. ==================================================== ============================================ -Number-theoretic and representation functions ---------------------------------------------- - -.. function:: ceil(x) - - Return the ceiling of *x*, the smallest integer greater than or equal to *x*. - If *x* is not a float, delegates to :meth:`x.__ceil__ `, - which should return an :class:`~numbers.Integral` value. - +Number-theoretic functions +-------------------------- .. function:: comb(n, k) @@ -140,25 +142,85 @@ Number-theoretic and representation functions .. versionadded:: 3.8 -.. function:: copysign(x, y) +.. function:: factorial(n) - Return a float with the magnitude (absolute value) of *x* but the sign of - *y*. On platforms that support signed zeros, ``copysign(1.0, -0.0)`` - returns *-1.0*. + Return *n* factorial as an integer. Raises :exc:`ValueError` if *n* is not integral or + is negative. + .. versionchanged:: 3.10 + Floats with integral values (like ``5.0``) are no longer accepted. -.. function:: fabs(x) - Return the absolute value of *x*. +.. function:: gcd(*integers) + + Return the greatest common divisor of the specified integer arguments. + If any of the arguments is nonzero, then the returned value is the largest + positive integer that is a divisor of all arguments. If all arguments + are zero, then the returned value is ``0``. ``gcd()`` without arguments + returns ``0``. + .. versionadded:: 3.5 -.. function:: factorial(n) + .. versionchanged:: 3.9 + Added support for an arbitrary number of arguments. Formerly, only two + arguments were supported. - Return *n* factorial as an integer. Raises :exc:`ValueError` if *n* is not integral or - is negative. - .. versionchanged:: 3.10 - Floats with integral values (like ``5.0``) are no longer accepted. +.. function:: isqrt(n) + + Return the integer square root of the nonnegative integer *n*. This is the + floor of the exact square root of *n*, or equivalently the greatest integer + *a* such that *a*\ ² |nbsp| ≤ |nbsp| *n*. + + For some applications, it may be more convenient to have the least integer + *a* such that *n* |nbsp| ≤ |nbsp| *a*\ ², or in other words the ceiling of + the exact square root of *n*. For positive *n*, this can be computed using + ``a = 1 + isqrt(n - 1)``. + + .. versionadded:: 3.8 + + +.. function:: lcm(*integers) + + Return the least common multiple of the specified integer arguments. + If all arguments are nonzero, then the returned value is the smallest + positive integer that is a multiple of all arguments. If any of the arguments + is zero, then the returned value is ``0``. ``lcm()`` without arguments + returns ``1``. + + .. versionadded:: 3.9 + + +.. function:: perm(n, k=None) + + Return the number of ways to choose *k* items from *n* items + without repetition and with order. + + Evaluates to ``n! / (n - k)!`` when ``k <= n`` and evaluates + to zero when ``k > n``. + + If *k* is not specified or is ``None``, then *k* defaults to *n* + and the function returns ``n!``. + + Raises :exc:`TypeError` if either of the arguments are not integers. + Raises :exc:`ValueError` if either of the arguments are negative. + + .. versionadded:: 3.8 + + +Floating point arithmetic +------------------------- + +.. function:: ceil(x) + + Return the ceiling of *x*, the smallest integer greater than or equal to *x*. + If *x* is not a float, delegates to :meth:`x.__ceil__ `, + which should return an :class:`~numbers.Integral` value. + + +.. function:: fabs(x) + + Return the absolute value of *x*. .. function:: floor(x) @@ -199,45 +261,76 @@ Number-theoretic and representation functions floats, while Python's ``x % y`` is preferred when working with integers. -.. function:: frexp(x) +.. function:: modf(x) - Return the mantissa and exponent of *x* as the pair ``(m, e)``. *m* is a float - and *e* is an integer such that ``x == m * 2**e`` exactly. If *x* is zero, - returns ``(0.0, 0)``, otherwise ``0.5 <= abs(m) < 1``. This is used to "pick - apart" the internal representation of a float in a portable way. + Return the fractional and integer parts of *x*. Both results carry the sign + of *x* and are floats. + Note that :func:`modf` has a different call/return pattern + than its C equivalents: it takes a single argument and return a pair of + values, rather than returning its second return value through an 'output + parameter' (there is no such thing in Python). -.. function:: fsum(iterable) - Return an accurate floating-point sum of values in the iterable. Avoids - loss of precision by tracking multiple intermediate partial sums. +.. function:: remainder(x, y) - The algorithm's accuracy depends on IEEE-754 arithmetic guarantees and the - typical case where the rounding mode is half-even. On some non-Windows - builds, the underlying C library uses extended precision addition and may - occasionally double-round an intermediate sum causing it to be off in its - least significant bit. + Return the IEEE 754-style remainder of *x* with respect to *y*. For + finite *x* and finite nonzero *y*, this is the difference ``x - n*y``, + where ``n`` is the closest integer to the exact value of the quotient ``x / + y``. If ``x / y`` is exactly halfway between two consecutive integers, the + nearest *even* integer is used for ``n``. The remainder ``r = remainder(x, + y)`` thus always satisfies ``abs(r) <= 0.5 * abs(y)``. - For further discussion and two alternative approaches, see the `ASPN cookbook - recipes for accurate floating-point summation - `_\. + Special cases follow IEEE 754: in particular, ``remainder(x, math.inf)`` is + *x* for any finite *x*, and ``remainder(x, 0)`` and + ``remainder(math.inf, x)`` raise :exc:`ValueError` for any non-NaN *x*. + If the result of the remainder operation is zero, that zero will have + the same sign as *x*. + On platforms using IEEE 754 binary floating point, the result of this + operation is always exactly representable: no rounding error is introduced. -.. function:: gcd(*integers) + .. versionadded:: 3.7 - Return the greatest common divisor of the specified integer arguments. - If any of the arguments is nonzero, then the returned value is the largest - positive integer that is a divisor of all arguments. If all arguments - are zero, then the returned value is ``0``. ``gcd()`` without arguments - returns ``0``. - .. versionadded:: 3.5 +.. function:: trunc(x) - .. versionchanged:: 3.9 - Added support for an arbitrary number of arguments. Formerly, only two - arguments were supported. + Return *x* with the fractional part + removed, leaving the integer part. This rounds toward 0: ``trunc()`` is + equivalent to :func:`floor` for positive *x*, and equivalent to :func:`ceil` + for negative *x*. If *x* is not a float, delegates to :meth:`x.__trunc__ + `, which should return an :class:`~numbers.Integral` value. + + +For the :func:`ceil`, :func:`floor`, and :func:`modf` functions, note that *all* +floating-point numbers of sufficiently large magnitude are exact integers. +Python floats typically carry no more than 53 bits of precision (the same as the +platform C double type), in which case any float *x* with ``abs(x) >= 2**52`` +necessarily has no fractional bits. +Floating point manipulation functions +------------------------------------- + +.. function:: copysign(x, y) + + Return a float with the magnitude (absolute value) of *x* but the sign of + *y*. On platforms that support signed zeros, ``copysign(1.0, -0.0)`` + returns *-1.0*. + + +.. function:: frexp(x) + + Return the mantissa and exponent of *x* as the pair ``(m, e)``. *m* is a float + and *e* is an integer such that ``x == m * 2**e`` exactly. If *x* is zero, + returns ``(0.0, 0)``, otherwise ``0.5 <= abs(m) < 1``. This is used to "pick + apart" the internal representation of a float in a portable way. + + Note that :func:`frexp` has a different call/return pattern + than its C equivalents: it takes a single argument and return a pair of + values, rather than returning its second return value through an 'output + parameter' (there is no such thing in Python). + .. function:: isclose(a, b, *, rel_tol=1e-09, abs_tol=0.0) Return ``True`` if the values *a* and *b* are close to each other and @@ -291,43 +384,12 @@ Number-theoretic and representation functions Return ``True`` if *x* is a NaN (not a number), and ``False`` otherwise. -.. function:: isqrt(n) - - Return the integer square root of the nonnegative integer *n*. This is the - floor of the exact square root of *n*, or equivalently the greatest integer - *a* such that *a*\ ² |nbsp| ≤ |nbsp| *n*. - - For some applications, it may be more convenient to have the least integer - *a* such that *n* |nbsp| ≤ |nbsp| *a*\ ², or in other words the ceiling of - the exact square root of *n*. For positive *n*, this can be computed using - ``a = 1 + isqrt(n - 1)``. - - .. versionadded:: 3.8 - - -.. function:: lcm(*integers) - - Return the least common multiple of the specified integer arguments. - If all arguments are nonzero, then the returned value is the smallest - positive integer that is a multiple of all arguments. If any of the arguments - is zero, then the returned value is ``0``. ``lcm()`` without arguments - returns ``1``. - - .. versionadded:: 3.9 - - .. function:: ldexp(x, i) Return ``x * (2**i)``. This is essentially the inverse of function :func:`frexp`. -.. function:: modf(x) - - Return the fractional and integer parts of *x*. Both results carry the sign - of *x* and are floats. - - .. function:: nextafter(x, y, steps=1) Return the floating-point value *steps* steps after *x* towards *y*. @@ -348,79 +410,6 @@ Number-theoretic and representation functions .. versionchanged:: 3.12 Added the *steps* argument. -.. function:: perm(n, k=None) - - Return the number of ways to choose *k* items from *n* items - without repetition and with order. - - Evaluates to ``n! / (n - k)!`` when ``k <= n`` and evaluates - to zero when ``k > n``. - - If *k* is not specified or is ``None``, then *k* defaults to *n* - and the function returns ``n!``. - - Raises :exc:`TypeError` if either of the arguments are not integers. - Raises :exc:`ValueError` if either of the arguments are negative. - - .. versionadded:: 3.8 - - -.. function:: prod(iterable, *, start=1) - - Calculate the product of all the elements in the input *iterable*. - The default *start* value for the product is ``1``. - - When the iterable is empty, return the start value. This function is - intended specifically for use with numeric values and may reject - non-numeric types. - - .. versionadded:: 3.8 - - -.. function:: remainder(x, y) - - Return the IEEE 754-style remainder of *x* with respect to *y*. For - finite *x* and finite nonzero *y*, this is the difference ``x - n*y``, - where ``n`` is the closest integer to the exact value of the quotient ``x / - y``. If ``x / y`` is exactly halfway between two consecutive integers, the - nearest *even* integer is used for ``n``. The remainder ``r = remainder(x, - y)`` thus always satisfies ``abs(r) <= 0.5 * abs(y)``. - - Special cases follow IEEE 754: in particular, ``remainder(x, math.inf)`` is - *x* for any finite *x*, and ``remainder(x, 0)`` and - ``remainder(math.inf, x)`` raise :exc:`ValueError` for any non-NaN *x*. - If the result of the remainder operation is zero, that zero will have - the same sign as *x*. - - On platforms using IEEE 754 binary floating point, the result of this - operation is always exactly representable: no rounding error is introduced. - - .. versionadded:: 3.7 - - -.. function:: sumprod(p, q) - - Return the sum of products of values from two iterables *p* and *q*. - - Raises :exc:`ValueError` if the inputs do not have the same length. - - Roughly equivalent to:: - - sum(map(operator.mul, p, q, strict=True)) - - For float and mixed int/float inputs, the intermediate products - and sums are computed with extended precision. - - .. versionadded:: 3.12 - - -.. function:: trunc(x) - - Return *x* with the fractional part - removed, leaving the integer part. This rounds toward 0: ``trunc()`` is - equivalent to :func:`floor` for positive *x*, and equivalent to :func:`ceil` - for negative *x*. If *x* is not a float, delegates to :meth:`x.__trunc__ - `, which should return an :class:`~numbers.Integral` value. .. function:: ulp(x) @@ -447,20 +436,8 @@ Number-theoretic and representation functions .. versionadded:: 3.9 -Note that :func:`frexp` and :func:`modf` have a different call/return pattern -than their C equivalents: they take a single argument and return a pair of -values, rather than returning their second return value through an 'output -parameter' (there is no such thing in Python). - -For the :func:`ceil`, :func:`floor`, and :func:`modf` functions, note that *all* -floating-point numbers of sufficiently large magnitude are exact integers. -Python floats typically carry no more than 53 bits of precision (the same as the -platform C double type), in which case any float *x* with ``abs(x) >= 2**52`` -necessarily has no fractional bits. - - -Power and logarithmic functions -------------------------------- +Power, exponential and logarithmic functions +-------------------------------------------- .. function:: cbrt(x) @@ -557,41 +534,8 @@ Power and logarithmic functions Return the square root of *x*. -Trigonometric functions ------------------------ - -.. function:: acos(x) - - Return the arc cosine of *x*, in radians. The result is between ``0`` and - ``pi``. - - -.. function:: asin(x) - - Return the arc sine of *x*, in radians. The result is between ``-pi/2`` and - ``pi/2``. - - -.. function:: atan(x) - - Return the arc tangent of *x*, in radians. The result is between ``-pi/2`` and - ``pi/2``. - - -.. function:: atan2(y, x) - - Return ``atan(y / x)``, in radians. The result is between ``-pi`` and ``pi``. - The vector in the plane from the origin to point ``(x, y)`` makes this angle - with the positive X axis. The point of :func:`atan2` is that the signs of both - inputs are known to it, so it can compute the correct quadrant for the angle. - For example, ``atan(1)`` and ``atan2(1, 1)`` are both ``pi/4``, but ``atan2(-1, - -1)`` is ``-3*pi/4``. - - -.. function:: cos(x) - - Return the cosine of *x* radians. - +Summation and product functions +------------------------------- .. function:: dist(p, q) @@ -606,6 +550,22 @@ Trigonometric functions .. versionadded:: 3.8 +.. function:: fsum(iterable) + + Return an accurate floating-point sum of values in the iterable. Avoids + loss of precision by tracking multiple intermediate partial sums. + + The algorithm's accuracy depends on IEEE-754 arithmetic guarantees and the + typical case where the rounding mode is half-even. On some non-Windows + builds, the underlying C library uses extended precision addition and may + occasionally double-round an intermediate sum causing it to be off in its + least significant bit. + + For further discussion and two alternative approaches, see the `ASPN cookbook + recipes for accurate floating-point summation + `_\. + + .. function:: hypot(*coordinates) Return the Euclidean norm, ``sqrt(sum(x**2 for x in coordinates))``. @@ -626,14 +586,32 @@ Trigonometric functions is almost always correctly rounded to within 1/2 ulp. -.. function:: sin(x) +.. function:: prod(iterable, *, start=1) - Return the sine of *x* radians. + Calculate the product of all the elements in the input *iterable*. + The default *start* value for the product is ``1``. + When the iterable is empty, return the start value. This function is + intended specifically for use with numeric values and may reject + non-numeric types. -.. function:: tan(x) + .. versionadded:: 3.8 - Return the tangent of *x* radians. + +.. function:: sumprod(p, q) + + Return the sum of products of values from two iterables *p* and *q*. + + Raises :exc:`ValueError` if the inputs do not have the same length. + + Roughly equivalent to:: + + sum(map(operator.mul, p, q, strict=True)) + + For float and mixed int/float inputs, the intermediate products + and sums are computed with extended precision. + + .. versionadded:: 3.12 Angular conversion @@ -649,6 +627,52 @@ Angular conversion Convert angle *x* from degrees to radians. +Trigonometric functions +----------------------- + +.. function:: acos(x) + + Return the arc cosine of *x*, in radians. The result is between ``0`` and + ``pi``. + + +.. function:: asin(x) + + Return the arc sine of *x*, in radians. The result is between ``-pi/2`` and + ``pi/2``. + + +.. function:: atan(x) + + Return the arc tangent of *x*, in radians. The result is between ``-pi/2`` and + ``pi/2``. + + +.. function:: atan2(y, x) + + Return ``atan(y / x)``, in radians. The result is between ``-pi`` and ``pi``. + The vector in the plane from the origin to point ``(x, y)`` makes this angle + with the positive X axis. The point of :func:`atan2` is that the signs of both + inputs are known to it, so it can compute the correct quadrant for the angle. + For example, ``atan(1)`` and ``atan2(1, 1)`` are both ``pi/4``, but ``atan2(-1, + -1)`` is ``-3*pi/4``. + + +.. function:: cos(x) + + Return the cosine of *x* radians. + + +.. function:: sin(x) + + Return the sine of *x* radians. + + +.. function:: tan(x) + + Return the tangent of *x* radians. + + Hyperbolic functions -------------------- diff --git a/Doc/library/msilib.rst b/Doc/library/msilib.rst new file mode 100644 index 00000000000000..eb1ac551ded456 --- /dev/null +++ b/Doc/library/msilib.rst @@ -0,0 +1,15 @@ +:mod:`!msilib` --- Read and write Microsoft Installer files +=========================================================== + +.. module:: msilib + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!msilib` module was +`Python 3.12 `_. diff --git a/Doc/library/nis.rst b/Doc/library/nis.rst new file mode 100644 index 00000000000000..dcc36dd43fc313 --- /dev/null +++ b/Doc/library/nis.rst @@ -0,0 +1,15 @@ +:mod:`!nis` --- Interface to Sun’s NIS (Yellow Pages) +===================================================== + +.. module:: nis + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!nis` module was +`Python 3.12 `_. diff --git a/Doc/library/nntplib.rst b/Doc/library/nntplib.rst new file mode 100644 index 00000000000000..8053fe8cb8b9e1 --- /dev/null +++ b/Doc/library/nntplib.rst @@ -0,0 +1,15 @@ +:mod:`!nntplib` --- NNTP protocol client +======================================== + +.. module:: nntplib + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!nntplib` module was +`Python 3.12 `_. diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 61144256f47ddb..dfe5ef0726ff7d 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -261,7 +261,7 @@ process and user. :data:`os.environ` or :data:`os.environb`, or calling :func:`os.getenv` while reloading, may return an empty result. - .. versionadded:: next + .. versionadded:: 3.14 .. function:: chdir(path) diff --git a/Doc/library/ossaudiodev.rst b/Doc/library/ossaudiodev.rst new file mode 100644 index 00000000000000..320adbeff82539 --- /dev/null +++ b/Doc/library/ossaudiodev.rst @@ -0,0 +1,15 @@ +:mod:`!ossaudiodev` --- Access to OSS-compatible audio devices +============================================================== + +.. module:: ossaudiodev + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!ossaudiodev` module was +`Python 3.12 `_. diff --git a/Doc/library/pipes.rst b/Doc/library/pipes.rst new file mode 100644 index 00000000000000..d9bcc3a5d99c9b --- /dev/null +++ b/Doc/library/pipes.rst @@ -0,0 +1,17 @@ +:mod:`!pipes` --- Interface to shell pipelines +============================================== + +.. module:: pipes + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Applications should use the :mod:`subprocess` module instead. + +The last version of Python that provided the :mod:`!pipes` module was +`Python 3.12 `_. diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst index 1beb3b9eb89d22..cfe1e7ba48da32 100644 --- a/Doc/library/platform.rst +++ b/Doc/library/platform.rst @@ -359,3 +359,15 @@ Android Platform `__. .. versionadded:: 3.13 + + +Miscellaneous +------------- + +.. function:: invalidate_caches() + + Clear out the internal cache of information, such as the :func:`uname`. + This is typically useful when the platform's :func:`node` is changed + by an external process and one needs to retrieve the updated value. + + .. versionadded:: 3.14 diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst index 1b3498e51f766d..2985f31bacb47a 100644 --- a/Doc/library/pprint.rst +++ b/Doc/library/pprint.rst @@ -267,7 +267,7 @@ let's fetch information about a project from `PyPI `_:: >>> import json >>> import pprint >>> from urllib.request import urlopen - >>> with urlopen('https://pypi.org/pypi/sampleproject/json') as resp: + >>> with urlopen('https://pypi.org/pypi/sampleproject/1.2.0/json') as resp: ... project_info = json.load(resp)['info'] In its basic form, :func:`~pprint.pp` shows the whole object:: diff --git a/Doc/library/removed.rst b/Doc/library/removed.rst new file mode 100644 index 00000000000000..4d75842eca1a03 --- /dev/null +++ b/Doc/library/removed.rst @@ -0,0 +1,39 @@ +:tocdepth: 1 + +.. _removed: + +*************** +Removed Modules +*************** + +The modules described in this chapter have been removed from the Python +standard library. They are documented here to help people find replacements. + + +.. toctree:: + :maxdepth: 1 + + aifc.rst + asynchat.rst + asyncore.rst + audioop.rst + cgi.rst + cgitb.rst + chunk.rst + crypt.rst + distutils.rst + imghdr.rst + imp.rst + mailcap.rst + msilib.rst + nis.rst + nntplib.rst + ossaudiodev.rst + pipes.rst + smtpd.rst + sndhdr.rst + spwd.rst + sunau.rst + telnetlib.rst + uu.rst + xdrlib.rst diff --git a/Doc/library/smtpd.rst b/Doc/library/smtpd.rst new file mode 100644 index 00000000000000..c704f4a241b469 --- /dev/null +++ b/Doc/library/smtpd.rst @@ -0,0 +1,18 @@ +:mod:`!smtpd` --- SMTP Server +============================= + +.. module:: smtpd + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.6 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.6. The removal was decided in :pep:`594`. + +A possible replacement is the third-party :pypi:`aiosmtpd` library. This +library is not maintained or supported by the Python core team. + +The last version of Python that provided the :mod:`!smtpd` module was +`Python 3.11 `_. diff --git a/Doc/library/sndhdr.rst b/Doc/library/sndhdr.rst new file mode 100644 index 00000000000000..6b71db4f6338a8 --- /dev/null +++ b/Doc/library/sndhdr.rst @@ -0,0 +1,19 @@ +:mod:`!sndhdr` --- Determine type of sound file +=============================================== + +.. module:: sndhdr + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Possible replacements are third-party modules from PyPI: +:pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic`. +These are not supported or maintained by the Python core team. + +The last version of Python that provided the :mod:`!sndhdr` module was +`Python 3.12 `_. diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 0c7b9328648f66..6358d140484c78 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -928,7 +928,9 @@ The :mod:`socket` module also offers various network-related services: .. versionadded:: 3.7 -.. function:: getaddrinfo(host, port, family=0, type=0, proto=0, flags=0) +.. function:: getaddrinfo(host, port, family=AF_UNSPEC, type=0, proto=0, flags=0) + + This function wraps the C function ``getaddrinfo`` of the underlying system. Translate the *host*/*port* argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. @@ -938,8 +940,10 @@ The :mod:`socket` module also offers various network-related services: and *port*, you can pass ``NULL`` to the underlying C API. The *family*, *type* and *proto* arguments can be optionally specified - in order to narrow the list of addresses returned. Passing zero as a - value for each of these arguments selects the full range of results. + in order to provide options and limit the list of addresses returned. + Pass their default values (:data:`AF_UNSPEC`, 0, and 0, respectively) + to not limit the results. See the note below for details. + The *flags* argument can be one or several of the ``AI_*`` constants, and will influence how results are computed and returned. For example, :const:`AI_NUMERICHOST` will disable domain name resolution @@ -959,6 +963,29 @@ The :mod:`socket` module also offers various network-related services: :const:`AF_INET6`), and is meant to be passed to the :meth:`socket.connect` method. + .. note:: + + If you intend to use results from :func:`!getaddrinfo` to create a socket + (rather than, for example, retrieve *canonname*), + consider limiting the results by *type* (e.g. :data:`SOCK_STREAM` or + :data:`SOCK_DGRAM`) and/or *proto* (e.g. :data:`IPPROTO_TCP` or + :data:`IPPROTO_UDP`) that your application can handle. + + The behavior with default values of *family*, *type*, *proto* + and *flags* is system-specific. + + Many systems (for example, most Linux configurations) will return a sorted + list of all matching addresses. + These addresses should generally be tried in order until a connection succeeds + (possibly tried in parallel, for example, using a `Happy Eyeballs`_ algorithm). + In these cases, limiting the *type* and/or *proto* can help eliminate + unsuccessful or unusable connecton attempts. + + Some systems will, however, only return a single address. + (For example, this was reported on Solaris and AIX configurations.) + On these systems, limiting the *type* and/or *proto* helps ensure that + this address is usable. + .. audit-event:: socket.getaddrinfo host,port,family,type,protocol socket.getaddrinfo The following example fetches address information for a hypothetical TCP @@ -978,6 +1005,8 @@ The :mod:`socket` module also offers various network-related services: for IPv6 multicast addresses, string representing an address will not contain ``%scope_id`` part. +.. _Happy Eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs + .. function:: getfqdn([name]) Return a fully qualified domain name for *name*. If *name* is omitted or empty, diff --git a/Doc/library/spwd.rst b/Doc/library/spwd.rst new file mode 100644 index 00000000000000..c16854bb380e52 --- /dev/null +++ b/Doc/library/spwd.rst @@ -0,0 +1,18 @@ +:mod:`!spwd` --- The shadow password database +============================================= + +.. module:: spwd + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +A possible replacement is the third-party library :pypi:`python-pam`. +This library is not supported or maintained by the Python core team. + +The last version of Python that provided the :mod:`!spwd` module was +`Python 3.12 `_. diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index a9b7662dcb212b..2347437d7273d9 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -3889,7 +3889,7 @@ copying. .. versionchanged:: 3.5 memoryviews can now be indexed with tuple of integers. - .. versionchanged:: next + .. versionchanged:: 3.14 memoryview is now a :term:`generic type`. :class:`memoryview` has several methods: diff --git a/Doc/library/sunau.rst b/Doc/library/sunau.rst new file mode 100644 index 00000000000000..feb7768f8bdd68 --- /dev/null +++ b/Doc/library/sunau.rst @@ -0,0 +1,15 @@ +:mod:`!sunau` --- Read and write Sun AU files +============================================= + +.. module:: sunau + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!sunau` module was +`Python 3.12 `_. diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index d83816ec1502ca..773e5d4185dfaf 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -944,7 +944,7 @@ always available. This function should be used for specialized purposes only. It is not guaranteed to exist in all implementations of Python. - .. versionchanged:: next + .. versionchanged:: 3.14 The result may include objects from other interpreters. diff --git a/Doc/library/telnetlib.rst b/Doc/library/telnetlib.rst new file mode 100644 index 00000000000000..6971ad33ff9751 --- /dev/null +++ b/Doc/library/telnetlib.rst @@ -0,0 +1,19 @@ +:mod:`!telnetlib` --- Telnet client +=================================== + +.. module:: telnetlib + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Possible replacements are third-party libraries from PyPI: :pypi:`telnetlib3` +or :pypi:`Exscript`. These are not supported or maintained by the Python core +team. + +The last version of Python that provided the :mod:`!telnetlib` module was +`Python 3.12 `_. diff --git a/Doc/library/time.rst b/Doc/library/time.rst index 9cd5db768e9853..6265c2214eaa0d 100644 --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -390,7 +390,7 @@ Functions threads ready to run, the function returns immediately, and the thread continues execution. On Windows 8.1 and newer the implementation uses a `high-resolution timer - `_ + `_ which provides resolution of 100 nanoseconds. If *secs* is zero, ``Sleep(0)`` is used. Unix implementation: diff --git a/Doc/library/tomllib.rst b/Doc/library/tomllib.rst index 521a7a17fb3e8b..30d7ff50a1acc1 100644 --- a/Doc/library/tomllib.rst +++ b/Doc/library/tomllib.rst @@ -60,9 +60,36 @@ This module defines the following functions: The following exceptions are available: -.. exception:: TOMLDecodeError +.. exception:: TOMLDecodeError(msg, doc, pos) - Subclass of :exc:`ValueError`. + Subclass of :exc:`ValueError` with the following additional attributes: + + .. attribute:: msg + + The unformatted error message. + + .. attribute:: doc + + The TOML document being parsed. + + .. attribute:: pos + + The index of *doc* where parsing failed. + + .. attribute:: lineno + + The line corresponding to *pos*. + + .. attribute:: colno + + The column corresponding to *pos*. + + .. versionchanged:: 3.14 + Added the *msg*, *doc* and *pos* parameters. + Added the :attr:`msg`, :attr:`doc`, :attr:`pos`, :attr:`lineno` and :attr:`colno` attributes. + + .. deprecated:: 3.14 + Passing free-form positional arguments is deprecated. Examples diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index cd8b90854b0e94..0fee782121b0af 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -1726,11 +1726,11 @@ without the dedicated syntax, as documented below. class Sequence[T]: # T is a TypeVar ... - This syntax can also be used to create bound and constrained type + This syntax can also be used to create bounded and constrained type variables:: - class StrSequence[S: str]: # S is a TypeVar bound to str - ... + class StrSequence[S: str]: # S is a TypeVar with a `str` upper bound; + ... # we can say that S is "bounded by `str`" class StrOrBytesSequence[A: (str, bytes)]: # A is a TypeVar constrained to str or bytes @@ -1763,8 +1763,8 @@ without the dedicated syntax, as documented below. """Add two strings or bytes objects together.""" return x + y - Note that type variables can be *bound*, *constrained*, or neither, but - cannot be both bound *and* constrained. + Note that type variables can be *bounded*, *constrained*, or neither, but + cannot be both bounded *and* constrained. The variance of type variables is inferred by type checkers when they are created through the :ref:`type parameter syntax ` or when @@ -1774,8 +1774,8 @@ without the dedicated syntax, as documented below. By default, manually created type variables are invariant. See :pep:`484` and :pep:`695` for more details. - Bound type variables and constrained type variables have different - semantics in several important ways. Using a *bound* type variable means + Bounded type variables and constrained type variables have different + semantics in several important ways. Using a *bounded* type variable means that the ``TypeVar`` will be solved using the most specific type possible:: x = print_capitalized('a string') @@ -1789,8 +1789,8 @@ without the dedicated syntax, as documented below. z = print_capitalized(45) # error: int is not a subtype of str - Type variables can be bound to concrete types, abstract types (ABCs or - protocols), and even unions of types:: + The upper bound of a type variable can be a concrete type, abstract type + (ABC or Protocol), or even a union of types:: # Can be anything with an __abs__ method def print_abs[T: SupportsAbs](arg: T) -> None: @@ -1834,7 +1834,7 @@ without the dedicated syntax, as documented below. .. attribute:: __bound__ - The bound of the type variable, if any. + The upper bound of the type variable, if any. .. versionchanged:: 3.12 @@ -2100,7 +2100,7 @@ without the dedicated syntax, as documented below. return x + y Without ``ParamSpec``, the simplest way to annotate this previously was to - use a :class:`TypeVar` with bound ``Callable[..., Any]``. However this + use a :class:`TypeVar` with upper bound ``Callable[..., Any]``. However this causes two problems: 1. The type checker can't type check the ``inner`` function because diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst index fb5353e1895bf9..44a9c79cba2216 100644 --- a/Doc/library/urllib.parse.rst +++ b/Doc/library/urllib.parse.rst @@ -239,6 +239,10 @@ or on combining URL components into a URL string. query parameter separator. This has been changed to allow only a single separator key, with ``&`` as the default separator. + .. deprecated:: 3.14 + Accepting objects with false values (like ``0`` and ``[]``) except empty + strings and byte-like objects and ``None`` is now deprecated. + .. function:: parse_qsl(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator='&') @@ -403,6 +407,15 @@ or on combining URL components into a URL string. If you do not want that behavior, preprocess the *url* with :func:`urlsplit` and :func:`urlunsplit`, removing possible *scheme* and *netloc* parts. + .. warning:: + + Because an absolute URL may be passed as the ``url`` parameter, it is + generally **not secure** to use ``urljoin`` with an attacker-controlled + ``url``. For example in, + ``urljoin("https://website.com/users/", username)``, if ``username`` can + contain an absolute URL, the result of ``urljoin`` will be the absolute + URL. + .. versionchanged:: 3.5 @@ -745,6 +758,10 @@ task isn't already covered by the URL parsing functions above. .. versionchanged:: 3.5 Added the *quote_via* parameter. + .. deprecated:: 3.14 + Accepting objects with false values (like ``0`` and ``[]``) except empty + strings and byte-like objects and ``None`` is now deprecated. + .. seealso:: diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index ce82552a3ae4be..e0831bf7e65ad2 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -67,8 +67,7 @@ The :mod:`urllib.request` module defines the following functions: the response headers as it is specified in the documentation for :class:`~http.client.HTTPResponse`. - For FTP, file, and data URLs and requests explicitly handled by legacy - :class:`URLopener` and :class:`FancyURLopener` classes, this function + For FTP, file, and data URLs, this function returns a :class:`urllib.response.addinfourl` object. Raises :exc:`~urllib.error.URLError` on protocol errors. @@ -153,6 +152,11 @@ The :mod:`urllib.request` module defines the following functions: the path component of a URL. This does not produce a complete URL. The return value will already be quoted using the :func:`~urllib.parse.quote` function. + .. versionchanged:: 3.14 + On Windows, ``:`` characters not following a drive letter are quoted. In + previous versions, :exc:`OSError` was raised if a colon character was + found in any position other than the second character. + .. function:: url2pathname(path) @@ -1339,7 +1343,7 @@ environment settings:: >>> import urllib.request >>> proxies = {'http': 'http://proxy.example.com:8080/'} - >>> opener = urllib.request.FancyURLopener(proxies) + >>> opener = urllib.request.build_opener(urllib.request.ProxyHandler(proxies)) >>> with opener.open("http://www.python.org") as f: ... f.read().decode('utf-8') ... @@ -1347,7 +1351,7 @@ environment settings:: The following example uses no proxies at all, overriding environment settings:: >>> import urllib.request - >>> opener = urllib.request.FancyURLopener({}) + >>> opener = urllib.request.build_opener(urllib.request.ProxyHandler({}})) >>> with opener.open("http://www.python.org/") as f: ... f.read().decode('utf-8') ... @@ -1412,121 +1416,6 @@ some point in the future. Cleans up temporary files that may have been left behind by previous calls to :func:`urlretrieve`. -.. class:: URLopener(proxies=None, **x509) - - .. deprecated:: 3.3 - - Base class for opening and reading URLs. Unless you need to support opening - objects using schemes other than :file:`http:`, :file:`ftp:`, or :file:`file:`, - you probably want to use :class:`FancyURLopener`. - - By default, the :class:`URLopener` class sends a :mailheader:`User-Agent` header - of ``urllib/VVV``, where *VVV* is the :mod:`urllib` version number. - Applications can define their own :mailheader:`User-Agent` header by subclassing - :class:`URLopener` or :class:`FancyURLopener` and setting the class attribute - :attr:`version` to an appropriate string value in the subclass definition. - - The optional *proxies* parameter should be a dictionary mapping scheme names to - proxy URLs, where an empty dictionary turns proxies off completely. Its default - value is ``None``, in which case environmental proxy settings will be used if - present, as discussed in the definition of :func:`urlopen`, above. - - Additional keyword parameters, collected in *x509*, may be used for - authentication of the client when using the :file:`https:` scheme. The keywords - *key_file* and *cert_file* are supported to provide an SSL key and certificate; - both are needed to support client authentication. - - :class:`URLopener` objects will raise an :exc:`OSError` exception if the server - returns an error code. - - .. method:: open(fullurl, data=None) - - Open *fullurl* using the appropriate protocol. This method sets up cache and - proxy information, then calls the appropriate open method with its input - arguments. If the scheme is not recognized, :meth:`open_unknown` is called. - The *data* argument has the same meaning as the *data* argument of - :func:`urlopen`. - - This method always quotes *fullurl* using :func:`~urllib.parse.quote`. - - .. method:: open_unknown(fullurl, data=None) - - Overridable interface to open unknown URL types. - - - .. method:: retrieve(url, filename=None, reporthook=None, data=None) - - Retrieves the contents of *url* and places it in *filename*. The return value - is a tuple consisting of a local filename and either an - :class:`email.message.Message` object containing the response headers (for remote - URLs) or ``None`` (for local URLs). The caller must then open and read the - contents of *filename*. If *filename* is not given and the URL refers to a - local file, the input filename is returned. If the URL is non-local and - *filename* is not given, the filename is the output of :func:`tempfile.mktemp` - with a suffix that matches the suffix of the last path component of the input - URL. If *reporthook* is given, it must be a function accepting three numeric - parameters: A chunk number, the maximum size chunks are read in and the total size of the download - (-1 if unknown). It will be called once at the start and after each chunk of data is read from the - network. *reporthook* is ignored for local URLs. - - If the *url* uses the :file:`http:` scheme identifier, the optional *data* - argument may be given to specify a ``POST`` request (normally the request type - is ``GET``). The *data* argument must in standard - :mimetype:`application/x-www-form-urlencoded` format; see the - :func:`urllib.parse.urlencode` function. - - - .. attribute:: version - - Variable that specifies the user agent of the opener object. To get - :mod:`urllib` to tell servers that it is a particular user agent, set this in a - subclass as a class variable or in the constructor before calling the base - constructor. - - -.. class:: FancyURLopener(...) - - .. deprecated:: 3.3 - - :class:`FancyURLopener` subclasses :class:`URLopener` providing default handling - for the following HTTP response codes: 301, 302, 303, 307 and 401. For the 30x - response codes listed above, the :mailheader:`Location` header is used to fetch - the actual URL. For 401 response codes (authentication required), basic HTTP - authentication is performed. For the 30x response codes, recursion is bounded - by the value of the *maxtries* attribute, which defaults to 10. - - For all other response codes, the method :meth:`~BaseHandler.http_error_default` is called - which you can override in subclasses to handle the error appropriately. - - .. note:: - - According to the letter of :rfc:`2616`, 301 and 302 responses to POST requests - must not be automatically redirected without confirmation by the user. In - reality, browsers do allow automatic redirection of these responses, changing - the POST to a GET, and :mod:`urllib` reproduces this behaviour. - - The parameters to the constructor are the same as those for :class:`URLopener`. - - .. note:: - - When performing basic authentication, a :class:`FancyURLopener` instance calls - its :meth:`prompt_user_passwd` method. The default implementation asks the - users for the required information on the controlling terminal. A subclass may - override this method to support more appropriate behavior if needed. - - The :class:`FancyURLopener` class offers one additional method that should be - overloaded to provide the appropriate behavior: - - .. method:: prompt_user_passwd(host, realm) - - Return information needed to authenticate the user at the given host in the - specified security realm. The return value should be a tuple, ``(user, - password)``, which can be used for basic authentication. - - The implementation prompts for this information on the terminal; an application - should override this method to use an appropriate interaction model in the local - environment. - :mod:`urllib.request` Restrictions ---------------------------------- @@ -1578,8 +1467,7 @@ some point in the future. you try to fetch a file whose read permissions make it inaccessible; the FTP code will try to read it, fail with a 550 error, and then perform a directory listing for the unreadable file. If fine-grained control is needed, consider - using the :mod:`ftplib` module, subclassing :class:`FancyURLopener`, or changing - *_urlopener* to meet your needs. + using the :mod:`ftplib` module. diff --git a/Doc/library/uu.rst b/Doc/library/uu.rst new file mode 100644 index 00000000000000..0636d180294d47 --- /dev/null +++ b/Doc/library/uu.rst @@ -0,0 +1,15 @@ +:mod:`!uu` --- Encode and decode uuencode files +=============================================== + +.. module:: uu + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!uu` module was +`Python 3.12 `_. diff --git a/Doc/library/uuid.rst b/Doc/library/uuid.rst index 0f2d7820cb25c8..39e82d0e19a9ac 100644 --- a/Doc/library/uuid.rst +++ b/Doc/library/uuid.rst @@ -1,8 +1,8 @@ -:mod:`!uuid` --- UUID objects according to :rfc:`4122` +:mod:`!uuid` --- UUID objects according to :rfc:`9562` ====================================================== .. module:: uuid - :synopsis: UUID objects (universally unique identifiers) according to RFC 4122 + :synopsis: UUID objects (universally unique identifiers) according to RFC 9562 .. moduleauthor:: Ka-Ping Yee .. sectionauthor:: George Yoshida @@ -11,8 +11,9 @@ -------------- This module provides immutable :class:`UUID` objects (the :class:`UUID` class) -and the functions :func:`uuid1`, :func:`uuid3`, :func:`uuid4`, :func:`uuid5` for -generating version 1, 3, 4, and 5 UUIDs as specified in :rfc:`4122`. +and the functions :func:`uuid1`, :func:`uuid3`, :func:`uuid4`, :func:`uuid5`, +and :func:`uuid.uuid8` for generating version 1, 3, 4, 5, and 8 UUIDs as +specified in :rfc:`9562` (which supersedes :rfc:`4122`). If all you want is a unique ID, you should probably call :func:`uuid1` or :func:`uuid4`. Note that :func:`uuid1` may compromise privacy since it creates @@ -65,7 +66,7 @@ which relays any information about the UUID's safety, using this enumeration: Exactly one of *hex*, *bytes*, *bytes_le*, *fields*, or *int* must be given. The *version* argument is optional; if given, the resulting UUID will have its - variant and version number set according to :rfc:`4122`, overriding bits in the + variant and version number set according to :rfc:`9562`, overriding bits in the given *hex*, *bytes*, *bytes_le*, *fields*, or *int*. Comparison of UUID objects are made by way of comparing their @@ -137,7 +138,7 @@ which relays any information about the UUID's safety, using this enumeration: .. attribute:: UUID.urn - The UUID as a URN as specified in :rfc:`4122`. + The UUID as a URN as specified in :rfc:`9562`. .. attribute:: UUID.variant @@ -149,9 +150,13 @@ which relays any information about the UUID's safety, using this enumeration: .. attribute:: UUID.version - The UUID version number (1 through 5, meaningful only when the variant is + The UUID version number (1 through 8, meaningful only when the variant is :const:`RFC_4122`). + .. versionchanged:: 3.14 + Added UUID version 8. + + .. attribute:: UUID.is_safe An enumeration of :class:`SafeUUID` which indicates whether the platform @@ -216,6 +221,23 @@ The :mod:`uuid` module defines the following functions: .. index:: single: uuid5 + +.. function:: uuid8(a=None, b=None, c=None) + + Generate a pseudo-random UUID according to + :rfc:`RFC 9562, §5.8 <9562#section-5.8>`. + + When specified, the parameters *a*, *b* and *c* are expected to be + positive integers of 48, 12 and 62 bits respectively. If they exceed + their expected bit count, only their least significant bits are kept; + non-specified arguments are substituted for a pseudo-random integer of + appropriate size. + + .. versionadded:: 3.14 + +.. index:: single: uuid8 + + The :mod:`uuid` module defines the following namespace identifiers for use with :func:`uuid3` or :func:`uuid5`. @@ -252,7 +274,9 @@ of the :attr:`~UUID.variant` attribute: .. data:: RFC_4122 - Specifies the UUID layout given in :rfc:`4122`. + Specifies the UUID layout given in :rfc:`4122`. This constant is kept + for backward compatibility even though :rfc:`4122` has been superseded + by :rfc:`9562`. .. data:: RESERVED_MICROSOFT @@ -267,7 +291,7 @@ of the :attr:`~UUID.variant` attribute: .. seealso:: - :rfc:`4122` - A Universally Unique IDentifier (UUID) URN Namespace + :rfc:`9562` - A Universally Unique IDentifier (UUID) URN Namespace This specification defines a Uniform Resource Name namespace for UUIDs, the internal format of UUIDs, and methods of generating UUIDs. @@ -283,7 +307,7 @@ The :mod:`uuid` module can be executed as a script from the command line. .. code-block:: sh - python -m uuid [-h] [-u {uuid1,uuid3,uuid4,uuid5}] [-n NAMESPACE] [-N NAME] + python -m uuid [-h] [-u {uuid1,uuid3,uuid4,uuid5,uuid8}] [-n NAMESPACE] [-N NAME] The following options are accepted: @@ -299,6 +323,9 @@ The following options are accepted: Specify the function name to use to generate the uuid. By default :func:`uuid4` is used. + .. versionadded:: 3.14 + Allow generating UUID version 8. + .. option:: -n --namespace diff --git a/Doc/library/xdrlib.rst b/Doc/library/xdrlib.rst new file mode 100644 index 00000000000000..59b801c8e4072e --- /dev/null +++ b/Doc/library/xdrlib.rst @@ -0,0 +1,15 @@ +:mod:`!xdrlib` --- Encode and decode XDR data +============================================= + +.. module:: xdrlib + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!xdrlib` module was +`Python 3.12 `_. diff --git a/Doc/license.rst b/Doc/license.rst index 674ac5f56e6f97..428dc22b817ebe 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -100,7 +100,7 @@ PSF LICENSE AGREEMENT FOR PYTHON |release| analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python |release| alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of - copyright, i.e., "Copyright © 2001-2024 Python Software Foundation; All Rights + copyright, i.e., "Copyright © 2001 Python Software Foundation; All Rights Reserved" are retained in Python |release| alone or in any derivative version prepared by Licensee. diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index 1b1e9f479cbe08..e73ce44270b082 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -534,18 +534,15 @@ is semantically equivalent to:: enter = type(manager).__enter__ exit = type(manager).__exit__ value = enter(manager) - hit_except = False try: TARGET = value SUITE except: - hit_except = True if not exit(manager, *sys.exc_info()): raise - finally: - if not hit_except: - exit(manager, None, None, None) + else: + exit(manager, None, None, None) With more than one item, the context managers are processed as if multiple :keyword:`with` statements were nested:: diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 41133b92ed88ec..66b836eaf0008a 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -1028,9 +1028,9 @@ this approach. using the :class:`types.ModuleType` constructor. Previously the attribute was optional. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.16 Setting :attr:`!__loader__` on a module while failing to set - :attr:`!__spec__.loader` is deprecated. In Python 3.14, + :attr:`!__spec__.loader` is deprecated. In Python 3.16, :attr:`!__loader__` will cease to be set or taken into consideration by the import system or the standard library. diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index decde0d297cf59..3eaceae41f7eaf 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -1156,7 +1156,8 @@ a user-defined function: first thing the code block will do is bind the formal parameters to the arguments; this is described in section :ref:`function`. When the code block executes a :keyword:`return` statement, this specifies the return value of the - function call. + function call. If execution reaches the end of the code block without + executing a :keyword:`return` statement, the return value is ``None``. a built-in function or method: .. index:: diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 24df4a6ba7b678..2a72af4e9a3299 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -408,9 +408,9 @@ The extended form, ``assert expression1, expression2``, is equivalent to :: These equivalences assume that :const:`__debug__` and :exc:`AssertionError` refer to the built-in variables with those names. In the current implementation, the -built-in variable :const:`__debug__` is ``True`` under normal circumstances, +built-in variable ``__debug__`` is ``True`` under normal circumstances, ``False`` when optimization is requested (command line option :option:`-O`). The current -code generator emits no code for an assert statement when optimization is +code generator emits no code for an :keyword:`assert` statement when optimization is requested at compile time. Note that it is unnecessary to include the source code for the expression that failed in the error message; it will be displayed as part of the stack trace. @@ -533,8 +533,8 @@ The :keyword:`!yield` statement yield_stmt: `yield_expression` A :keyword:`yield` statement is semantically equivalent to a :ref:`yield -expression `. The yield statement can be used to omit the parentheses -that would otherwise be required in the equivalent yield expression +expression `. The ``yield`` statement can be used to omit the +parentheses that would otherwise be required in the equivalent yield expression statement. For example, the yield statements :: yield @@ -546,7 +546,7 @@ are equivalent to the yield expression statements :: (yield from ) Yield expressions and statements are only used when defining a :term:`generator` -function, and are only used in the body of the generator function. Using yield +function, and are only used in the body of the generator function. Using :keyword:`yield` in a function definition is sufficient to cause that definition to create a generator function instead of a normal function. @@ -966,25 +966,14 @@ The :keyword:`!global` statement .. productionlist:: python-grammar global_stmt: "global" `identifier` ("," `identifier`)* -The :keyword:`global` statement is a declaration which holds for the entire -current code block. It means that the listed identifiers are to be interpreted -as globals. It would be impossible to assign to a global variable without +The :keyword:`global` statement causes the listed identifiers to be interpreted +as globals. It would be impossible to assign to a global variable without :keyword:`!global`, although free variables may refer to globals without being declared global. -Names listed in a :keyword:`global` statement must not be used in the same code -block textually preceding that :keyword:`!global` statement. - -Names listed in a :keyword:`global` statement must not be defined as formal -parameters, or as targets in :keyword:`with` statements or :keyword:`except` clauses, or in a :keyword:`for` target list, :keyword:`class` -definition, function definition, :keyword:`import` statement, or -:term:`variable annotations `. - -.. impl-detail:: - - The current implementation does not enforce some of these restrictions, but - programs should not abuse this freedom, as future implementations may enforce - them or silently change the meaning of the program. +The :keyword:`global` statement applies to the entire scope of a function or +class body. A :exc:`SyntaxError` is raised if a variable is used or +assigned to prior to its global declaration in the scope. .. index:: pair: built-in function; exec @@ -1020,7 +1009,7 @@ identifiers. If a name is bound in more than one nonlocal scope, the nearest binding is used. If a name is not bound in any nonlocal scope, or if there is no nonlocal scope, a :exc:`SyntaxError` is raised. -The nonlocal statement applies to the entire scope of a function or +The :keyword:`nonlocal` statement applies to the entire scope of a function or class body. A :exc:`SyntaxError` is raised if a variable is used or assigned to prior to its nonlocal declaration in the scope. diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index 83994af795e3fc..e7733a6dc11451 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -297,6 +297,19 @@ General Options .. versionadded:: 3.13 +.. option:: --enable-experimental-jit=[no|yes|yes-off|interpreter] + + Indicate how to integrate the :ref:`JIT compiler `. + + * ``no`` - build the interpreter without the JIT. + * ``yes`` - build the interpreter with the JIT. + * ``yes-off`` - build the interpreter with the JIT but disable it by default. + * ``interpreter`` - build the interpreter without the JIT, but with the tier 2 enabled interpreter. + + By convention, ``--enable-experimental-jit`` is a shorthand for ``--enable-experimental-jit=yes``. + + .. versionadded:: 3.13 + .. option:: PKG_CONFIG Path to ``pkg-config`` utility. @@ -441,15 +454,6 @@ Options for third-party dependencies WebAssembly Options ------------------- -.. option:: --with-emscripten-target=[browser|node] - - Set build flavor for ``wasm32-emscripten``. - - * ``browser`` (default): preload minimal stdlib, default MEMFS. - * ``node``: NODERAWFS and pthread support. - - .. versionadded:: 3.11 - .. option:: --enable-wasm-dynamic-linking Turn on dynamic linking support for WASM. @@ -1084,7 +1088,8 @@ CPython project) this is usually the ``all`` target. The all`` will build. The three choices are: * ``profile-opt`` (configured with ``--enable-optimizations``) -* ``build_wasm`` (configured with ``--with-emscripten-target``) +* ``build_wasm`` (chosen if the host platform matches ``wasm32-wasi*`` or + ``wasm32-emscripten``) * ``build_all`` (configured without explicitly using either of the others) Depending on the most recent source file changes, Make will rebuild diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index daaf8822af1161..1a6322d72341ff 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -435,7 +435,7 @@ When writing to the Windows Registry, the following behaviors exist: For more detail on the technical basis for these limitations, please consult Microsoft's documentation on packaged full-trust apps, currently available at `docs.microsoft.com/en-us/windows/msix/desktop/desktop-to-uwp-behind-the-scenes -`_ +`_ .. _windows-nuget: @@ -536,7 +536,7 @@ dependents, such as Idle), pip and the Python documentation are not included. .. note:: The embedded distribution does not include the `Microsoft C Runtime - `_ and it is + `_ and it is the responsibility of the application installer to provide this. The runtime may have already been installed on a user's system previously or automatically via Windows Update, and can be detected by finding @@ -679,13 +679,13 @@ System variables, you need non-restricted access to your machine .. seealso:: - https://docs.microsoft.com/en-us/windows/win32/procthread/environment-variables + https://learn.microsoft.com/windows/win32/procthread/environment-variables Overview of environment variables on Windows - https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/set_1 + https://learn.microsoft.com/windows-server/administration/windows-commands/set_1 The ``set`` command, for temporarily modifying environment variables - https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/setx + https://learn.microsoft.com/windows-server/administration/windows-commands/setx The ``setx`` command, for permanently modifying environment variables @@ -1291,13 +1291,13 @@ is a collection of modules for advanced Windows-specific support. This includes utilities for: * `Component Object Model - `_ + `_ (COM) * Win32 API calls * Registry * Event log * `Microsoft Foundation Classes - `_ + `_ (MFC) user interfaces `PythonWin ` and :mod:`copyable `. (Contributed by Serhiy Storchaka in :gh:`125767`.) @@ -225,7 +228,7 @@ argparse * Introduced the optional *suggest_on_error* parameter to :class:`argparse.ArgumentParser`, enabling suggestions for argument choices and subparser names if mistyped by the user. - (Contributed by Savannah Ostrowski in :gh:`124456`) + (Contributed by Savannah Ostrowski in :gh:`124456`.) ast --- @@ -250,6 +253,12 @@ concurrent.futures same process) to Python code. This is separate from the proposed API in :pep:`734`. (Contributed by Eric Snow in :gh:`124548`.) +* The default ``ProcessPoolExecutor`` start method (see + :ref:`multiprocessing-start-methods`) changed from *fork* to *forkserver* on + platforms other than macOS & Windows. If you require the threading + incompatible *fork* start method you must explicitly request it by + supplying a *mp_context* to :class:`concurrent.futures.ProcessPoolExecutor`. + (Contributed by Gregory P. Smith in :gh:`84559`.) ctypes ------ @@ -264,12 +273,8 @@ ctypes to help match a non-default ABI. (Contributed by Petr Viktorin in :gh:`97702`.) -decimal -------- - -* Add alternative :class:`~decimal.Decimal` constructor - :meth:`Decimal.from_number() `. - (Contributed by Serhiy Storchaka in :gh:`121798`.) +* The :exc:`~ctypes.COMError` exception is now public. + (Contributed by Jun Komoda in :gh:`126686`.) datetime -------- @@ -277,6 +282,13 @@ datetime * Add :meth:`datetime.time.strptime` and :meth:`datetime.date.strptime`. (Contributed by Wannes Boeykens in :gh:`41431`.) +decimal +------- + +* Add alternative :class:`~decimal.Decimal` constructor + :meth:`Decimal.from_number() `. + (Contributed by Serhiy Storchaka in :gh:`121798`.) + dis --- @@ -314,6 +326,19 @@ functools to reserve a place for positional arguments. (Contributed by Dominykas Grigonis in :gh:`119127`.) +* Allow the *initial* parameter of :func:`functools.reduce` to be passed + as a keyword argument. + (Contributed by Sayandip Dutta in :gh:`125916`.) + + +getopt +------ + +* Add support for options with optional arguments. + (Contributed by Serhiy Storchaka in :gh:`126374`.) + +* Add support for returning intermixed options and non-option arguments in order. + (Contributed by Serhiy Storchaka in :gh:`126390`.) http ---- @@ -352,6 +377,65 @@ json (Contributed by Trey Hunner in :gh:`122873`.) +mimetypes +--------- + +* Add MS and :rfc:`8081` MIME types for fonts: + + * Embedded OpenType: ``application/vnd.ms-fontobject`` + * OpenType Layout (OTF) ``font/otf`` + * TrueType: ``font/ttf`` + * WOFF 1.0 ``font/woff`` + * WOFF 2.0 ``font/woff2`` + + (Contributed by Sahil Prajapati and Hugo van Kemenade in :gh:`84852`.) + +* Add :rfc:`9559` MIME types for Matroska audiovisual data container + structures, containing: + + * audio with no video: ``audio/matroska`` (``.mka``) + * video: ``video/matroska`` (``.mkv``) + * stereoscopic video: ``video/matroska-3d`` (``.mk3d``) + + (Contributed by Hugo van Kemenade in :gh:`89416`.) + +* Add MIME types for images with RFCs: + + * :rfc:`1494`: CCITT Group 3 (``.g3``) + * :rfc:`3362`: Real-time Facsimile, T.38 (``.t38``) + * :rfc:`3745`: JPEG 2000 (``.jp2``), extension (``.jpx``) and compound (``.jpm``) + * :rfc:`3950`: Tag Image File Format Fax eXtended, TIFF-FX (``.tfx``) + * :rfc:`4047`: Flexible Image Transport System (``.fits``) + * :rfc:`7903`: Enhanced Metafile (``.emf``) and Windows Metafile (``.wmf``) + + (Contributed by Hugo van Kemenade in :gh:`85957`.) + + +multiprocessing +--------------- + +* The default start method (see :ref:`multiprocessing-start-methods`) changed + from *fork* to *forkserver* on platforms other than macOS & Windows where + it was already *spawn*. If you require the threading incompatible *fork* + start method you must explicitly request it using a context from + :func:`multiprocessing.get_context` (preferred) or change the default via + :func:`multiprocessing.set_start_method`. + (Contributed by Gregory P. Smith in :gh:`84559`.) +* :mod:`multiprocessing`'s ``"forkserver"`` start method now authenticates + its control socket to avoid solely relying on filesystem permissions + to restrict what other processes could cause the forkserver to spawn workers + and run code. + (Contributed by Gregory P. Smith for :gh:`97514`.) +* The :ref:`multiprocessing proxy objects ` + for *list* and *dict* types gain previously overlooked missing methods: + + * :meth:`!clear` and :meth:`!copy` for proxies of :class:`list`. + * :meth:`~dict.fromkeys`, ``reversed(d)``, ``d | {}``, ``{} | d``, + ``d |= {'b': 2}`` for proxies of :class:`dict`. + + (Contributed by Roy Hyunjin Han for :gh:`103134`.) + + operator -------- @@ -406,6 +490,14 @@ pdb command when :mod:`pdb` is in ``inline`` mode. (Contributed by Tian Gao in :gh:`123757`.) +platform +-------- + +* Add :func:`platform.invalidate_caches` to invalidate the cached results. + + (Contributed by Bénédikt Tran in :gh:`122549`.) + + pickle ------ @@ -458,6 +550,14 @@ unittest (Contributed by Jacob Walls in :gh:`80958`.) +uuid +---- + +* Add support for UUID version 8 via :func:`uuid.uuid8` as specified + in :rfc:`9562`. + (Contributed by Bénédikt Tran in :gh:`89083`.) + + .. Add improved modules above alphabetically, not here at the end. Optimizations @@ -506,14 +606,6 @@ Deprecated as a single positional argument. (Contributed by Serhiy Storchaka in :gh:`109218`.) -* :mod:`multiprocessing` and :mod:`concurrent.futures`: - The default start method (see :ref:`multiprocessing-start-methods`) changed - away from *fork* to *forkserver* on platforms where it was not already - *spawn* (Windows & macOS). If you require the threading incompatible *fork* - start method you must explicitly specify it when using :mod:`multiprocessing` - or :mod:`concurrent.futures` APIs. - (Contributed by Gregory P. Smith in :gh:`84559`.) - * :mod:`os`: :term:`Soft deprecate ` :func:`os.popen` and :func:`os.spawn* ` functions. They should no longer be used to @@ -524,6 +616,13 @@ Deprecated Deprecate :meth:`symtable.Class.get_methods` due to the lack of interest. (Contributed by Bénédikt Tran in :gh:`119698`.) +* :mod:`urllib.parse`: + Accepting objects with false values (like ``0`` and ``[]``) except empty + strings, byte-like objects and ``None`` in :mod:`urllib.parse` functions + :func:`~urllib.parse.parse_qsl` and :func:`~urllib.parse.parse_qs` is now + deprecated. + (Contributed by Serhiy Storchaka in :gh:`116897`.) + .. Add deprecations above alphabetically, not here at the end. .. include:: ../deprecations/pending-removal-in-3.15.rst @@ -678,6 +777,17 @@ urllib * Remove deprecated :class:`!Quoter` class from :mod:`urllib.parse`. It had previously raised a :exc:`DeprecationWarning` since Python 3.11. (Contributed by Nikita Sobolev in :gh:`118827`.) +* Remove deprecated :class:`!URLopener` and :class:`!FancyURLopener` classes + from :mod:`urllib.request`. They had previously raised a + :exc:`DeprecationWarning` since Python 3.3. + + ``myopener.open()`` can be replaced with :func:`~urllib.request.urlopen`, + and ``myopener.retrieve()`` can be replaced with + :func:`~urllib.request.urlretrieve`. Customizations to the opener + classes can be replaced by passing customized handlers to + :func:`~urllib.request.build_opener`. + + (Contributed by Barney Gale in :gh:`84850`.) Others ------ @@ -714,6 +824,16 @@ Changes in the Python API Build changes ============= +PEP 761: Discontinuation of PGP signatures +------------------------------------------ + +PGP signatures will not be available for CPython 3.14 and onwards. +Users verifying artifacts must use `Sigstore verification materials`_ for +verifying CPython artifacts. This change in release process is specified +in :pep:`761`. + +.. _Sigstore verification materials: https://www.python.org/downloads/metadata/sigstore/ + C API changes ============= @@ -756,6 +876,11 @@ New features an interned string and deallocate it during module shutdown. (Contributed by Eddie Elizondo in :gh:`113601`.) +* Add :c:func:`PyLong_IsPositive`, :c:func:`PyLong_IsNegative` + and :c:func:`PyLong_IsZero` for checking if :c:type:`PyLongObject` + is positive, negative, or zero, respectively. + (Contributed by James Roy and Sergey B Kirpichev in :gh:`126061`.) + * Add new functions to convert C ```` numbers from/to Python :class:`int`: @@ -819,6 +944,9 @@ New features * Add :c:func:`PyType_Freeze` function to make a type immutable. (Contributed by Victor Stinner in :gh:`121654`.) +* Add :c:func:`PyUnstable_Object_EnableDeferredRefcount` for enabling + deferred reference counting, as outlined in :pep:`703`. + Porting to Python 3.14 ---------------------- diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index fc9f49e65af847..bdc4ca5cab5245 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -936,7 +936,7 @@ Add option ``--json-lines`` to parse every input line as a separate JSON object. logging ------- -Added a *force* keyword argument to :func:`logging.basicConfig` +Added a *force* keyword argument to :func:`logging.basicConfig`. When set to true, any existing handlers attached to the root logger are removed and closed before carrying out the configuration specified by the other arguments. diff --git a/Include/cpython/code.h b/Include/cpython/code.h index 370f1d259abe0f..3899d4269233a1 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -131,7 +131,8 @@ typedef struct { \ /* redundant values (derived from co_localsplusnames and \ co_localspluskinds) */ \ - int co_nlocalsplus; /* number of local + cell + free variables */ \ + int co_nlocalsplus; /* number of spaces for holding local, cell, \ + and free variables */ \ int co_framesize; /* Size of frame in words */ \ int co_nlocals; /* number of local variables */ \ int co_ncellvars; /* total number of cell variables */ \ diff --git a/Include/cpython/longobject.h b/Include/cpython/longobject.h index c1214d5e3714ea..4d6e618f831ad8 100644 --- a/Include/cpython/longobject.h +++ b/Include/cpython/longobject.h @@ -61,6 +61,24 @@ PyAPI_FUNC(PyObject*) PyLong_FromUnsignedNativeBytes(const void* buffer, PyAPI_FUNC(int) PyUnstable_Long_IsCompact(const PyLongObject* op); PyAPI_FUNC(Py_ssize_t) PyUnstable_Long_CompactValue(const PyLongObject* op); +/* PyLong_IsPositive. Check if the integer object is positive. + + - On success, return 1 if *obj is positive, and 0 otherwise. + - On failure, set an exception, and return -1. */ +PyAPI_FUNC(int) PyLong_IsPositive(PyObject *obj); + +/* PyLong_IsNegative. Check if the integer object is negative. + + - On success, return 1 if *obj is negative, and 0 otherwise. + - On failure, set an exception, and return -1. */ +PyAPI_FUNC(int) PyLong_IsNegative(PyObject *obj); + +/* PyLong_IsZero. Check if the integer object is zero. + + - On success, return 1 if *obj is zero, and 0 if it is non-zero. + - On failure, set an exception, and return -1. */ +PyAPI_FUNC(int) PyLong_IsZero(PyObject *obj); + /* PyLong_GetSign. Get the sign of an integer object: 0, -1 or +1 for zero, negative or positive integer, respectively. diff --git a/Include/cpython/object.h b/Include/cpython/object.h index f0f61796cd3ec8..e4797029da431e 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -527,3 +527,10 @@ typedef enum { typedef int (*PyRefTracer)(PyObject *, PyRefTracerEvent event, void *); PyAPI_FUNC(int) PyRefTracer_SetTracer(PyRefTracer tracer, void *data); PyAPI_FUNC(PyRefTracer) PyRefTracer_GetTracer(void**); + +/* Enable PEP-703 deferred reference counting on the object. + * + * Returns 1 if deferred reference counting was successfully enabled, and + * 0 if the runtime ignored it. This function cannot fail. + */ +PyAPI_FUNC(int) PyUnstable_Object_EnableDeferredRefcount(PyObject *); diff --git a/Include/internal/pycore_backoff.h b/Include/internal/pycore_backoff.h index 20436a68b69677..3e02728522828e 100644 --- a/Include/internal/pycore_backoff.h +++ b/Include/internal/pycore_backoff.h @@ -102,10 +102,9 @@ backoff_counter_triggers(_Py_BackoffCounter counter) } /* Initial JUMP_BACKWARD counter. - * This determines when we create a trace for a loop. -* Backoff sequence 16, 32, 64, 128, 256, 512, 1024, 2048, 4096. */ -#define JUMP_BACKWARD_INITIAL_VALUE 15 -#define JUMP_BACKWARD_INITIAL_BACKOFF 4 + * This determines when we create a trace for a loop. */ +#define JUMP_BACKWARD_INITIAL_VALUE 4095 +#define JUMP_BACKWARD_INITIAL_BACKOFF 12 static inline _Py_BackoffCounter initial_jump_backoff_counter(void) { diff --git a/Include/internal/pycore_crossinterp.h b/Include/internal/pycore_crossinterp.h index e91e911feb38cc..69a60d73e05c26 100644 --- a/Include/internal/pycore_crossinterp.h +++ b/Include/internal/pycore_crossinterp.h @@ -11,6 +11,7 @@ extern "C" { #include "pycore_lock.h" // PyMutex #include "pycore_pyerrors.h" + /**************/ /* exceptions */ /**************/ @@ -38,14 +39,14 @@ extern int _Py_CallInInterpreterAndRawFree( /* cross-interpreter data */ /**************************/ -typedef struct _xid _PyXIData_t; -typedef PyObject *(*xid_newobjectfunc)(_PyXIData_t *); +typedef struct _xidata _PyXIData_t; +typedef PyObject *(*xid_newobjfunc)(_PyXIData_t *); typedef void (*xid_freefunc)(void *); // _PyXIData_t is similar to Py_buffer as an effectively // opaque struct that holds data outside the object machinery. This // is necessary to pass safely between interpreters in the same process. -struct _xid { +struct _xidata { // data is the cross-interpreter-safe derivation of a Python object // (see _PyObject_GetXIData). It will be NULL if the // new_object func (below) encodes the data. @@ -71,7 +72,7 @@ struct _xid { // interpreter given the data. The resulting object (a new // reference) will be equivalent to the original object. This field // is required. - xid_newobjectfunc new_object; + xid_newobjfunc new_object; // free is called when the data is released. If it is NULL then // nothing will be done to free the data. For some types this is // okay (e.g. bytes) and for those types this field should be set @@ -99,9 +100,26 @@ typedef int (*xidatafunc)(PyThreadState *tstate, PyObject *, _PyXIData_t *); typedef struct _xid_lookup_state _PyXIData_lookup_t; -PyAPI_FUNC(xidatafunc) _PyXIData_Lookup(PyObject *); -PyAPI_FUNC(int) _PyObject_CheckXIData(PyObject *); -PyAPI_FUNC(int) _PyObject_GetXIData(PyObject *, _PyXIData_t *); +typedef struct { + _PyXIData_lookup_t *global; + _PyXIData_lookup_t *local; + PyObject *PyExc_NotShareableError; +} _PyXIData_lookup_context_t; + +PyAPI_FUNC(int) _PyXIData_GetLookupContext( + PyInterpreterState *, + _PyXIData_lookup_context_t *); + +PyAPI_FUNC(xidatafunc) _PyXIData_Lookup( + _PyXIData_lookup_context_t *, + PyObject *); +PyAPI_FUNC(int) _PyObject_CheckXIData( + _PyXIData_lookup_context_t *, + PyObject *); +PyAPI_FUNC(int) _PyObject_GetXIData( + _PyXIData_lookup_context_t *, + PyObject *, + _PyXIData_t *); /* using cross-interpreter data */ @@ -116,11 +134,11 @@ PyAPI_FUNC(int) _PyXIData_ReleaseAndRawFree(_PyXIData_t *); PyAPI_FUNC(void) _PyXIData_Init( _PyXIData_t *data, PyInterpreterState *interp, void *shared, PyObject *obj, - xid_newobjectfunc new_object); + xid_newobjfunc new_object); PyAPI_FUNC(int) _PyXIData_InitWithSize( _PyXIData_t *, PyInterpreterState *interp, const size_t, PyObject *, - xid_newobjectfunc); + xid_newobjfunc); PyAPI_FUNC(void) _PyXIData_Clear( PyInterpreterState *, _PyXIData_t *); // Normally the Init* functions are sufficient. The only time @@ -154,25 +172,38 @@ PyAPI_FUNC(void) _PyXIData_Clear( PyInterpreterState *, _PyXIData_t *); /* runtime state & lifecycle */ /*****************************/ -struct _xi_runtime_state { +typedef struct { // builtin types _PyXIData_lookup_t data_lookup; -}; +} _PyXI_global_state_t; -struct _xi_state { +typedef struct { // heap types _PyXIData_lookup_t data_lookup; - // heap types - PyObject *PyExc_NotShareableError; -}; + struct xi_exceptions { + // static types + PyObject *PyExc_InterpreterError; + PyObject *PyExc_InterpreterNotFoundError; + // heap types + PyObject *PyExc_NotShareableError; + } exceptions; +} _PyXI_state_t; + +#define _PyXI_GET_GLOBAL_STATE(interp) (&(interp)->runtime->xi) +#define _PyXI_GET_STATE(interp) (&(interp)->xi) +#ifndef Py_BUILD_CORE_MODULE extern PyStatus _PyXI_Init(PyInterpreterState *interp); extern void _PyXI_Fini(PyInterpreterState *interp); extern PyStatus _PyXI_InitTypes(PyInterpreterState *interp); extern void _PyXI_FiniTypes(PyInterpreterState *interp); +#endif // Py_BUILD_CORE_MODULE -#define _PyInterpreterState_GetXIState(interp) (&(interp)->xi) +int _Py_xi_global_state_init(_PyXI_global_state_t *); +void _Py_xi_global_state_fini(_PyXI_global_state_t *); +int _Py_xi_state_init(_PyXI_state_t *, PyInterpreterState *); +void _Py_xi_state_fini(_PyXI_state_t *, PyInterpreterState *); /***************************/ diff --git a/Include/internal/pycore_crossinterp_data_registry.h b/Include/internal/pycore_crossinterp_data_registry.h index 2990c6af62e952..bbad4de770857f 100644 --- a/Include/internal/pycore_crossinterp_data_registry.h +++ b/Include/internal/pycore_crossinterp_data_registry.h @@ -7,30 +7,35 @@ // alternative would be to add a tp_* slot for a class's // xidatafunc. It would be simpler and more efficient. -struct _xidregitem; +struct _xid_regitem; -struct _xidregitem { - struct _xidregitem *prev; - struct _xidregitem *next; +typedef struct _xid_regitem { + struct _xid_regitem *prev; + struct _xid_regitem *next; /* This can be a dangling pointer, but only if weakref is set. */ PyTypeObject *cls; /* This is NULL for builtin types. */ PyObject *weakref; size_t refcount; xidatafunc getdata; -}; +} _PyXIData_regitem_t; -struct _xidregistry { +typedef struct { int global; /* builtin types or heap types */ int initialized; PyMutex mutex; - struct _xidregitem *head; -}; + _PyXIData_regitem_t *head; +} _PyXIData_registry_t; -PyAPI_FUNC(int) _PyXIData_RegisterClass(PyTypeObject *, xidatafunc); -PyAPI_FUNC(int) _PyXIData_UnregisterClass(PyTypeObject *); +PyAPI_FUNC(int) _PyXIData_RegisterClass( + _PyXIData_lookup_context_t *, + PyTypeObject *, + xidatafunc); +PyAPI_FUNC(int) _PyXIData_UnregisterClass( + _PyXIData_lookup_context_t *, + PyTypeObject *); struct _xid_lookup_state { // XXX Remove this field once we have a tp_* slot. - struct _xidregistry registry; + _PyXIData_registry_t registry; }; diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index c5399ad8e0497f..6e4a308226f3fe 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -43,8 +43,6 @@ extern int _PyDict_Next( extern int _PyDict_HasOnlyStringKeys(PyObject *mp); -extern void _PyDict_MaybeUntrack(PyObject *mp); - // Export for '_ctypes' shared extension PyAPI_FUNC(Py_ssize_t) _PyDict_SizeOf(PyDictObject *); @@ -92,6 +90,17 @@ extern PyObject *_PyDict_FromKeys(PyObject *, PyObject *, PyObject *); extern uint32_t _PyDictKeys_GetVersionForCurrentState( PyInterpreterState *interp, PyDictKeysObject *dictkeys); +/* Gets a version number unique to the current state of the keys of dict, if possible. + * + * In free-threaded builds ensures that the dict can be used for lock-free + * reads if a version was assigned. + * + * The caller must hold the per-object lock on dict. + * + * Returns the version number, or zero if it was not possible to get a version number. */ +extern uint32_t _PyDict_GetKeysVersionForCurrentState( + PyInterpreterState *interp, PyDictObject *dict); + extern size_t _PyDict_KeysSize(PyDictKeysObject *keys); extern void _PyDictKeys_DecRef(PyDictKeysObject *keys); diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 38a1c56c09d9db..479fe10d00066d 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -50,7 +50,6 @@ static inline PyObject* _Py_FROM_GC(PyGC_Head *gc) { # define _PyGC_BITS_UNREACHABLE (4) # define _PyGC_BITS_FROZEN (8) # define _PyGC_BITS_SHARED (16) -# define _PyGC_BITS_SHARED_INLINE (32) # define _PyGC_BITS_DEFERRED (64) // Use deferred reference counting #endif @@ -119,23 +118,6 @@ static inline void _PyObject_GC_SET_SHARED(PyObject *op) { } #define _PyObject_GC_SET_SHARED(op) _PyObject_GC_SET_SHARED(_Py_CAST(PyObject*, op)) -/* True if the memory of the object is shared between multiple - * threads and needs special purpose when freeing due to - * the possibility of in-flight lock-free reads occurring. - * Objects with this bit that are GC objects will automatically - * delay-freed by PyObject_GC_Del. */ -static inline int _PyObject_GC_IS_SHARED_INLINE(PyObject *op) { - return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_SHARED_INLINE); -} -#define _PyObject_GC_IS_SHARED_INLINE(op) \ - _PyObject_GC_IS_SHARED_INLINE(_Py_CAST(PyObject*, op)) - -static inline void _PyObject_GC_SET_SHARED_INLINE(PyObject *op) { - _PyObject_SET_GC_BITS(op, _PyGC_BITS_SHARED_INLINE); -} -#define _PyObject_GC_SET_SHARED_INLINE(op) \ - _PyObject_GC_SET_SHARED_INLINE(_Py_CAST(PyObject*, op)) - #endif /* Bit flags for _gc_prev */ diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index e4f0138e17edfa..c12e242d560bde 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -982,6 +982,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hi)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hook)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(hour)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(id)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ident)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(identity_hint)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ignore)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index e70f11e2a26cd5..dfd9f2b799ec8e 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -471,6 +471,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(hi) STRUCT_FOR_ID(hook) STRUCT_FOR_ID(hour) + STRUCT_FOR_ID(id) STRUCT_FOR_ID(ident) STRUCT_FOR_ID(identity_hint) STRUCT_FOR_ID(ignore) diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index 290ba95e1a0ad7..318c712bdfa174 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -21,6 +21,7 @@ extern int _PyImport_SetModuleString(const char *name, PyObject* module); extern void _PyImport_AcquireLock(PyInterpreterState *interp); extern void _PyImport_ReleaseLock(PyInterpreterState *interp); +extern void _PyImport_ReInitLock(PyInterpreterState *interp); // This is used exclusively for the sys and builtins modules: extern int _PyImport_FixupBuiltin( diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 9e3b4299693bbc..5e4bcbf835a4d0 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -16,7 +16,7 @@ extern "C" { #include "pycore_code.h" // struct callable_cache #include "pycore_codecs.h" // struct codecs_state #include "pycore_context.h" // struct _Py_context_state -#include "pycore_crossinterp.h" // struct _xidregistry +#include "pycore_crossinterp.h" // _PyXI_state_t #include "pycore_dict_state.h" // struct _Py_dict_state #include "pycore_dtoa.h" // struct _dtoa_state #include "pycore_exceptions.h" // struct _Py_exc_state @@ -130,6 +130,7 @@ struct _is { uint64_t next_unique_id; /* The linked list of threads, newest first. */ PyThreadState *head; + _PyThreadStateImpl *preallocated; /* The thread currently executing in the __main__ module, if any. */ PyThreadState *main; /* Used in Modules/_threadmodule.c. */ @@ -205,7 +206,7 @@ struct _is { freefunc co_extra_freefuncs[MAX_CO_EXTRA_USERS]; /* cross-interpreter data and utils */ - struct _xi_state xi; + _PyXI_state_t xi; #ifdef HAVE_FORK PyObject *before_forkers; @@ -278,9 +279,10 @@ struct _is { struct _Py_interp_cached_objects cached_objects; struct _Py_interp_static_objects static_objects; + Py_ssize_t _interactive_src_count; + /* the initial PyInterpreterState.threads.head */ _PyThreadStateImpl _initial_thread; - Py_ssize_t _interactive_src_count; }; diff --git a/Include/internal/pycore_modsupport.h b/Include/internal/pycore_modsupport.h index c661f1d82a84f6..614e9f93751834 100644 --- a/Include/internal/pycore_modsupport.h +++ b/Include/internal/pycore_modsupport.h @@ -1,8 +1,6 @@ #ifndef Py_INTERNAL_MODSUPPORT_H #define Py_INTERNAL_MODSUPPORT_H -#include "pycore_lock.h" // _PyOnceFlag - #ifdef __cplusplus extern "C" { #endif diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index c7af720b1ce43d..783d88cb51ffbd 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -14,6 +14,7 @@ extern "C" { #include "pycore_interp.h" // PyInterpreterState.gc #include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_STORE_PTR_RELAXED #include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_stackref.h" #include "pycore_uniqueid.h" // _PyObject_ThreadIncrefSlow() // This value is added to `ob_ref_shared` for objects that use deferred @@ -94,6 +95,14 @@ PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc( #define _Py_FatalRefcountError(message) \ _Py_FatalRefcountErrorFunc(__func__, (message)) +#define _PyReftracerTrack(obj, operation) \ + do { \ + struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; \ + if (tracer->tracer_func != NULL) { \ + void *data = tracer->tracer_data; \ + tracer->tracer_func((obj), (operation), data); \ + } \ + } while(0) #ifdef Py_REF_DEBUG /* The symbol is only exposed in the API for the sake of extensions @@ -208,11 +217,7 @@ _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct) #ifdef Py_TRACE_REFS _Py_ForgetReference(op); #endif - struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; - if (tracer->tracer_func != NULL) { - void* data = tracer->tracer_data; - tracer->tracer_func(op, PyRefTracer_DESTROY, data); - } + _PyReftracerTrack(op, PyRefTracer_DESTROY); destruct(op); } } @@ -591,6 +596,20 @@ _Py_TryIncrefCompare(PyObject **src, PyObject *op) return 1; } +static inline int +_Py_TryIncrefCompareStackRef(PyObject **src, PyObject *op, _PyStackRef *out) +{ + if (_Py_IsImmortal(op) || _PyObject_HasDeferredRefcount(op)) { + *out = (_PyStackRef){ .bits = (intptr_t)op | Py_TAG_DEFERRED }; + return 1; + } + if (_Py_TryIncrefCompare(src, op)) { + *out = PyStackRef_FromPyObjectSteal(op); + return 1; + } + return 0; +} + /* Loads and increfs an object from ptr, which may contain a NULL value. Safe with concurrent (atomic) updates to ptr. NOTE: The writer must set maybe-weakref on the stored object! */ diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index f92c0a0cddf906..6d70b42f708854 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -58,7 +58,8 @@ typedef struct { uint16_t error_target; }; }; - uint64_t operand; // A cache entry + uint64_t operand0; // A cache entry + uint64_t operand1; } _PyUOpInstruction; typedef struct { diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index dd6b0762370c92..5bb34001aab1b4 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -120,11 +120,25 @@ extern int _PyMem_DebugEnabled(void); extern void _PyMem_FreeDelayed(void *ptr); // Enqueue an object to be freed possibly after some delay -extern void _PyObject_FreeDelayed(void *ptr); +#ifdef Py_GIL_DISABLED +extern void _PyObject_XDecRefDelayed(PyObject *obj); +#else +static inline void _PyObject_XDecRefDelayed(PyObject *obj) +{ + Py_XDECREF(obj); +} +#endif // Periodically process delayed free requests. extern void _PyMem_ProcessDelayed(PyThreadState *tstate); + +// Periodically process delayed free requests when the world is stopped. +// Notify of any objects whic should be freeed. +typedef void (*delayed_dealloc_cb)(PyObject *, void *); +extern void _PyMem_ProcessDelayedNoDealloc(PyThreadState *tstate, + delayed_dealloc_cb cb, void *state); + // Abandon all thread-local delayed free requests and push them to the // interpreter's queue. extern void _PyMem_AbandonDelayed(PyThreadState *tstate); diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index fade55945b7dbf..54d8803bc0bdb6 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -82,7 +82,7 @@ PyAPI_FUNC(PyObject *) _PyInterpreterState_GetIDObject(PyInterpreterState *); PyAPI_FUNC(int) _PyInterpreterState_SetRunningMain(PyInterpreterState *); PyAPI_FUNC(void) _PyInterpreterState_SetNotRunningMain(PyInterpreterState *); PyAPI_FUNC(int) _PyInterpreterState_IsRunningMain(PyInterpreterState *); -PyAPI_FUNC(int) _PyInterpreterState_FailIfRunningMain(PyInterpreterState *); +PyAPI_FUNC(void) _PyErr_SetInterpreterAlreadyRunning(void); extern int _PyThreadState_IsRunningMain(PyThreadState *); extern void _PyInterpreterState_ReinitRunningMain(PyThreadState *); @@ -141,6 +141,12 @@ _PyThreadState_GET(void) #endif } +static inline int +_PyThreadState_IsAttached(PyThreadState *tstate) +{ + return (_Py_atomic_load_int_relaxed(&tstate->state) == _Py_THREAD_ATTACHED); +} + // Attaches the current thread to the interpreter. // // This may block while acquiring the GIL (if the GIL is enabled) or while @@ -263,6 +269,15 @@ extern int _PyOS_InterruptOccurred(PyThreadState *tstate); #define HEAD_UNLOCK(runtime) \ PyMutex_Unlock(&(runtime)->interpreters.mutex) +#define _Py_FOR_EACH_TSTATE_UNLOCKED(interp, t) \ + for (PyThreadState *t = interp->threads.head; t; t = t->next) +#define _Py_FOR_EACH_TSTATE_BEGIN(interp, t) \ + HEAD_LOCK(interp->runtime); \ + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, t) +#define _Py_FOR_EACH_TSTATE_END(interp) \ + HEAD_UNLOCK(interp->runtime) + + // Get the configuration of the current interpreter. // The caller must hold the GIL. // Export for test_peg_generator. diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 7f592aa6cf9f05..2f2cec22cf1589 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -11,7 +11,7 @@ extern "C" { #include "pycore_atexit.h" // struct _atexit_runtime_state #include "pycore_audit.h" // _Py_AuditHookEntry #include "pycore_ceval_state.h" // struct _ceval_runtime_state -#include "pycore_crossinterp.h" // struct _xidregistry +#include "pycore_crossinterp.h" // _PyXI_global_state_t #include "pycore_debug_offsets.h" // _Py_DebugOffsets #include "pycore_faulthandler.h" // struct _faulthandler_runtime_state #include "pycore_floatobject.h" // struct _Py_float_runtime_state @@ -106,7 +106,7 @@ typedef struct pyruntimestate { tools. */ /* cross-interpreter data and utils */ - struct _xi_runtime_state xi; + _PyXI_global_state_t xi; struct _pymem_allocators allocators; struct _obmalloc_global_state obmalloc; diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index 8a8f47695fb8b0..9f6748945bab36 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -118,6 +118,9 @@ extern PyTypeObject _PyExc_MemoryError; { \ .id_refcount = -1, \ ._whence = _PyInterpreterState_WHENCE_NOTSET, \ + .threads = { \ + .preallocated = &(INTERP)._initial_thread, \ + }, \ .imports = IMPORTS_INIT, \ .ceval = { \ .recursion_limit = Py_DEFAULT_RECURSION_LIMIT, \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 5d404c8fd91ca6..b631382cae058a 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -980,6 +980,7 @@ extern "C" { INIT_ID(hi), \ INIT_ID(hook), \ INIT_ID(hour), \ + INIT_ID(id), \ INIT_ID(ident), \ INIT_ID(identity_hint), \ INIT_ID(ignore), \ diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index 5debdd68fe94ca..7b39d07f976ee3 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -269,6 +269,16 @@ extern unsigned int _PyType_GetVersionForCurrentState(PyTypeObject *tp); PyAPI_FUNC(void) _PyType_SetVersion(PyTypeObject *tp, unsigned int version); PyTypeObject *_PyType_LookupByVersion(unsigned int version); +// Function pointer type for user-defined validation function that will be +// called by _PyType_Validate(). +// It should return 0 if the validation is passed, otherwise it will return -1. +typedef int (*_py_validate_type)(PyTypeObject *); + +// It will verify the ``ty`` through user-defined validation function ``validate``, +// and if the validation is passed, it will set the ``tp_version`` as valid +// tp_version_tag from the ``ty``. +extern int _PyType_Validate(PyTypeObject *ty, _py_validate_type validate, unsigned int *tp_version); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index d0bc8d7186c053..24cec3a4fded7a 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -1680,6 +1680,10 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); assert(PyUnicode_GET_LENGTH(string) != 1); + string = &_Py_ID(id); + _PyUnicode_InternStatic(interp, &string); + assert(_PyUnicode_CheckConsistency(string, 1)); + assert(PyUnicode_GET_LENGTH(string) != 1); string = &_Py_ID(ident); _PyUnicode_InternStatic(interp, &string); assert(_PyUnicode_CheckConsistency(string, 1)); diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index 55416d2aae1e1a..fab4ce6a25b347 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -63,90 +63,91 @@ extern "C" { #define _CHECK_FUNCTION 333 #define _CHECK_FUNCTION_EXACT_ARGS 334 #define _CHECK_FUNCTION_VERSION 335 -#define _CHECK_FUNCTION_VERSION_KW 336 -#define _CHECK_IS_NOT_PY_CALLABLE 337 -#define _CHECK_IS_NOT_PY_CALLABLE_KW 338 -#define _CHECK_MANAGED_OBJECT_HAS_VALUES 339 -#define _CHECK_METHOD_VERSION 340 -#define _CHECK_METHOD_VERSION_KW 341 -#define _CHECK_PEP_523 342 -#define _CHECK_PERIODIC 343 -#define _CHECK_PERIODIC_IF_NOT_YIELD_FROM 344 -#define _CHECK_STACK_SPACE 345 -#define _CHECK_STACK_SPACE_OPERAND 346 -#define _CHECK_VALIDITY 347 -#define _CHECK_VALIDITY_AND_SET_IP 348 -#define _COMPARE_OP 349 -#define _COMPARE_OP_FLOAT 350 -#define _COMPARE_OP_INT 351 -#define _COMPARE_OP_STR 352 -#define _CONTAINS_OP 353 +#define _CHECK_FUNCTION_VERSION_INLINE 336 +#define _CHECK_FUNCTION_VERSION_KW 337 +#define _CHECK_IS_NOT_PY_CALLABLE 338 +#define _CHECK_IS_NOT_PY_CALLABLE_KW 339 +#define _CHECK_MANAGED_OBJECT_HAS_VALUES 340 +#define _CHECK_METHOD_VERSION 341 +#define _CHECK_METHOD_VERSION_KW 342 +#define _CHECK_PEP_523 343 +#define _CHECK_PERIODIC 344 +#define _CHECK_PERIODIC_IF_NOT_YIELD_FROM 345 +#define _CHECK_STACK_SPACE 346 +#define _CHECK_STACK_SPACE_OPERAND 347 +#define _CHECK_VALIDITY 348 +#define _CHECK_VALIDITY_AND_SET_IP 349 +#define _COMPARE_OP 350 +#define _COMPARE_OP_FLOAT 351 +#define _COMPARE_OP_INT 352 +#define _COMPARE_OP_STR 353 +#define _CONTAINS_OP 354 #define _CONTAINS_OP_DICT CONTAINS_OP_DICT #define _CONTAINS_OP_SET CONTAINS_OP_SET #define _CONVERT_VALUE CONVERT_VALUE #define _COPY COPY #define _COPY_FREE_VARS COPY_FREE_VARS -#define _CREATE_INIT_FRAME 354 +#define _CREATE_INIT_FRAME 355 #define _DELETE_ATTR DELETE_ATTR #define _DELETE_DEREF DELETE_DEREF #define _DELETE_FAST DELETE_FAST #define _DELETE_GLOBAL DELETE_GLOBAL #define _DELETE_NAME DELETE_NAME #define _DELETE_SUBSCR DELETE_SUBSCR -#define _DEOPT 355 +#define _DEOPT 356 #define _DICT_MERGE DICT_MERGE #define _DICT_UPDATE DICT_UPDATE -#define _DO_CALL 356 -#define _DO_CALL_FUNCTION_EX 357 -#define _DO_CALL_KW 358 -#define _DYNAMIC_EXIT 359 +#define _DO_CALL 357 +#define _DO_CALL_FUNCTION_EX 358 +#define _DO_CALL_KW 359 +#define _DYNAMIC_EXIT 360 #define _END_SEND END_SEND -#define _ERROR_POP_N 360 +#define _ERROR_POP_N 361 #define _EXIT_INIT_CHECK EXIT_INIT_CHECK -#define _EXPAND_METHOD 361 -#define _EXPAND_METHOD_KW 362 -#define _FATAL_ERROR 363 +#define _EXPAND_METHOD 362 +#define _EXPAND_METHOD_KW 363 +#define _FATAL_ERROR 364 #define _FORMAT_SIMPLE FORMAT_SIMPLE #define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC -#define _FOR_ITER 364 -#define _FOR_ITER_GEN_FRAME 365 -#define _FOR_ITER_TIER_TWO 366 +#define _FOR_ITER 365 +#define _FOR_ITER_GEN_FRAME 366 +#define _FOR_ITER_TIER_TWO 367 #define _GET_AITER GET_AITER #define _GET_ANEXT GET_ANEXT #define _GET_AWAITABLE GET_AWAITABLE #define _GET_ITER GET_ITER #define _GET_LEN GET_LEN #define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER -#define _GUARD_BOTH_FLOAT 367 -#define _GUARD_BOTH_INT 368 -#define _GUARD_BOTH_UNICODE 369 -#define _GUARD_BUILTINS_VERSION_PUSH_KEYS 370 -#define _GUARD_DORV_NO_DICT 371 -#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 372 -#define _GUARD_GLOBALS_VERSION 373 -#define _GUARD_GLOBALS_VERSION_PUSH_KEYS 374 -#define _GUARD_IS_FALSE_POP 375 -#define _GUARD_IS_NONE_POP 376 -#define _GUARD_IS_NOT_NONE_POP 377 -#define _GUARD_IS_TRUE_POP 378 -#define _GUARD_KEYS_VERSION 379 -#define _GUARD_NOS_FLOAT 380 -#define _GUARD_NOS_INT 381 -#define _GUARD_NOT_EXHAUSTED_LIST 382 -#define _GUARD_NOT_EXHAUSTED_RANGE 383 -#define _GUARD_NOT_EXHAUSTED_TUPLE 384 -#define _GUARD_TOS_FLOAT 385 -#define _GUARD_TOS_INT 386 -#define _GUARD_TYPE_VERSION 387 +#define _GUARD_BOTH_FLOAT 368 +#define _GUARD_BOTH_INT 369 +#define _GUARD_BOTH_UNICODE 370 +#define _GUARD_BUILTINS_VERSION_PUSH_KEYS 371 +#define _GUARD_DORV_NO_DICT 372 +#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 373 +#define _GUARD_GLOBALS_VERSION 374 +#define _GUARD_GLOBALS_VERSION_PUSH_KEYS 375 +#define _GUARD_IS_FALSE_POP 376 +#define _GUARD_IS_NONE_POP 377 +#define _GUARD_IS_NOT_NONE_POP 378 +#define _GUARD_IS_TRUE_POP 379 +#define _GUARD_KEYS_VERSION 380 +#define _GUARD_NOS_FLOAT 381 +#define _GUARD_NOS_INT 382 +#define _GUARD_NOT_EXHAUSTED_LIST 383 +#define _GUARD_NOT_EXHAUSTED_RANGE 384 +#define _GUARD_NOT_EXHAUSTED_TUPLE 385 +#define _GUARD_TOS_FLOAT 386 +#define _GUARD_TOS_INT 387 +#define _GUARD_TYPE_VERSION 388 #define _IMPORT_FROM IMPORT_FROM #define _IMPORT_NAME IMPORT_NAME -#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 388 -#define _INIT_CALL_PY_EXACT_ARGS 389 -#define _INIT_CALL_PY_EXACT_ARGS_0 390 -#define _INIT_CALL_PY_EXACT_ARGS_1 391 -#define _INIT_CALL_PY_EXACT_ARGS_2 392 -#define _INIT_CALL_PY_EXACT_ARGS_3 393 -#define _INIT_CALL_PY_EXACT_ARGS_4 394 +#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 389 +#define _INIT_CALL_PY_EXACT_ARGS 390 +#define _INIT_CALL_PY_EXACT_ARGS_0 391 +#define _INIT_CALL_PY_EXACT_ARGS_1 392 +#define _INIT_CALL_PY_EXACT_ARGS_2 393 +#define _INIT_CALL_PY_EXACT_ARGS_3 394 +#define _INIT_CALL_PY_EXACT_ARGS_4 395 #define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX #define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW #define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER @@ -158,142 +159,142 @@ extern "C" { #define _INSTRUMENTED_POP_JUMP_IF_NONE INSTRUMENTED_POP_JUMP_IF_NONE #define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE #define _INSTRUMENTED_POP_JUMP_IF_TRUE INSTRUMENTED_POP_JUMP_IF_TRUE -#define _INTERNAL_INCREMENT_OPT_COUNTER 395 -#define _IS_NONE 396 +#define _INTERNAL_INCREMENT_OPT_COUNTER 396 +#define _IS_NONE 397 #define _IS_OP IS_OP -#define _ITER_CHECK_LIST 397 -#define _ITER_CHECK_RANGE 398 -#define _ITER_CHECK_TUPLE 399 -#define _ITER_JUMP_LIST 400 -#define _ITER_JUMP_RANGE 401 -#define _ITER_JUMP_TUPLE 402 -#define _ITER_NEXT_LIST 403 -#define _ITER_NEXT_RANGE 404 -#define _ITER_NEXT_TUPLE 405 -#define _JUMP_TO_TOP 406 +#define _ITER_CHECK_LIST 398 +#define _ITER_CHECK_RANGE 399 +#define _ITER_CHECK_TUPLE 400 +#define _ITER_JUMP_LIST 401 +#define _ITER_JUMP_RANGE 402 +#define _ITER_JUMP_TUPLE 403 +#define _ITER_NEXT_LIST 404 +#define _ITER_NEXT_RANGE 405 +#define _ITER_NEXT_TUPLE 406 +#define _JUMP_TO_TOP 407 #define _LIST_APPEND LIST_APPEND #define _LIST_EXTEND LIST_EXTEND -#define _LOAD_ATTR 407 -#define _LOAD_ATTR_CLASS 408 -#define _LOAD_ATTR_CLASS_0 409 -#define _LOAD_ATTR_CLASS_1 410 +#define _LOAD_ATTR 408 +#define _LOAD_ATTR_CLASS 409 +#define _LOAD_ATTR_CLASS_0 410 +#define _LOAD_ATTR_CLASS_1 411 #define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN -#define _LOAD_ATTR_INSTANCE_VALUE 411 -#define _LOAD_ATTR_INSTANCE_VALUE_0 412 -#define _LOAD_ATTR_INSTANCE_VALUE_1 413 -#define _LOAD_ATTR_METHOD_LAZY_DICT 414 -#define _LOAD_ATTR_METHOD_NO_DICT 415 -#define _LOAD_ATTR_METHOD_WITH_VALUES 416 -#define _LOAD_ATTR_MODULE 417 -#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 418 -#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 419 -#define _LOAD_ATTR_PROPERTY_FRAME 420 -#define _LOAD_ATTR_SLOT 421 -#define _LOAD_ATTR_SLOT_0 422 -#define _LOAD_ATTR_SLOT_1 423 -#define _LOAD_ATTR_WITH_HINT 424 +#define _LOAD_ATTR_INSTANCE_VALUE 412 +#define _LOAD_ATTR_INSTANCE_VALUE_0 413 +#define _LOAD_ATTR_INSTANCE_VALUE_1 414 +#define _LOAD_ATTR_METHOD_LAZY_DICT 415 +#define _LOAD_ATTR_METHOD_NO_DICT 416 +#define _LOAD_ATTR_METHOD_WITH_VALUES 417 +#define _LOAD_ATTR_MODULE 418 +#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 419 +#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 420 +#define _LOAD_ATTR_PROPERTY_FRAME 421 +#define _LOAD_ATTR_SLOT 422 +#define _LOAD_ATTR_SLOT_0 423 +#define _LOAD_ATTR_SLOT_1 424 +#define _LOAD_ATTR_WITH_HINT 425 #define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS -#define _LOAD_BYTECODE 425 +#define _LOAD_BYTECODE 426 #define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT #define _LOAD_CONST LOAD_CONST #define _LOAD_CONST_IMMORTAL LOAD_CONST_IMMORTAL -#define _LOAD_CONST_INLINE 426 -#define _LOAD_CONST_INLINE_BORROW 427 -#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 428 -#define _LOAD_CONST_INLINE_WITH_NULL 429 +#define _LOAD_CONST_INLINE 427 +#define _LOAD_CONST_INLINE_BORROW 428 +#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 429 +#define _LOAD_CONST_INLINE_WITH_NULL 430 #define _LOAD_DEREF LOAD_DEREF -#define _LOAD_FAST 430 -#define _LOAD_FAST_0 431 -#define _LOAD_FAST_1 432 -#define _LOAD_FAST_2 433 -#define _LOAD_FAST_3 434 -#define _LOAD_FAST_4 435 -#define _LOAD_FAST_5 436 -#define _LOAD_FAST_6 437 -#define _LOAD_FAST_7 438 +#define _LOAD_FAST 431 +#define _LOAD_FAST_0 432 +#define _LOAD_FAST_1 433 +#define _LOAD_FAST_2 434 +#define _LOAD_FAST_3 435 +#define _LOAD_FAST_4 436 +#define _LOAD_FAST_5 437 +#define _LOAD_FAST_6 438 +#define _LOAD_FAST_7 439 #define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR #define _LOAD_FAST_CHECK LOAD_FAST_CHECK #define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST #define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF #define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS -#define _LOAD_GLOBAL 439 -#define _LOAD_GLOBAL_BUILTINS 440 -#define _LOAD_GLOBAL_BUILTINS_FROM_KEYS 441 -#define _LOAD_GLOBAL_MODULE 442 -#define _LOAD_GLOBAL_MODULE_FROM_KEYS 443 +#define _LOAD_GLOBAL 440 +#define _LOAD_GLOBAL_BUILTINS 441 +#define _LOAD_GLOBAL_BUILTINS_FROM_KEYS 442 +#define _LOAD_GLOBAL_MODULE 443 +#define _LOAD_GLOBAL_MODULE_FROM_KEYS 444 #define _LOAD_LOCALS LOAD_LOCALS #define _LOAD_NAME LOAD_NAME -#define _LOAD_SMALL_INT 444 -#define _LOAD_SMALL_INT_0 445 -#define _LOAD_SMALL_INT_1 446 -#define _LOAD_SMALL_INT_2 447 -#define _LOAD_SMALL_INT_3 448 +#define _LOAD_SMALL_INT 445 +#define _LOAD_SMALL_INT_0 446 +#define _LOAD_SMALL_INT_1 447 +#define _LOAD_SMALL_INT_2 448 +#define _LOAD_SMALL_INT_3 449 #define _LOAD_SPECIAL LOAD_SPECIAL #define _LOAD_SUPER_ATTR_ATTR LOAD_SUPER_ATTR_ATTR #define _LOAD_SUPER_ATTR_METHOD LOAD_SUPER_ATTR_METHOD -#define _MAKE_CALLARGS_A_TUPLE 449 +#define _MAKE_CALLARGS_A_TUPLE 450 #define _MAKE_CELL MAKE_CELL #define _MAKE_FUNCTION MAKE_FUNCTION -#define _MAKE_WARM 450 +#define _MAKE_WARM 451 #define _MAP_ADD MAP_ADD #define _MATCH_CLASS MATCH_CLASS #define _MATCH_KEYS MATCH_KEYS #define _MATCH_MAPPING MATCH_MAPPING #define _MATCH_SEQUENCE MATCH_SEQUENCE -#define _MAYBE_EXPAND_METHOD 451 -#define _MAYBE_EXPAND_METHOD_KW 452 -#define _MONITOR_CALL 453 -#define _MONITOR_JUMP_BACKWARD 454 -#define _MONITOR_RESUME 455 +#define _MAYBE_EXPAND_METHOD 452 +#define _MAYBE_EXPAND_METHOD_KW 453 +#define _MONITOR_CALL 454 +#define _MONITOR_JUMP_BACKWARD 455 +#define _MONITOR_RESUME 456 #define _NOP NOP #define _POP_EXCEPT POP_EXCEPT -#define _POP_JUMP_IF_FALSE 456 -#define _POP_JUMP_IF_TRUE 457 +#define _POP_JUMP_IF_FALSE 457 +#define _POP_JUMP_IF_TRUE 458 #define _POP_TOP POP_TOP -#define _POP_TOP_LOAD_CONST_INLINE_BORROW 458 +#define _POP_TOP_LOAD_CONST_INLINE_BORROW 459 #define _PUSH_EXC_INFO PUSH_EXC_INFO -#define _PUSH_FRAME 459 +#define _PUSH_FRAME 460 #define _PUSH_NULL PUSH_NULL -#define _PY_FRAME_GENERAL 460 -#define _PY_FRAME_KW 461 -#define _QUICKEN_RESUME 462 -#define _REPLACE_WITH_TRUE 463 +#define _PY_FRAME_GENERAL 461 +#define _PY_FRAME_KW 462 +#define _QUICKEN_RESUME 463 +#define _REPLACE_WITH_TRUE 464 #define _RESUME_CHECK RESUME_CHECK #define _RETURN_GENERATOR RETURN_GENERATOR #define _RETURN_VALUE RETURN_VALUE -#define _SAVE_RETURN_OFFSET 464 -#define _SEND 465 -#define _SEND_GEN_FRAME 466 +#define _SAVE_RETURN_OFFSET 465 +#define _SEND 466 +#define _SEND_GEN_FRAME 467 #define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS #define _SET_ADD SET_ADD #define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE #define _SET_UPDATE SET_UPDATE -#define _START_EXECUTOR 467 -#define _STORE_ATTR 468 -#define _STORE_ATTR_INSTANCE_VALUE 469 -#define _STORE_ATTR_SLOT 470 -#define _STORE_ATTR_WITH_HINT 471 +#define _START_EXECUTOR 468 +#define _STORE_ATTR 469 +#define _STORE_ATTR_INSTANCE_VALUE 470 +#define _STORE_ATTR_SLOT 471 +#define _STORE_ATTR_WITH_HINT 472 #define _STORE_DEREF STORE_DEREF -#define _STORE_FAST 472 -#define _STORE_FAST_0 473 -#define _STORE_FAST_1 474 -#define _STORE_FAST_2 475 -#define _STORE_FAST_3 476 -#define _STORE_FAST_4 477 -#define _STORE_FAST_5 478 -#define _STORE_FAST_6 479 -#define _STORE_FAST_7 480 +#define _STORE_FAST 473 +#define _STORE_FAST_0 474 +#define _STORE_FAST_1 475 +#define _STORE_FAST_2 476 +#define _STORE_FAST_3 477 +#define _STORE_FAST_4 478 +#define _STORE_FAST_5 479 +#define _STORE_FAST_6 480 +#define _STORE_FAST_7 481 #define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST #define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST #define _STORE_GLOBAL STORE_GLOBAL #define _STORE_NAME STORE_NAME -#define _STORE_SLICE 481 -#define _STORE_SUBSCR 482 +#define _STORE_SLICE 482 +#define _STORE_SUBSCR 483 #define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT #define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT #define _SWAP SWAP -#define _TIER2_RESUME_CHECK 483 -#define _TO_BOOL 484 +#define _TIER2_RESUME_CHECK 484 +#define _TO_BOOL 485 #define _TO_BOOL_BOOL TO_BOOL_BOOL #define _TO_BOOL_INT TO_BOOL_INT #define _TO_BOOL_LIST TO_BOOL_LIST @@ -303,13 +304,13 @@ extern "C" { #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT #define _UNPACK_EX UNPACK_EX -#define _UNPACK_SEQUENCE 485 +#define _UNPACK_SEQUENCE 486 #define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST #define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE #define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE #define _WITH_EXCEPT_START WITH_EXCEPT_START #define _YIELD_VALUE YIELD_VALUE -#define MAX_UOP_ID 485 +#define MAX_UOP_ID 486 #ifdef __cplusplus } diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 98a41d1f23f569..1b2880cb6bb67e 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -213,6 +213,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_MAYBE_EXPAND_METHOD] = HAS_ARG_FLAG, [_PY_FRAME_GENERAL] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_CHECK_FUNCTION_VERSION] = HAS_ARG_FLAG | HAS_EXIT_FLAG, + [_CHECK_FUNCTION_VERSION_INLINE] = HAS_EXIT_FLAG, [_CHECK_METHOD_VERSION] = HAS_ARG_FLAG | HAS_EXIT_FLAG, [_EXPAND_METHOD] = HAS_ARG_FLAG, [_CHECK_IS_NOT_PY_CALLABLE] = HAS_ARG_FLAG | HAS_EXIT_FLAG, @@ -353,6 +354,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_CHECK_FUNCTION] = "_CHECK_FUNCTION", [_CHECK_FUNCTION_EXACT_ARGS] = "_CHECK_FUNCTION_EXACT_ARGS", [_CHECK_FUNCTION_VERSION] = "_CHECK_FUNCTION_VERSION", + [_CHECK_FUNCTION_VERSION_INLINE] = "_CHECK_FUNCTION_VERSION_INLINE", [_CHECK_FUNCTION_VERSION_KW] = "_CHECK_FUNCTION_VERSION_KW", [_CHECK_IS_NOT_PY_CALLABLE] = "_CHECK_IS_NOT_PY_CALLABLE", [_CHECK_IS_NOT_PY_CALLABLE_KW] = "_CHECK_IS_NOT_PY_CALLABLE_KW", @@ -965,6 +967,8 @@ int _PyUop_num_popped(int opcode, int oparg) return 2 + oparg; case _CHECK_FUNCTION_VERSION: return 0; + case _CHECK_FUNCTION_VERSION_INLINE: + return 0; case _CHECK_METHOD_VERSION: return 0; case _EXPAND_METHOD: diff --git a/Include/marshal.h b/Include/marshal.h index f8b0de80cfc38d..f773587bdd0429 100644 --- a/Include/marshal.h +++ b/Include/marshal.h @@ -13,7 +13,7 @@ PyAPI_FUNC(PyObject *) PyMarshal_ReadObjectFromString(const char *, Py_ssize_t); PyAPI_FUNC(PyObject *) PyMarshal_WriteObjectToString(PyObject *, int); -#define Py_MARSHAL_VERSION 4 +#define Py_MARSHAL_VERSION 5 PyAPI_FUNC(long) PyMarshal_ReadLongFromFile(FILE *); PyAPI_FUNC(int) PyMarshal_ReadShortFromFile(FILE *); diff --git a/Include/patchlevel.h b/Include/patchlevel.h index f7b33a8038570b..e99c3a66f84e4f 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 14 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 1 +#define PY_RELEASE_SERIAL 2 /* Version as a string */ -#define PY_VERSION "3.14.0a1+" +#define PY_VERSION "3.14.0a2+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/InternalDocs/garbage_collector.md b/InternalDocs/garbage_collector.md index d624cf4befd31a..5de4aa05398b55 100644 --- a/InternalDocs/garbage_collector.md +++ b/InternalDocs/garbage_collector.md @@ -108,7 +108,7 @@ As is explained later in the [Optimization: reusing fields to save memory](#optimization-reusing-fields-to-save-memory) section, these two extra fields are normally used to keep doubly linked lists of all the objects tracked by the garbage collector (these lists are the GC generations, more on -that in the [Optimization: generations](#Optimization-generations) section), but +that in the [Optimization: incremental collection](#Optimization-incremental-collection) section), but they are also reused to fulfill other purposes when the full doubly linked list structure is not needed as a memory optimization. @@ -351,38 +351,90 @@ follows these steps in order: the reference counts fall to 0, triggering the destruction of all unreachable objects. -Optimization: generations -========================= +Optimization: incremental collection +==================================== -In order to limit the time each garbage collection takes, the GC -implementation for the default build uses a popular optimization: -generations. The main idea behind this concept is the assumption that most -objects have a very short lifespan and can thus be collected soon after their -creation. This has proven to be very close to the reality of many Python +In order to bound the length of each garbage collection pause, the GC implementation +for the default build uses incremental collection with two generations. + +Generational garbage collection takes advantage of what is known as the weak +generational hypothesis: Most objects die young. +This has proven to be very close to the reality of many Python programs as many temporary objects are created and destroyed very quickly. To take advantage of this fact, all container objects are segregated into -three spaces/generations. Every new -object starts in the first generation (generation 0). The previous algorithm is -executed only over the objects of a particular generation and if an object -survives a collection of its generation it will be moved to the next one -(generation 1), where it will be surveyed for collection less often. If -the same object survives another GC round in this new generation (generation 1) -it will be moved to the last generation (generation 2) where it will be -surveyed the least often. - -The GC implementation for the free-threaded build does not use multiple -generations. Every collection operates on the entire heap. +two generations: young and old. Every new object starts in the young generation. +Each garbage collection scans the entire young generation and part of the old generation. + +The time taken to scan the young generation can be controlled by controlling its +size, but the size of the old generation cannot be controlled. +In order to keep pause times down, scanning of the old generation of the heap +occurs in increments. + +To keep track of what has been scanned, the old generation contains two lists: + +* Those objects that have not yet been scanned, referred to as the `pending` list. +* Those objects that have been scanned, referred to as the `visited` list. + +To detect and collect all unreachable objects in the heap, the garbage collector +must scan the whole heap. This whole heap scan is called a full scavenge. + +Increments +---------- + +Each full scavenge is performed in a series of increments. +For each full scavenge, the combined increments will cover the whole heap. + +Each increment is made up of: + +* The young generation +* The old generation's least recently scanned objects +* All objects reachable from those objects that have not yet been scanned this full scavenge + +The surviving objects (those that are not collected) are moved to the back of the +`visited` list in the old generation. + +When a full scavenge starts, no objects in the heap are considered to have been scanned, +so all objects in the old generation must be in the `pending` space. +When all objects in the heap have been scanned a cycle ends, and all objects are moved +to the `pending` list again. To avoid having to traverse the entire list, which list is +`pending` and which is `visited` is determined by a field in the `GCState` struct. +The `visited` and `pending` lists can be swapped by toggling this bit. + +Correctness +----------- + +The [algorithm for identifying cycles](#Identifying-reference-cycles) will find all +unreachable cycles in a list of objects, but will not find any cycles that are +even partly outside of that list. +Therefore, to be guaranteed that a full scavenge will find all unreachable cycles, +each cycle must be fully contained within a single increment. + +To make sure that no partial cycles are included in the increment we perform a +[transitive closure](https://en.wikipedia.org/wiki/Transitive_closure) +over reachable, unscanned objects from the initial increment. +Since the transitive closure of objects reachable from an object must be a (non-strict) +superset of any unreachable cycle including that object, we are guaranteed that a +transitive closure cannot contain any partial cycles. +We can exclude scanned objects, as they must have been reachable when scanned. +If a scanned object becomes part of an unreachable cycle after being scanned, it will +not be collected this at this time, but it will be collected in the next full scavenge. + +> [!NOTE] +> The GC implementation for the free-threaded build does not use incremental collection. +> Every collection operates on the entire heap. In order to decide when to run, the collector keeps track of the number of object allocations and deallocations since the last collection. When the number of -allocations minus the number of deallocations exceeds `threshold_0`, -collection starts. Initially only generation 0 is examined. If generation 0 has -been examined more than `threshold_1` times since generation 1 has been -examined, then generation 1 is examined as well. With generation 2, -things are a bit more complicated; see -[Collecting the oldest generation](#Collecting-the-oldest-generation) for -more information. These thresholds can be examined using the +allocations minus the number of deallocations exceeds `threshold0`, +collection starts. `threshold1` determines the fraction of the old +collection that is included in the increment. +The fraction is inversely proportional to `threshold1`, +as historically a larger `threshold1` meant that old generation +collections were performed less frequently. +`threshold2` is ignored. + +These thresholds can be examined using the [`gc.get_threshold()`](https://docs.python.org/3/library/gc.html#gc.get_threshold) function: @@ -402,8 +454,8 @@ specifically in a generation by calling `gc.collect(generation=NUM)`. ... pass ... - # Move everything to the last generation so it's easier to inspect - # the younger generations. + # Move everything to the old generation so it's easier to inspect + # the young generation. >>> gc.collect() 0 @@ -413,40 +465,24 @@ specifically in a generation by calling `gc.collect(generation=NUM)`. >>> x = MyObj() >>> x.self = x - # Initially the object is in the youngest generation. + # Initially the object is in the young generation. >>> gc.get_objects(generation=0) [..., <__main__.MyObj object at 0x7fbcc12a3400>, ...] # After a collection of the youngest generation the object - # moves to the next generation. + # moves to the old generation. >>> gc.collect(generation=0) 0 >>> gc.get_objects(generation=0) [] >>> gc.get_objects(generation=1) + [] + >>> gc.get_objects(generation=2) [..., <__main__.MyObj object at 0x7fbcc12a3400>, ...] ``` -Collecting the oldest generation --------------------------------- - -In addition to the various configurable thresholds, the GC only triggers a full -collection of the oldest generation if the ratio `long_lived_pending / long_lived_total` -is above a given value (hardwired to 25%). The reason is that, while "non-full" -collections (that is, collections of the young and middle generations) will always -examine roughly the same number of objects (determined by the aforementioned -thresholds) the cost of a full collection is proportional to the total -number of long-lived objects, which is virtually unbounded. Indeed, it has -been remarked that doing a full collection every of object -creations entails a dramatic performance degradation in workloads which consist -of creating and storing lots of long-lived objects (for example, building a large list -of GC-tracked objects would show quadratic performance, instead of linear as -expected). Using the above ratio, instead, yields amortized linear performance -in the total number of objects (the effect of which can be summarized thusly: -"each full garbage collection is more and more costly as the number of objects -grows, but we do fewer and fewer of them"). Optimization: reusing fields to save memory =========================================== @@ -496,8 +532,8 @@ of `PyGC_Head` discussed in the `Memory layout and object structure`_ section: currently in. Instead, when that's needed, ad hoc tricks (like the `NEXT_MASK_UNREACHABLE` flag) are employed. -Optimization: delay tracking containers -======================================= +Optimization: delayed untracking containers +=========================================== Certain types of containers cannot participate in a reference cycle, and so do not need to be tracked by the garbage collector. Untracking these objects @@ -510,26 +546,17 @@ a container: 2. When the container is examined by the garbage collector. As a general rule, instances of atomic types aren't tracked and instances of -non-atomic types (containers, user-defined objects...) are. However, some -type-specific optimizations can be present in order to suppress the garbage -collector footprint of simple instances. Some examples of native types that -benefit from delayed tracking: - -- Tuples containing only immutable objects (integers, strings etc, - and recursively, tuples of immutable objects) do not need to be tracked. The - interpreter creates a large number of tuples, many of which will not survive - until garbage collection. It is therefore not worthwhile to untrack eligible - tuples at creation time. Instead, all tuples except the empty tuple are tracked - when created. During garbage collection it is determined whether any surviving - tuples can be untracked. A tuple can be untracked if all of its contents are - already not tracked. Tuples are examined for untracking in all garbage collection - cycles. It may take more than one cycle to untrack a tuple. - -- Dictionaries containing only immutable objects also do not need to be tracked. - Dictionaries are untracked when created. If a tracked item is inserted into a - dictionary (either as a key or value), the dictionary becomes tracked. During a - full garbage collection (all generations), the collector will untrack any dictionaries - whose contents are not tracked. +non-atomic types (containers, user-defined objects...) are. + +Tuples containing only immutable objects (integers, strings etc, +and recursively, tuples of immutable objects) do not need to be tracked. The +interpreter creates a large number of tuples, many of which will not survive +until garbage collection. It is therefore not worthwhile to untrack eligible +tuples at creation time. Instead, all tuples except the empty tuple are tracked +when created. During garbage collection it is determined whether any surviving +tuples can be untracked. A tuple can be untracked if all of its contents are +already not tracked. Tuples are examined for untracking in all garbage collection +cycles. The garbage collector module provides the Python function `is_tracked(obj)`, which returns the current tracking status of the object. Subsequent garbage collections may change the @@ -542,11 +569,11 @@ tracking status of the object. False >>> gc.is_tracked([]) True - >>> gc.is_tracked({}) + >>> gc.is_tracked(()) False + >>> gc.is_tracked({}) + True >>> gc.is_tracked({"a": 1}) - False - >>> gc.is_tracked({"a": []}) True ``` @@ -588,9 +615,9 @@ heap. be more difficult. -> [!NOTE] +> [!NOTE] > **Document history** -> +> > Pablo Galindo Salgado - Original author -> +> > Irit Katriel - Convert to Markdown diff --git a/LICENSE b/LICENSE index 14603b95c2e23b..20cf39097c68ba 100644 --- a/LICENSE +++ b/LICENSE @@ -83,7 +83,7 @@ grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001-2024 Python Software Foundation; All Rights Reserved" +i.e., "Copyright (c) 2001 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on diff --git a/Lib/_pydatetime.py b/Lib/_pydatetime.py index 78e03e32896740..ed01670cfece43 100644 --- a/Lib/_pydatetime.py +++ b/Lib/_pydatetime.py @@ -651,7 +651,19 @@ def __new__(cls, days=0, seconds=0, microseconds=0, # guide the C implementation; it's way more convoluted than speed- # ignoring auto-overflow-to-long idiomatic Python could be. - # XXX Check that all inputs are ints or floats. + for name, value in ( + ("days", days), + ("seconds", seconds), + ("microseconds", microseconds), + ("milliseconds", milliseconds), + ("minutes", minutes), + ("hours", hours), + ("weeks", weeks) + ): + if not isinstance(value, (int, float)): + raise TypeError( + f"unsupported type for timedelta {name} component: {type(value).__name__}" + ) # Final values, all integer. # s and us fit in 32-bit signed ints; d isn't bounded. diff --git a/Lib/_pyrepl/_minimal_curses.py b/Lib/_pyrepl/_minimal_curses.py index 849617bf7585e4..d884f880f50ac7 100644 --- a/Lib/_pyrepl/_minimal_curses.py +++ b/Lib/_pyrepl/_minimal_curses.py @@ -34,7 +34,7 @@ def _find_clib() -> str: clib.setupterm.restype = ctypes.c_int clib.tigetstr.argtypes = [ctypes.c_char_p] -clib.tigetstr.restype = ctypes.POINTER(ctypes.c_char) +clib.tigetstr.restype = ctypes.c_ssize_t clib.tparm.argtypes = [ctypes.c_char_p] + 9 * [ctypes.c_int] # type: ignore[operator] clib.tparm.restype = ctypes.c_char_p @@ -56,7 +56,7 @@ def tigetstr(cap): if not isinstance(cap, bytes): cap = cap.encode("ascii") result = clib.tigetstr(cap) - if ctypes.cast(result, ctypes.c_void_p).value == ERR: + if result == ERR: return None return ctypes.cast(result, ctypes.c_char_p).value diff --git a/Lib/argparse.py b/Lib/argparse.py index 072cd5e7dc0d06..5ecfdca17175e3 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -863,6 +863,9 @@ def __init__(self, _option_strings.append(option_string) if option_string.startswith('--'): + if option_string.startswith('--no-'): + raise ValueError(f'invalid option name {option_string!r} ' + f'for BooleanOptionalAction') option_string = '--no-' + option_string[2:] _option_strings.append(option_string) diff --git a/Lib/calendar.py b/Lib/calendar.py index 069dd5174112ae..8c1c646da46a98 100644 --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -27,7 +27,9 @@ error = ValueError # Exceptions raised for bad input -class IllegalMonthError(ValueError): +# This is trick for backward compatibility. Since 3.13, we will raise IllegalMonthError instead of +# IndexError for bad month number(out of 1-12). But we can't remove IndexError for backward compatibility. +class IllegalMonthError(ValueError, IndexError): def __init__(self, month): self.month = month def __str__(self): @@ -158,11 +160,14 @@ def weekday(year, month, day): return Day(datetime.date(year, month, day).weekday()) +def _validate_month(month): + if not 1 <= month <= 12: + raise IllegalMonthError(month) + def monthrange(year, month): """Return weekday of first day of month (0-6 ~ Mon-Sun) and number of days (28-31) for year, month.""" - if not 1 <= month <= 12: - raise IllegalMonthError(month) + _validate_month(month) day1 = weekday(year, month, 1) ndays = mdays[month] + (month == FEBRUARY and isleap(year)) return day1, ndays @@ -370,6 +375,8 @@ def formatmonthname(self, theyear, themonth, width, withyear=True): """ Return a formatted month name. """ + _validate_month(themonth) + s = month_name[themonth] if withyear: s = "%s %r" % (s, theyear) @@ -500,6 +507,7 @@ def formatmonthname(self, theyear, themonth, withyear=True): """ Return a month name as a table row. """ + _validate_month(themonth) if withyear: s = '%s %s' % (month_name[themonth], theyear) else: @@ -786,6 +794,8 @@ def main(args=None): if options.month is None: optdict["c"] = options.spacing optdict["m"] = options.months + if options.month is not None: + _validate_month(options.month) if options.year is None: result = cal.formatyear(datetime.date.today().year, **optdict) elif options.month is None: diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index 4a368f02d851c5..ac6493892068e9 100644 --- a/Lib/ctypes/__init__.py +++ b/Lib/ctypes/__init__.py @@ -19,7 +19,7 @@ raise Exception("Version number mismatch", __version__, _ctypes_version) if _os.name == "nt": - from _ctypes import FormatError + from _ctypes import COMError, FormatError DEFAULT_MODE = RTLD_LOCAL if _os.name == "posix" and _sys.platform == "darwin": diff --git a/Lib/email/__init__.py b/Lib/email/__init__.py index 9fa47783004185..6d597006e5eefe 100644 --- a/Lib/email/__init__.py +++ b/Lib/email/__init__.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py index 36625e35ffb6a7..84917038874ba1 100644 --- a/Lib/email/_parseaddr.py +++ b/Lib/email/_parseaddr.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2007 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Contact: email-sig@python.org """Email address parsing code. diff --git a/Lib/email/_policybase.py b/Lib/email/_policybase.py index c7694a44e26639..4b63b97217a835 100644 --- a/Lib/email/_policybase.py +++ b/Lib/email/_policybase.py @@ -302,12 +302,12 @@ def header_source_parse(self, sourcelines): """+ The name is parsed as everything up to the ':' and returned unmodified. The value is determined by stripping leading whitespace off the - remainder of the first line, joining all subsequent lines together, and + remainder of the first line joined with all subsequent lines, and stripping any trailing carriage return or linefeed characters. """ name, value = sourcelines[0].split(':', 1) - value = value.lstrip(' \t') + ''.join(sourcelines[1:]) + value = ''.join((value, *sourcelines[1:])).lstrip(' \t\r\n') return (name, value.rstrip('\r\n')) def header_store_parse(self, name, value): diff --git a/Lib/email/base64mime.py b/Lib/email/base64mime.py index d440de95255bf1..a5a3f737a97b51 100644 --- a/Lib/email/base64mime.py +++ b/Lib/email/base64mime.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2007 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Author: Ben Gertzfield # Contact: email-sig@python.org diff --git a/Lib/email/charset.py b/Lib/email/charset.py index cfd5a0c456e497..5036c3f58a5633 100644 --- a/Lib/email/charset.py +++ b/Lib/email/charset.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Ben Gertzfield, Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/encoders.py b/Lib/email/encoders.py index 17bd1ab7b19f32..55741a22a07b20 100644 --- a/Lib/email/encoders.py +++ b/Lib/email/encoders.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/errors.py b/Lib/email/errors.py index 02aa5eced6ae46..6bc744bd59c5bb 100644 --- a/Lib/email/errors.py +++ b/Lib/email/errors.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/feedparser.py b/Lib/email/feedparser.py index 06d6b4a3afcd07..b2bc4afc1cc26f 100644 --- a/Lib/email/feedparser.py +++ b/Lib/email/feedparser.py @@ -1,4 +1,4 @@ -# Copyright (C) 2004-2006 Python Software Foundation +# Copyright (C) 2004 Python Software Foundation # Authors: Baxter, Wouters and Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/generator.py b/Lib/email/generator.py index 205caf0fe9e81d..ab5bd0653e440c 100644 --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2010 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/header.py b/Lib/email/header.py index 66a1d46db50c45..113a81f41314ec 100644 --- a/Lib/email/header.py +++ b/Lib/email/header.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2007 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Author: Ben Gertzfield, Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/iterators.py b/Lib/email/iterators.py index 2f436aefc2300b..08ede3ec679613 100644 --- a/Lib/email/iterators.py +++ b/Lib/email/iterators.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/message.py b/Lib/email/message.py index 08192c50a8ff5c..a58afc5fe5f68e 100644 --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/application.py b/Lib/email/mime/application.py index f67cbad3f03407..9a9d213d2a940d 100644 --- a/Lib/email/mime/application.py +++ b/Lib/email/mime/application.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Keith Dart # Contact: email-sig@python.org diff --git a/Lib/email/mime/audio.py b/Lib/email/mime/audio.py index aa0c4905cbb2b4..85f4a955238c52 100644 --- a/Lib/email/mime/audio.py +++ b/Lib/email/mime/audio.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Anthony Baxter # Contact: email-sig@python.org diff --git a/Lib/email/mime/base.py b/Lib/email/mime/base.py index f601f621cec393..da4c6e591a5cb8 100644 --- a/Lib/email/mime/base.py +++ b/Lib/email/mime/base.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/image.py b/Lib/email/mime/image.py index 4b7f2f9cbad425..dab9685848172b 100644 --- a/Lib/email/mime/image.py +++ b/Lib/email/mime/image.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/message.py b/Lib/email/mime/message.py index 61836b5a7861fc..13d9ff599f86db 100644 --- a/Lib/email/mime/message.py +++ b/Lib/email/mime/message.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/multipart.py b/Lib/email/mime/multipart.py index 47fc218e1ae032..1abb84d5fed0bb 100644 --- a/Lib/email/mime/multipart.py +++ b/Lib/email/mime/multipart.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2006 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/nonmultipart.py b/Lib/email/mime/nonmultipart.py index a41386eb148c0c..5beab3a441e2bc 100644 --- a/Lib/email/mime/nonmultipart.py +++ b/Lib/email/mime/nonmultipart.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2006 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/text.py b/Lib/email/mime/text.py index 7672b789138600..aa4da7f8217e43 100644 --- a/Lib/email/mime/text.py +++ b/Lib/email/mime/text.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/parser.py b/Lib/email/parser.py index 475aa2b1a66680..039f03cba74fa0 100644 --- a/Lib/email/parser.py +++ b/Lib/email/parser.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw, Thomas Wouters, Anthony Baxter # Contact: email-sig@python.org diff --git a/Lib/email/policy.py b/Lib/email/policy.py index 46b7de5bb6d8ae..6e109b65011a44 100644 --- a/Lib/email/policy.py +++ b/Lib/email/policy.py @@ -119,13 +119,13 @@ def header_source_parse(self, sourcelines): """+ The name is parsed as everything up to the ':' and returned unmodified. The value is determined by stripping leading whitespace off the - remainder of the first line, joining all subsequent lines together, and + remainder of the first line joined with all subsequent lines, and stripping any trailing carriage return or linefeed characters. (This is the same as Compat32). """ name, value = sourcelines[0].split(':', 1) - value = value.lstrip(' \t') + ''.join(sourcelines[1:]) + value = ''.join((value, *sourcelines[1:])).lstrip(' \t\r\n') return (name, value.rstrip('\r\n')) def header_store_parse(self, name, value): diff --git a/Lib/email/quoprimime.py b/Lib/email/quoprimime.py index 500bbc5151769d..27c7ea55c7871f 100644 --- a/Lib/email/quoprimime.py +++ b/Lib/email/quoprimime.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Ben Gertzfield # Contact: email-sig@python.org diff --git a/Lib/email/utils.py b/Lib/email/utils.py index f276303197396b..7eab74dc0db9df 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2010 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/encodings/__init__.py b/Lib/encodings/__init__.py index f9075b8f0d98ac..298177eb8003a7 100644 --- a/Lib/encodings/__init__.py +++ b/Lib/encodings/__init__.py @@ -156,19 +156,22 @@ def search_function(encoding): codecs.register(search_function) if sys.platform == 'win32': - # bpo-671666, bpo-46668: If Python does not implement a codec for current - # Windows ANSI code page, use the "mbcs" codec instead: - # WideCharToMultiByte() and MultiByteToWideChar() functions with CP_ACP. - # Python does not support custom code pages. - def _alias_mbcs(encoding): + from ._win_cp_codecs import create_win32_code_page_codec + + def win32_code_page_search_function(encoding): + encoding = encoding.lower() + if not encoding.startswith('cp'): + return None try: - import _winapi - ansi_code_page = "cp%s" % _winapi.GetACP() - if encoding == ansi_code_page: - import encodings.mbcs - return encodings.mbcs.getregentry() - except ImportError: - # Imports may fail while we are shutting down - pass + cp = int(encoding[2:]) + except ValueError: + return None + # Test if the code page is supported + try: + codecs.code_page_encode(cp, 'x') + except (OverflowError, OSError): + return None + + return create_win32_code_page_codec(cp) - codecs.register(_alias_mbcs) + codecs.register(win32_code_page_search_function) diff --git a/Lib/encodings/_win_cp_codecs.py b/Lib/encodings/_win_cp_codecs.py new file mode 100644 index 00000000000000..4f8eb886794404 --- /dev/null +++ b/Lib/encodings/_win_cp_codecs.py @@ -0,0 +1,36 @@ +import codecs + +def create_win32_code_page_codec(cp): + from codecs import code_page_encode, code_page_decode + + def encode(input, errors='strict'): + return code_page_encode(cp, input, errors) + + def decode(input, errors='strict'): + return code_page_decode(cp, input, errors, True) + + class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input, final=False): + return code_page_encode(cp, input, self.errors)[0] + + class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, input, errors, final): + return code_page_decode(cp, input, errors, final) + + class StreamWriter(codecs.StreamWriter): + def encode(self, input, errors='strict'): + return code_page_encode(cp, input, errors) + + class StreamReader(codecs.StreamReader): + def decode(self, input, errors, final): + return code_page_decode(cp, input, errors, final) + + return codecs.CodecInfo( + name=f'cp{cp}', + encode=encode, + decode=decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamreader=StreamReader, + streamwriter=StreamWriter, + ) diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 585afc85836c06..645ad998129348 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,7 +10,7 @@ __all__ = ["version", "bootstrap"] -_PIP_VERSION = "24.2" +_PIP_VERSION = "24.3.1" # Directory of system wheel packages. Some Linux distribution packaging # policies recommend against bundling dependencies. For example, Fedora diff --git a/Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-24.3.1-py3-none-any.whl similarity index 84% rename from Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-24.3.1-py3-none-any.whl index 542cdd1e7284ae..5f1d35be6dd56b 100644 Binary files a/Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-24.3.1-py3-none-any.whl differ diff --git a/Lib/functools.py b/Lib/functools.py index 27abd622a8cff1..eff6540c7f606e 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -6,7 +6,7 @@ # Written by Nick Coghlan , # Raymond Hettinger , # and Łukasz Langa . -# Copyright (C) 2006-2024 Python Software Foundation. +# Copyright (C) 2006 Python Software Foundation. # See C source code for _functools credits/copyright __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', @@ -236,7 +236,7 @@ def __ge__(self, other): def reduce(function, sequence, initial=_initial_missing): """ - reduce(function, iterable[, initial], /) -> value + reduce(function, iterable, /[, initial]) -> value Apply a function of two arguments cumulatively to the items of an iterable, from left to right. diff --git a/Lib/getopt.py b/Lib/getopt.py index 1df5b96472a45c..a9c452a601ee81 100644 --- a/Lib/getopt.py +++ b/Lib/getopt.py @@ -24,10 +24,6 @@ # TODO for gnu_getopt(): # # - GNU getopt_long_only mechanism -# - allow the caller to specify ordering -# - RETURN_IN_ORDER option -# - GNU extension with '-' as first character of option string -# - optional arguments, specified by double colons # - an option string with a W followed by semicolon should # treat "-W foo" as "--foo" @@ -58,12 +54,14 @@ def getopt(args, shortopts, longopts = []): running program. Typically, this means "sys.argv[1:]". shortopts is the string of option letters that the script wants to recognize, with options that require an argument followed by a - colon (i.e., the same format that Unix getopt() uses). If + colon and options that accept an optional argument followed by + two colons (i.e., the same format that Unix getopt() uses). If specified, longopts is a list of strings with the names of the long options which should be supported. The leading '--' characters should not be included in the option name. Options which require an argument should be followed by an equal sign - ('='). + ('='). Options which accept an optional argument should be + followed by an equal sign and question mark ('=?'). The return value consists of two elements: the first is a list of (option, value) pairs; the second is the list of program arguments @@ -115,8 +113,13 @@ def gnu_getopt(args, shortopts, longopts = []): else: longopts = list(longopts) + return_in_order = False + if shortopts.startswith('-'): + shortopts = shortopts[1:] + all_options_first = False + return_in_order = True # Allow options after non-option arguments? - if shortopts.startswith('+'): + elif shortopts.startswith('+'): shortopts = shortopts[1:] all_options_first = True elif os.environ.get("POSIXLY_CORRECT"): @@ -130,8 +133,14 @@ def gnu_getopt(args, shortopts, longopts = []): break if args[0][:2] == '--': + if return_in_order and prog_args: + opts.append((None, prog_args)) + prog_args = [] opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) elif args[0][:1] == '-' and args[0] != '-': + if return_in_order and prog_args: + opts.append((None, prog_args)) + prog_args = [] opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) else: if all_options_first: @@ -153,7 +162,7 @@ def do_longs(opts, opt, longopts, args): has_arg, opt = long_has_args(opt, longopts) if has_arg: - if optarg is None: + if optarg is None and has_arg != '?': if not args: raise GetoptError(_('option --%s requires argument') % opt, opt) optarg, args = args[0], args[1:] @@ -174,6 +183,8 @@ def long_has_args(opt, longopts): return False, opt elif opt + '=' in possibilities: return True, opt + elif opt + '=?' in possibilities: + return '?', opt # No exact match, so better be unique. if len(possibilities) > 1: # XXX since possibilities contains all valid continuations, might be @@ -181,6 +192,8 @@ def long_has_args(opt, longopts): raise GetoptError(_('option --%s not a unique prefix') % opt, opt) assert len(possibilities) == 1 unique_match = possibilities[0] + if unique_match.endswith('=?'): + return '?', unique_match[:-2] has_arg = unique_match.endswith('=') if has_arg: unique_match = unique_match[:-1] @@ -189,8 +202,9 @@ def long_has_args(opt, longopts): def do_shorts(opts, optstring, shortopts, args): while optstring != '': opt, optstring = optstring[0], optstring[1:] - if short_has_arg(opt, shortopts): - if optstring == '': + has_arg = short_has_arg(opt, shortopts) + if has_arg: + if optstring == '' and has_arg != '?': if not args: raise GetoptError(_('option -%s requires argument') % opt, opt) @@ -204,7 +218,11 @@ def do_shorts(opts, optstring, shortopts, args): def short_has_arg(opt, shortopts): for i in range(len(shortopts)): if opt == shortopts[i] != ':': - return shortopts.startswith(':', i+1) + if not shortopts.startswith(':', i+1): + return False + if shortopts.startswith('::', i+1): + return '?' + return True raise GetoptError(_('option -%s not recognized') % opt, opt) if __name__ == '__main__': diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 1b76328429f63a..fa36159711846f 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -209,7 +209,11 @@ def _write_atomic(path, data, mode=0o666): # We first write data to a temporary file, and then use os.replace() to # perform an atomic rename. with _io.FileIO(fd, 'wb') as file: - file.write(data) + bytes_written = file.write(data) + if bytes_written != len(data): + # Raise an OSError so the 'except' below cleans up the partially + # written file. + raise OSError("os.write() didn't write the full pyc file") _os.replace(path_tmp, path) except OSError: try: diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index d7c4e8444f8dec..753238354f6d36 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -23,11 +23,6 @@ read_mime_types(file) -- parse one file, return a dictionary or None """ -import os -import sys -import posixpath -import urllib.parse - try: from _winapi import _mimetypes_read_windows_registry except ImportError: @@ -119,6 +114,10 @@ def guess_type(self, url, strict=True): Optional 'strict' argument when False adds a bunch of commonly found, but non-standard types. """ + # Lazy import to improve module import time + import os + import urllib.parse + # TODO: Deprecate accepting file paths (in particular path-like objects). url = os.fspath(url) p = urllib.parse.urlparse(url) @@ -146,6 +145,10 @@ def guess_type(self, url, strict=True): if '=' in type or '/' not in type: type = 'text/plain' return type, None # never compressed, so encoding is None + + # Lazy import to improve module import time + import posixpath + return self._guess_file_type(url, strict, posixpath.splitext) def guess_file_type(self, path, *, strict=True): @@ -153,6 +156,9 @@ def guess_file_type(self, path, *, strict=True): Similar to guess_type(), but takes file path instead of URL. """ + # Lazy import to improve module import time + import os + path = os.fsdecode(path) path = os.path.splitdrive(path)[1] return self._guess_file_type(path, strict, os.path.splitext) @@ -399,6 +405,9 @@ def init(files=None): else: db = _db + # Lazy import to improve module import time + import os + for file in files: if os.path.isfile(file): db.read(file) @@ -445,7 +454,7 @@ def _default_mime_types(): } # Before adding new types, make sure they are either registered with IANA, - # at http://www.iana.org/assignments/media-types + # at https://www.iana.org/assignments/media-types/media-types.xhtml # or extensions, i.e. using the x- prefix # If you add to these, please keep them sorted by mime type. @@ -479,6 +488,7 @@ def _default_mime_types(): '.m3u8' : 'application/vnd.apple.mpegurl', '.xls' : 'application/vnd.ms-excel', '.xlb' : 'application/vnd.ms-excel', + '.eot' : 'application/vnd.ms-fontobject', '.ppt' : 'application/vnd.ms-powerpoint', '.pot' : 'application/vnd.ms-powerpoint', '.ppa' : 'application/vnd.ms-powerpoint', @@ -534,6 +544,7 @@ def _default_mime_types(): '.ass' : 'audio/aac', '.au' : 'audio/basic', '.snd' : 'audio/basic', + '.mka' : 'audio/matroska', '.mp3' : 'audio/mpeg', '.mp2' : 'audio/mpeg', '.opus' : 'audio/opus', @@ -542,21 +553,34 @@ def _default_mime_types(): '.aiff' : 'audio/x-aiff', '.ra' : 'audio/x-pn-realaudio', '.wav' : 'audio/x-wav', + '.otf' : 'font/otf', + '.ttf' : 'font/ttf', + '.woff' : 'font/woff', + '.woff2' : 'font/woff2', '.avif' : 'image/avif', '.bmp' : 'image/bmp', + '.emf' : 'image/emf', + '.fits' : 'image/fits', + '.g3' : 'image/g3fax', '.gif' : 'image/gif', '.ief' : 'image/ief', + '.jp2' : 'image/jp2', '.jpg' : 'image/jpeg', '.jpe' : 'image/jpeg', '.jpeg' : 'image/jpeg', + '.jpm' : 'image/jpm', + '.jpx' : 'image/jpx', '.heic' : 'image/heic', '.heif' : 'image/heif', '.png' : 'image/png', '.svg' : 'image/svg+xml', + '.t38' : 'image/t38', '.tiff' : 'image/tiff', '.tif' : 'image/tiff', + '.tfx' : 'image/tiff-fx', '.ico' : 'image/vnd.microsoft.icon', '.webp' : 'image/webp', + '.wmf' : 'image/wmf', '.ras' : 'image/x-cmu-raster', '.pnm' : 'image/x-portable-anymap', '.pbm' : 'image/x-portable-bitmap', @@ -595,6 +619,8 @@ def _default_mime_types(): '.sgml' : 'text/x-sgml', '.vcf' : 'text/x-vcard', '.xml' : 'text/xml', + '.mkv' : 'video/matroska', + '.mk3d' : 'video/matroska-3d', '.mp4' : 'video/mp4', '.mpeg' : 'video/mpeg', '.m1v' : 'video/mpeg', @@ -629,6 +655,7 @@ def _default_mime_types(): def _main(): import getopt + import sys USAGE = """\ Usage: mimetypes.py [options] type diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index 7e901cf2fb9852..996887cb713942 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -181,6 +181,10 @@ def close(self): finally: self._handle = None + def _detach(self): + """Stop managing the underlying file descriptor or handle.""" + self._handle = None + def send_bytes(self, buf, offset=0, size=None): """Send the bytes data from a bytes-like object""" self._check_closed() diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py index 53b8c492675878..df9b9be9d1898b 100644 --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -9,6 +9,7 @@ import threading import warnings +from . import AuthenticationError from . import connection from . import process from .context import reduction @@ -25,6 +26,7 @@ MAXFDS_TO_SEND = 256 SIGNED_STRUCT = struct.Struct('q') # large enough for pid_t +_AUTHKEY_LEN = 32 # <= PIPEBUF so it fits a single write to an empty pipe. # # Forkserver class @@ -33,6 +35,7 @@ class ForkServer(object): def __init__(self): + self._forkserver_authkey = None self._forkserver_address = None self._forkserver_alive_fd = None self._forkserver_pid = None @@ -59,6 +62,7 @@ def _stop_unlocked(self): if not util.is_abstract_socket_namespace(self._forkserver_address): os.unlink(self._forkserver_address) self._forkserver_address = None + self._forkserver_authkey = None def set_forkserver_preload(self, modules_names): '''Set list of module names to try to load in forkserver process.''' @@ -83,6 +87,7 @@ def connect_to_new_process(self, fds): process data. ''' self.ensure_running() + assert self._forkserver_authkey if len(fds) + 4 >= MAXFDS_TO_SEND: raise ValueError('too many fds') with socket.socket(socket.AF_UNIX) as client: @@ -93,6 +98,18 @@ def connect_to_new_process(self, fds): resource_tracker.getfd()] allfds += fds try: + client.setblocking(True) + wrapped_client = connection.Connection(client.fileno()) + # The other side of this exchange happens in the child as + # implemented in main(). + try: + connection.answer_challenge( + wrapped_client, self._forkserver_authkey) + connection.deliver_challenge( + wrapped_client, self._forkserver_authkey) + finally: + wrapped_client._detach() + del wrapped_client reduction.sendfds(client, allfds) return parent_r, parent_w except: @@ -120,6 +137,7 @@ def ensure_running(self): return # dead, launch it again os.close(self._forkserver_alive_fd) + self._forkserver_authkey = None self._forkserver_address = None self._forkserver_alive_fd = None self._forkserver_pid = None @@ -130,9 +148,9 @@ def ensure_running(self): if self._preload_modules: desired_keys = {'main_path', 'sys_path'} data = spawn.get_preparation_data('ignore') - data = {x: y for x, y in data.items() if x in desired_keys} + main_kws = {x: y for x, y in data.items() if x in desired_keys} else: - data = {} + main_kws = {} with socket.socket(socket.AF_UNIX) as listener: address = connection.arbitrary_address('AF_UNIX') @@ -144,19 +162,31 @@ def ensure_running(self): # all client processes own the write end of the "alive" pipe; # when they all terminate the read end becomes ready. alive_r, alive_w = os.pipe() + # A short lived pipe to initialize the forkserver authkey. + authkey_r, authkey_w = os.pipe() try: - fds_to_pass = [listener.fileno(), alive_r] + fds_to_pass = [listener.fileno(), alive_r, authkey_r] + main_kws['authkey_r'] = authkey_r cmd %= (listener.fileno(), alive_r, self._preload_modules, - data) + main_kws) exe = spawn.get_executable() args = [exe] + util._args_from_interpreter_flags() args += ['-c', cmd] pid = util.spawnv_passfds(exe, args, fds_to_pass) except: os.close(alive_w) + os.close(authkey_w) raise finally: os.close(alive_r) + os.close(authkey_r) + # Authenticate our control socket to prevent access from + # processes we have not shared this key with. + try: + self._forkserver_authkey = os.urandom(_AUTHKEY_LEN) + os.write(authkey_w, self._forkserver_authkey) + finally: + os.close(authkey_w) self._forkserver_address = address self._forkserver_alive_fd = alive_w self._forkserver_pid = pid @@ -165,9 +195,21 @@ def ensure_running(self): # # -def main(listener_fd, alive_r, preload, main_path=None, sys_path=None): - '''Run forkserver.''' +def main(listener_fd, alive_r, preload, main_path=None, sys_path=None, + *, authkey_r=None): + """Run forkserver.""" + if authkey_r is not None: + try: + authkey = os.read(authkey_r, _AUTHKEY_LEN) + assert len(authkey) == _AUTHKEY_LEN, f'{len(authkey)} < {_AUTHKEY_LEN}' + finally: + os.close(authkey_r) + else: + authkey = b'' + if preload: + if sys_path is not None: + sys.path[:] = sys_path if '__main__' in preload and main_path is not None: process.current_process()._inheriting = True try: @@ -255,8 +297,24 @@ def sigchld_handler(*_unused): if listener in rfds: # Incoming fork request with listener.accept()[0] as s: - # Receive fds from client - fds = reduction.recvfds(s, MAXFDS_TO_SEND + 1) + try: + if authkey: + wrapped_s = connection.Connection(s.fileno()) + # The other side of this exchange happens in + # in connect_to_new_process(). + try: + connection.deliver_challenge( + wrapped_s, authkey) + connection.answer_challenge( + wrapped_s, authkey) + finally: + wrapped_s._detach() + del wrapped_s + # Receive fds from client + fds = reduction.recvfds(s, MAXFDS_TO_SEND + 1) + except (EOFError, BrokenPipeError, AuthenticationError): + s.close() + continue if len(fds) > MAXFDS_TO_SEND: raise RuntimeError( "Too many ({0:n}) fds to send".format( diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py index a5d2f53613952e..040f4674d735c0 100644 --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -759,22 +759,29 @@ class BaseProxy(object): _address_to_local = {} _mutex = util.ForkAwareThreadLock() + # Each instance gets a `_serial` number. Unlike `id(...)`, this number + # is never reused. + _next_serial = 1 + def __init__(self, token, serializer, manager=None, authkey=None, exposed=None, incref=True, manager_owned=False): with BaseProxy._mutex: - tls_idset = BaseProxy._address_to_local.get(token.address, None) - if tls_idset is None: - tls_idset = util.ForkAwareLocal(), ProcessLocalSet() - BaseProxy._address_to_local[token.address] = tls_idset + tls_serials = BaseProxy._address_to_local.get(token.address, None) + if tls_serials is None: + tls_serials = util.ForkAwareLocal(), ProcessLocalSet() + BaseProxy._address_to_local[token.address] = tls_serials + + self._serial = BaseProxy._next_serial + BaseProxy._next_serial += 1 # self._tls is used to record the connection used by this # thread to communicate with the manager at token.address - self._tls = tls_idset[0] + self._tls = tls_serials[0] - # self._idset is used to record the identities of all shared - # objects for which the current process owns references and + # self._all_serials is a set used to record the identities of all + # shared objects for which the current process owns references and # which are in the manager at token.address - self._idset = tls_idset[1] + self._all_serials = tls_serials[1] self._token = token self._id = self._token.id @@ -857,20 +864,20 @@ def _incref(self): dispatch(conn, None, 'incref', (self._id,)) util.debug('INCREF %r', self._token.id) - self._idset.add(self._id) + self._all_serials.add(self._serial) state = self._manager and self._manager._state self._close = util.Finalize( self, BaseProxy._decref, - args=(self._token, self._authkey, state, - self._tls, self._idset, self._Client), + args=(self._token, self._serial, self._authkey, state, + self._tls, self._all_serials, self._Client), exitpriority=10 ) @staticmethod - def _decref(token, authkey, state, tls, idset, _Client): - idset.discard(token.id) + def _decref(token, serial, authkey, state, tls, idset, _Client): + idset.discard(serial) # check whether manager is still alive if state is None or state.value == State.STARTED: diff --git a/Lib/multiprocessing/reduction.py b/Lib/multiprocessing/reduction.py index 5593f0682f7fce..fcccd3eef86cc7 100644 --- a/Lib/multiprocessing/reduction.py +++ b/Lib/multiprocessing/reduction.py @@ -139,15 +139,12 @@ def detach(self): __all__ += ['DupFd', 'sendfds', 'recvfds'] import array - # On MacOSX we should acknowledge receipt of fds -- see Issue14669 - ACKNOWLEDGE = sys.platform == 'darwin' - def sendfds(sock, fds): '''Send an array of fds over an AF_UNIX socket.''' fds = array.array('i', fds) msg = bytes([len(fds) % 256]) sock.sendmsg([msg], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fds)]) - if ACKNOWLEDGE and sock.recv(1) != b'A': + if sock.recv(1) != b'A': raise RuntimeError('did not receive acknowledgement of fd') def recvfds(sock, size): @@ -158,8 +155,11 @@ def recvfds(sock, size): if not msg and not ancdata: raise EOFError try: - if ACKNOWLEDGE: - sock.send(b'A') + # We send/recv an Ack byte after the fds to work around an old + # macOS bug; it isn't clear if this is still required but it + # makes unit testing fd sending easier. + # See: https://github.com/python/cpython/issues/58874 + sock.send(b'A') # Acknowledge if len(ancdata) != 1: raise RuntimeError('received %d items of ancdata' % len(ancdata)) diff --git a/Lib/ntpath.py b/Lib/ntpath.py index 1b1873f08b608b..5481bb8888ef59 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -553,28 +553,21 @@ def normpath(path): return prefix + sep.join(comps) -def _abspath_fallback(path): - """Return the absolute version of a path as a fallback function in case - `nt._getfullpathname` is not available or raises OSError. See bpo-31047 for - more. - - """ - - path = os.fspath(path) - if not isabs(path): - if isinstance(path, bytes): - cwd = os.getcwdb() - else: - cwd = os.getcwd() - path = join(cwd, path) - return normpath(path) - # Return an absolute path. try: from nt import _getfullpathname except ImportError: # not running on Windows - mock up something sensible - abspath = _abspath_fallback + def abspath(path): + """Return the absolute version of a path.""" + path = os.fspath(path) + if not isabs(path): + if isinstance(path, bytes): + cwd = os.getcwdb() + else: + cwd = os.getcwd() + path = join(cwd, path) + return normpath(path) else: # use native Windows method on Windows def abspath(path): @@ -582,7 +575,27 @@ def abspath(path): try: return _getfullpathname(normpath(path)) except (OSError, ValueError): - return _abspath_fallback(path) + # See gh-75230, handle outside for cleaner traceback + pass + path = os.fspath(path) + if not isabs(path): + if isinstance(path, bytes): + sep = b'\\' + getcwd = os.getcwdb + else: + sep = '\\' + getcwd = os.getcwd + drive, root, path = splitroot(path) + # Either drive or root can be nonempty, but not both. + if drive or root: + try: + path = join(_getfullpathname(drive + root), path) + except (OSError, ValueError): + # Drive "\0:" cannot exist; use the root directory. + path = drive + sep + path + else: + path = join(getcwd(), path) + return normpath(path) try: from nt import _findfirstfile, _getfinalpathname, readlink as _nt_readlink diff --git a/Lib/nturl2path.py b/Lib/nturl2path.py index 2f9fec7893afd1..66092e4821a0ec 100644 --- a/Lib/nturl2path.py +++ b/Lib/nturl2path.py @@ -15,15 +15,20 @@ def url2pathname(url): # become # C:\foo\bar\spam.foo import string, urllib.parse + if url[:3] == '///': + # URL has an empty authority section, so the path begins on the third + # character. + url = url[2:] + elif url[:12] == '//localhost/': + # Skip past 'localhost' authority. + url = url[11:] + if url[:3] == '///': + # Skip past extra slash before UNC drive in URL path. + url = url[1:] # Windows itself uses ":" even in URLs. url = url.replace(':', '|') if not '|' in url: # No drive specifier, just convert slashes - if url[:4] == '////': - # path is something like ////host/path/on/remote/host - # convert this to \\host\path\on\remote\host - # (notice halving of slashes at the start of the path) - url = url[2:] # make sure not to convert quoted slashes :-) return urllib.parse.unquote(url.replace('/', '\\')) comp = url.split('|') @@ -41,23 +46,22 @@ def pathname2url(p): # C:\foo\bar\spam.foo # becomes # ///C:/foo/bar/spam.foo + import ntpath import urllib.parse # First, clean up some special forms. We are going to sacrifice # the additional information anyway - if p[:4] == '\\\\?\\': + p = p.replace('\\', '/') + if p[:4] == '//?/': p = p[4:] - if p[:4].upper() == 'UNC\\': - p = '\\\\' + p[4:] - elif p[1:2] != ':': - raise OSError('Bad path: ' + p) - if not ':' in p: - # No drive specifier, just convert slashes and quote the name - return urllib.parse.quote(p.replace('\\', '/')) - comp = p.split(':', maxsplit=2) - if len(comp) != 2 or len(comp[0]) > 1: - error = 'Bad path: ' + p - raise OSError(error) + if p[:4].upper() == 'UNC/': + p = '//' + p[4:] + drive, tail = ntpath.splitdrive(p) + if drive[1:] == ':': + # DOS drive specified. Add three slashes to the start, producing + # an authority section with a zero-length authority, and a path + # section starting with a single slash. + drive = f'///{drive.upper()}' - drive = urllib.parse.quote(comp[0].upper()) - tail = urllib.parse.quote(comp[1].replace('\\', '/')) - return '///' + drive + ':' + tail + drive = urllib.parse.quote(drive, safe='/:') + tail = urllib.parse.quote(tail) + return drive + tail diff --git a/Lib/optparse.py b/Lib/optparse.py index 04112eca37c801..cbe3451ced8bc3 100644 --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -43,7 +43,7 @@ __copyright__ = """ Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved. -Copyright (c) 2002-2006 Python Software Foundation. All rights reserved. +Copyright (c) 2002 Python Software Foundation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index 43e6624934b045..2c243d470d4eda 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -124,14 +124,12 @@ class PurePathBase: parser = ParserBase() _globber = PathGlobber - def __init__(self, arg, *args): - paths = [arg] - paths.extend(args) - for path in paths: - if not isinstance(path, str): + def __init__(self, *args): + for arg in args: + if not isinstance(arg, str): raise TypeError( - f"path should be a str, not {type(path).__name__!r}") - self._raw_paths = paths + f"argument should be a str, not {type(arg).__name__!r}") + self._raw_paths = list(args) self._resolving = False def with_segments(self, *pathsegments): @@ -270,7 +268,7 @@ def relative_to(self, other, *, walk_up=False): raise ValueError(f"'..' segment in {str(other)!r} cannot be walked") else: parts0.append('..') - return self.with_segments('', *reversed(parts0)) + return self.with_segments(*reversed(parts0)) def is_relative_to(self, other): """Return True if the path is relative to another path or False. @@ -737,7 +735,13 @@ def absolute(self): Use resolve() to resolve symlinks and remove '..' segments. """ - raise UnsupportedOperation(self._unsupported_msg('absolute()')) + if self.is_absolute(): + return self + elif self.parser is not posixpath: + raise UnsupportedOperation(self._unsupported_msg('absolute()')) + else: + # Treat the root directory as the current working directory. + return self.with_segments('/', *self._raw_paths) @classmethod def cwd(cls): @@ -746,7 +750,7 @@ def cwd(cls): # enable users to replace the implementation of 'absolute()' in a # subclass and benefit from the new behaviour here. This works because # os.path.abspath('.') == os.getcwd(). - return cls('').absolute() + return cls().absolute() def expanduser(self): """ Return a new path with expanded ~ and ~user constructs @@ -774,10 +778,13 @@ def resolve(self, strict=False): """ if self._resolving: return self + elif self.parser is not posixpath: + raise UnsupportedOperation(self._unsupported_msg('resolve()')) - def getcwd(): - return str(self.with_segments().absolute()) + def raise_error(*args): + raise OSError("Unsupported operation.") + getcwd = raise_error if strict or getattr(self.readlink, '_supported', True): def lstat(path_str): path = self.with_segments(path_str) @@ -792,14 +799,10 @@ def readlink(path_str): # If the user has *not* overridden the `readlink()` method, then # symlinks are unsupported and (in non-strict mode) we can improve # performance by not calling `path.lstat()`. - def skip(path_str): - # This exception will be internally consumed by `_realpath()`. - raise OSError("Operation skipped.") - - lstat = readlink = skip + lstat = readlink = raise_error return self.with_segments(posixpath._realpath( - str(self), strict, self.parser.sep, + str(self.absolute()), strict, self.parser.sep, getcwd=getcwd, lstat=lstat, readlink=readlink, maxlinks=self._max_symlinks)) diff --git a/Lib/pickletools.py b/Lib/pickletools.py index c462d26da97ce1..d9c4fb1e63e91a 100644 --- a/Lib/pickletools.py +++ b/Lib/pickletools.py @@ -312,7 +312,7 @@ def read_uint8(f): doc="Eight-byte unsigned integer, little-endian.") -def read_stringnl(f, decode=True, stripquotes=True): +def read_stringnl(f, decode=True, stripquotes=True, *, encoding='latin-1'): r""" >>> import io >>> read_stringnl(io.BytesIO(b"'abcd'\nefg\n")) @@ -356,7 +356,7 @@ def read_stringnl(f, decode=True, stripquotes=True): raise ValueError("no string quotes around %r" % data) if decode: - data = codecs.escape_decode(data)[0].decode("ascii") + data = codecs.escape_decode(data)[0].decode(encoding) return data stringnl = ArgumentDescriptor( @@ -370,7 +370,7 @@ def read_stringnl(f, decode=True, stripquotes=True): """) def read_stringnl_noescape(f): - return read_stringnl(f, stripquotes=False) + return read_stringnl(f, stripquotes=False, encoding='utf-8') stringnl_noescape = ArgumentDescriptor( name='stringnl_noescape', @@ -2509,7 +2509,10 @@ def dis(pickle, out=None, memo=None, indentlevel=4, annotate=0): # make a mild effort to align arguments line += ' ' * (10 - len(opcode.name)) if arg is not None: - line += ' ' + repr(arg) + if opcode.name in ("STRING", "BINSTRING", "SHORT_BINSTRING"): + line += ' ' + ascii(arg) + else: + line += ' ' + repr(arg) if markmsg: line += ' ' + markmsg if annotate: diff --git a/Lib/platform.py b/Lib/platform.py index d6322c9d99d2f3..239e660cd1621d 100644 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -31,6 +31,7 @@ # # # +# 1.0.9 - added invalidate_caches() function to invalidate cached values # 1.0.8 - changed Windows support to read version from kernel32.dll # 1.0.7 - added DEV_NULL # 1.0.6 - added linux_distribution() @@ -109,7 +110,7 @@ """ -__version__ = '1.0.8' +__version__ = '1.0.9' import collections import os @@ -1441,6 +1442,18 @@ def freedesktop_os_release(): return _os_release_cache.copy() +def invalidate_caches(): + """Invalidate the cached results.""" + global _uname_cache + _uname_cache = None + + global _os_release_cache + _os_release_cache = None + + _sys_version_cache.clear() + _platform_cache.clear() + + ### Command line interface if __name__ == '__main__': diff --git a/Lib/posixpath.py b/Lib/posixpath.py index fccca4e066b76f..db72ded8826056 100644 --- a/Lib/posixpath.py +++ b/Lib/posixpath.py @@ -412,6 +412,10 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir, # very fast way of spelling list(reversed(...)). rest = filename.split(sep)[::-1] + # Number of unprocessed parts in 'rest'. This can differ from len(rest) + # later, because 'rest' might contain markers for unresolved symlinks. + part_count = len(rest) + # The resolved path, which is absolute throughout this function. # Note: getcwd() returns a normalized and symlink-free path. path = sep if filename.startswith(sep) else getcwd() @@ -426,12 +430,13 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir, # by *maxlinks*, this is used instead of *seen* to detect symlink loops. link_count = 0 - while rest: + while part_count: name = rest.pop() if name is None: # resolved symlink target seen[rest.pop()] = path continue + part_count -= 1 if not name or name == curdir: # current dir continue @@ -444,8 +449,11 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir, else: newpath = path + sep + name try: - st = lstat(newpath) - if not stat.S_ISLNK(st.st_mode): + st_mode = lstat(newpath).st_mode + if not stat.S_ISLNK(st_mode): + if strict and part_count and not stat.S_ISDIR(st_mode): + raise OSError(errno.ENOTDIR, os.strerror(errno.ENOTDIR), + newpath) path = newpath continue elif maxlinks is not None: @@ -487,7 +495,9 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir, rest.append(newpath) rest.append(None) # Push the unresolved symlink target parts onto the stack. - rest.extend(target.split(sep)[::-1]) + target_parts = target.split(sep)[::-1] + rest.extend(target_parts) + part_count += len(target_parts) return path diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 165d3853f95e29..f73e55d77311ae 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Oct 15 22:34:02 2024 +# Autogenerated by Sphinx on Tue Nov 19 16:52:22 2024 # as part of the release process. topics = {'assert': 'The "assert" statement\n' '**********************\n' @@ -29,13 +29,12 @@ '(command\n' 'line option "-O"). The current code generator emits no code for ' 'an\n' - 'assert statement when optimization is requested at compile time. ' - 'Note\n' - 'that it is unnecessary to include the source code for the ' - 'expression\n' - 'that failed in the error message; it will be displayed as part of ' - 'the\n' - 'stack trace.\n' + '"assert" statement when optimization is requested at compile ' + 'time.\n' + 'Note that it is unnecessary to include the source code for the\n' + 'expression that failed in the error message; it will be displayed ' + 'as\n' + 'part of the stack trace.\n' '\n' 'Assignments to "__debug__" are illegal. The value for the ' 'built-in\n' @@ -713,7 +712,8 @@ 'should either\n' ' return the (computed) attribute value or raise an ' '"AttributeError"\n' - ' exception.\n' + ' exception. The "object" class itself does not provide ' + 'this method.\n' '\n' ' Note that if the attribute is found through the ' 'normal mechanism,\n' @@ -896,7 +896,9 @@ 'parents). In the\n' 'examples below, “the attribute” refers to the attribute ' 'whose name is\n' - 'the key of the property in the owner class’ "__dict__".\n' + 'the key of the property in the owner class’ "__dict__". ' + 'The "object"\n' + 'class itself does not implement any of these protocols.\n' '\n' 'object.__get__(self, instance, owner=None)\n' '\n' @@ -1569,7 +1571,9 @@ ' Called when the instance is “called” as a function; if ' 'this method\n' ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)".\n', + ' "type(x).__call__(x, arg1, ...)". The "object" class ' + 'itself does\n' + ' not provide this method.\n', 'calls': 'Calls\n' '*****\n' '\n' @@ -1754,6 +1758,9 @@ ' Function definitions. When the code block executes a "return"\n' ' statement, this specifies the return value of the function ' 'call.\n' + ' If execution reaches the end of the code block without executing ' + 'a\n' + ' "return" statement, the return value is "None".\n' '\n' 'a built-in function or method:\n' ' The result is up to the interpreter; see Built-in Functions for ' @@ -2802,18 +2809,15 @@ ' enter = type(manager).__enter__\n' ' exit = type(manager).__exit__\n' ' value = enter(manager)\n' - ' hit_except = False\n' '\n' ' try:\n' ' TARGET = value\n' ' SUITE\n' ' except:\n' - ' hit_except = True\n' ' if not exit(manager, *sys.exc_info()):\n' ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' exit(manager, None, None, None)\n' + ' else:\n' + ' exit(manager, None, None, None)\n' '\n' 'With more than one item, the context managers are processed as ' 'if\n' @@ -4495,6 +4499,9 @@ '\n' 'For more information on context managers, see Context ' 'Manager Types.\n' + 'The "object" class itself does not provide the context ' + 'manager\n' + 'methods.\n' '\n' 'object.__enter__(self)\n' '\n' @@ -4764,17 +4771,20 @@ '\n' ' This is typically used for debugging, so it is important ' 'that the\n' - ' representation is information-rich and unambiguous.\n' + ' representation is information-rich and unambiguous. A ' + 'default\n' + ' implementation is provided by the "object" class ' + 'itself.\n' '\n' 'object.__str__(self)\n' '\n' - ' Called by "str(object)" and the built-in functions ' - '"format()" and\n' - ' "print()" to compute the “informal” or nicely printable ' - 'string\n' - ' representation of an object. The return value must be a ' - 'string\n' - ' object.\n' + ' Called by "str(object)", the default "__format__()" ' + 'implementation,\n' + ' and the built-in function "print()", to compute the ' + '“informal” or\n' + ' nicely printable string representation of an object. ' + 'The return\n' + ' value must be a str object.\n' '\n' ' This method differs from "object.__repr__()" in that ' 'there is no\n' @@ -4790,7 +4800,9 @@ '\n' ' Called by bytes to compute a byte-string representation ' 'of an\n' - ' object. This should return a "bytes" object.\n' + ' object. This should return a "bytes" object. The ' + '"object" class\n' + ' itself does not provide this method.\n' '\n' 'object.__format__(self, format_spec)\n' '\n' @@ -4818,6 +4830,11 @@ '\n' ' The return value must be a string object.\n' '\n' + ' The default implementation by the "object" class should ' + 'be given an\n' + ' empty *format_spec* string. It delegates to ' + '"__str__()".\n' + '\n' ' Changed in version 3.4: The __format__ method of ' '"object" itself\n' ' raises a "TypeError" if passed any non-empty string.\n' @@ -4875,6 +4892,16 @@ ' ordering operations from a single root operation, see\n' ' "functools.total_ordering()".\n' '\n' + ' By default, the "object" class provides implementations ' + 'consistent\n' + ' with Value comparisons: equality compares according to ' + 'object\n' + ' identity, and order comparisons raise "TypeError". Each ' + 'default\n' + ' method may generate these results directly, but may also ' + 'return\n' + ' "NotImplemented".\n' + '\n' ' See the paragraph on "__hash__()" for some important ' 'notes on\n' ' creating *hashable* objects which support custom ' @@ -4961,12 +4988,13 @@ '\n' ' User-defined classes have "__eq__()" and "__hash__()" ' 'methods by\n' - ' default; with them, all objects compare unequal (except ' - 'with\n' - ' themselves) and "x.__hash__()" returns an appropriate ' - 'value such\n' - ' that "x == y" implies both that "x is y" and "hash(x) == ' - 'hash(y)".\n' + ' default (inherited from the "object" class); with them, ' + 'all objects\n' + ' compare unequal (except with themselves) and ' + '"x.__hash__()" returns\n' + ' an appropriate value such that "x == y" implies both ' + 'that "x is y"\n' + ' and "hash(x) == hash(y)".\n' '\n' ' A class that overrides "__eq__()" and does not define ' '"__hash__()"\n' @@ -5037,9 +5065,9 @@ 'the object is\n' ' considered true if its result is nonzero. If a class ' 'defines\n' - ' neither "__len__()" nor "__bool__()", all its instances ' - 'are\n' - ' considered true.\n', + ' neither "__len__()" nor "__bool__()" (which is true of ' + 'the "object"\n' + ' class itself), all its instances are considered true.\n', 'debugger': '"pdb" — The Python Debugger\n' '***************************\n' '\n' @@ -7038,10 +7066,12 @@ 'printing fields |\n' '| | in the form ‘+000000120’. This alignment ' 'option is only |\n' - '| | valid for numeric types. It becomes the ' - 'default for |\n' - '| | numbers when ‘0’ immediately precedes the ' - 'field width. |\n' + '| | valid for numeric types, excluding "complex". ' + 'It becomes |\n' + '| | the default for numbers when ‘0’ immediately ' + 'precedes the |\n' + '| | field ' + 'width. |\n' '+-----------+------------------------------------------------------------+\n' '| "\'^\'" | Forces the field to be centered within the ' 'available |\n' @@ -7148,9 +7178,9 @@ 'field by a\n' 'zero ("\'0\'") character enables sign-aware zero-padding ' 'for numeric\n' - 'types. This is equivalent to a *fill* character of "\'0\'" ' - 'with an\n' - '*alignment* type of "\'=\'".\n' + 'types, excluding "complex". This is equivalent to a *fill* ' + 'character\n' + 'of "\'0\'" with an *alignment* type of "\'=\'".\n' '\n' 'Changed in version 3.10: Preceding the *width* field by ' '"\'0\'" no\n' @@ -7416,6 +7446,32 @@ ' ' '+-----------+------------------------------------------------------------+\n' '\n' + 'The result should be correctly rounded to a given precision ' + '"p" of\n' + 'digits after the decimal point. The rounding mode for ' + '"float" matches\n' + 'that of the "round()" builtin. For "Decimal", the rounding ' + 'mode of\n' + 'the current context will be used.\n' + '\n' + 'The available presentation types for "complex" are the same ' + 'as those\n' + 'for "float" ("\'%\'" is not allowed). Both the real and ' + 'imaginary\n' + 'components of a complex number are formatted as ' + 'floating-point\n' + 'numbers, according to the specified presentation type. ' + 'They are\n' + 'separated by the mandatory sign of the imaginary part, the ' + 'latter\n' + 'being terminated by a "j" suffix. If the presentation type ' + 'is\n' + 'missing, the result will match the output of "str()" ' + '(complex numbers\n' + 'with a non-zero real part are also surrounded by ' + 'parentheses),\n' + 'possibly altered by other format modifiers.\n' + '\n' '\n' 'Format examples\n' '===============\n' @@ -7800,33 +7856,17 @@ '\n' ' global_stmt ::= "global" identifier ("," identifier)*\n' '\n' - 'The "global" statement is a declaration which holds for the ' - 'entire\n' - 'current code block. It means that the listed identifiers are to ' - 'be\n' - 'interpreted as globals. It would be impossible to assign to a ' - 'global\n' - 'variable without "global", although free variables may refer to\n' - 'globals without being declared global.\n' - '\n' - 'Names listed in a "global" statement must not be used in the same ' - 'code\n' - 'block textually preceding that "global" statement.\n' - '\n' - 'Names listed in a "global" statement must not be defined as ' - 'formal\n' - 'parameters, or as targets in "with" statements or "except" ' - 'clauses, or\n' - 'in a "for" target list, "class" definition, function definition,\n' - '"import" statement, or *variable annotations*.\n' + 'The "global" statement causes the listed identifiers to be ' + 'interpreted\n' + 'as globals. It would be impossible to assign to a global variable\n' + 'without "global", although free variables may refer to globals ' + 'without\n' + 'being declared global.\n' '\n' - '**CPython implementation detail:** The current implementation does ' - 'not\n' - 'enforce some of these restrictions, but programs should not abuse ' - 'this\n' - 'freedom, as future implementations may enforce them or silently ' - 'change\n' - 'the meaning of the program.\n' + 'The "global" statement applies to the entire scope of a function ' + 'or\n' + 'class body. A "SyntaxError" is raised if a variable is used or\n' + 'assigned to prior to its global declaration in the scope.\n' '\n' '**Programmer’s note:** "global" is a directive to the parser. It\n' 'applies only to code parsed at the same time as the "global"\n' @@ -8896,8 +8936,8 @@ 'scope,\n' 'or if there is no nonlocal scope, a "SyntaxError" is raised.\n' '\n' - 'The nonlocal statement applies to the entire scope of a function ' - 'or\n' + 'The "nonlocal" statement applies to the entire scope of a ' + 'function or\n' 'class body. A "SyntaxError" is raised if a variable is used or\n' 'assigned to prior to its nonlocal declaration in the scope.\n' '\n' @@ -8996,17 +9036,21 @@ '"divmod()",\n' ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' '(swapped)\n' - ' operands. These functions are only called if the left ' - 'operand does\n' - ' not support the corresponding operation [3] and the ' + ' operands. These functions are only called if the ' 'operands are of\n' - ' different types. [4] For instance, to evaluate the ' - 'expression "x -\n' - ' y", where *y* is an instance of a class that has an ' - '"__rsub__()"\n' - ' method, "type(y).__rsub__(y, x)" is called if ' - '"type(x).__sub__(x,\n' - ' y)" returns "NotImplemented".\n' + ' different types, when the left operand does not support ' + 'the\n' + ' corresponding operation [3], or the right operand’s ' + 'class is\n' + ' derived from the left operand’s class. [4] For instance, ' + 'to\n' + ' evaluate the expression "x - y", where *y* is an ' + 'instance of a\n' + ' class that has an "__rsub__()" method, ' + '"type(y).__rsub__(y, x)" is\n' + ' called if "type(x).__sub__(x, y)" returns ' + '"NotImplemented" or\n' + ' "type(y)" is a subclass of "type(x)". [5]\n' '\n' ' Note that ternary "pow()" will not try calling ' '"__rpow__()" (the\n' @@ -9651,56 +9695,58 @@ '\n' 'The following methods can be defined to implement ' 'container objects.\n' - 'Containers usually are *sequences* (such as "lists" or ' - '"tuples") or\n' - '*mappings* (like "dictionaries"), but can represent other ' - 'containers\n' - 'as well. The first set of methods is used either to ' - 'emulate a\n' - 'sequence or to emulate a mapping; the difference is that ' - 'for a\n' - 'sequence, the allowable keys should be the integers *k* ' - 'for which "0\n' - '<= k < N" where *N* is the length of the sequence, or ' - '"slice" objects,\n' - 'which define a range of items. It is also recommended ' - 'that mappings\n' - 'provide the methods "keys()", "values()", "items()", ' - '"get()",\n' - '"clear()", "setdefault()", "pop()", "popitem()", "copy()", ' + 'None of them are provided by the "object" class itself. ' + 'Containers\n' + 'usually are *sequences* (such as "lists" or "tuples") or ' + '*mappings*\n' + '(like *dictionaries*), but can represent other containers ' + 'as well.\n' + 'The first set of methods is used either to emulate a ' + 'sequence or to\n' + 'emulate a mapping; the difference is that for a sequence, ' + 'the\n' + 'allowable keys should be the integers *k* for which "0 <= ' + 'k < N" where\n' + '*N* is the length of the sequence, or "slice" objects, ' + 'which define a\n' + 'range of items. It is also recommended that mappings ' + 'provide the\n' + 'methods "keys()", "values()", "items()", "get()", ' + '"clear()",\n' + '"setdefault()", "pop()", "popitem()", "copy()", and ' + '"update()"\n' + 'behaving similar to those for Python’s standard ' + '"dictionary" objects.\n' + 'The "collections.abc" module provides a "MutableMapping" ' + '*abstract\n' + 'base class* to help create those methods from a base set ' + 'of\n' + '"__getitem__()", "__setitem__()", "__delitem__()", and ' + '"keys()".\n' + 'Mutable sequences should provide methods "append()", ' + '"count()",\n' + '"index()", "extend()", "insert()", "pop()", "remove()", ' + '"reverse()"\n' + 'and "sort()", like Python standard "list" objects. ' + 'Finally, sequence\n' + 'types should implement addition (meaning concatenation) ' 'and\n' - '"update()" behaving similar to those for Python’s ' - 'standard\n' - '"dictionary" objects. The "collections.abc" module ' - 'provides a\n' - '"MutableMapping" *abstract base class* to help create ' - 'those methods\n' - 'from a base set of "__getitem__()", "__setitem__()", ' - '"__delitem__()",\n' - 'and "keys()". Mutable sequences should provide methods ' - '"append()",\n' - '"count()", "index()", "extend()", "insert()", "pop()", ' - '"remove()",\n' - '"reverse()" and "sort()", like Python standard "list" ' - 'objects.\n' - 'Finally, sequence types should implement addition ' - '(meaning\n' - 'concatenation) and multiplication (meaning repetition) by ' - 'defining the\n' - 'methods "__add__()", "__radd__()", "__iadd__()", ' - '"__mul__()",\n' - '"__rmul__()" and "__imul__()" described below; they should ' - 'not define\n' - 'other numerical operators. It is recommended that both ' - 'mappings and\n' - 'sequences implement the "__contains__()" method to allow ' - 'efficient use\n' - 'of the "in" operator; for mappings, "in" should search the ' - 'mapping’s\n' - 'keys; for sequences, it should search through the values. ' - 'It is\n' - 'further recommended that both mappings and sequences ' - 'implement the\n' + 'multiplication (meaning repetition) by defining the ' + 'methods\n' + '"__add__()", "__radd__()", "__iadd__()", "__mul__()", ' + '"__rmul__()" and\n' + '"__imul__()" described below; they should not define other ' + 'numerical\n' + 'operators. It is recommended that both mappings and ' + 'sequences\n' + 'implement the "__contains__()" method to allow efficient ' + 'use of the\n' + '"in" operator; for mappings, "in" should search the ' + 'mapping’s keys;\n' + 'for sequences, it should search through the values. It is ' + 'further\n' + 'recommended that both mappings and sequences implement ' + 'the\n' '"__iter__()" method to allow efficient iteration through ' 'the\n' 'container; for mappings, "__iter__()" should iterate ' @@ -10240,17 +10286,19 @@ '\n' ' This is typically used for debugging, so it is important ' 'that the\n' - ' representation is information-rich and unambiguous.\n' + ' representation is information-rich and unambiguous. A ' + 'default\n' + ' implementation is provided by the "object" class itself.\n' '\n' 'object.__str__(self)\n' '\n' - ' Called by "str(object)" and the built-in functions ' - '"format()" and\n' - ' "print()" to compute the “informal” or nicely printable ' - 'string\n' - ' representation of an object. The return value must be a ' - 'string\n' - ' object.\n' + ' Called by "str(object)", the default "__format__()" ' + 'implementation,\n' + ' and the built-in function "print()", to compute the ' + '“informal” or\n' + ' nicely printable string representation of an object. The ' + 'return\n' + ' value must be a str object.\n' '\n' ' This method differs from "object.__repr__()" in that ' 'there is no\n' @@ -10266,7 +10314,9 @@ '\n' ' Called by bytes to compute a byte-string representation ' 'of an\n' - ' object. This should return a "bytes" object.\n' + ' object. This should return a "bytes" object. The "object" ' + 'class\n' + ' itself does not provide this method.\n' '\n' 'object.__format__(self, format_spec)\n' '\n' @@ -10294,6 +10344,10 @@ '\n' ' The return value must be a string object.\n' '\n' + ' The default implementation by the "object" class should ' + 'be given an\n' + ' empty *format_spec* string. It delegates to "__str__()".\n' + '\n' ' Changed in version 3.4: The __format__ method of "object" ' 'itself\n' ' raises a "TypeError" if passed any non-empty string.\n' @@ -10351,6 +10405,16 @@ ' ordering operations from a single root operation, see\n' ' "functools.total_ordering()".\n' '\n' + ' By default, the "object" class provides implementations ' + 'consistent\n' + ' with Value comparisons: equality compares according to ' + 'object\n' + ' identity, and order comparisons raise "TypeError". Each ' + 'default\n' + ' method may generate these results directly, but may also ' + 'return\n' + ' "NotImplemented".\n' + '\n' ' See the paragraph on "__hash__()" for some important ' 'notes on\n' ' creating *hashable* objects which support custom ' @@ -10436,12 +10500,13 @@ '\n' ' User-defined classes have "__eq__()" and "__hash__()" ' 'methods by\n' - ' default; with them, all objects compare unequal (except ' - 'with\n' - ' themselves) and "x.__hash__()" returns an appropriate ' - 'value such\n' - ' that "x == y" implies both that "x is y" and "hash(x) == ' - 'hash(y)".\n' + ' default (inherited from the "object" class); with them, ' + 'all objects\n' + ' compare unequal (except with themselves) and ' + '"x.__hash__()" returns\n' + ' an appropriate value such that "x == y" implies both that ' + '"x is y"\n' + ' and "hash(x) == hash(y)".\n' '\n' ' A class that overrides "__eq__()" and does not define ' '"__hash__()"\n' @@ -10510,9 +10575,9 @@ 'object is\n' ' considered true if its result is nonzero. If a class ' 'defines\n' - ' neither "__len__()" nor "__bool__()", all its instances ' - 'are\n' - ' considered true.\n' + ' neither "__len__()" nor "__bool__()" (which is true of ' + 'the "object"\n' + ' class itself), all its instances are considered true.\n' '\n' '\n' 'Customizing attribute access\n' @@ -10536,7 +10601,8 @@ 'either\n' ' return the (computed) attribute value or raise an ' '"AttributeError"\n' - ' exception.\n' + ' exception. The "object" class itself does not provide ' + 'this method.\n' '\n' ' Note that if the attribute is found through the normal ' 'mechanism,\n' @@ -10716,7 +10782,9 @@ 'parents). In the\n' 'examples below, “the attribute” refers to the attribute ' 'whose name is\n' - 'the key of the property in the owner class’ "__dict__".\n' + 'the key of the property in the owner class’ "__dict__". The ' + '"object"\n' + 'class itself does not implement any of these protocols.\n' '\n' 'object.__get__(self, instance, owner=None)\n' '\n' @@ -11599,7 +11667,9 @@ ' Called when the instance is “called” as a function; if ' 'this method\n' ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)".\n' + ' "type(x).__call__(x, arg1, ...)". The "object" class ' + 'itself does\n' + ' not provide this method.\n' '\n' '\n' 'Emulating container types\n' @@ -11607,54 +11677,54 @@ '\n' 'The following methods can be defined to implement container ' 'objects.\n' - 'Containers usually are *sequences* (such as "lists" or ' - '"tuples") or\n' - '*mappings* (like "dictionaries"), but can represent other ' - 'containers\n' - 'as well. The first set of methods is used either to emulate ' - 'a\n' - 'sequence or to emulate a mapping; the difference is that for ' - 'a\n' - 'sequence, the allowable keys should be the integers *k* for ' - 'which "0\n' - '<= k < N" where *N* is the length of the sequence, or ' - '"slice" objects,\n' - 'which define a range of items. It is also recommended that ' - 'mappings\n' - 'provide the methods "keys()", "values()", "items()", ' - '"get()",\n' - '"clear()", "setdefault()", "pop()", "popitem()", "copy()", ' - 'and\n' - '"update()" behaving similar to those for Python’s standard\n' - '"dictionary" objects. The "collections.abc" module provides ' - 'a\n' - '"MutableMapping" *abstract base class* to help create those ' - 'methods\n' - 'from a base set of "__getitem__()", "__setitem__()", ' - '"__delitem__()",\n' - 'and "keys()". Mutable sequences should provide methods ' - '"append()",\n' - '"count()", "index()", "extend()", "insert()", "pop()", ' - '"remove()",\n' - '"reverse()" and "sort()", like Python standard "list" ' + 'None of them are provided by the "object" class itself. ' + 'Containers\n' + 'usually are *sequences* (such as "lists" or "tuples") or ' + '*mappings*\n' + '(like *dictionaries*), but can represent other containers as ' + 'well.\n' + 'The first set of methods is used either to emulate a ' + 'sequence or to\n' + 'emulate a mapping; the difference is that for a sequence, ' + 'the\n' + 'allowable keys should be the integers *k* for which "0 <= k ' + '< N" where\n' + '*N* is the length of the sequence, or "slice" objects, which ' + 'define a\n' + 'range of items. It is also recommended that mappings ' + 'provide the\n' + 'methods "keys()", "values()", "items()", "get()", ' + '"clear()",\n' + '"setdefault()", "pop()", "popitem()", "copy()", and ' + '"update()"\n' + 'behaving similar to those for Python’s standard "dictionary" ' 'objects.\n' - 'Finally, sequence types should implement addition (meaning\n' - 'concatenation) and multiplication (meaning repetition) by ' - 'defining the\n' - 'methods "__add__()", "__radd__()", "__iadd__()", ' - '"__mul__()",\n' - '"__rmul__()" and "__imul__()" described below; they should ' - 'not define\n' - 'other numerical operators. It is recommended that both ' - 'mappings and\n' - 'sequences implement the "__contains__()" method to allow ' - 'efficient use\n' - 'of the "in" operator; for mappings, "in" should search the ' - 'mapping’s\n' - 'keys; for sequences, it should search through the values. ' - 'It is\n' - 'further recommended that both mappings and sequences ' - 'implement the\n' + 'The "collections.abc" module provides a "MutableMapping" ' + '*abstract\n' + 'base class* to help create those methods from a base set of\n' + '"__getitem__()", "__setitem__()", "__delitem__()", and ' + '"keys()".\n' + 'Mutable sequences should provide methods "append()", ' + '"count()",\n' + '"index()", "extend()", "insert()", "pop()", "remove()", ' + '"reverse()"\n' + 'and "sort()", like Python standard "list" objects. Finally, ' + 'sequence\n' + 'types should implement addition (meaning concatenation) and\n' + 'multiplication (meaning repetition) by defining the methods\n' + '"__add__()", "__radd__()", "__iadd__()", "__mul__()", ' + '"__rmul__()" and\n' + '"__imul__()" described below; they should not define other ' + 'numerical\n' + 'operators. It is recommended that both mappings and ' + 'sequences\n' + 'implement the "__contains__()" method to allow efficient use ' + 'of the\n' + '"in" operator; for mappings, "in" should search the ' + 'mapping’s keys;\n' + 'for sequences, it should search through the values. It is ' + 'further\n' + 'recommended that both mappings and sequences implement the\n' '"__iter__()" method to allow efficient iteration through ' 'the\n' 'container; for mappings, "__iter__()" should iterate through ' @@ -11913,17 +11983,21 @@ '"divmod()",\n' ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' '(swapped)\n' - ' operands. These functions are only called if the left ' - 'operand does\n' - ' not support the corresponding operation [3] and the ' + ' operands. These functions are only called if the ' 'operands are of\n' - ' different types. [4] For instance, to evaluate the ' - 'expression "x -\n' - ' y", where *y* is an instance of a class that has an ' - '"__rsub__()"\n' - ' method, "type(y).__rsub__(y, x)" is called if ' - '"type(x).__sub__(x,\n' - ' y)" returns "NotImplemented".\n' + ' different types, when the left operand does not support ' + 'the\n' + ' corresponding operation [3], or the right operand’s class ' + 'is\n' + ' derived from the left operand’s class. [4] For instance, ' + 'to\n' + ' evaluate the expression "x - y", where *y* is an instance ' + 'of a\n' + ' class that has an "__rsub__()" method, ' + '"type(y).__rsub__(y, x)" is\n' + ' called if "type(x).__sub__(x, y)" returns ' + '"NotImplemented" or\n' + ' "type(y)" is a subclass of "type(x)". [5]\n' '\n' ' Note that ternary "pow()" will not try calling ' '"__rpow__()" (the\n' @@ -12066,6 +12140,9 @@ '\n' 'For more information on context managers, see Context ' 'Manager Types.\n' + 'The "object" class itself does not provide the context ' + 'manager\n' + 'methods.\n' '\n' 'object.__enter__(self)\n' '\n' @@ -14794,13 +14871,13 @@ ' modules created dynamically using the "types.ModuleType"\n' ' constructor. Previously the attribute was optional.\n' '\n' - ' Deprecated since version 3.12, removed in version 3.14: Setting\n' - ' "__loader__" on a module while failing to set "__spec__.loader" ' - 'is\n' - ' deprecated. In Python 3.14, "__loader__" will cease to be set ' + ' Deprecated since version 3.12, will be removed in version 3.16:\n' + ' Setting "__loader__" on a module while failing to set\n' + ' "__spec__.loader" is deprecated. In Python 3.16, "__loader__" ' + 'will\n' + ' cease to be set or taken into consideration by the import system ' 'or\n' - ' taken into consideration by the import system or the standard\n' - ' library.\n' + ' the standard library.\n' '\n' 'module.__path__\n' '\n' @@ -15311,7 +15388,7 @@ '| | version ' '3.12: This attribute of code objects is |\n' '| | deprecated, ' - 'and may be removed in Python 3.14. |\n' + 'and may be removed in Python 3.15. |\n' '+----------------------------------------------------+----------------------------------------------------+\n' '| codeobject.co_stacksize | The required ' 'stack size of the code object |\n' @@ -15345,10 +15422,9 @@ '\n' 'Other bits in "co_flags" are reserved for internal use.\n' '\n' - 'If a code object represents a function, the first item in ' - '"co_consts"\n' - 'is the documentation string of the function, or "None" if ' - 'undefined.\n' + 'If a code object represents a function and has a docstring, the ' + 'first\n' + 'item in "co_consts" is the docstring of the function.\n' '\n' '\n' 'Methods on code objects\n' @@ -15996,10 +16072,10 @@ '"keys()" method\n' ' (in which case "__getitem__()" is called with every ' 'key returned\n' - ' from the method). or an iterable of key/value pairs ' - '(as tuples\n' - ' or other iterables of length two). If keyword ' - 'arguments are\n' + ' from the method) or an iterable of key/value pairs (as ' + 'tuples or\n' + ' other iterables of length two). If keyword arguments ' + 'are\n' ' specified, the dictionary is then updated with those ' 'key/value\n' ' pairs: "d.update(red=1, blue=2)".\n' @@ -17310,18 +17386,15 @@ ' enter = type(manager).__enter__\n' ' exit = type(manager).__exit__\n' ' value = enter(manager)\n' - ' hit_except = False\n' '\n' ' try:\n' ' TARGET = value\n' ' SUITE\n' ' except:\n' - ' hit_except = True\n' ' if not exit(manager, *sys.exc_info()):\n' ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' exit(manager, None, None, None)\n' + ' else:\n' + ' exit(manager, None, None, None)\n' '\n' 'With more than one item, the context managers are processed as if\n' 'multiple "with" statements were nested:\n' @@ -17362,7 +17435,8 @@ '\n' 'A "yield" statement is semantically equivalent to a yield ' 'expression.\n' - 'The yield statement can be used to omit the parentheses that would\n' + 'The "yield" statement can be used to omit the parentheses that ' + 'would\n' 'otherwise be required in the equivalent yield expression ' 'statement.\n' 'For example, the yield statements\n' @@ -17378,10 +17452,9 @@ 'Yield expressions and statements are only used when defining a\n' '*generator* function, and are only used in the body of the ' 'generator\n' - 'function. Using yield in a function definition is sufficient to ' - 'cause\n' - 'that definition to create a generator function instead of a normal\n' - 'function.\n' + 'function. Using "yield" in a function definition is sufficient to\n' + 'cause that definition to create a generator function instead of a\n' + 'normal function.\n' '\n' 'For full details of "yield" semantics, refer to the Yield ' 'expressions\n' diff --git a/Lib/re/_compiler.py b/Lib/re/_compiler.py index 29109f8812ee7b..20dd561d1c1520 100644 --- a/Lib/re/_compiler.py +++ b/Lib/re/_compiler.py @@ -255,11 +255,11 @@ def _optimize_charset(charset, iscased=None, fixup=None, fixes=None): while True: try: if op is LITERAL: - if fixup: - lo = fixup(av) - charmap[lo] = 1 - if fixes and lo in fixes: - for k in fixes[lo]: + if fixup: # IGNORECASE and not LOCALE + av = fixup(av) + charmap[av] = 1 + if fixes and av in fixes: + for k in fixes[av]: charmap[k] = 1 if not hascased and iscased(av): hascased = True @@ -267,7 +267,7 @@ def _optimize_charset(charset, iscased=None, fixup=None, fixes=None): charmap[av] = 1 elif op is RANGE: r = range(av[0], av[1]+1) - if fixup: + if fixup: # IGNORECASE and not LOCALE if fixes: for i in map(fixup, r): charmap[i] = 1 @@ -298,8 +298,7 @@ def _optimize_charset(charset, iscased=None, fixup=None, fixes=None): # Character set contains non-BMP character codes. # For range, all BMP characters in the range are already # proceeded. - if fixup: - hascased = True + if fixup: # IGNORECASE and not LOCALE # For now, IN_UNI_IGNORE+LITERAL and # IN_UNI_IGNORE+RANGE_UNI_IGNORE work for all non-BMP # characters, because two characters (at least one of @@ -310,7 +309,13 @@ def _optimize_charset(charset, iscased=None, fixup=None, fixes=None): # Also, both c.lower() and c.lower().upper() are single # characters for every non-BMP character. if op is RANGE: - op = RANGE_UNI_IGNORE + if fixes: # not ASCII + op = RANGE_UNI_IGNORE + hascased = True + else: + assert op is LITERAL + if not hascased and iscased(av): + hascased = True tail.append((op, av)) break diff --git a/Lib/site.py b/Lib/site.py index 07a6361fad44e5..54f07ab5b4e6d5 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -442,8 +442,9 @@ def setcopyright(): """Set 'copyright' and 'credits' in builtins""" builtins.copyright = _sitebuiltins._Printer("copyright", sys.copyright) builtins.credits = _sitebuiltins._Printer("credits", """\ - Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands - for supporting Python development. See www.python.org for more information.""") + Thanks to CWI, CNRI, BeOpen, Zope Corporation, the Python Software + Foundation, and a cast of thousands for supporting Python + development. See www.python.org for more information.""") files, dirs = [], [] # Not all modules are required to have a __file__ attribute. See # PEP 420 for more details. diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py index 43f9276799b848..67a071963d8c7d 100644 --- a/Lib/sysconfig/__init__.py +++ b/Lib/sysconfig/__init__.py @@ -173,9 +173,7 @@ def joinuser(*args): _PY_VERSION = sys.version.split()[0] _PY_VERSION_SHORT = f'{sys.version_info[0]}.{sys.version_info[1]}' _PY_VERSION_SHORT_NO_DOT = f'{sys.version_info[0]}{sys.version_info[1]}' -_PREFIX = os.path.normpath(sys.prefix) _BASE_PREFIX = os.path.normpath(sys.base_prefix) -_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) _BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix) # Mutex guarding initialization of _CONFIG_VARS. _CONFIG_VARS_LOCK = threading.RLock() @@ -355,7 +353,8 @@ def _init_posix(vars): else: _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) build_time_vars = _temp.build_time_vars - vars.update(build_time_vars) + # GH-126920: Make sure we don't overwrite any of the keys already set + vars.update(build_time_vars | vars) def _init_non_posix(vars): """Initialize the module as appropriate for NT""" @@ -466,8 +465,10 @@ def _init_config_vars(): # Normalized versions of prefix and exec_prefix are handy to have; # in fact, these are the standard versions used most places in the # Distutils. - _CONFIG_VARS['prefix'] = _PREFIX - _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _PREFIX = os.path.normpath(sys.prefix) + _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) + _CONFIG_VARS['prefix'] = _PREFIX # FIXME: This gets overwriten by _init_posix. + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX # FIXME: This gets overwriten by _init_posix. _CONFIG_VARS['py_version'] = _PY_VERSION _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT _CONFIG_VARS['py_version_nodot'] = _PY_VERSION_SHORT_NO_DOT @@ -540,6 +541,7 @@ def get_config_vars(*args): With arguments, return a list of values that result from looking up each argument in the configuration variable dictionary. """ + global _CONFIG_VARS_INITIALIZED # Avoid claiming the lock once initialization is complete. if not _CONFIG_VARS_INITIALIZED: @@ -550,6 +552,15 @@ def get_config_vars(*args): # don't re-enter init_config_vars(). if _CONFIG_VARS is None: _init_config_vars() + else: + # If the site module initialization happened after _CONFIG_VARS was + # initialized, a virtual environment might have been activated, resulting in + # variables like sys.prefix changing their value, so we need to re-init the + # config vars (see GH-126789). + if _CONFIG_VARS['base'] != os.path.normpath(sys.prefix): + with _CONFIG_VARS_LOCK: + _CONFIG_VARS_INITIALIZED = False + _init_config_vars() if args: vals = [] diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 38ddb62c693fc0..80b08b8ac66899 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -12,6 +12,7 @@ import sys import os import gc +import importlib import errno import functools import signal @@ -20,8 +21,10 @@ import socket import random import logging +import shutil import subprocess import struct +import tempfile import operator import pickle import weakref @@ -255,6 +258,9 @@ def __call__(self, *args, **kwds): class BaseTestCase(object): ALLOWED_TYPES = ('processes', 'manager', 'threads') + # If not empty, limit which start method suites run this class. + START_METHODS: set[str] = set() + start_method = None # set by install_tests_in_module_dict() def assertTimingAlmostEqual(self, a, b): if CHECK_TIMINGS: @@ -840,8 +846,8 @@ def test_error_on_stdio_flush_2(self): finally: setattr(sys, stream_name, old_stream) - @classmethod - def _sleep_and_set_event(self, evt, delay=0.0): + @staticmethod + def _sleep_and_set_event(evt, delay=0.0): time.sleep(delay) evt.set() @@ -892,6 +898,56 @@ def test_forkserver_sigkill(self): if os.name != 'nt': self.check_forkserver_death(signal.SIGKILL) + def test_forkserver_auth_is_enabled(self): + if self.TYPE == "threads": + self.skipTest(f"test not appropriate for {self.TYPE}") + if multiprocessing.get_start_method() != "forkserver": + self.skipTest("forkserver start method specific") + + forkserver = multiprocessing.forkserver._forkserver + forkserver.ensure_running() + self.assertTrue(forkserver._forkserver_pid) + authkey = forkserver._forkserver_authkey + self.assertTrue(authkey) + self.assertGreater(len(authkey), 15) + addr = forkserver._forkserver_address + self.assertTrue(addr) + + # Demonstrate that a raw auth handshake, as Client performs, does not + # raise an error. + client = multiprocessing.connection.Client(addr, authkey=authkey) + client.close() + + # That worked, now launch a quick process. + proc = self.Process(target=sys.exit) + proc.start() + proc.join() + self.assertEqual(proc.exitcode, 0) + + def test_forkserver_without_auth_fails(self): + if self.TYPE == "threads": + self.skipTest(f"test not appropriate for {self.TYPE}") + if multiprocessing.get_start_method() != "forkserver": + self.skipTest("forkserver start method specific") + + forkserver = multiprocessing.forkserver._forkserver + forkserver.ensure_running() + self.assertTrue(forkserver._forkserver_pid) + authkey_len = len(forkserver._forkserver_authkey) + with unittest.mock.patch.object( + forkserver, '_forkserver_authkey', None): + # With an incorrect authkey we should get an auth rejection + # rather than the above protocol error. + forkserver._forkserver_authkey = b'T' * authkey_len + proc = self.Process(target=sys.exit) + with self.assertRaises(multiprocessing.AuthenticationError): + proc.start() + del proc + + # authkey restored, launching processes should work again. + proc = self.Process(target=sys.exit) + proc.start() + proc.join() # # @@ -2458,6 +2514,19 @@ def test_list_isinstance(self): a = self.list() self.assertIsInstance(a, collections.abc.MutableSequence) + # MutableSequence also has __iter__, but we can iterate over + # ListProxy using __getitem__ instead. Adding __iter__ to ListProxy + # would change the behavior of a list modified during iteration. + mutable_sequence_methods = ( + '__contains__', '__delitem__', '__getitem__', '__iadd__', + '__len__', '__reversed__', '__setitem__', 'append', + 'clear', 'count', 'extend', 'index', 'insert', 'pop', 'remove', + 'reverse', + ) + for name in mutable_sequence_methods: + with self.subTest(name=name): + self.assertTrue(callable(getattr(a, name))) + def test_list_iter(self): a = self.list(list(range(10))) it = iter(a) @@ -2502,6 +2571,15 @@ def test_dict_isinstance(self): a = self.dict() self.assertIsInstance(a, collections.abc.MutableMapping) + mutable_mapping_methods = ( + '__contains__', '__delitem__', '__eq__', '__getitem__', '__iter__', + '__len__', '__ne__', '__setitem__', 'clear', 'get', 'items', + 'keys', 'pop', 'popitem', 'setdefault', 'update', 'values', + ) + for name in mutable_mapping_methods: + with self.subTest(name=name): + self.assertTrue(callable(getattr(a, name))) + def test_dict_iter(self): d = self.dict() indices = list(range(65, 70)) @@ -6397,6 +6475,76 @@ def test_atexit(self): self.assertEqual(f.read(), 'deadbeef') +class _TestSpawnedSysPath(BaseTestCase): + """Test that sys.path is setup in forkserver and spawn processes.""" + + ALLOWED_TYPES = {'processes'} + # Not applicable to fork which inherits everything from the process as is. + START_METHODS = {"forkserver", "spawn"} + + def setUp(self): + self._orig_sys_path = list(sys.path) + self._temp_dir = tempfile.mkdtemp(prefix="test_sys_path-") + self._mod_name = "unique_test_mod" + module_path = os.path.join(self._temp_dir, f"{self._mod_name}.py") + with open(module_path, "w", encoding="utf-8") as mod: + mod.write("# A simple test module\n") + sys.path[:] = [p for p in sys.path if p] # remove any existing ""s + sys.path.insert(0, self._temp_dir) + sys.path.insert(0, "") # Replaced with an abspath in child. + self.assertIn(self.start_method, self.START_METHODS) + self._ctx = multiprocessing.get_context(self.start_method) + + def tearDown(self): + sys.path[:] = self._orig_sys_path + shutil.rmtree(self._temp_dir, ignore_errors=True) + + @staticmethod + def enq_imported_module_names(queue): + queue.put(tuple(sys.modules)) + + def test_forkserver_preload_imports_sys_path(self): + if self._ctx.get_start_method() != "forkserver": + self.skipTest("forkserver specific test.") + self.assertNotIn(self._mod_name, sys.modules) + multiprocessing.forkserver._forkserver._stop() # Must be fresh. + self._ctx.set_forkserver_preload( + ["test.test_multiprocessing_forkserver", self._mod_name]) + q = self._ctx.Queue() + proc = self._ctx.Process( + target=self.enq_imported_module_names, args=(q,)) + proc.start() + proc.join() + child_imported_modules = q.get() + q.close() + self.assertIn(self._mod_name, child_imported_modules) + + @staticmethod + def enq_sys_path_and_import(queue, mod_name): + queue.put(sys.path) + try: + importlib.import_module(mod_name) + except ImportError as exc: + queue.put(exc) + else: + queue.put(None) + + def test_child_sys_path(self): + q = self._ctx.Queue() + proc = self._ctx.Process( + target=self.enq_sys_path_and_import, args=(q, self._mod_name)) + proc.start() + proc.join() + child_sys_path = q.get() + import_error = q.get() + q.close() + self.assertNotIn("", child_sys_path) # replaced by an abspath + self.assertIn(self._temp_dir, child_sys_path) # our addition + # ignore the first element, it is the absolute "" replacement + self.assertEqual(child_sys_path[1:], sys.path[1:]) + self.assertIsNone(import_error, msg=f"child could not import {self._mod_name}") + + class MiscTestCase(unittest.TestCase): def test__all__(self): # Just make sure names in not_exported are excluded @@ -6591,6 +6739,8 @@ def install_tests_in_module_dict(remote_globs, start_method, if base is BaseTestCase: continue assert set(base.ALLOWED_TYPES) <= ALL_TYPES, base.ALLOWED_TYPES + if base.START_METHODS and start_method not in base.START_METHODS: + continue # class not intended for this start method. for type_ in base.ALLOWED_TYPES: if only_type and type_ != only_type: continue @@ -6604,6 +6754,7 @@ class Temp(base, Mixin, unittest.TestCase): Temp = hashlib_helper.requires_hashdigest('sha256')(Temp) Temp.__name__ = Temp.__qualname__ = newname Temp.__module__ = __module__ + Temp.start_method = start_method remote_globs[newname] = Temp elif issubclass(base, unittest.TestCase): if only_type: diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py index dbe25ef57dea83..25a3015c4e19ce 100644 --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -510,6 +510,7 @@ class TestTimeDelta(HarmlessMixedComparison, unittest.TestCase): def test_constructor(self): eq = self.assertEqual + ra = self.assertRaises td = timedelta # Check keyword args to constructor @@ -533,6 +534,15 @@ def test_constructor(self): eq(td(seconds=0.001), td(milliseconds=1)) eq(td(milliseconds=0.001), td(microseconds=1)) + # Check type of args to constructor + ra(TypeError, lambda: td(weeks='1')) + ra(TypeError, lambda: td(days='1')) + ra(TypeError, lambda: td(hours='1')) + ra(TypeError, lambda: td(minutes='1')) + ra(TypeError, lambda: td(seconds='1')) + ra(TypeError, lambda: td(milliseconds='1')) + ra(TypeError, lambda: td(microseconds='1')) + def test_computations(self): eq = self.assertEqual td = timedelta diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index 8bef04cba81138..0c94fcc1907071 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -148,7 +148,7 @@ def __init__(self, **kwargs) -> None: self.randomize = False self.fromfile = None self.fail_env_changed = False - self.use_resources = None + self.use_resources: list[str] = [] self.trace = False self.coverdir = 'coverage' self.runleaks = False @@ -403,8 +403,6 @@ def _parse_args(args, **kwargs): raise TypeError('%r is an invalid keyword argument ' 'for this function' % k) setattr(ns, k, v) - if ns.use_resources is None: - ns.use_resources = [] parser = _create_parser() # Issue #14191: argparse doesn't support "intermixed" positional and diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 133eba8ffe8e69..49209b0cec756e 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -123,7 +123,7 @@ def __init__(self, ns: Namespace, _add_python_opts: bool = False): self.python_cmd = None self.coverage: bool = ns.trace self.coverage_dir: StrPath | None = ns.coverdir - self.tmp_dir: StrPath | None = ns.tempdir + self._tmp_dir: StrPath | None = ns.tempdir # Randomize self.randomize: bool = ns.randomize @@ -159,6 +159,8 @@ def log(self, line: str = '') -> None: self.logger.log(line) def find_tests(self, tests: TestList | None = None) -> tuple[TestTuple, TestList | None]: + if tests is None: + tests = [] if self.single_test_run: self.next_single_filename = os.path.join(self.tmp_dir, 'pynexttest') try: @@ -454,6 +456,11 @@ def finalize_tests(self, coverage: trace.CoverageResults | None) -> None: self.results.write_junit(self.junit_filename) def display_summary(self) -> None: + if self.first_runtests is None: + raise ValueError( + "Should never call `display_summary()` before calling `_run_test()`" + ) + duration = time.perf_counter() - self.logger.start_time filtered = bool(self.match_tests) @@ -708,7 +715,15 @@ def _init(self): strip_py_suffix(self.cmdline_args) - self.tmp_dir = get_temp_dir(self.tmp_dir) + self._tmp_dir = get_temp_dir(self._tmp_dir) + + @property + def tmp_dir(self) -> StrPath: + if self._tmp_dir is None: + raise ValueError( + "Should never use `.tmp_dir` before calling `.main()`" + ) + return self._tmp_dir def main(self, tests: TestList | None = None) -> NoReturn: if self.want_add_python_opts: diff --git a/Lib/test/libregrtest/mypy.ini b/Lib/test/libregrtest/mypy.ini index 22c7c7a9acef14..3fa9afcb7a4a8c 100644 --- a/Lib/test/libregrtest/mypy.ini +++ b/Lib/test/libregrtest/mypy.ini @@ -15,19 +15,12 @@ strict = True # Various stricter settings that we can't yet enable # Try to enable these in the following order: -disallow_any_generics = False disallow_incomplete_defs = False disallow_untyped_calls = False disallow_untyped_defs = False check_untyped_defs = False warn_return_any = False -disable_error_code = return - -# Enable --strict-optional for these ASAP: -[mypy-Lib.test.libregrtest.main.*,Lib.test.libregrtest.run_workers.*] -strict_optional = False - # Various internal modules that typeshed deliberately doesn't have stubs for: [mypy-_abc.*,_opcode.*,_overlapped.*,_testcapi.*,_testinternalcapi.*,test.*] ignore_missing_imports = True diff --git a/Lib/test/libregrtest/results.py b/Lib/test/libregrtest/results.py index 4f3e84282dc5dc..9eda926966dc7e 100644 --- a/Lib/test/libregrtest/results.py +++ b/Lib/test/libregrtest/results.py @@ -1,5 +1,6 @@ import sys import trace +from typing import TYPE_CHECKING from .runtests import RunTests from .result import State, TestResult, TestStats, Location @@ -7,6 +8,9 @@ StrPath, TestName, TestTuple, TestList, FilterDict, printlist, count, format_duration) +if TYPE_CHECKING: + from xml.etree.ElementTree import Element + # Python uses exit code 1 when an exception is not caught # argparse.ArgumentParser.error() uses exit code 2 @@ -34,7 +38,7 @@ def __init__(self) -> None: self.test_times: list[tuple[float, TestName]] = [] self.stats = TestStats() # used by --junit-xml - self.testsuite_xml: list = [] + self.testsuite_xml: list['Element'] = [] # used by -T with -j self.covered_lines: set[Location] = set() diff --git a/Lib/test/libregrtest/run_workers.py b/Lib/test/libregrtest/run_workers.py index 387ddf9614cf79..0ca86a986ea436 100644 --- a/Lib/test/libregrtest/run_workers.py +++ b/Lib/test/libregrtest/run_workers.py @@ -102,6 +102,9 @@ def __init__(self, super().__init__() +_NOT_RUNNING = "" + + class WorkerThread(threading.Thread): def __init__(self, worker_id: int, runner: "RunWorkers") -> None: super().__init__() @@ -111,8 +114,8 @@ def __init__(self, worker_id: int, runner: "RunWorkers") -> None: self.output = runner.output self.timeout = runner.worker_timeout self.log = runner.log - self.test_name: TestName | None = None - self.start_time: float | None = None + self.test_name = _NOT_RUNNING + self.start_time = time.monotonic() self._popen: subprocess.Popen[str] | None = None self._killed = False self._stopped = False @@ -129,7 +132,7 @@ def __repr__(self) -> str: popen = self._popen if popen is not None: dt = time.monotonic() - self.start_time - info.extend((f'pid={self._popen.pid}', + info.extend((f'pid={popen.pid}', f'time={format_duration(dt)}')) return '<%s>' % ' '.join(info) @@ -211,6 +214,7 @@ def _run_process(self, runtests: WorkerRunTests, output_fd: int, # on reading closed stdout raise ExitThread raise + return None except: self._kill() raise @@ -400,7 +404,7 @@ def run(self) -> None: except WorkerError as exc: mp_result = exc.mp_result finally: - self.test_name = None + self.test_name = _NOT_RUNNING mp_result.result.duration = time.monotonic() - self.start_time self.output.put((False, mp_result)) @@ -415,6 +419,9 @@ def run(self) -> None: def _wait_completed(self) -> None: popen = self._popen + # only needed for mypy: + if popen is None: + raise ValueError("Should never access `._popen` before calling `.run()`") try: popen.wait(WAIT_COMPLETED_TIMEOUT) @@ -482,7 +489,7 @@ def __init__(self, num_workers: int, runtests: RunTests, self.worker_timeout: float | None = min(self.timeout * 1.5, self.timeout + 5 * 60) else: self.worker_timeout = None - self.workers: list[WorkerThread] | None = None + self.workers: list[WorkerThread] = [] jobs = self.runtests.get_jobs() if jobs is not None: @@ -502,7 +509,7 @@ def start_workers(self) -> None: processes = plural(nworkers, "process", "processes") msg = (f"Run {tests} in parallel using " f"{nworkers} worker {processes}") - if self.timeout: + if self.timeout and self.worker_timeout is not None: msg += (" (timeout: %s, worker timeout: %s)" % (format_duration(self.timeout), format_duration(self.worker_timeout))) @@ -544,6 +551,7 @@ def _get_result(self) -> QueueOutput | None: running = get_running(self.workers) if running: self.log(running) + return None def display_result(self, mp_result: MultiprocessResult) -> None: result = mp_result.result @@ -553,7 +561,7 @@ def display_result(self, mp_result: MultiprocessResult) -> None: if mp_result.err_msg: # WORKER_BUG text += ' (%s)' % mp_result.err_msg - elif (result.duration >= PROGRESS_MIN_TIME and not pgo): + elif (result.duration and result.duration >= PROGRESS_MIN_TIME and not pgo): text += ' (%s)' % format_duration(result.duration) if not pgo: running = get_running(self.workers) diff --git a/Lib/test/libregrtest/runtests.py b/Lib/test/libregrtest/runtests.py index cd1ce8080a04df..130c036a62eefb 100644 --- a/Lib/test/libregrtest/runtests.py +++ b/Lib/test/libregrtest/runtests.py @@ -28,7 +28,7 @@ class JsonFile: file: int | None file_type: str - def configure_subprocess(self, popen_kwargs: dict) -> None: + def configure_subprocess(self, popen_kwargs: dict[str, Any]) -> None: match self.file_type: case JsonFileType.UNIX_FD: # Unix file descriptor diff --git a/Lib/test/libregrtest/worker.py b/Lib/test/libregrtest/worker.py index da24760a82c6c6..0c9f5bd6e42f11 100644 --- a/Lib/test/libregrtest/worker.py +++ b/Lib/test/libregrtest/worker.py @@ -20,7 +20,7 @@ def create_worker_process(runtests: WorkerRunTests, output_fd: int, - tmp_dir: StrPath | None = None) -> subprocess.Popen: + tmp_dir: StrPath | None = None) -> subprocess.Popen[str]: worker_json = runtests.as_json() cmd = runtests.create_python_cmd() diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 8ef8fae44f1d25..cf020a48b81cfa 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -1357,6 +1357,41 @@ def test_bad_newobj_ex(self): self.check_unpickling_error(error, b'cbuiltins\nint\nN}\x92.') self.check_unpickling_error(error, b'cbuiltins\nint\n)N\x92.') + def test_bad_state(self): + c = C() + c.x = None + base = b'c__main__\nC\n)\x81' + self.assertEqual(self.loads(base + b'}X\x01\x00\x00\x00xNsb.'), c) + self.assertEqual(self.loads(base + b'N}X\x01\x00\x00\x00xNs\x86b.'), c) + # non-hashable dict key + self.check_unpickling_error(TypeError, base + b'}]Nsb.') + # state = list + error = (pickle.UnpicklingError, AttributeError) + self.check_unpickling_error(error, base + b'](}}eb.') + # state = 1-tuple + self.check_unpickling_error(error, base + b'}\x85b.') + # state = 3-tuple + self.check_unpickling_error(error, base + b'}}}\x87b.') + # non-hashable slot name + self.check_unpickling_error(TypeError, base + b'}}]Ns\x86b.') + # non-string slot name + self.check_unpickling_error(TypeError, base + b'}}NNs\x86b.') + # dict = True + self.check_unpickling_error(error, base + b'\x88}\x86b.') + # slots dict = True + self.check_unpickling_error(error, base + b'}\x88\x86b.') + + class BadKey1: + count = 1 + def __hash__(self): + if not self.count: + raise CustomError + self.count -= 1 + return 42 + __main__.BadKey1 = BadKey1 + # bad hashable dict key + self.check_unpickling_error(CustomError, base + b'}c__main__\nBadKey1\n)\x81Nsb.') + def test_bad_stack(self): badpickles = [ b'.', # STOP diff --git a/Lib/test/support/i18n_helper.py b/Lib/test/support/i18n_helper.py new file mode 100644 index 00000000000000..2e304f29e8ba7f --- /dev/null +++ b/Lib/test/support/i18n_helper.py @@ -0,0 +1,63 @@ +import re +import subprocess +import sys +import unittest +from pathlib import Path +from test.support import REPO_ROOT, TEST_HOME_DIR, requires_subprocess +from test.test_tools import skip_if_missing + + +pygettext = Path(REPO_ROOT) / 'Tools' / 'i18n' / 'pygettext.py' + +msgid_pattern = re.compile(r'msgid(.*?)(?:msgid_plural|msgctxt|msgstr)', + re.DOTALL) +msgid_string_pattern = re.compile(r'"((?:\\"|[^"])*)"') + + +def _generate_po_file(path, *, stdout_only=True): + res = subprocess.run([sys.executable, pygettext, + '--no-location', '-o', '-', path], + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + text=True) + if stdout_only: + return res.stdout + return res + + +def _extract_msgids(po): + msgids = [] + for msgid in msgid_pattern.findall(po): + msgid_string = ''.join(msgid_string_pattern.findall(msgid)) + msgid_string = msgid_string.replace(r'\"', '"') + if msgid_string: + msgids.append(msgid_string) + return sorted(msgids) + + +def _get_snapshot_path(module_name): + return Path(TEST_HOME_DIR) / 'translationdata' / module_name / 'msgids.txt' + + +@requires_subprocess() +class TestTranslationsBase(unittest.TestCase): + + def assertMsgidsEqual(self, module): + '''Assert that msgids extracted from a given module match a + snapshot. + + ''' + skip_if_missing('i18n') + res = _generate_po_file(module.__file__, stdout_only=False) + self.assertEqual(res.returncode, 0) + self.assertEqual(res.stderr, '') + msgids = _extract_msgids(res.stdout) + snapshot_path = _get_snapshot_path(module.__name__) + snapshot = snapshot_path.read_text().splitlines() + self.assertListEqual(msgids, snapshot) + + +def update_translation_snapshots(module): + contents = _generate_po_file(module.__file__) + msgids = _extract_msgids(contents) + snapshot_path = _get_snapshot_path(module.__name__) + snapshot_path.write_text('\n'.join(msgids)) diff --git a/Lib/test/support/os_helper.py b/Lib/test/support/os_helper.py index 891405943b78c5..8071c248b9b67e 100644 --- a/Lib/test/support/os_helper.py +++ b/Lib/test/support/os_helper.py @@ -632,8 +632,7 @@ def fd_count(): if hasattr(os, 'sysconf'): try: MAXFD = os.sysconf("SC_OPEN_MAX") - except (OSError, ValueError): - # gh-118201: ValueError is raised intermittently on iOS + except OSError: pass old_modes = None diff --git a/Lib/test/support/strace_helper.py b/Lib/test/support/strace_helper.py index 90d4b5bccb6fa3..90281b47274299 100644 --- a/Lib/test/support/strace_helper.py +++ b/Lib/test/support/strace_helper.py @@ -1,6 +1,7 @@ import re import sys import textwrap +import os import unittest from dataclasses import dataclass from functools import cache @@ -91,7 +92,10 @@ def _make_error(reason, details): res, cmd_line = run_python_until_end( "-c", textwrap.dedent(code), - __run_using_command=[_strace_binary] + strace_flags) + __run_using_command=[_strace_binary] + strace_flags, + # Don't want to trace our JIT's own mmap and mprotect calls: + PYTHON_JIT="0", + ) except OSError as err: return _make_error("Caught OSError", err) @@ -160,6 +164,13 @@ def requires_strace(): if sys.platform != "linux": return unittest.skip("Linux only, requires strace.") + if "LD_PRELOAD" in os.environ: + # Distribution packaging (ex. Debian `fakeroot` and Gentoo `sandbox`) + # use LD_PRELOAD to intercept system calls, which changes the overall + # set of system calls which breaks tests expecting a specific set of + # system calls). + return unittest.skip("Not supported when LD_PRELOAD is intercepting system calls.") + if support.check_sanitizer(address=True, memory=True): return unittest.skip("LeakSanitizer does not work under ptrace (strace, gdb, etc)") diff --git a/Lib/test/support/venv.py b/Lib/test/support/venv.py new file mode 100644 index 00000000000000..78e6a51ec1815e --- /dev/null +++ b/Lib/test/support/venv.py @@ -0,0 +1,70 @@ +import contextlib +import logging +import os +import subprocess +import shlex +import sys +import sysconfig +import tempfile +import venv + + +class VirtualEnvironment: + def __init__(self, prefix, **venv_create_args): + self._logger = logging.getLogger(self.__class__.__name__) + venv.create(prefix, **venv_create_args) + self._prefix = prefix + self._paths = sysconfig.get_paths( + scheme='venv', + vars={'base': self.prefix}, + expand=True, + ) + + @classmethod + @contextlib.contextmanager + def from_tmpdir(cls, *, prefix=None, dir=None, **venv_create_args): + delete = not bool(os.environ.get('PYTHON_TESTS_KEEP_VENV')) + with tempfile.TemporaryDirectory(prefix=prefix, dir=dir, delete=delete) as tmpdir: + yield cls(tmpdir, **venv_create_args) + + @property + def prefix(self): + return self._prefix + + @property + def paths(self): + return self._paths + + @property + def interpreter(self): + return os.path.join(self.paths['scripts'], os.path.basename(sys.executable)) + + def _format_output(self, name, data, indent='\t'): + if not data: + return indent + f'{name}: (none)' + if len(data.splitlines()) == 1: + return indent + f'{name}: {data}' + else: + prefixed_lines = '\n'.join(indent + '> ' + line for line in data.splitlines()) + return indent + f'{name}:\n' + prefixed_lines + + def run(self, *args, **subprocess_args): + if subprocess_args.get('shell'): + raise ValueError('Running the subprocess in shell mode is not supported.') + default_args = { + 'capture_output': True, + 'check': True, + } + try: + result = subprocess.run([self.interpreter, *args], **default_args | subprocess_args) + except subprocess.CalledProcessError as e: + if e.returncode != 0: + self._logger.error( + f'Interpreter returned non-zero exit status {e.returncode}.\n' + + self._format_output('COMMAND', shlex.join(e.cmd)) + '\n' + + self._format_output('STDOUT', e.stdout.decode()) + '\n' + + self._format_output('STDERR', e.stderr.decode()) + '\n' + ) + raise + else: + return result diff --git a/Lib/test/test__interpreters.py b/Lib/test/test__interpreters.py index 14cd50bd30502c..bf3165e2341949 100644 --- a/Lib/test/test__interpreters.py +++ b/Lib/test/test__interpreters.py @@ -551,6 +551,24 @@ def test_still_running(self): self.assertTrue(_interpreters.is_running(interp)) +class CommonTests(TestBase): + def setUp(self): + super().setUp() + self.id = _interpreters.create() + + def test_signatures(self): + # for method in ['exec', 'run_string', 'run_func']: + msg = "expected 'shared' to be a dict" + with self.assertRaisesRegex(TypeError, msg): + _interpreters.exec(self.id, 'a', 1) + with self.assertRaisesRegex(TypeError, msg): + _interpreters.exec(self.id, 'a', shared=1) + with self.assertRaisesRegex(TypeError, msg): + _interpreters.run_string(self.id, 'a', shared=1) + with self.assertRaisesRegex(TypeError, msg): + _interpreters.run_func(self.id, lambda: None, shared=1) + + class RunStringTests(TestBase): def setUp(self): diff --git a/Lib/test/test__locale.py b/Lib/test/test__locale.py index 7e6e296c069abb..2c751033ebb3e2 100644 --- a/Lib/test/test__locale.py +++ b/Lib/test/test__locale.py @@ -90,6 +90,14 @@ def accept(loc): 'bn_IN': (100, {0: '\u09e6', 10: '\u09e7\u09e6', 99: '\u09ef\u09ef'}), } +known_era = { + 'C': (0, ''), + 'en_US': (0, ''), + 'ja_JP': (11, '+:1:2019/05/01:2019/12/31:令和:%EC元年'), + 'zh_TW': (3, '+:1:1912/01/01:1912/12/31:民國:%EC元年'), + 'th_TW': (1, '+:1:-543/01/01:+*:พ.ศ.:%EC %Ey'), +} + if sys.platform == 'win32': # ps_AF doesn't work on Windows: see bpo-38324 (msg361830) del known_numerics['ps_AF'] @@ -230,6 +238,43 @@ def test_alt_digits_nl_langinfo(self): if not tested: self.skipTest('no suitable locales') + @unittest.skipUnless(nl_langinfo, "nl_langinfo is not available") + @unittest.skipUnless(hasattr(locale, 'ERA'), "requires locale.ERA") + @unittest.skipIf( + support.is_emscripten or support.is_wasi, + "musl libc issue on Emscripten, bpo-46390" + ) + def test_era_nl_langinfo(self): + # Test nl_langinfo(ERA) + tested = False + for loc in candidate_locales: + with self.subTest(locale=loc): + try: + setlocale(LC_TIME, loc) + except Error: + self.skipTest(f'no locale {loc!r}') + continue + + with self.subTest(locale=loc): + era = nl_langinfo(locale.ERA) + self.assertIsInstance(era, str) + if era: + self.assertEqual(era.count(':'), (era.count(';') + 1) * 5, era) + + loc1 = loc.split('.', 1)[0] + if loc1 in known_era: + count, sample = known_era[loc1] + if count: + if not era: + self.skipTest(f'ERA is not set for locale {loc!r} on this platform') + self.assertGreaterEqual(era.count(';') + 1, count) + self.assertIn(sample, era) + else: + self.assertEqual(era, '') + tested = True + if not tested: + self.skipTest('no suitable locales') + def test_float_parsing(self): # Bug #1391872: Test whether float parsing is okay on European # locales. diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index ba9876570385d3..358cfb1c56aae4 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -7,10 +7,8 @@ import operator import os import py_compile -import re import shutil import stat -import subprocess import sys import textwrap import tempfile @@ -19,15 +17,11 @@ import warnings from enum import StrEnum -from pathlib import Path -from test.support import REPO_ROOT -from test.support import TEST_HOME_DIR from test.support import captured_stderr from test.support import import_helper from test.support import os_helper -from test.support import requires_subprocess from test.support import script_helper -from test.test_tools import skip_if_missing +from test.support.i18n_helper import TestTranslationsBase, update_translation_snapshots from unittest import mock @@ -789,6 +783,13 @@ def test_const(self): self.assertIn("got an unexpected keyword argument 'const'", str(cm.exception)) + def test_invalid_name(self): + parser = argparse.ArgumentParser() + with self.assertRaises(ValueError) as cm: + parser.add_argument('--no-foo', action=argparse.BooleanOptionalAction) + self.assertEqual(str(cm.exception), + "invalid option name '--no-foo' for BooleanOptionalAction") + class TestBooleanOptionalActionRequired(ParserTestCase): """Tests BooleanOptionalAction required""" @@ -7049,50 +7050,10 @@ def test_directory_in_zipfile_compiled(self): # Translation tests # ================= -pygettext = Path(REPO_ROOT) / 'Tools' / 'i18n' / 'pygettext.py' -snapshot_path = Path(TEST_HOME_DIR) / 'translationdata' / 'argparse' / 'msgids.txt' - -msgid_pattern = re.compile(r'msgid(.*?)(?:msgid_plural|msgctxt|msgstr)', re.DOTALL) -msgid_string_pattern = re.compile(r'"((?:\\"|[^"])*)"') - - -@requires_subprocess() -class TestTranslations(unittest.TestCase): +class TestTranslations(TestTranslationsBase): def test_translations(self): - # Test messages extracted from the argparse module against a snapshot - skip_if_missing('i18n') - res = generate_po_file(stdout_only=False) - self.assertEqual(res.returncode, 0) - self.assertEqual(res.stderr, '') - msgids = extract_msgids(res.stdout) - snapshot = snapshot_path.read_text().splitlines() - self.assertListEqual(msgids, snapshot) - - -def generate_po_file(*, stdout_only=True): - res = subprocess.run([sys.executable, pygettext, - '--no-location', '-o', '-', argparse.__file__], - stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - if stdout_only: - return res.stdout - return res - - -def extract_msgids(po): - msgids = [] - for msgid in msgid_pattern.findall(po): - msgid_string = ''.join(msgid_string_pattern.findall(msgid)) - msgid_string = msgid_string.replace(r'\"', '"') - if msgid_string: - msgids.append(msgid_string) - return sorted(msgids) - - -def update_translation_snapshots(): - contents = generate_po_file() - msgids = extract_msgids(contents) - snapshot_path.write_text('\n'.join(msgids)) + self.assertMsgidsEqual(argparse) def tearDownModule(): @@ -7104,6 +7065,6 @@ def tearDownModule(): if __name__ == '__main__': # To regenerate translation snapshots if len(sys.argv) > 1 and sys.argv[1] == '--snapshot-update': - update_translation_snapshots() + update_translation_snapshots(argparse) sys.exit(0) unittest.main() diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py index ae938d12c9401b..61921e93e85e63 100644 --- a/Lib/test/test_buffer.py +++ b/Lib/test/test_buffer.py @@ -3910,6 +3910,8 @@ def test_memoryview_check_released(self): self.assertRaises(ValueError, memoryview, m) # memoryview.cast() self.assertRaises(ValueError, m.cast, 'c') + # memoryview.__iter__() + self.assertRaises(ValueError, m.__iter__) # getbuffer() self.assertRaises(ValueError, ndarray, m) # memoryview.tolist() @@ -4437,6 +4439,14 @@ def test_issue_7385(self): x = ndarray([1,2,3], shape=[3], flags=ND_GETBUF_FAIL) self.assertRaises(BufferError, memoryview, x) + def test_bytearray_release_buffer_read_flag(self): + # See https://github.com/python/cpython/issues/126980 + obj = bytearray(b'abc') + with self.assertRaises(SystemError): + obj.__buffer__(inspect.BufferFlags.READ) + with self.assertRaises(SystemError): + obj.__buffer__(inspect.BufferFlags.WRITE) + @support.cpython_only def test_pybuffer_size_from_format(self): # basic tests @@ -4444,6 +4454,21 @@ def test_pybuffer_size_from_format(self): self.assertEqual(_testcapi.PyBuffer_SizeFromFormat(format), struct.calcsize(format)) + @support.cpython_only + def test_flags_overflow(self): + # gh-126594: Check for integer overlow on large flags + try: + from _testcapi import INT_MIN, INT_MAX + except ImportError: + INT_MIN = -(2 ** 31) + INT_MAX = 2 ** 31 - 1 + + obj = b'abc' + for flags in (INT_MIN - 1, INT_MAX + 1): + with self.subTest(flags=flags): + with self.assertRaises(OverflowError): + obj.__buffer__(flags) + class TestPythonBufferProtocol(unittest.TestCase): def test_basic(self): diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py index f119d89c0ec39a..073df310bb49eb 100644 --- a/Lib/test/test_calendar.py +++ b/Lib/test/test_calendar.py @@ -457,6 +457,11 @@ def test_formatmonth(self): calendar.TextCalendar().formatmonth(0, 2), result_0_02_text ) + def test_formatmonth_with_invalid_month(self): + with self.assertRaises(calendar.IllegalMonthError): + calendar.TextCalendar().formatmonth(2017, 13) + with self.assertRaises(calendar.IllegalMonthError): + calendar.TextCalendar().formatmonth(2017, -1) def test_formatmonthname_with_year(self): self.assertEqual( @@ -1121,7 +1126,7 @@ def test__all__(self): not_exported = { 'mdays', 'January', 'February', 'EPOCH', 'different_locale', 'c', 'prweek', 'week', 'format', - 'formatstring', 'main', 'monthlen', 'prevmonth', 'nextmonth'} + 'formatstring', 'main', 'monthlen', 'prevmonth', 'nextmonth', ""} support.check__all__(self, calendar, not_exported=not_exported) @@ -1149,6 +1154,13 @@ def test_formatmonth(self): self.assertIn('class="text-center month"', self.cal.formatmonth(2017, 5)) + def test_formatmonth_with_invalid_month(self): + with self.assertRaises(calendar.IllegalMonthError): + self.cal.formatmonth(2017, 13) + with self.assertRaises(calendar.IllegalMonthError): + self.cal.formatmonth(2017, -1) + + def test_formatweek(self): weeks = self.cal.monthdays2calendar(2017, 5) self.assertIn('class="wed text-nowrap"', self.cal.formatweek(weeks[0])) diff --git a/Lib/test/test_capi/test_long.py b/Lib/test/test_capi/test_long.py index 925fccd660bde3..a77094588a0edf 100644 --- a/Lib/test/test_capi/test_long.py +++ b/Lib/test/test_capi/test_long.py @@ -643,6 +643,51 @@ def test_long_getsign(self): # CRASHES getsign(NULL) + def test_long_ispositive(self): + # Test PyLong_IsPositive() + ispositive = _testcapi.pylong_ispositive + self.assertEqual(ispositive(1), 1) + self.assertEqual(ispositive(123), 1) + self.assertEqual(ispositive(-1), 0) + self.assertEqual(ispositive(0), 0) + self.assertEqual(ispositive(True), 1) + self.assertEqual(ispositive(False), 0) + self.assertEqual(ispositive(IntSubclass(-1)), 0) + self.assertRaises(TypeError, ispositive, 1.0) + self.assertRaises(TypeError, ispositive, Index(123)) + + # CRASHES ispositive(NULL) + + def test_long_isnegative(self): + # Test PyLong_IsNegative() + isnegative = _testcapi.pylong_isnegative + self.assertEqual(isnegative(1), 0) + self.assertEqual(isnegative(123), 0) + self.assertEqual(isnegative(-1), 1) + self.assertEqual(isnegative(0), 0) + self.assertEqual(isnegative(True), 0) + self.assertEqual(isnegative(False), 0) + self.assertEqual(isnegative(IntSubclass(-1)), 1) + self.assertRaises(TypeError, isnegative, 1.0) + self.assertRaises(TypeError, isnegative, Index(123)) + + # CRASHES isnegative(NULL) + + def test_long_iszero(self): + # Test PyLong_IsZero() + iszero = _testcapi.pylong_iszero + self.assertEqual(iszero(1), 0) + self.assertEqual(iszero(-1), 0) + self.assertEqual(iszero(0), 1) + self.assertEqual(iszero(True), 0) + self.assertEqual(iszero(False), 1) + self.assertEqual(iszero(IntSubclass(-1)), 0) + self.assertEqual(iszero(IntSubclass(0)), 1) + self.assertRaises(TypeError, iszero, 1.0) + self.assertRaises(TypeError, iszero, Index(123)) + + # CRASHES iszero(NULL) + def test_long_asint32(self): # Test PyLong_AsInt32() and PyLong_FromInt32() to_int32 = _testlimitedcapi.pylong_asint32 diff --git a/Lib/test/test_capi/test_object.py b/Lib/test/test_capi/test_object.py index cc9c9b688f00e2..b0d39937fd865f 100644 --- a/Lib/test/test_capi/test_object.py +++ b/Lib/test/test_capi/test_object.py @@ -1,10 +1,13 @@ import enum import unittest +from test import support from test.support import import_helper from test.support import os_helper +from test.support import threading_helper _testlimitedcapi = import_helper.import_module('_testlimitedcapi') _testcapi = import_helper.import_module('_testcapi') +_testinternalcapi = import_helper.import_module('_testinternalcapi') class Constant(enum.IntEnum): @@ -131,5 +134,41 @@ def test_ClearWeakRefsNoCallbacks_no_weakref_support(self): _testcapi.pyobject_clear_weakrefs_no_callbacks(obj) +@threading_helper.requires_working_threading() +class EnableDeferredRefcountingTest(unittest.TestCase): + """Test PyUnstable_Object_EnableDeferredRefcount""" + @support.requires_resource("cpu") + def test_enable_deferred_refcount(self): + from threading import Thread + + self.assertEqual(_testcapi.pyobject_enable_deferred_refcount("not tracked"), 0) + foo = [] + self.assertEqual(_testcapi.pyobject_enable_deferred_refcount(foo), int(support.Py_GIL_DISABLED)) + + # Make sure reference counting works on foo now + self.assertEqual(foo, []) + if support.Py_GIL_DISABLED: + self.assertTrue(_testinternalcapi.has_deferred_refcount(foo)) + + # Make sure that PyUnstable_Object_EnableDeferredRefcount is thread safe + def silly_func(obj): + self.assertIn( + _testcapi.pyobject_enable_deferred_refcount(obj), + (0, 1) + ) + + silly_list = [1, 2, 3] + threads = [ + Thread(target=silly_func, args=(silly_list,)) for _ in range(4) + ] + + with threading_helper.start_threads(threads): + for i in range(10): + silly_list.append(i) + + if support.Py_GIL_DISABLED: + self.assertTrue(_testinternalcapi.has_deferred_refcount(silly_list)) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py index 7b3d9e4fd1126f..4cf9b66170c055 100644 --- a/Lib/test/test_capi/test_opt.py +++ b/Lib/test/test_capi/test_opt.py @@ -62,9 +62,9 @@ def test_get_set_optimizer(self): def test_counter_optimizer(self): # Generate a new function at each call ns = {} - exec(textwrap.dedent(""" + exec(textwrap.dedent(f""" def loop(): - for _ in range(1000): + for _ in range({TIER2_THRESHOLD + 1000}): pass """), ns, ns) loop = ns['loop'] @@ -75,20 +75,19 @@ def loop(): self.assertEqual(opt.get_count(), 0) with clear_executors(loop): loop() - # Subtract because optimizer doesn't kick in sooner - self.assertEqual(opt.get_count(), 1000 - TIER2_THRESHOLD) + self.assertEqual(opt.get_count(), 1001) def test_long_loop(self): "Check that we aren't confused by EXTENDED_ARG" # Generate a new function at each call ns = {} - exec(textwrap.dedent(""" + exec(textwrap.dedent(f""" def nop(): pass def long_loop(): - for _ in range(20): + for _ in range({TIER2_THRESHOLD + 20}): nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); @@ -103,7 +102,7 @@ def long_loop(): with temporary_optimizer(opt): self.assertEqual(opt.get_count(), 0) long_loop() - self.assertEqual(opt.get_count(), 20 - TIER2_THRESHOLD) # Need iterations to warm up + self.assertEqual(opt.get_count(), 21) # Need iterations to warm up def test_code_restore_for_ENTER_EXECUTOR(self): def testfunc(x): @@ -159,7 +158,7 @@ def test_invalidate_object(self): func_src = "\n".join( f""" def f{n}(): - for _ in range(1000): + for _ in range({TIER2_THRESHOLD}): pass """ for n in range(5) ) @@ -190,9 +189,9 @@ def f{n}(): def test_uop_optimizer_invalidation(self): # Generate a new function at each call ns = {} - exec(textwrap.dedent(""" + exec(textwrap.dedent(f""" def f(): - for i in range(1000): + for i in range({TIER2_THRESHOLD}): pass """), ns, ns) f = ns['f'] @@ -207,7 +206,7 @@ def f(): def test_sys__clear_internal_caches(self): def f(): - for _ in range(1000): + for _ in range(TIER2_THRESHOLD): pass opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): @@ -236,7 +235,7 @@ def testfunc(x): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(1000) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -247,7 +246,7 @@ def testfunc(x): def test_extended_arg(self): "Check EXTENDED_ARG handling in superblock creation" ns = {} - exec(textwrap.dedent(""" + exec(textwrap.dedent(f""" def many_vars(): # 260 vars, so z9 should have index 259 a0 = a1 = a2 = a3 = a4 = a5 = a6 = a7 = a8 = a9 = 42 @@ -275,7 +274,7 @@ def many_vars(): w0 = w1 = w2 = w3 = w4 = w5 = w6 = w7 = w8 = w9 = 42 x0 = x1 = x2 = x3 = x4 = x5 = x6 = x7 = x8 = x9 = 42 y0 = y1 = y2 = y3 = y4 = y5 = y6 = y7 = y8 = y9 = 42 - z0 = z1 = z2 = z3 = z4 = z5 = z6 = z7 = z8 = z9 = 42 + z0 = z1 = z2 = z3 = z4 = z5 = z6 = z7 = z8 = z9 = {TIER2_THRESHOLD} while z9 > 0: z9 = z9 - 1 +z9 @@ -308,7 +307,7 @@ def testfunc(x): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -323,7 +322,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -338,7 +337,7 @@ def testfunc(a): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(range(20)) + testfunc(range(TIER2_THRESHOLD)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -355,7 +354,7 @@ def testfunc(a): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(range(20)) + testfunc(range(TIER2_THRESHOLD)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -371,7 +370,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -386,7 +385,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -406,7 +405,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -424,8 +423,8 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - total = testfunc(20) - self.assertEqual(total, 190) + total = testfunc(TIER2_THRESHOLD) + self.assertEqual(total, sum(range(TIER2_THRESHOLD))) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -445,9 +444,9 @@ def testfunc(a): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - a = list(range(20)) + a = list(range(TIER2_THRESHOLD)) total = testfunc(a) - self.assertEqual(total, 190) + self.assertEqual(total, sum(a)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -467,9 +466,9 @@ def testfunc(a): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - a = tuple(range(20)) + a = tuple(range(TIER2_THRESHOLD)) total = testfunc(a) - self.assertEqual(total, 190) + self.assertEqual(total, sum(a)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -503,7 +502,7 @@ def dummy(x): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -521,7 +520,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - testfunc(20) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -549,9 +548,9 @@ def testfunc(n, m): opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - x = testfunc(10, 10) + x = testfunc(TIER2_THRESHOLD, TIER2_THRESHOLD) - self.assertEqual(x, sum(range(10)) * 10010) + self.assertEqual(x, sum(range(TIER2_THRESHOLD)) * TIER2_THRESHOLD * 1001) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -572,15 +571,13 @@ def testfunc(n): bits += 1 if i&0x10: bits += 1 - if i&0x20: - bits += 1 return bits opt = _testinternalcapi.new_uop_optimizer() with temporary_optimizer(opt): - x = testfunc(20) + x = testfunc(TIER2_THRESHOLD * 2) - self.assertEqual(x, 40) + self.assertEqual(x, TIER2_THRESHOLD * 5) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) ops = list(iter_opnames(ex)) @@ -615,9 +612,9 @@ def testfunc(loops): num += 1 return a - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertIsNotNone(ex) - self.assertEqual(res, 63) + self.assertEqual(res, (TIER2_THRESHOLD - 1) * 2 + 1) binop_count = [opname for opname in iter_opnames(ex) if opname == "_BINARY_OP_ADD_INT"] guard_both_int_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_INT"] self.assertGreaterEqual(len(binop_count), 3) @@ -637,11 +634,11 @@ def testfunc(loops): opt = _testinternalcapi.new_uop_optimizer() res = None with temporary_optimizer(opt): - res = testfunc(32) + res = testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) - self.assertEqual(res, 124) + self.assertEqual(res, (TIER2_THRESHOLD - 1) * 4) binop_count = [opname for opname in iter_opnames(ex) if opname == "_BINARY_OP_ADD_INT"] guard_both_int_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_INT"] self.assertGreaterEqual(len(binop_count), 3) @@ -661,11 +658,11 @@ def testfunc(loops): opt = _testinternalcapi.new_uop_optimizer() res = None with temporary_optimizer(opt): - res = testfunc(32) + res = testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) - self.assertEqual(res, 124) + self.assertEqual(res, (TIER2_THRESHOLD - 1) * 4) binop_count = [opname for opname in iter_opnames(ex) if opname == "_BINARY_OP_ADD_INT"] guard_both_int_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_INT"] self.assertGreaterEqual(len(binop_count), 3) @@ -682,7 +679,7 @@ def testfunc(loops): num += 1 return a - res, ex = self._run_with_optimizer(testfunc, 64) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertIsNotNone(ex) binop_count = [opname for opname in iter_opnames(ex) if opname == "_BINARY_OP_ADD_INT"] self.assertGreaterEqual(len(binop_count), 3) @@ -694,7 +691,7 @@ def dummy(x): for i in range(n): dummy(i) - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertIsNotNone(ex) uops = get_opnames(ex) self.assertIn("_PUSH_FRAME", uops) @@ -708,8 +705,8 @@ def testfunc(n): x = i + i return x - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 62) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, (TIER2_THRESHOLD - 1) * 2) self.assertIsNotNone(ex) uops = get_opnames(ex) self.assertNotIn("_GUARD_BOTH_INT", uops) @@ -726,7 +723,7 @@ def testfunc(n): res = x + z + a + b return res - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertEqual(res, 4) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -739,8 +736,8 @@ def testfunc(n): for _ in range(n): return [i for i in range(n)] - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, list(range(32))) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, list(range(TIER2_THRESHOLD))) self.assertIsNotNone(ex) uops = get_opnames(ex) self.assertNotIn("_BINARY_OP_ADD_INT", uops) @@ -797,7 +794,7 @@ def testfunc(n): opt = _testinternalcapi.new_uop_optimizer() _testinternalcapi.set_optimizer(opt) - testfunc(64) + testfunc(_testinternalcapi.TIER2_THRESHOLD) ex = get_first_executor(testfunc) assert ex is not None @@ -817,8 +814,8 @@ def testfunc(n): a = a + 0.25 return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertAlmostEqual(res, 33.0) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertAlmostEqual(res, TIER2_THRESHOLD + 1) self.assertIsNotNone(ex) uops = get_opnames(ex) guard_both_float_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_FLOAT"] @@ -837,8 +834,8 @@ def testfunc(n): a = a - 0.25 return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertAlmostEqual(res, -31.0) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertAlmostEqual(res, -TIER2_THRESHOLD + 1) self.assertIsNotNone(ex) uops = get_opnames(ex) guard_both_float_count = [opname for opname in iter_opnames(ex) if opname == "_GUARD_BOTH_FLOAT"] @@ -857,7 +854,7 @@ def testfunc(n): a = a * 1.0 return a - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertAlmostEqual(res, 1.0) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -877,7 +874,7 @@ def testfunc(n): a + a return a - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertEqual(res, "") self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -895,7 +892,7 @@ def testfunc(n): x = a == a return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertTrue(res) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -913,7 +910,7 @@ def testfunc(n): x = a == a return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertTrue(res) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -931,7 +928,7 @@ def testfunc(n): x = 1 return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertEqual(res, 1) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -951,7 +948,7 @@ def testfunc(n): x = 1 return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertEqual(res, 1) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -971,7 +968,7 @@ def testfunc(n): x = a == a return x - res, ex = self._run_with_optimizer(testfunc, 32) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) self.assertTrue(res) self.assertIsNotNone(ex) uops = get_opnames(ex) @@ -989,10 +986,10 @@ def testfunc(n): exec(src, ns, ns) testfunc = ns['testfunc'] ns['_test_global'] = 0 - _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD - 1) self.assertIsNone(ex) ns['_test_global'] = 1 - _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD - 1) self.assertIsNotNone(ex) uops = get_opnames(ex) self.assertNotIn("_GUARD_BOTH_INT", uops) @@ -1003,10 +1000,10 @@ def testfunc(n): exec(src, ns, ns) testfunc = ns['testfunc'] ns['_test_global'] = 0 - _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD - 1) self.assertIsNone(ex) ns['_test_global'] = 3.14 - _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + _, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD - 1) self.assertIsNone(ex) def test_combine_stack_space_checks_sequential(self): @@ -1023,8 +1020,8 @@ def testfunc(n): a += b + c + d return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 832) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 26) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1050,8 +1047,8 @@ def testfunc(n): a += b + c return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 224) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 7) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1085,8 +1082,8 @@ def testfunc(n): a += b + c + d + e return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 800) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 25) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1121,8 +1118,8 @@ def testfunc(n): a += b + c + d + e return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 800) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 25) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1157,16 +1154,16 @@ def dummy6(x): z = dummy0(y) return dummy4(z) def testfunc(n): - a = 0; - for _ in range(32): + a = 0 + for _ in range(n): b = dummy5(1) c = dummy0(1) d = dummy6(1) a += b + c + d return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 96) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * 3) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1225,8 +1222,8 @@ def testfunc(n): b += dummy15(7) return b - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 32 * (repetitions + 9)) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * (repetitions + 9)) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1259,8 +1256,13 @@ def testfunc(n): a += dummy15(n) return a - res, ex = self._run_with_optimizer(testfunc, 32) - self.assertEqual(res, 42 * 32) + recursion_limit = sys.getrecursionlimit() + try: + sys.setrecursionlimit(TIER2_THRESHOLD + recursion_limit) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + finally: + sys.setrecursionlimit(recursion_limit) + self.assertEqual(res, TIER2_THRESHOLD * 42) self.assertIsNotNone(ex) uops_and_operands = [(opcode, operand) for opcode, _, _, operand in ex] @@ -1307,8 +1309,8 @@ def testfunc(n): for i in range(n): gen() return i - res, ex = self._run_with_optimizer(testfunc, 20) - self.assertEqual(res, 19) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD - 1) self.assertIsNotNone(ex) self.assertIn("_RETURN_GENERATOR", get_opnames(ex)) @@ -1322,8 +1324,8 @@ def testfunc(n): for i in g: s += i return s - res, ex = self._run_with_optimizer(testfunc, 20) - self.assertEqual(res, 190) + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, sum(range(TIER2_THRESHOLD))) self.assertIsNotNone(ex) self.assertIn("_FOR_ITER_GEN_FRAME", get_opnames(ex)) @@ -1342,7 +1344,7 @@ def test_modified_local_is_seen_by_optimized_code(self): def test_guard_type_version_removed(self): def thing(a): x = 0 - for _ in range(100): + for _ in range(TIER2_THRESHOLD): x += a.attr x += a.attr return x @@ -1353,7 +1355,7 @@ class Foo: res, ex = self._run_with_optimizer(thing, Foo()) opnames = list(iter_opnames(ex)) self.assertIsNotNone(ex) - self.assertEqual(res, 200) + self.assertEqual(res, TIER2_THRESHOLD * 2) guard_type_version_count = opnames.count("_GUARD_TYPE_VERSION") self.assertEqual(guard_type_version_count, 1) @@ -1367,7 +1369,7 @@ def fn(): def thing(a): x = 0 - for _ in range(100): + for _ in range(TIER2_THRESHOLD): x += a.attr fn() x += a.attr @@ -1379,24 +1381,21 @@ class Foo: res, ex = self._run_with_optimizer(thing, Foo()) opnames = list(iter_opnames(ex)) self.assertIsNotNone(ex) - self.assertEqual(res, 200) + self.assertEqual(res, TIER2_THRESHOLD * 2) guard_type_version_count = opnames.count("_GUARD_TYPE_VERSION") self.assertEqual(guard_type_version_count, 1) - def test_guard_type_version_not_removed(self): - """ - Verify that the guard type version is not removed if we modify the class - """ + def test_guard_type_version_removed_invalidation(self): def thing(a): x = 0 - for i in range(100): + for i in range(TIER2_THRESHOLD * 2 + 1): x += a.attr - # for the first 90 iterations we set the attribute on this dummy function which shouldn't - # trigger the type watcher - # then after 90 it should trigger it and stop optimizing - # Note that the code needs to be in this weird form so it's optimized inline without any control flow - setattr((Foo, Bar)[i < 90], "attr", 2) + # The first TIER2_THRESHOLD iterations we set the attribute on + # this dummy class, which shouldn't trigger the type watcher. + # Note that the code needs to be in this weird form so it's + # optimized inline without any control flow: + setattr((Bar, Foo)[i == TIER2_THRESHOLD + 1], "attr", 2) x += a.attr return x @@ -1408,24 +1407,21 @@ class Bar: res, ex = self._run_with_optimizer(thing, Foo()) opnames = list(iter_opnames(ex)) - self.assertIsNotNone(ex) - self.assertEqual(res, 219) - guard_type_version_count = opnames.count("_GUARD_TYPE_VERSION") - self.assertEqual(guard_type_version_count, 2) + self.assertEqual(res, TIER2_THRESHOLD * 6 + 1) + call = opnames.index("_CALL_BUILTIN_FAST") + load_attr_top = opnames.index("_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", 0, call) + load_attr_bottom = opnames.index("_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", call) + self.assertEqual(opnames[:load_attr_top].count("_GUARD_TYPE_VERSION"), 1) + self.assertEqual(opnames[call:load_attr_bottom].count("_CHECK_VALIDITY"), 1) - - @unittest.expectedFailure - def test_guard_type_version_not_removed_escaping(self): - """ - Verify that the guard type version is not removed if have an escaping function - """ + def test_guard_type_version_removed_escaping(self): def thing(a): x = 0 - for i in range(100): + for i in range(TIER2_THRESHOLD): x += a.attr - # eval should be escaping and so should cause optimization to stop and preserve both type versions + # eval should be escaping eval("None") x += a.attr return x @@ -1435,12 +1431,12 @@ class Foo: res, ex = self._run_with_optimizer(thing, Foo()) opnames = list(iter_opnames(ex)) self.assertIsNotNone(ex) - self.assertEqual(res, 200) - guard_type_version_count = opnames.count("_GUARD_TYPE_VERSION") - # Note: This will actually be 1 for noe - # https://github.com/python/cpython/pull/119365#discussion_r1626220129 - self.assertEqual(guard_type_version_count, 2) - + self.assertEqual(res, TIER2_THRESHOLD * 2) + call = opnames.index("_CALL_BUILTIN_FAST_WITH_KEYWORDS") + load_attr_top = opnames.index("_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", 0, call) + load_attr_bottom = opnames.index("_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", call) + self.assertEqual(opnames[:load_attr_top].count("_GUARD_TYPE_VERSION"), 1) + self.assertEqual(opnames[call:load_attr_bottom].count("_CHECK_VALIDITY"), 1) def test_guard_type_version_executor_invalidated(self): """ @@ -1449,7 +1445,7 @@ def test_guard_type_version_executor_invalidated(self): def thing(a): x = 0 - for i in range(100): + for i in range(TIER2_THRESHOLD): x += a.attr x += a.attr return x @@ -1458,7 +1454,7 @@ class Foo: attr = 1 res, ex = self._run_with_optimizer(thing, Foo()) - self.assertEqual(res, 200) + self.assertEqual(res, TIER2_THRESHOLD * 2) self.assertIsNotNone(ex) self.assertEqual(list(iter_opnames(ex)).count("_GUARD_TYPE_VERSION"), 1) self.assertTrue(ex.is_valid()) @@ -1486,6 +1482,26 @@ def fn(a): fn(A()) + def test_func_guards_removed_or_reduced(self): + def testfunc(n): + for i in range(n): + # Only works on functions promoted to constants + global_identity(i) + + opt = _testinternalcapi.new_uop_optimizer() + with temporary_optimizer(opt): + testfunc(TIER2_THRESHOLD) + + ex = get_first_executor(testfunc) + self.assertIsNotNone(ex) + uops = get_opnames(ex) + self.assertIn("_PUSH_FRAME", uops) + # Strength reduced version + self.assertIn("_CHECK_FUNCTION_VERSION_INLINE", uops) + self.assertNotIn("_CHECK_FUNCTION_VERSION", uops) + # Removed guard + self.assertNotIn("_CHECK_FUNCTION_EXACT_ARGS", uops) + def test_jit_error_pops(self): """ Tests that the correct number of pops are inserted into the @@ -1495,5 +1511,9 @@ def test_jit_error_pops(self): with self.assertRaises(TypeError): {item for item in items} + +def global_identity(x): + return x + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py index 290656f070503a..e51f7e0ee12b1f 100644 --- a/Lib/test/test_codecs.py +++ b/Lib/test/test_codecs.py @@ -3256,7 +3256,11 @@ def test_code_page_name(self): codecs.code_page_decode, self.CP_UTF8, b'\xff', 'strict', True) def check_decode(self, cp, tests): - for raw, errors, expected in tests: + for raw, errors, expected, *rest in tests: + if rest: + altexpected, = rest + else: + altexpected = expected if expected is not None: try: decoded = codecs.code_page_decode(cp, raw, errors, True) @@ -3273,8 +3277,21 @@ def check_decode(self, cp, tests): self.assertRaises(UnicodeDecodeError, codecs.code_page_decode, cp, raw, errors, True) + if altexpected is not None: + decoded = raw.decode(f'cp{cp}', errors) + self.assertEqual(decoded, altexpected, + '%a.decode("cp%s", %r)=%a != %a' + % (raw, cp, errors, decoded, altexpected)) + else: + self.assertRaises(UnicodeDecodeError, + raw.decode, f'cp{cp}', errors) + def check_encode(self, cp, tests): - for text, errors, expected in tests: + for text, errors, expected, *rest in tests: + if rest: + altexpected, = rest + else: + altexpected = expected if expected is not None: try: encoded = codecs.code_page_encode(cp, text, errors) @@ -3285,18 +3302,26 @@ def check_encode(self, cp, tests): '%a.encode("cp%s", %r)=%a != %a' % (text, cp, errors, encoded[0], expected)) self.assertEqual(encoded[1], len(text)) + + encoded = text.encode(f'cp{cp}', errors) + self.assertEqual(encoded, altexpected, + '%a.encode("cp%s", %r)=%a != %a' + % (text, cp, errors, encoded, altexpected)) else: self.assertRaises(UnicodeEncodeError, codecs.code_page_encode, cp, text, errors) + self.assertRaises(UnicodeEncodeError, + text.encode, f'cp{cp}', errors) def test_cp932(self): self.check_encode(932, ( ('abc', 'strict', b'abc'), ('\uff44\u9a3e', 'strict', b'\x82\x84\xe9\x80'), + ('\uf8f3', 'strict', b'\xff'), # test error handlers ('\xff', 'strict', None), ('[\xff]', 'ignore', b'[]'), - ('[\xff]', 'replace', b'[y]'), + ('[\xff]', 'replace', b'[y]', b'[?]'), ('[\u20ac]', 'replace', b'[?]'), ('[\xff]', 'backslashreplace', b'[\\xff]'), ('[\xff]', 'namereplace', @@ -3310,12 +3335,12 @@ def test_cp932(self): (b'abc', 'strict', 'abc'), (b'\x82\x84\xe9\x80', 'strict', '\uff44\u9a3e'), # invalid bytes - (b'[\xff]', 'strict', None), - (b'[\xff]', 'ignore', '[]'), - (b'[\xff]', 'replace', '[\ufffd]'), - (b'[\xff]', 'backslashreplace', '[\\xff]'), - (b'[\xff]', 'surrogateescape', '[\udcff]'), - (b'[\xff]', 'surrogatepass', None), + (b'[\xff]', 'strict', None, '[\uf8f3]'), + (b'[\xff]', 'ignore', '[]', '[\uf8f3]'), + (b'[\xff]', 'replace', '[\ufffd]', '[\uf8f3]'), + (b'[\xff]', 'backslashreplace', '[\\xff]', '[\uf8f3]'), + (b'[\xff]', 'surrogateescape', '[\udcff]', '[\uf8f3]'), + (b'[\xff]', 'surrogatepass', None, '[\uf8f3]'), (b'\x81\x00abc', 'strict', None), (b'\x81\x00abc', 'ignore', '\x00abc'), (b'\x81\x00abc', 'replace', '\ufffd\x00abc'), @@ -3330,7 +3355,7 @@ def test_cp1252(self): # test error handlers ('\u0141', 'strict', None), ('\u0141', 'ignore', b''), - ('\u0141', 'replace', b'L'), + ('\u0141', 'replace', b'L', b'?'), ('\udc98', 'surrogateescape', b'\x98'), ('\udc98', 'surrogatepass', None), )) @@ -3340,6 +3365,59 @@ def test_cp1252(self): (b'\xff', 'strict', '\xff'), )) + def test_cp708(self): + self.check_encode(708, ( + ('abc2%', 'strict', b'abc2%'), + ('\u060c\u0621\u064a', 'strict', b'\xac\xc1\xea'), + ('\u2562\xe7\xa0', 'strict', b'\x86\x87\xff'), + ('\x9a\x9f', 'strict', b'\x9a\x9f'), + ('\u256b', 'strict', b'\xc0'), + # test error handlers + ('[\u0662]', 'strict', None), + ('[\u0662]', 'ignore', b'[]'), + ('[\u0662]', 'replace', b'[?]'), + ('\udca0', 'surrogateescape', b'\xa0'), + ('\udca0', 'surrogatepass', None), + )) + self.check_decode(708, ( + (b'abc2%', 'strict', 'abc2%'), + (b'\xac\xc1\xea', 'strict', '\u060c\u0621\u064a'), + (b'\x86\x87\xff', 'strict', '\u2562\xe7\xa0'), + (b'\x9a\x9f', 'strict', '\x9a\x9f'), + (b'\xc0', 'strict', '\u256b'), + # test error handlers + (b'\xa0', 'strict', None), + (b'[\xa0]', 'ignore', '[]'), + (b'[\xa0]', 'replace', '[\ufffd]'), + (b'[\xa0]', 'backslashreplace', '[\\xa0]'), + (b'[\xa0]', 'surrogateescape', '[\udca0]'), + (b'[\xa0]', 'surrogatepass', None), + )) + + def test_cp20106(self): + self.check_encode(20106, ( + ('abc', 'strict', b'abc'), + ('\xa7\xc4\xdf', 'strict', b'@[~'), + # test error handlers + ('@', 'strict', None), + ('@', 'ignore', b''), + ('@', 'replace', b'?'), + ('\udcbf', 'surrogateescape', b'\xbf'), + ('\udcbf', 'surrogatepass', None), + )) + self.check_decode(20106, ( + (b'abc', 'strict', 'abc'), + (b'@[~', 'strict', '\xa7\xc4\xdf'), + (b'\xe1\xfe', 'strict', 'a\xdf'), + # test error handlers + (b'(\xbf)', 'strict', None), + (b'(\xbf)', 'ignore', '()'), + (b'(\xbf)', 'replace', '(\ufffd)'), + (b'(\xbf)', 'backslashreplace', '(\\xbf)'), + (b'(\xbf)', 'surrogateescape', '(\udcbf)'), + (b'(\xbf)', 'surrogatepass', None), + )) + def test_cp_utf7(self): cp = 65000 self.check_encode(cp, ( @@ -3412,17 +3490,15 @@ def test_incremental(self): False) self.assertEqual(decoded, ('abc', 3)) - def test_mbcs_alias(self): - # Check that looking up our 'default' codepage will return - # mbcs when we don't have a more specific one available - code_page = 99_999 - name = f'cp{code_page}' - with mock.patch('_winapi.GetACP', return_value=code_page): - try: - codec = codecs.lookup(name) - self.assertEqual(codec.name, 'mbcs') - finally: - codecs.unregister(name) + def test_mbcs_code_page(self): + # Check that codec for the current Windows (ANSII) code page is + # always available. + try: + from _winapi import GetACP + except ImportError: + self.skipTest('requires _winapi.GetACP') + cp = GetACP() + codecs.lookup(f'cp{cp}') @support.bigmemtest(size=2**31, memuse=7, dry_run=False) def test_large_input(self, size): diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index ce5c03659f1979..4af8f7f480e759 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001,2002 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # csv package unit tests import copy diff --git a/Lib/test/test_ctypes/test_dlerror.py b/Lib/test/test_ctypes/test_dlerror.py new file mode 100644 index 00000000000000..4441e30cd7a2a7 --- /dev/null +++ b/Lib/test/test_ctypes/test_dlerror.py @@ -0,0 +1,123 @@ +import os +import sys +import unittest +import platform + +FOO_C = r""" +#include + +/* This is a 'GNU indirect function' (IFUNC) that will be called by + dlsym() to resolve the symbol "foo" to an address. Typically, such + a function would return the address of an actual function, but it + can also just return NULL. For some background on IFUNCs, see + https://willnewton.name/uncategorized/using-gnu-indirect-functions. + + Adapted from Michael Kerrisk's answer: https://stackoverflow.com/a/53590014. +*/ + +asm (".type foo STT_GNU_IFUNC"); + +void *foo(void) +{ + write($DESCRIPTOR, "OK", 2); + return NULL; +} +""" + + +@unittest.skipUnless(sys.platform.startswith('linux'), + 'Test only valid for Linux') +class TestNullDlsym(unittest.TestCase): + """GH-126554: Ensure that we catch NULL dlsym return values + + In rare cases, such as when using GNU IFUNCs, dlsym(), + the C function that ctypes' CDLL uses to get the address + of symbols, can return NULL. + + The objective way of telling if an error during symbol + lookup happened is to call glibc's dlerror() and check + for a non-NULL return value. + + However, there can be cases where dlsym() returns NULL + and dlerror() is also NULL, meaning that glibc did not + encounter any error. + + In the case of ctypes, we subjectively treat that as + an error, and throw a relevant exception. + + This test case ensures that we correctly enforce + this 'dlsym returned NULL -> throw Error' rule. + """ + + def test_null_dlsym(self): + import subprocess + import tempfile + + # To avoid ImportErrors on Windows, where _ctypes does not have + # dlopen and dlsym, + # import here, i.e., inside the test function. + # The skipUnless('linux') decorator ensures that we're on linux + # if we're executing these statements. + from ctypes import CDLL, c_int + from _ctypes import dlopen, dlsym + + retcode = subprocess.call(["gcc", "--version"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) + if retcode != 0: + self.skipTest("gcc is missing") + + pipe_r, pipe_w = os.pipe() + self.addCleanup(os.close, pipe_r) + self.addCleanup(os.close, pipe_w) + + with tempfile.TemporaryDirectory() as d: + # Create a C file with a GNU Indirect Function (FOO_C) + # and compile it into a shared library. + srcname = os.path.join(d, 'foo.c') + dstname = os.path.join(d, 'libfoo.so') + with open(srcname, 'w') as f: + f.write(FOO_C.replace('$DESCRIPTOR', str(pipe_w))) + args = ['gcc', '-fPIC', '-shared', '-o', dstname, srcname] + p = subprocess.run(args, capture_output=True) + + if p.returncode != 0: + # IFUNC is not supported on all architectures. + if platform.machine() == 'x86_64': + # It should be supported here. Something else went wrong. + p.check_returncode() + else: + # IFUNC might not be supported on this machine. + self.skipTest(f"could not compile indirect function: {p}") + + # Case #1: Test 'PyCFuncPtr_FromDll' from Modules/_ctypes/_ctypes.c + L = CDLL(dstname) + with self.assertRaisesRegex(AttributeError, "function 'foo' not found"): + # Try accessing the 'foo' symbol. + # It should resolve via dlsym() to NULL, + # and since we subjectively treat NULL + # addresses as errors, we should get + # an error. + L.foo + + # Assert that the IFUNC was called + self.assertEqual(os.read(pipe_r, 2), b'OK') + + # Case #2: Test 'CDataType_in_dll_impl' from Modules/_ctypes/_ctypes.c + with self.assertRaisesRegex(ValueError, "symbol 'foo' not found"): + c_int.in_dll(L, "foo") + + # Assert that the IFUNC was called + self.assertEqual(os.read(pipe_r, 2), b'OK') + + # Case #3: Test 'py_dl_sym' from Modules/_ctypes/callproc.c + L = dlopen(dstname) + with self.assertRaisesRegex(OSError, "symbol 'foo' not found"): + dlsym(L, "foo") + + # Assert that the IFUNC was called + self.assertEqual(os.read(pipe_r, 2), b'OK') + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_ctypes/test_win32.py b/Lib/test/test_ctypes/test_win32.py index 31919118670613..54b47dc28fbc73 100644 --- a/Lib/test/test_ctypes/test_win32.py +++ b/Lib/test/test_ctypes/test_win32.py @@ -65,15 +65,16 @@ def test_PARAM(self): sizeof(c_void_p)) def test_COMError(self): - from _ctypes import COMError + from ctypes import COMError if support.HAVE_DOCSTRINGS: self.assertEqual(COMError.__doc__, "Raised when a COM method call failed.") - ex = COMError(-1, "text", ("details",)) + ex = COMError(-1, "text", ("descr", "source", "helpfile", 0, "progid")) self.assertEqual(ex.hresult, -1) self.assertEqual(ex.text, "text") - self.assertEqual(ex.details, ("details",)) + self.assertEqual(ex.details, + ("descr", "source", "helpfile", 0, "progid")) self.assertEqual(COMError.mro(), [COMError, Exception, BaseException, object]) diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index 4030716efb51f9..c94dc2df4f0a7f 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -880,115 +880,6 @@ class C(object): gc.collect() self.assertIs(ref(), None, "Cycle was not collected") - def _not_tracked(self, t): - # Nested containers can take several collections to untrack - gc.collect() - gc.collect() - self.assertFalse(gc.is_tracked(t), t) - - def _tracked(self, t): - self.assertTrue(gc.is_tracked(t), t) - gc.collect() - gc.collect() - self.assertTrue(gc.is_tracked(t), t) - - def test_string_keys_can_track_values(self): - # Test that this doesn't leak. - for i in range(10): - d = {} - for j in range(10): - d[str(j)] = j - d["foo"] = d - - @support.cpython_only - def test_track_literals(self): - # Test GC-optimization of dict literals - x, y, z, w = 1.5, "a", (1, None), [] - - self._not_tracked({}) - self._not_tracked({x:(), y:x, z:1}) - self._not_tracked({1: "a", "b": 2}) - self._not_tracked({1: 2, (None, True, False, ()): int}) - self._not_tracked({1: object()}) - - # Dicts with mutable elements are always tracked, even if those - # elements are not tracked right now. - self._tracked({1: []}) - self._tracked({1: ([],)}) - self._tracked({1: {}}) - self._tracked({1: set()}) - - @support.cpython_only - def test_track_dynamic(self): - # Test GC-optimization of dynamically-created dicts - class MyObject(object): - pass - x, y, z, w, o = 1.5, "a", (1, object()), [], MyObject() - - d = dict() - self._not_tracked(d) - d[1] = "a" - self._not_tracked(d) - d[y] = 2 - self._not_tracked(d) - d[z] = 3 - self._not_tracked(d) - self._not_tracked(d.copy()) - d[4] = w - self._tracked(d) - self._tracked(d.copy()) - d[4] = None - self._not_tracked(d) - self._not_tracked(d.copy()) - - # dd isn't tracked right now, but it may mutate and therefore d - # which contains it must be tracked. - d = dict() - dd = dict() - d[1] = dd - self._not_tracked(dd) - self._tracked(d) - dd[1] = d - self._tracked(dd) - - d = dict.fromkeys([x, y, z]) - self._not_tracked(d) - dd = dict() - dd.update(d) - self._not_tracked(dd) - d = dict.fromkeys([x, y, z, o]) - self._tracked(d) - dd = dict() - dd.update(d) - self._tracked(dd) - - d = dict(x=x, y=y, z=z) - self._not_tracked(d) - d = dict(x=x, y=y, z=z, w=w) - self._tracked(d) - d = dict() - d.update(x=x, y=y, z=z) - self._not_tracked(d) - d.update(w=w) - self._tracked(d) - - d = dict([(x, y), (z, 1)]) - self._not_tracked(d) - d = dict([(x, y), (z, w)]) - self._tracked(d) - d = dict() - d.update([(x, y), (z, 1)]) - self._not_tracked(d) - d.update([(x, y), (z, w)]) - self._tracked(d) - - @support.cpython_only - def test_track_subtypes(self): - # Dict subtypes are always tracked - class MyDict(dict): - pass - self._tracked(MyDict()) - def make_shared_key_dict(self, n): class C: pass diff --git a/Lib/test/test_email/test_asian_codecs.py b/Lib/test/test_email/test_asian_codecs.py index 1e0caeeaed0810..ca44f54c69b39b 100644 --- a/Lib/test/test_email/test_asian_codecs.py +++ b/Lib/test/test_email/test_asian_codecs.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2006 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Contact: email-sig@python.org # email package unit tests for (optional) Asian codecs diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py index 65ddbabcaa1997..abe9ef2e94409f 100644 --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2010 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Contact: email-sig@python.org # email package unit tests diff --git a/Lib/test/test_email/test_message.py b/Lib/test/test_email/test_message.py index 034f7626c1fc7c..96979db27f3a21 100644 --- a/Lib/test/test_email/test_message.py +++ b/Lib/test/test_email/test_message.py @@ -1,6 +1,6 @@ -import unittest import textwrap -from email import policy, message_from_string +import unittest +from email import message_from_bytes, message_from_string, policy from email.message import EmailMessage, MIMEPart from test.test_email import TestEmailBase, parameterize @@ -958,6 +958,52 @@ def test_folding_with_utf8_encoding_8(self): b'123456789-123456789\n 123456789 Hello ' b'=?utf-8?q?W=C3=B6rld!?= 123456789 123456789\n\n') + def test_folding_with_short_nospace_1(self): + # bpo-36520 + # + # Fold a line that contains a long whitespace after + # the fold point. + + m = EmailMessage(policy.default) + m['Message-ID'] = '123456789' * 3 + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + + def test_folding_with_long_nospace_default_policy_1(self): + # Fixed: https://github.com/python/cpython/issues/124452 + # + # When the value is too long, it should be converted back + # to its original form without any modifications. + + m = EmailMessage(policy.default) + message = '123456789' * 10 + m['Message-ID'] = message + self.assertEqual(m.as_bytes(), + f'Message-ID:\n {message}\n\n'.encode()) + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + + def test_folding_with_long_nospace_compat32_policy_1(self): + m = EmailMessage(policy.compat32) + message = '123456789' * 10 + m['Message-ID'] = message + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + + def test_folding_with_long_nospace_smtp_policy_1(self): + m = EmailMessage(policy.SMTP) + message = '123456789' * 10 + m['Message-ID'] = message + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + + def test_folding_with_long_nospace_http_policy_1(self): + m = EmailMessage(policy.HTTP) + message = '123456789' * 10 + m['Message-ID'] = message + parsed_msg = message_from_bytes(m.as_bytes(), policy=policy.default) + self.assertEqual(parsed_msg['Message-ID'], m['Message-ID']) + def test_get_body_malformed(self): """test for bpo-42892""" msg = textwrap.dedent("""\ diff --git a/Lib/test/test_email/torture_test.py b/Lib/test/test_email/torture_test.py index 9cf9362c9b77e0..d15948a38b25dd 100644 --- a/Lib/test/test_email/torture_test.py +++ b/Lib/test/test_email/torture_test.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2004 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # # A torture test of the email package. This should not be run as part of the # standard Python test suite since it requires several meg of email messages diff --git a/Lib/test/test_free_threading/test_code.py b/Lib/test/test_free_threading/test_code.py new file mode 100644 index 00000000000000..a5136a3ba4edc7 --- /dev/null +++ b/Lib/test/test_free_threading/test_code.py @@ -0,0 +1,30 @@ +import unittest + +from threading import Thread +from unittest import TestCase + +from test.support import threading_helper + +@threading_helper.requires_working_threading() +class TestCode(TestCase): + def test_code_attrs(self): + """Test concurrent accesses to lazily initialized code attributes""" + code_objects = [] + for _ in range(1000): + code_objects.append(compile("a + b", "", "eval")) + + def run_in_thread(): + for code in code_objects: + self.assertIsInstance(code.co_code, bytes) + self.assertIsInstance(code.co_freevars, tuple) + self.assertIsInstance(code.co_varnames, tuple) + + threads = [Thread(target=run_in_thread) for _ in range(2)] + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_free_threading/test_dict.py b/Lib/test/test_free_threading/test_dict.py index 80daf0d9cae9e0..13717cb39fa35d 100644 --- a/Lib/test/test_free_threading/test_dict.py +++ b/Lib/test/test_free_threading/test_dict.py @@ -142,6 +142,70 @@ def writer_func(l): for ref in thread_list: self.assertIsNone(ref()) + def test_racing_set_object_dict(self): + """Races assigning to __dict__ should be thread safe""" + class C: pass + class MyDict(dict): pass + for cyclic in (False, True): + f = C() + f.__dict__ = {"foo": 42} + THREAD_COUNT = 10 + + def writer_func(l): + for i in range(1000): + if cyclic: + other_d = {} + d = MyDict({"foo": 100}) + if cyclic: + d["x"] = other_d + other_d["bar"] = d + l.append(weakref.ref(d)) + f.__dict__ = d + + def reader_func(): + for i in range(1000): + f.foo + + lists = [] + readers = [] + writers = [] + for x in range(THREAD_COUNT): + thread_list = [] + lists.append(thread_list) + writer = Thread(target=partial(writer_func, thread_list)) + writers.append(writer) + + for x in range(THREAD_COUNT): + reader = Thread(target=partial(reader_func)) + readers.append(reader) + + for writer in writers: + writer.start() + for reader in readers: + reader.start() + + for writer in writers: + writer.join() + + for reader in readers: + reader.join() + + f.__dict__ = {} + gc.collect() + gc.collect() + + count = 0 + ids = set() + for thread_list in lists: + for i, ref in enumerate(thread_list): + if ref() is None: + continue + count += 1 + ids.add(id(ref())) + count += 1 + + self.assertEqual(count, 0) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index d590af090abc6e..6d60f6941c4c5d 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1005,6 +1005,29 @@ def __getitem__(self, i): d = {"one": 1, "two": 2, "three": 3} self.assertEqual(self.reduce(add, d), "".join(d.keys())) + # test correctness of keyword usage of `initial` in `reduce` + def test_initial_keyword(self): + def add(x, y): + return x + y + self.assertEqual( + self.reduce(add, ['a', 'b', 'c'], ''), + self.reduce(add, ['a', 'b', 'c'], initial=''), + ) + self.assertEqual( + self.reduce(add, [['a', 'c'], [], ['d', 'w']], []), + self.reduce(add, [['a', 'c'], [], ['d', 'w']], initial=[]), + ) + self.assertEqual( + self.reduce(lambda x, y: x*y, range(2,8), 1), + self.reduce(lambda x, y: x*y, range(2,8), initial=1), + ) + self.assertEqual( + self.reduce(lambda x, y: x*y, range(2,21), 1), + self.reduce(lambda x, y: x*y, range(2,21), initial=1), + ) + self.assertRaises(TypeError, self.reduce, add, [0, 1], initial="") + self.assertEqual(self.reduce(42, "", initial="1"), "1") # func is never called with one item + @unittest.skipUnless(c_functools, 'requires the C _functools module') class TestReduceC(TestReduce, unittest.TestCase): diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index 2b3c0d3baddeaf..0372815b9bfd27 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -1082,6 +1082,44 @@ def __del__(self): gc.collect() self.assertTrue(collected) + def test_traverse_frozen_objects(self): + # See GH-126312: Objects that were not frozen could traverse over + # a frozen object on the free-threaded build, which would cause + # a negative reference count. + x = [1, 2, 3] + gc.freeze() + y = [x] + y.append(y) + del y + gc.collect() + gc.unfreeze() + + def test_deferred_refcount_frozen(self): + # Also from GH-126312: objects that use deferred reference counting + # weren't ignored if they were frozen. Unfortunately, it's pretty + # difficult to come up with a case that triggers this. + # + # Calling gc.collect() while the garbage collector is frozen doesn't + # trigger this normally, but it *does* if it's inside unittest for whatever + # reason. We can't call unittest from inside a test, so it has to be + # in a subprocess. + source = textwrap.dedent(""" + import gc + import unittest + + + class Test(unittest.TestCase): + def test_something(self): + gc.freeze() + gc.collect() + gc.unfreeze() + + + if __name__ == "__main__": + unittest.main() + """) + assert_python_ok("-c", source) + class IncrementalGCTests(unittest.TestCase): diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index bf2cb1160723b0..2ea6dba12effc1 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -758,7 +758,8 @@ def check_stack_names(self, frame, expected): while frame: name = frame.f_code.co_name # Stop checking frames when we get to our test helper. - if name.startswith('check_') or name.startswith('call_'): + if (name.startswith('check_') or name.startswith('call_') + or name.startswith('test')): break names.append(name) @@ -799,6 +800,25 @@ def call_throw(gen): self.check_yield_from_example(call_throw) + def test_throw_with_yield_from_custom_generator(self): + + class CustomGen: + def __init__(self, test): + self.test = test + def throw(self, *args): + self.test.check_stack_names(sys._getframe(), ['throw', 'g']) + def __iter__(self): + return self + def __next__(self): + return 42 + + def g(target): + yield from target + + gen = g(CustomGen(self)) + gen.send(None) + gen.throw(RuntimeError) + class YieldFromTests(unittest.TestCase): def test_generator_gi_yieldfrom(self): diff --git a/Lib/test/test_getopt.py b/Lib/test/test_getopt.py index c8b3442de4aa77..ed967ad27619ae 100644 --- a/Lib/test/test_getopt.py +++ b/Lib/test/test_getopt.py @@ -1,11 +1,12 @@ # test_getopt.py # David Goodger 2000-08-19 -from test.support.os_helper import EnvironmentVarGuard import doctest -import unittest - import getopt +import sys +import unittest +from test.support.i18n_helper import TestTranslationsBase, update_translation_snapshots +from test.support.os_helper import EnvironmentVarGuard sentinel = object() @@ -19,21 +20,34 @@ def assertError(self, *args, **kwargs): self.assertRaises(getopt.GetoptError, *args, **kwargs) def test_short_has_arg(self): - self.assertTrue(getopt.short_has_arg('a', 'a:')) - self.assertFalse(getopt.short_has_arg('a', 'a')) + self.assertIs(getopt.short_has_arg('a', 'a:'), True) + self.assertIs(getopt.short_has_arg('a', 'a'), False) + self.assertEqual(getopt.short_has_arg('a', 'a::'), '?') self.assertError(getopt.short_has_arg, 'a', 'b') def test_long_has_args(self): has_arg, option = getopt.long_has_args('abc', ['abc=']) - self.assertTrue(has_arg) + self.assertIs(has_arg, True) self.assertEqual(option, 'abc') has_arg, option = getopt.long_has_args('abc', ['abc']) - self.assertFalse(has_arg) + self.assertIs(has_arg, False) self.assertEqual(option, 'abc') + has_arg, option = getopt.long_has_args('abc', ['abc=?']) + self.assertEqual(has_arg, '?') + self.assertEqual(option, 'abc') + + has_arg, option = getopt.long_has_args('abc', ['abcd=']) + self.assertIs(has_arg, True) + self.assertEqual(option, 'abcd') + has_arg, option = getopt.long_has_args('abc', ['abcd']) - self.assertFalse(has_arg) + self.assertIs(has_arg, False) + self.assertEqual(option, 'abcd') + + has_arg, option = getopt.long_has_args('abc', ['abcd=?']) + self.assertEqual(has_arg, '?') self.assertEqual(option, 'abcd') self.assertError(getopt.long_has_args, 'abc', ['def']) @@ -49,9 +63,9 @@ def test_do_shorts(self): self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, []) - #opts, args = getopt.do_shorts([], 'a=1', 'a:', []) - #self.assertEqual(opts, [('-a', '1')]) - #self.assertEqual(args, []) + opts, args = getopt.do_shorts([], 'a=1', 'a:', []) + self.assertEqual(opts, [('-a', '=1')]) + self.assertEqual(args, []) opts, args = getopt.do_shorts([], 'a', 'a:', ['1']) self.assertEqual(opts, [('-a', '1')]) @@ -61,6 +75,14 @@ def test_do_shorts(self): self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, ['2']) + opts, args = getopt.do_shorts([], 'a', 'a::', ['1']) + self.assertEqual(opts, [('-a', '')]) + self.assertEqual(args, ['1']) + + opts, args = getopt.do_shorts([], 'a1', 'a::', []) + self.assertEqual(opts, [('-a', '1')]) + self.assertEqual(args, []) + self.assertError(getopt.do_shorts, [], 'a1', 'a', []) self.assertError(getopt.do_shorts, [], 'a', 'a:', []) @@ -77,6 +99,22 @@ def test_do_longs(self): self.assertEqual(opts, [('--abcd', '1')]) self.assertEqual(args, []) + opts, args = getopt.do_longs([], 'abc', ['abc=?'], ['1']) + self.assertEqual(opts, [('--abc', '')]) + self.assertEqual(args, ['1']) + + opts, args = getopt.do_longs([], 'abc', ['abcd=?'], ['1']) + self.assertEqual(opts, [('--abcd', '')]) + self.assertEqual(args, ['1']) + + opts, args = getopt.do_longs([], 'abc=1', ['abc=?'], []) + self.assertEqual(opts, [('--abc', '1')]) + self.assertEqual(args, []) + + opts, args = getopt.do_longs([], 'abc=1', ['abcd=?'], []) + self.assertEqual(opts, [('--abcd', '1')]) + self.assertEqual(args, []) + opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], []) self.assertEqual(opts, [('--abc', '')]) self.assertEqual(args, []) @@ -95,7 +133,7 @@ def test_getopt(self): # note: the empty string between '-a' and '--beta' is significant: # it simulates an empty string option argument ('-a ""') on the # command line. - cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a', + cmdline = ['-a1', '-b', '--alpha=2', '--beta', '-a', '3', '-a', '', '--beta', 'arg1', 'arg2'] opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta']) @@ -106,33 +144,53 @@ def test_getopt(self): # accounted for in the code that calls getopt(). self.assertEqual(args, ['arg1', 'arg2']) + cmdline = ['-a1', '--alpha=2', '--alpha=', '-a', '--alpha', 'arg1', 'arg2'] + opts, args = getopt.getopt(cmdline, 'a::', ['alpha=?']) + self.assertEqual(opts, [('-a', '1'), ('--alpha', '2'), ('--alpha', ''), + ('-a', ''), ('--alpha', '')]) + self.assertEqual(args, ['arg1', 'arg2']) + self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta']) def test_gnu_getopt(self): # Test handling of GNU style scanning mode. - cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2'] + cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2', '--beta', + '3', 'arg2'] # GNU style opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta=']) - self.assertEqual(args, ['arg1']) - self.assertEqual(opts, [('-a', ''), ('-b', '1'), - ('--alpha', ''), ('--beta', '2')]) + self.assertEqual(args, ['arg1', 'arg2']) + self.assertEqual(opts, [('-a', ''), ('-b', '1'), ('--alpha', ''), + ('--beta', '2'), ('--beta', '3')]) + + opts, args = getopt.gnu_getopt(cmdline, 'ab::', ['alpha', 'beta=?']) + self.assertEqual(args, ['arg1', '1', '3', 'arg2']) + self.assertEqual(opts, [('-a', ''), ('-b', ''), ('--alpha', ''), + ('--beta', '2'), ('--beta', '')]) # recognize "-" as an argument opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', []) self.assertEqual(args, ['-']) self.assertEqual(opts, [('-a', ''), ('-b', '-')]) + # Return positional arguments intermixed with options. + opts, args = getopt.gnu_getopt(cmdline, '-ab:', ['alpha', 'beta=']) + self.assertEqual(args, ['arg2']) + self.assertEqual(opts, [('-a', ''), (None, ['arg1']), ('-b', '1'), ('--alpha', ''), + ('--beta', '2'), ('--beta', '3')]) + # Posix style via + opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta=']) self.assertEqual(opts, [('-a', '')]) - self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2']) + self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2', + '--beta', '3', 'arg2']) # Posix style via POSIXLY_CORRECT self.env["POSIXLY_CORRECT"] = "1" opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta=']) self.assertEqual(opts, [('-a', '')]) - self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2']) + self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2', + '--beta', '3', 'arg2']) def test_issue4629(self): longopts, shortopts = getopt.getopt(['--help='], '', ['help=']) @@ -173,10 +231,20 @@ def test_libref_examples(): ['a1', 'a2'] """ + +class TestTranslations(TestTranslationsBase): + def test_translations(self): + self.assertMsgidsEqual(getopt) + + def load_tests(loader, tests, pattern): tests.addTest(doctest.DocTestSuite()) return tests -if __name__ == "__main__": +if __name__ == '__main__': + # To regenerate translation snapshots + if len(sys.argv) > 1 and sys.argv[1] == '--snapshot-update': + update_translation_snapshots(getopt) + sys.exit(0) unittest.main() diff --git a/Lib/test/test_global.py b/Lib/test/test_global.py index f5b38c25ea0728..11d0bd54e8b69b 100644 --- a/Lib/test/test_global.py +++ b/Lib/test/test_global.py @@ -1,7 +1,19 @@ -"""Verify that warnings are issued for global statements following use.""" +"""This module includes tests for syntax errors that occur when a name +declared as `global` is used in ways that violate the language +specification, such as after assignment, usage, or annotation. The tests +verify that syntax errors are correctly raised for improper `global` +statements following variable use or assignment within functions. +Additionally, it tests various name-binding scenarios for global +variables to ensure correct behavior. +See `test_scope.py` for additional related behavioral tests covering +variable scoping and usage in different contexts. +""" + +import contextlib from test.support import check_syntax_error from test.support.warnings_helper import check_warnings +from types import SimpleNamespace import unittest import warnings @@ -12,40 +24,185 @@ def setUp(self): self.enterContext(check_warnings()) warnings.filterwarnings("error", module="") - def test1(self): - prog_text_1 = """\ -def wrong1(): - a = 1 - b = 2 - global a - global b + ###################################################### + ### Syntax error cases as covered in Python/symtable.c + ###################################################### + + def test_name_param(self): + prog_text = """\ +def fn(name_param): + global name_param """ - check_syntax_error(self, prog_text_1, lineno=4, offset=5) + check_syntax_error(self, prog_text, lineno=2, offset=5) - def test2(self): - prog_text_2 = """\ -def wrong2(): - print(x) - global x + def test_name_after_assign(self): + prog_text = """\ +def fn(): + name_assign = 1 + global name_assign """ - check_syntax_error(self, prog_text_2, lineno=3, offset=5) + check_syntax_error(self, prog_text, lineno=3, offset=5) - def test3(self): - prog_text_3 = """\ -def wrong3(): - print(x) - x = 2 - global x + def test_name_after_use(self): + prog_text = """\ +def fn(): + print(name_use) + global name_use """ - check_syntax_error(self, prog_text_3, lineno=4, offset=5) + check_syntax_error(self, prog_text, lineno=3, offset=5) - def test4(self): - prog_text_4 = """\ -global x -x = 2 + def test_name_annot(self): + prog_text_3 = """\ +def fn(): + name_annot: int + global name_annot """ - # this should work - compile(prog_text_4, "", "exec") + check_syntax_error(self, prog_text_3, lineno=3, offset=5) + + ############################################################# + ### Tests for global variables across all name binding cases, + ### as described in executionmodel.rst + ############################################################# + + def test_assignment_statement(self): + global name_assignment_statement + value = object() + name_assignment_statement = value + self.assertIs(globals()["name_assignment_statement"], value) + del name_assignment_statement + + def test_unpacking_assignment(self): + global name_unpacking_assignment + value = object() + _, name_unpacking_assignment = [None, value] + self.assertIs(globals()["name_unpacking_assignment"], value) + del name_unpacking_assignment + + def test_assignment_expression(self): + global name_assignment_expression + value = object() + if name_assignment_expression := value: + pass + self.assertIs(globals()["name_assignment_expression"], value) + del name_assignment_expression + + def test_iteration_variable(self): + global name_iteration_variable + value = object() + for name_iteration_variable in [value]: + pass + self.assertIs(globals()["name_iteration_variable"], value) + del name_iteration_variable + + def test_func_def(self): + global name_func_def + + def name_func_def(): + pass + + value = name_func_def + self.assertIs(globals()["name_func_def"], value) + del name_func_def + + def test_class_def(self): + global name_class_def + + class name_class_def: + pass + + value = name_class_def + self.assertIs(globals()["name_class_def"], value) + del name_class_def + + def test_type_alias(self): + global name_type_alias + type name_type_alias = tuple[int, int] + value = name_type_alias + self.assertIs(globals()["name_type_alias"], value) + del name_type_alias + + def test_caught_exception(self): + global name_caught_exc + + try: + 1 / 0 + except ZeroDivisionError as name_caught_exc: + value = name_caught_exc + # `name_caught_exc` is cleared automatically after the except block + self.assertIs(globals()["name_caught_exc"], value) + + def test_caught_exception_group(self): + global name_caught_exc_group + try: + try: + 1 / 0 + except ZeroDivisionError as exc: + raise ExceptionGroup("eg", [exc]) + except* ZeroDivisionError as name_caught_exc_group: + value = name_caught_exc_group + # `name_caught_exc` is cleared automatically after the except block + self.assertIs(globals()["name_caught_exc_group"], value) + + def test_enter_result(self): + global name_enter_result + value = object() + with contextlib.nullcontext(value) as name_enter_result: + pass + self.assertIs(globals()["name_enter_result"], value) + del name_enter_result + + def test_import_result(self): + global name_import_result + value = contextlib + import contextlib as name_import_result + + self.assertIs(globals()["name_import_result"], value) + del name_import_result + + def test_match(self): + global name_match + value = object() + match value: + case name_match: + pass + self.assertIs(globals()["name_match"], value) + del name_match + + def test_match_as(self): + global name_match_as + value = object() + match value: + case _ as name_match_as: + pass + self.assertIs(globals()["name_match_as"], value) + del name_match_as + + def test_match_seq(self): + global name_match_seq + value = object() + match (None, value): + case (_, name_match_seq): + pass + self.assertIs(globals()["name_match_seq"], value) + del name_match_seq + + def test_match_map(self): + global name_match_map + value = object() + match {"key": value}: + case {"key": name_match_map}: + pass + self.assertIs(globals()["name_match_map"], value) + del name_match_map + + def test_match_attr(self): + global name_match_attr + value = object() + match SimpleNamespace(key=value): + case SimpleNamespace(key=name_match_attr): + pass + self.assertIs(globals()["name_match_attr"], value) + del name_match_attr def setUpModule(): diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index 668042782bdc5f..0bdd1b4b82e544 100644 --- a/Lib/test/test_importlib/test_util.py +++ b/Lib/test/test_importlib/test_util.py @@ -6,12 +6,14 @@ importlib_util = util.import_importlib('importlib.util') import importlib.util +from importlib import _bootstrap_external import os import pathlib import re import string import sys from test import support +from test.support import os_helper import textwrap import types import unittest @@ -775,5 +777,35 @@ def test_complete_multi_phase_init_module(self): self.run_with_own_gil(script) +class MiscTests(unittest.TestCase): + def test_atomic_write_should_notice_incomplete_writes(self): + import _pyio + + oldwrite = os.write + seen_write = False + + truncate_at_length = 100 + + # Emulate an os.write that only writes partial data. + def write(fd, data): + nonlocal seen_write + seen_write = True + return oldwrite(fd, data[:truncate_at_length]) + + # Need to patch _io to be _pyio, so that io.FileIO is affected by the + # os.write patch. + with (support.swap_attr(_bootstrap_external, '_io', _pyio), + support.swap_attr(os, 'write', write)): + with self.assertRaises(OSError): + # Make sure we write something longer than the point where we + # truncate. + content = b'x' * (truncate_at_length * 2) + _bootstrap_external._write_atomic(os_helper.TESTFN, content) + assert seen_write + + with self.assertRaises(OSError): + os.stat(support.os_helper.TESTFN) # Check that the file did not get written. + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_interpreters/test_stress.py b/Lib/test/test_interpreters/test_stress.py index e400535b2a0e4e..56bfc1721992c8 100644 --- a/Lib/test/test_interpreters/test_stress.py +++ b/Lib/test/test_interpreters/test_stress.py @@ -23,6 +23,7 @@ def test_create_many_sequential(self): alive.append(interp) @support.requires_resource('cpu') + @threading_helper.requires_working_threading() def test_create_many_threaded(self): alive = [] def task(): @@ -32,6 +33,35 @@ def task(): with threading_helper.start_threads(threads): pass + @support.requires_resource('cpu') + @threading_helper.requires_working_threading() + def test_many_threads_running_interp_in_other_interp(self): + interp = interpreters.create() + + script = f"""if True: + import _interpreters + _interpreters.run_string({interp.id}, '1') + """ + + def run(): + interp = interpreters.create() + alreadyrunning = (f'{interpreters.InterpreterError}: ' + 'interpreter already running') + success = False + while not success: + try: + interp.exec(script) + except interpreters.ExecutionFailed as exc: + if exc.excinfo.msg != 'interpreter already running': + raise # re-raise + assert exc.excinfo.type.__name__ == 'InterpreterError' + else: + success = True + + threads = (threading.Thread(target=run) for _ in range(200)) + with threading_helper.start_threads(threads): + pass + if __name__ == '__main__': # Test needs to be a package, so we can do relative imports. diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index a52e1d3fa142d9..b94d688738f9e8 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -494,6 +494,8 @@ def test_count(self): self.assertEqual(take(2, zip('abc',count(-3))), [('a', -3), ('b', -2)]) self.assertRaises(TypeError, count, 2, 3, 4) self.assertRaises(TypeError, count, 'a') + self.assertEqual(take(3, count(maxsize)), + [maxsize, maxsize + 1, maxsize + 2]) self.assertEqual(take(10, count(maxsize-5)), list(range(maxsize-5, maxsize+5))) self.assertEqual(take(10, count(-maxsize-5)), @@ -540,6 +542,12 @@ def test_count_with_step(self): self.assertEqual(take(20, count(-maxsize-15, 3)), take(20, range(-maxsize-15,-maxsize+100, 3))) self.assertEqual(take(3, count(10, maxsize+5)), list(range(10, 10+3*(maxsize+5), maxsize+5))) + self.assertEqual(take(3, count(maxsize, 2)), + [maxsize, maxsize + 2, maxsize + 4]) + self.assertEqual(take(3, count(maxsize, maxsize)), + [maxsize, 2 * maxsize, 3 * maxsize]) + self.assertEqual(take(3, count(-maxsize, maxsize)), + [-maxsize, 0, maxsize]) self.assertEqual(take(3, count(2, 1.25)), [2, 3.25, 4.5]) self.assertEqual(take(3, count(2, 3.25-4j)), [2, 5.25-4j, 8.5-8j]) self.assertEqual(take(3, count(Decimal('1.1'), Decimal('.1'))), diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py index 64ee1ba867d592..93b8684c725d24 100644 --- a/Lib/test/test_marshal.py +++ b/Lib/test/test_marshal.py @@ -28,6 +28,13 @@ def helper(self, sample, *extra): finally: os_helper.unlink(os_helper.TESTFN) +def omit_last_byte(data): + """return data[:-1]""" + # This file's code is used in CompatibilityTestCase, + # but slices need marshal version 5. + # Avoid the slice literal. + return data[slice(0, -1)] + class IntTestCase(unittest.TestCase, HelperMixin): def test_ints(self): # Test a range of Python ints larger than the machine word size. @@ -241,7 +248,8 @@ def test_bug_5888452(self): def test_patch_873224(self): self.assertRaises(Exception, marshal.loads, b'0') self.assertRaises(Exception, marshal.loads, b'f') - self.assertRaises(Exception, marshal.loads, marshal.dumps(2**65)[:-1]) + self.assertRaises(Exception, marshal.loads, + omit_last_byte(marshal.dumps(2**65))) def test_version_argument(self): # Python 2.4.0 crashes for any call to marshal.dumps(x, y) @@ -594,6 +602,19 @@ def testNoIntern(self): s2 = sys.intern(s) self.assertNotEqual(id(s2), id(s)) +class SliceTestCase(unittest.TestCase, HelperMixin): + def test_slice(self): + for obj in ( + slice(None), slice(1), slice(1, 2), slice(1, 2, 3), + slice({'set'}, ('tuple', {'with': 'dict'}, ), self.helper.__code__) + ): + with self.subTest(obj=str(obj)): + self.helper(obj) + + for version in range(4): + with self.assertRaises(ValueError): + marshal.dumps(obj, version) + @support.cpython_only @unittest.skipUnless(_testcapi, 'requires _testcapi') class CAPI_TestCase(unittest.TestCase, HelperMixin): @@ -654,7 +675,7 @@ def test_read_last_object_from_file(self): self.assertEqual(r, obj) with open(os_helper.TESTFN, 'wb') as f: - f.write(data[:1]) + f.write(omit_last_byte(data)) with self.assertRaises(EOFError): _testcapi.pymarshal_read_last_object_from_file(os_helper.TESTFN) os_helper.unlink(os_helper.TESTFN) @@ -671,7 +692,7 @@ def test_read_object_from_file(self): self.assertEqual(p, len(data)) with open(os_helper.TESTFN, 'wb') as f: - f.write(data[:1]) + f.write(omit_last_byte(data)) with self.assertRaises(EOFError): _testcapi.pymarshal_read_object_from_file(os_helper.TESTFN) os_helper.unlink(os_helper.TESTFN) diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py index 58f6a4dfae08ba..0a5b511e75537c 100644 --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -223,26 +223,46 @@ def test_guess_known_extensions(self): def test_preferred_extension(self): def check_extensions(): - self.assertEqual(mimetypes.guess_extension('application/octet-stream'), '.bin') - self.assertEqual(mimetypes.guess_extension('application/postscript'), '.ps') - self.assertEqual(mimetypes.guess_extension('application/vnd.apple.mpegurl'), '.m3u') - self.assertEqual(mimetypes.guess_extension('application/vnd.ms-excel'), '.xls') - self.assertEqual(mimetypes.guess_extension('application/vnd.ms-powerpoint'), '.ppt') - self.assertEqual(mimetypes.guess_extension('application/x-texinfo'), '.texi') - self.assertEqual(mimetypes.guess_extension('application/x-troff'), '.roff') - self.assertEqual(mimetypes.guess_extension('application/xml'), '.xsl') - self.assertEqual(mimetypes.guess_extension('audio/mpeg'), '.mp3') - self.assertEqual(mimetypes.guess_extension('image/avif'), '.avif') - self.assertEqual(mimetypes.guess_extension('image/webp'), '.webp') - self.assertEqual(mimetypes.guess_extension('image/jpeg'), '.jpg') - self.assertEqual(mimetypes.guess_extension('image/tiff'), '.tiff') - self.assertEqual(mimetypes.guess_extension('message/rfc822'), '.eml') - self.assertEqual(mimetypes.guess_extension('text/html'), '.html') - self.assertEqual(mimetypes.guess_extension('text/plain'), '.txt') - self.assertEqual(mimetypes.guess_extension('text/rtf'), '.rtf') - self.assertEqual(mimetypes.guess_extension('text/x-rst'), '.rst') - self.assertEqual(mimetypes.guess_extension('video/mpeg'), '.mpeg') - self.assertEqual(mimetypes.guess_extension('video/quicktime'), '.mov') + for mime_type, ext in ( + ("application/octet-stream", ".bin"), + ("application/postscript", ".ps"), + ("application/vnd.apple.mpegurl", ".m3u"), + ("application/vnd.ms-excel", ".xls"), + ("application/vnd.ms-fontobject", ".eot"), + ("application/vnd.ms-powerpoint", ".ppt"), + ("application/x-texinfo", ".texi"), + ("application/x-troff", ".roff"), + ("application/xml", ".xsl"), + ("audio/matroska", ".mka"), + ("audio/mpeg", ".mp3"), + ("font/otf", ".otf"), + ("font/ttf", ".ttf"), + ("font/woff", ".woff"), + ("font/woff2", ".woff2"), + ("image/avif", ".avif"), + ("image/emf", ".emf"), + ("image/fits", ".fits"), + ("image/g3fax", ".g3"), + ("image/jp2", ".jp2"), + ("image/jpm", ".jpm"), + ("image/t38", ".t38"), + ("image/webp", ".webp"), + ("image/wmf", ".wmf"), + ("image/jpeg", ".jpg"), + ("image/tiff", ".tiff"), + ("image/tiff-fx", ".tfx"), + ("message/rfc822", ".eml"), + ("text/html", ".html"), + ("text/plain", ".txt"), + ("text/rtf", ".rtf"), + ("text/x-rst", ".rst"), + ("video/matroska", ".mkv"), + ("video/matroska-3d", ".mk3d"), + ("video/mpeg", ".mpeg"), + ("video/quicktime", ".mov"), + ): + with self.subTest(mime_type=mime_type, ext=ext): + self.assertEqual(mimetypes.guess_extension(mime_type), ext) check_extensions() mimetypes.init() diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index 64cbfaaaaa0690..6715071af8c752 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -347,13 +347,18 @@ def test_normpath(self): tester("ntpath.normpath('..')", r'..') tester("ntpath.normpath('.')", r'.') + tester("ntpath.normpath('c:.')", 'c:') tester("ntpath.normpath('')", r'.') tester("ntpath.normpath('/')", '\\') tester("ntpath.normpath('c:/')", 'c:\\') tester("ntpath.normpath('/../.././..')", '\\') tester("ntpath.normpath('c:/../../..')", 'c:\\') + tester("ntpath.normpath('/./a/b')", r'\a\b') + tester("ntpath.normpath('c:/./a/b')", r'c:\a\b') tester("ntpath.normpath('../.././..')", r'..\..\..') tester("ntpath.normpath('K:../.././..')", r'K:..\..\..') + tester("ntpath.normpath('./a/b')", r'a\b') + tester("ntpath.normpath('c:./a/b')", r'c:a\b') tester("ntpath.normpath('C:////a/b')", r'C:\a\b') tester("ntpath.normpath('//machine/share//a/b')", r'\\machine\share\a\b') @@ -806,6 +811,9 @@ def test_abspath(self): tester('ntpath.abspath("C:\\spam. . .")', "C:\\spam") tester('ntpath.abspath("C:/nul")', "\\\\.\\nul") tester('ntpath.abspath("C:\\nul")', "\\\\.\\nul") + self.assertTrue(ntpath.isabs(ntpath.abspath("C:spam"))) + self.assertEqual(ntpath.abspath("C:\x00"), ntpath.join(ntpath.abspath("C:"), "\x00")) + self.assertEqual(ntpath.abspath("\x00:spam"), "\x00:\\spam") tester('ntpath.abspath("//..")', "\\\\") tester('ntpath.abspath("//../")', "\\\\..\\") tester('ntpath.abspath("//../..")', "\\\\..\\") diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index 78e4bf44f7ea0c..a0292b31af1be5 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -546,7 +546,6 @@ def count_args(self, *args): @threading_helper.requires_working_threading() -@requires_specialization class TestRacesDoNotCrash(TestBase): # Careful with these. Bigger numbers have a higher chance of catching bugs, # but you can also burn through a *ton* of type/dict/function versions: @@ -588,6 +587,7 @@ def assert_races_do_not_crash( for writer in writers: writer.join() + @requires_specialization def test_binary_subscr_getitem(self): def get_items(): class C: @@ -617,6 +617,7 @@ def write(items): opname = "BINARY_SUBSCR_GETITEM" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_binary_subscr_list_int(self): def get_items(): items = [] @@ -640,6 +641,7 @@ def write(items): opname = "BINARY_SUBSCR_LIST_INT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_for_iter_gen(self): def get_items(): def g(): @@ -671,6 +673,7 @@ def write(items): opname = "FOR_ITER_GEN" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_for_iter_list(self): def get_items(): items = [] @@ -692,6 +695,7 @@ def write(items): opname = "FOR_ITER_LIST" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_class(self): def get_items(): class C: @@ -721,6 +725,7 @@ def write(items): opname = "LOAD_ATTR_CLASS" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_getattribute_overridden(self): def get_items(): class C: @@ -750,6 +755,7 @@ def write(items): opname = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_instance_value(self): def get_items(): class C: @@ -773,6 +779,7 @@ def write(items): opname = "LOAD_ATTR_INSTANCE_VALUE" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_method_lazy_dict(self): def get_items(): class C(Exception): @@ -802,6 +809,7 @@ def write(items): opname = "LOAD_ATTR_METHOD_LAZY_DICT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_method_no_dict(self): def get_items(): class C: @@ -832,6 +840,7 @@ def write(items): opname = "LOAD_ATTR_METHOD_NO_DICT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_method_with_values(self): def get_items(): class C: @@ -861,6 +870,7 @@ def write(items): opname = "LOAD_ATTR_METHOD_WITH_VALUES" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_module(self): def get_items(): items = [] @@ -885,6 +895,7 @@ def write(items): opname = "LOAD_ATTR_MODULE" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_property(self): def get_items(): class C: @@ -914,6 +925,7 @@ def write(items): opname = "LOAD_ATTR_PROPERTY" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_load_attr_with_hint(self): def get_items(): class C: @@ -940,6 +952,7 @@ def write(items): opname = "LOAD_ATTR_WITH_HINT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization_ft def test_load_global_module(self): def get_items(): items = [] @@ -961,6 +974,7 @@ def write(items): opname, get_items, read, write, check_items=True ) + @requires_specialization def test_store_attr_instance_value(self): def get_items(): class C: @@ -983,6 +997,7 @@ def write(items): opname = "STORE_ATTR_INSTANCE_VALUE" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_store_attr_with_hint(self): def get_items(): class C: @@ -1008,6 +1023,7 @@ def write(items): opname = "STORE_ATTR_WITH_HINT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_store_subscr_list_int(self): def get_items(): items = [] @@ -1031,6 +1047,7 @@ def write(items): opname = "STORE_SUBSCR_LIST_INT" self.assert_races_do_not_crash(opname, get_items, read, write) + @requires_specialization def test_unpack_sequence_list(self): def get_items(): items = [] @@ -1255,6 +1272,72 @@ def g(): self.assert_specialized(g, "CONTAINS_OP_SET") self.assert_no_opcode(g, "CONTAINS_OP") + @cpython_only + @requires_specialization_ft + def test_to_bool(self): + def to_bool_bool(): + true_cnt, false_cnt = 0, 0 + elems = [e % 2 == 0 for e in range(100)] + for e in elems: + if e: + true_cnt += 1 + else: + false_cnt += 1 + self.assertEqual(true_cnt, 50) + self.assertEqual(false_cnt, 50) + + to_bool_bool() + self.assert_specialized(to_bool_bool, "TO_BOOL_BOOL") + self.assert_no_opcode(to_bool_bool, "TO_BOOL") + + def to_bool_int(): + count = 0 + for i in range(100): + if i: + count += 1 + else: + count -= 1 + self.assertEqual(count, 98) + + to_bool_int() + self.assert_specialized(to_bool_int, "TO_BOOL_INT") + self.assert_no_opcode(to_bool_int, "TO_BOOL") + + def to_bool_list(): + count = 0 + elems = [1, 2, 3] + while elems: + count += elems.pop() + self.assertEqual(elems, []) + self.assertEqual(count, 6) + + to_bool_list() + self.assert_specialized(to_bool_list, "TO_BOOL_LIST") + self.assert_no_opcode(to_bool_list, "TO_BOOL") + + def to_bool_none(): + count = 0 + elems = [None, None, None, None] + for e in elems: + if not e: + count += 1 + self.assertEqual(count, len(elems)) + + to_bool_none() + self.assert_specialized(to_bool_none, "TO_BOOL_NONE") + self.assert_no_opcode(to_bool_none, "TO_BOOL") + + def to_bool_str(): + count = 0 + elems = ["", "foo", ""] + for e in elems: + if e: + count += 1 + self.assertEqual(count, 1) + + to_bool_str() + self.assert_specialized(to_bool_str, "TO_BOOL_STR") + self.assert_no_opcode(to_bool_str, "TO_BOOL") if __name__ == "__main__": diff --git a/Lib/test/test_optparse.py b/Lib/test/test_optparse.py index 28b274462388ed..8655a0537a5e56 100644 --- a/Lib/test/test_optparse.py +++ b/Lib/test/test_optparse.py @@ -15,7 +15,7 @@ from io import StringIO from test import support from test.support import os_helper - +from test.support.i18n_helper import TestTranslationsBase, update_translation_snapshots import optparse from optparse import make_option, Option, \ @@ -1656,5 +1656,14 @@ def test__all__(self): support.check__all__(self, optparse, not_exported=not_exported) +class TestTranslations(TestTranslationsBase): + def test_translations(self): + self.assertMsgidsEqual(optparse) + + if __name__ == '__main__': + # To regenerate translation snapshots + if len(sys.argv) > 1 and sys.argv[1] == '--snapshot-update': + update_translation_snapshots(optparse) + sys.exit(0) unittest.main() diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 9a4be78556c648..99515dfc71f9ba 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -2447,8 +2447,8 @@ def test_fchown(self): support.is_emscripten or support.is_wasi, "musl libc issue on Emscripten/WASI, bpo-46390" ) - @unittest.skipIf(support.is_apple_mobile, "gh-118201: Test is flaky on iOS") def test_fpathconf(self): + self.assertIn("PC_NAME_MAX", os.pathconf_names) self.check(os.pathconf, "PC_NAME_MAX") self.check(os.fpathconf, "PC_NAME_MAX") self.check_bool(os.pathconf, "PC_NAME_MAX") @@ -3967,10 +3967,10 @@ def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr, **kwa xattr.remove("user.test") self.assertEqual(set(listxattr(fn)), xattr) self.assertEqual(getxattr(fn, s("user.test2"), **kwargs), b"foo") - setxattr(fn, s("user.test"), b"a"*1024, **kwargs) - self.assertEqual(getxattr(fn, s("user.test"), **kwargs), b"a"*1024) + setxattr(fn, s("user.test"), b"a"*256, **kwargs) + self.assertEqual(getxattr(fn, s("user.test"), **kwargs), b"a"*256) removexattr(fn, s("user.test"), **kwargs) - many = sorted("user.test{}".format(i) for i in range(100)) + many = sorted("user.test{}".format(i) for i in range(32)) for thing in many: setxattr(fn, thing, b"x", **kwargs) self.assertEqual(set(listxattr(fn)), set(init_xattr) | set(many)) @@ -4177,9 +4177,9 @@ def test_eventfd_select(self): @support.requires_linux_version(2, 6, 30) class TimerfdTests(unittest.TestCase): # 1 ms accuracy is reliably achievable on every platform except Android - # emulators, where we allow 10 ms (gh-108277). + # emulators, where we allow 100 ms (gh-124873). if sys.platform == "android" and platform.android_ver().is_emulator: - CLOCK_RES_PLACES = 2 + CLOCK_RES_PLACES = 1 else: CLOCK_RES_PLACES = 3 diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index c7104bfda90f6c..46966b6df2d7b0 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -861,6 +861,28 @@ def test_move_into_other_os(self): def test_move_into_empty_name_other_os(self): self.test_move_into_empty_name() + def _check_complex_symlinks(self, link0_target): + super()._check_complex_symlinks(link0_target) + P = self.cls(self.base) + # Resolve relative paths. + old_path = os.getcwd() + os.chdir(self.base) + try: + p = self.cls('link0').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = self.cls('link1').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = self.cls('link2').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + p = self.cls('link3').resolve() + self.assertEqual(p, P) + self.assertEqualNormCase(str(p), self.base) + finally: + os.chdir(old_path) + def test_resolve_nonexist_relative_issue38671(self): p = self.cls('non', 'exist') diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py index d155e7c5bb9935..b69d674e1cf1ed 100644 --- a/Lib/test/test_pathlib/test_pathlib_abc.py +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -148,6 +148,7 @@ def test_constructor_common(self): P = self.cls p = P('a') self.assertIsInstance(p, P) + P() P('a', 'b', 'c') P('/a', 'b', 'c') P('a/b/c') @@ -2492,6 +2493,23 @@ def test_glob_long_symlink(self): bad_link.symlink_to("bad" * 200) self.assertEqual(sorted(base.glob('**/*')), [bad_link]) + @needs_posix + def test_absolute_posix(self): + P = self.cls + # The default implementation uses '/' as the current directory + self.assertEqual(str(P('').absolute()), '/') + self.assertEqual(str(P('a').absolute()), '/a') + self.assertEqual(str(P('a/b').absolute()), '/a/b') + + self.assertEqual(str(P('/').absolute()), '/') + self.assertEqual(str(P('/a').absolute()), '/a') + self.assertEqual(str(P('/a/b').absolute()), '/a/b') + + # '//'-prefixed absolute path (supported by POSIX). + self.assertEqual(str(P('//').absolute()), '//') + self.assertEqual(str(P('//a').absolute()), '//a') + self.assertEqual(str(P('//a/b').absolute()), '//a/b') + @needs_symlinks def test_readlink(self): P = self.cls(self.base) @@ -2809,29 +2827,6 @@ def _check_complex_symlinks(self, link0_target): self.assertEqual(p, P) self.assertEqualNormCase(str(p), self.base) - # Resolve relative paths. - try: - self.cls('').absolute() - except UnsupportedOperation: - return - old_path = os.getcwd() - os.chdir(self.base) - try: - p = self.cls('link0').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), self.base) - p = self.cls('link1').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), self.base) - p = self.cls('link2').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), self.base) - p = self.cls('link3').resolve() - self.assertEqual(p, P) - self.assertEqualNormCase(str(p), self.base) - finally: - os.chdir(old_path) - @needs_symlinks def test_complex_symlinks_absolute(self): self._check_complex_symlinks(self.base) diff --git a/Lib/test/test_pickletools.py b/Lib/test/test_pickletools.py index d8ff7a25cbc4b7..265dc497ccb86c 100644 --- a/Lib/test/test_pickletools.py +++ b/Lib/test/test_pickletools.py @@ -361,6 +361,88 @@ def test_annotate(self): highest protocol among opcodes = 0 ''', annotate=20) + def test_string(self): + self.check_dis(b"S'abc'\n.", '''\ + 0: S STRING 'abc' + 7: . STOP +highest protocol among opcodes = 0 +''') + self.check_dis(b'S"abc"\n.', '''\ + 0: S STRING 'abc' + 7: . STOP +highest protocol among opcodes = 0 +''') + self.check_dis(b"S'\xc3\xb5'\n.", '''\ + 0: S STRING '\\xc3\\xb5' + 6: . STOP +highest protocol among opcodes = 0 +''') + + def test_string_without_quotes(self): + self.check_dis_error(b"Sabc'\n.", '', + 'no string quotes around b"abc\'"') + self.check_dis_error(b'Sabc"\n.', '', + "no string quotes around b'abc\"'") + self.check_dis_error(b"S'abc\n.", '', + '''strinq quote b"'" not found at both ends of b"'abc"''') + self.check_dis_error(b'S"abc\n.', '', + r"""strinq quote b'"' not found at both ends of b'"abc'""") + self.check_dis_error(b"S'abc\"\n.", '', + r"""strinq quote b"'" not found at both ends of b'\\'abc"'""") + self.check_dis_error(b"S\"abc'\n.", '', + r"""strinq quote b'"' not found at both ends of b'"abc\\''""") + + def test_binstring(self): + self.check_dis(b"T\x03\x00\x00\x00abc.", '''\ + 0: T BINSTRING 'abc' + 8: . STOP +highest protocol among opcodes = 1 +''') + self.check_dis(b"T\x02\x00\x00\x00\xc3\xb5.", '''\ + 0: T BINSTRING '\\xc3\\xb5' + 7: . STOP +highest protocol among opcodes = 1 +''') + + def test_short_binstring(self): + self.check_dis(b"U\x03abc.", '''\ + 0: U SHORT_BINSTRING 'abc' + 5: . STOP +highest protocol among opcodes = 1 +''') + self.check_dis(b"U\x02\xc3\xb5.", '''\ + 0: U SHORT_BINSTRING '\\xc3\\xb5' + 4: . STOP +highest protocol among opcodes = 1 +''') + + def test_global(self): + self.check_dis(b"cmodule\nname\n.", '''\ + 0: c GLOBAL 'module name' + 13: . STOP +highest protocol among opcodes = 0 +''') + self.check_dis(b"cm\xc3\xb6dule\nn\xc3\xa4me\n.", '''\ + 0: c GLOBAL 'm\xf6dule n\xe4me' + 15: . STOP +highest protocol among opcodes = 0 +''') + + def test_inst(self): + self.check_dis(b"(imodule\nname\n.", '''\ + 0: ( MARK + 1: i INST 'module name' (MARK at 0) + 14: . STOP +highest protocol among opcodes = 0 +''') + + def test_persid(self): + self.check_dis(b"Pabc\n.", '''\ + 0: P PERSID 'abc' + 5: . STOP +highest protocol among opcodes = 0 +''') + class MiscTestCase(unittest.TestCase): def test__all__(self): diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 40d5fb338ce563..e04ad142061ad3 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -83,6 +83,38 @@ def clear_caches(self): platform._uname_cache = None platform._os_release_cache = None + def test_invalidate_caches(self): + self.clear_caches() + + self.assertDictEqual(platform._platform_cache, {}) + self.assertDictEqual(platform._sys_version_cache, {}) + self.assertIsNone(platform._uname_cache) + self.assertIsNone(platform._os_release_cache) + + # fill the cached entries (some have side effects on others) + platform.platform() # for platform._platform_cache + platform.python_implementation() # for platform._sys_version_cache + platform.uname() # for platform._uname_cache + + # check that the cache are filled + self.assertNotEqual(platform._platform_cache, {}) + self.assertNotEqual(platform._sys_version_cache, {}) + self.assertIsNotNone(platform._uname_cache) + + try: + platform.freedesktop_os_release() + except OSError: + self.assertIsNone(platform._os_release_cache) + else: + self.assertIsNotNone(platform._os_release_cache) + + with self.subTest('clear platform caches'): + platform.invalidate_caches() + self.assertDictEqual(platform._platform_cache, {}) + self.assertDictEqual(platform._sys_version_cache, {}) + self.assertIsNone(platform._uname_cache) + self.assertIsNone(platform._os_release_cache) + def test_architecture(self): res = platform.architecture() diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py index b231b05f864ab9..a0c76e5dec5ebe 100644 --- a/Lib/test/test_plistlib.py +++ b/Lib/test/test_plistlib.py @@ -1,4 +1,4 @@ -# Copyright (C) 2003-2013 Python Software Foundation +# Copyright (C) 2003 Python Software Foundation import copy import operator import pickle diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index ef9d617f66feec..c9cbe1541e733e 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -568,10 +568,38 @@ def test_dup(self): @unittest.skipUnless(hasattr(posix, 'confstr'), 'test needs posix.confstr()') - @unittest.skipIf(support.is_apple_mobile, "gh-118201: Test is flaky on iOS") def test_confstr(self): - self.assertRaises(ValueError, posix.confstr, "CS_garbage") - self.assertEqual(len(posix.confstr("CS_PATH")) > 0, True) + with self.assertRaisesRegex( + ValueError, "unrecognized configuration name" + ): + posix.confstr("CS_garbage") + + with self.assertRaisesRegex( + TypeError, "configuration names must be strings or integers" + ): + posix.confstr(1.23) + + path = posix.confstr("CS_PATH") + self.assertGreater(len(path), 0) + self.assertEqual(posix.confstr(posix.confstr_names["CS_PATH"]), path) + + @unittest.skipUnless(hasattr(posix, 'sysconf'), + 'test needs posix.sysconf()') + def test_sysconf(self): + with self.assertRaisesRegex( + ValueError, "unrecognized configuration name" + ): + posix.sysconf("SC_garbage") + + with self.assertRaisesRegex( + TypeError, "configuration names must be strings or integers" + ): + posix.sysconf(1.23) + + arg_max = posix.sysconf("SC_ARG_MAX") + self.assertGreater(arg_max, 0) + self.assertEqual( + posix.sysconf(posix.sysconf_names["SC_ARG_MAX"]), arg_max) @unittest.skipUnless(hasattr(posix, 'dup2'), 'test needs posix.dup2()') diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index ca5cf42f8fcd71..43e4fbc610e5f7 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -379,6 +379,7 @@ def test_expanduser_pwd2(self): ("/.", "/"), ("/./", "/"), ("/.//.", "/"), + ("/./foo/bar", "/foo/bar"), ("/foo", "/foo"), ("/foo/bar", "/foo/bar"), ("//", "//"), @@ -388,6 +389,7 @@ def test_expanduser_pwd2(self): ("///..//./foo/.//bar", "/foo/bar"), (".", "."), (".//.", "."), + ("./foo/bar", "foo/bar"), ("..", ".."), ("../", ".."), ("../foo", "../foo"), @@ -695,6 +697,65 @@ def test_realpath_unreadable_symlink(self): os.chmod(ABSTFN, 0o755, follow_symlinks=False) os.unlink(ABSTFN) + @skip_if_ABSTFN_contains_backslash + def test_realpath_nonterminal_file(self): + try: + with open(ABSTFN, 'w') as f: + f.write('test_posixpath wuz ere') + self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN) + self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN) + self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True) + self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True) + self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN)) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True) + self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "/subdir") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True) + finally: + os_helper.unlink(ABSTFN) + + @os_helper.skip_unless_symlink + @skip_if_ABSTFN_contains_backslash + def test_realpath_nonterminal_symlink_to_file(self): + try: + with open(ABSTFN + "1", 'w') as f: + f.write('test_posixpath wuz ere') + os.symlink(ABSTFN + "1", ABSTFN) + self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN + "1") + self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "1") + self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN + "1") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True) + self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN + "1") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True) + self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN)) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True) + self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "1/subdir") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True) + finally: + os_helper.unlink(ABSTFN) + + @os_helper.skip_unless_symlink + @skip_if_ABSTFN_contains_backslash + def test_realpath_nonterminal_symlink_to_symlinks_to_file(self): + try: + with open(ABSTFN + "2", 'w') as f: + f.write('test_posixpath wuz ere') + os.symlink(ABSTFN + "2", ABSTFN + "1") + os.symlink(ABSTFN + "1", ABSTFN) + self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN + "2") + self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "2") + self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN + "2") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True) + self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN + "2") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True) + self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN)) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True) + self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "2/subdir") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True) + finally: + os_helper.unlink(ABSTFN) + def test_relpath(self): (real_getcwd, os.getcwd) = (os.getcwd, lambda: r"/home/user/bar") try: diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py index ff95f54026e172..0d3599be87f228 100644 --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -1136,6 +1136,39 @@ def test_ignore_case_set(self): self.assertTrue(re.match(br'[19a]', b'a', re.I)) self.assertTrue(re.match(br'[19a]', b'A', re.I)) self.assertTrue(re.match(br'[19A]', b'a', re.I)) + self.assertTrue(re.match(r'[19\xc7]', '\xc7', re.I)) + self.assertTrue(re.match(r'[19\xc7]', '\xe7', re.I)) + self.assertTrue(re.match(r'[19\xe7]', '\xc7', re.I)) + self.assertTrue(re.match(r'[19\xe7]', '\xe7', re.I)) + self.assertTrue(re.match(r'[19\u0400]', '\u0400', re.I)) + self.assertTrue(re.match(r'[19\u0400]', '\u0450', re.I)) + self.assertTrue(re.match(r'[19\u0450]', '\u0400', re.I)) + self.assertTrue(re.match(r'[19\u0450]', '\u0450', re.I)) + self.assertTrue(re.match(r'[19\U00010400]', '\U00010400', re.I)) + self.assertTrue(re.match(r'[19\U00010400]', '\U00010428', re.I)) + self.assertTrue(re.match(r'[19\U00010428]', '\U00010400', re.I)) + self.assertTrue(re.match(r'[19\U00010428]', '\U00010428', re.I)) + + self.assertTrue(re.match(br'[19A]', b'A', re.I)) + self.assertTrue(re.match(br'[19a]', b'a', re.I)) + self.assertTrue(re.match(br'[19a]', b'A', re.I)) + self.assertTrue(re.match(br'[19A]', b'a', re.I)) + self.assertTrue(re.match(r'[19A]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[19a]', 'a', re.I|re.A)) + self.assertTrue(re.match(r'[19a]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[19A]', 'a', re.I|re.A)) + self.assertTrue(re.match(r'[19\xc7]', '\xc7', re.I|re.A)) + self.assertIsNone(re.match(r'[19\xc7]', '\xe7', re.I|re.A)) + self.assertIsNone(re.match(r'[19\xe7]', '\xc7', re.I|re.A)) + self.assertTrue(re.match(r'[19\xe7]', '\xe7', re.I|re.A)) + self.assertTrue(re.match(r'[19\u0400]', '\u0400', re.I|re.A)) + self.assertIsNone(re.match(r'[19\u0400]', '\u0450', re.I|re.A)) + self.assertIsNone(re.match(r'[19\u0450]', '\u0400', re.I|re.A)) + self.assertTrue(re.match(r'[19\u0450]', '\u0450', re.I|re.A)) + self.assertTrue(re.match(r'[19\U00010400]', '\U00010400', re.I|re.A)) + self.assertIsNone(re.match(r'[19\U00010400]', '\U00010428', re.I|re.A)) + self.assertIsNone(re.match(r'[19\U00010428]', '\U00010400', re.I|re.A)) + self.assertTrue(re.match(r'[19\U00010428]', '\U00010428', re.I|re.A)) # Two different characters have the same lowercase. assert 'K'.lower() == '\u212a'.lower() == 'k' # 'K' @@ -1172,8 +1205,10 @@ def test_ignore_case_range(self): self.assertTrue(re.match(br'[9-a]', b'_', re.I)) self.assertIsNone(re.match(br'[9-A]', b'_', re.I)) self.assertTrue(re.match(r'[\xc0-\xde]', '\xd7', re.I)) + self.assertTrue(re.match(r'[\xc0-\xde]', '\xe7', re.I)) self.assertIsNone(re.match(r'[\xc0-\xde]', '\xf7', re.I)) self.assertTrue(re.match(r'[\xe0-\xfe]', '\xf7', re.I)) + self.assertTrue(re.match(r'[\xe0-\xfe]', '\xc7', re.I)) self.assertIsNone(re.match(r'[\xe0-\xfe]', '\xd7', re.I)) self.assertTrue(re.match(r'[\u0430-\u045f]', '\u0450', re.I)) self.assertTrue(re.match(r'[\u0430-\u045f]', '\u0400', re.I)) @@ -1184,6 +1219,26 @@ def test_ignore_case_range(self): self.assertTrue(re.match(r'[\U00010400-\U00010427]', '\U00010428', re.I)) self.assertTrue(re.match(r'[\U00010400-\U00010427]', '\U00010400', re.I)) + self.assertTrue(re.match(r'[\xc0-\xde]', '\xd7', re.I|re.A)) + self.assertIsNone(re.match(r'[\xc0-\xde]', '\xe7', re.I|re.A)) + self.assertTrue(re.match(r'[\xe0-\xfe]', '\xf7', re.I|re.A)) + self.assertIsNone(re.match(r'[\xe0-\xfe]', '\xc7', re.I|re.A)) + self.assertTrue(re.match(r'[\u0430-\u045f]', '\u0450', re.I|re.A)) + self.assertIsNone(re.match(r'[\u0430-\u045f]', '\u0400', re.I|re.A)) + self.assertIsNone(re.match(r'[\u0400-\u042f]', '\u0450', re.I|re.A)) + self.assertTrue(re.match(r'[\u0400-\u042f]', '\u0400', re.I|re.A)) + self.assertTrue(re.match(r'[\U00010428-\U0001044f]', '\U00010428', re.I|re.A)) + self.assertIsNone(re.match(r'[\U00010428-\U0001044f]', '\U00010400', re.I|re.A)) + self.assertIsNone(re.match(r'[\U00010400-\U00010427]', '\U00010428', re.I|re.A)) + self.assertTrue(re.match(r'[\U00010400-\U00010427]', '\U00010400', re.I|re.A)) + + self.assertTrue(re.match(r'[N-\x7f]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[n-\x7f]', 'Z', re.I|re.A)) + self.assertTrue(re.match(r'[N-\uffff]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[n-\uffff]', 'Z', re.I|re.A)) + self.assertTrue(re.match(r'[N-\U00010000]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[n-\U00010000]', 'Z', re.I|re.A)) + # Two different characters have the same lowercase. assert 'K'.lower() == '\u212a'.lower() == 'k' # 'K' self.assertTrue(re.match(r'[J-M]', '\u212a', re.I)) @@ -2585,6 +2640,12 @@ def test_bug_gh100061(self): self.assertEqual(re.match("(?>(?:ab?c){1,3})", "aca").span(), (0, 2)) self.assertEqual(re.match("(?:ab?c){1,3}+", "aca").span(), (0, 2)) + def test_bug_gh101955(self): + # Possessive quantifier with nested alternative with capture groups + self.assertEqual(re.match('((x)|y|z)*+', 'xyz').groups(), ('z', 'x')) + self.assertEqual(re.match('((x)|y|z){3}+', 'xyz').groups(), ('z', 'x')) + self.assertEqual(re.match('((x)|y|z){3,}+', 'xyz').groups(), ('z', 'x')) + @unittest.skipIf(multiprocessing is None, 'test requires multiprocessing') def test_regression_gh94675(self): pattern = re.compile(r'(?<=[({}])(((//[^\n]*)?[\n])([\000-\040])*)*' @@ -2626,6 +2687,29 @@ def test_character_set_none(self): self.assertIsNone(re.search(p, s)) self.assertIsNone(re.search('(?s:.)' + p, s)) + def check_interrupt(self, pattern, string, maxcount): + class Interrupt(Exception): + pass + p = re.compile(pattern) + for n in range(maxcount): + try: + p._fail_after(n, Interrupt) + p.match(string) + return n + except Interrupt: + pass + finally: + p._fail_after(-1, None) + + @unittest.skipUnless(hasattr(re.Pattern, '_fail_after'), 'requires debug build') + def test_memory_leaks(self): + self.check_interrupt(r'(.)*:', 'abc:', 100) + self.check_interrupt(r'([^:])*?:', 'abc:', 100) + self.check_interrupt(r'([^:])*+:', 'abc:', 100) + self.check_interrupt(r'(.){2,4}:', 'abc:', 100) + self.check_interrupt(r'([^:]){2,4}?:', 'abc:', 100) + self.check_interrupt(r'([^:]){2,4}+:', 'abc:', 100) + def get_debug_out(pat): with captured_stdout() as out: diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index f2bc52ba6e8701..7b3914f30e5f52 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -5132,6 +5132,33 @@ def _testRecv(self): # send data: recv() will no longer block self.cli.sendall(MSG) + def testLargeTimeout(self): + # gh-126876: Check that a timeout larger than INT_MAX is replaced with + # INT_MAX in the poll() code path. The following assertion must not + # fail: assert(INT_MIN <= ms && ms <= INT_MAX). + large_timeout = _testcapi.INT_MAX + 1 + + # test recv() with large timeout + conn, addr = self.serv.accept() + self.addCleanup(conn.close) + try: + conn.settimeout(large_timeout) + except OverflowError: + # On Windows, settimeout() fails with OverflowError, whereas + # we want to test recv(). Just give up silently. + return + msg = conn.recv(len(MSG)) + + def _testLargeTimeout(self): + # test sendall() with large timeout + large_timeout = _testcapi.INT_MAX + 1 + self.cli.connect((HOST, self.port)) + try: + self.cli.settimeout(large_timeout) + except OverflowError: + return + self.cli.sendall(MSG) + class FileObjectClassTestCase(SocketConnectedTest): """Unit tests for the object returned by socket.makefile() @@ -5334,6 +5361,8 @@ def _testMakefileClose(self): self.write_file.write(self.write_msg) self.write_file.flush() + @unittest.skipUnless(hasattr(sys, 'getrefcount'), + 'test needs sys.getrefcount()') def testMakefileCloseSocketDestroy(self): refcount_before = sys.getrefcount(self.cli_conn) self.read_file.close() diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index 1ade49281b4e26..9bbf8d0c6cf2da 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -5,6 +5,8 @@ import os import subprocess import shutil +import json +import textwrap from copy import copy from test.support import ( @@ -17,6 +19,7 @@ from test.support.import_helper import import_module from test.support.os_helper import (TESTFN, unlink, skip_unless_symlink, change_cwd) +from test.support.venv import VirtualEnvironment import sysconfig from sysconfig import (get_paths, get_platform, get_config_vars, @@ -101,6 +104,12 @@ def _cleanup_testfn(self): elif os.path.isdir(path): shutil.rmtree(path) + def venv(self, **venv_create_args): + return VirtualEnvironment.from_tmpdir( + prefix=f'{self.id()}-venv-', + **venv_create_args, + ) + def test_get_path_names(self): self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS) @@ -582,6 +591,105 @@ def test_osx_ext_suffix(self): suffix = sysconfig.get_config_var('EXT_SUFFIX') self.assertTrue(suffix.endswith('-darwin.so'), suffix) + @requires_subprocess() + def test_config_vars_depend_on_site_initialization(self): + script = textwrap.dedent(""" + import sysconfig + + config_vars = sysconfig.get_config_vars() + + import json + print(json.dumps(config_vars, indent=2)) + """) + + with self.venv() as venv: + site_config_vars = json.loads(venv.run('-c', script).stdout) + no_site_config_vars = json.loads(venv.run('-S', '-c', script).stdout) + + self.assertNotEqual(site_config_vars, no_site_config_vars) + # With the site initialization, the virtual environment should be enabled. + self.assertEqual(site_config_vars['base'], venv.prefix) + self.assertEqual(site_config_vars['platbase'], venv.prefix) + #self.assertEqual(site_config_vars['prefix'], venv.prefix) # # FIXME: prefix gets overwriten by _init_posix + # Without the site initialization, the virtual environment should be disabled. + self.assertEqual(no_site_config_vars['base'], site_config_vars['installed_base']) + self.assertEqual(no_site_config_vars['platbase'], site_config_vars['installed_platbase']) + + @requires_subprocess() + def test_config_vars_recalculation_after_site_initialization(self): + script = textwrap.dedent(""" + import sysconfig + + before = sysconfig.get_config_vars() + + import site + site.main() + + after = sysconfig.get_config_vars() + + import json + print(json.dumps({'before': before, 'after': after}, indent=2)) + """) + + with self.venv() as venv: + config_vars = json.loads(venv.run('-S', '-c', script).stdout) + + self.assertNotEqual(config_vars['before'], config_vars['after']) + self.assertEqual(config_vars['after']['base'], venv.prefix) + #self.assertEqual(config_vars['after']['prefix'], venv.prefix) # FIXME: prefix gets overwriten by _init_posix + #self.assertEqual(config_vars['after']['exec_prefix'], venv.prefix) # FIXME: exec_prefix gets overwriten by _init_posix + + @requires_subprocess() + def test_paths_depend_on_site_initialization(self): + script = textwrap.dedent(""" + import sysconfig + + paths = sysconfig.get_paths() + + import json + print(json.dumps(paths, indent=2)) + """) + + with self.venv() as venv: + site_paths = json.loads(venv.run('-c', script).stdout) + no_site_paths = json.loads(venv.run('-S', '-c', script).stdout) + + self.assertNotEqual(site_paths, no_site_paths) + + @requires_subprocess() + def test_makefile_overwrites_config_vars(self): + script = textwrap.dedent(""" + import sys, sysconfig + + data = { + 'prefix': sys.prefix, + 'exec_prefix': sys.exec_prefix, + 'base_prefix': sys.base_prefix, + 'base_exec_prefix': sys.base_exec_prefix, + 'config_vars': sysconfig.get_config_vars(), + } + + import json + print(json.dumps(data, indent=2)) + """) + + # We need to run the test inside a virtual environment so that + # sys.prefix/sys.exec_prefix have a different value from the + # prefix/exec_prefix Makefile variables. + with self.venv() as venv: + data = json.loads(venv.run('-c', script).stdout) + + # We expect sysconfig.get_config_vars to correctly reflect sys.prefix/sys.exec_prefix + self.assertEqual(data['prefix'], data['config_vars']['prefix']) + self.assertEqual(data['exec_prefix'], data['config_vars']['exec_prefix']) + # As a sanity check, just make sure sys.prefix/sys.exec_prefix really + # are different from the Makefile values. + # sys.base_prefix/sys.base_exec_prefix should reflect the value of the + # prefix/exec_prefix Makefile variables, so we use them in the comparison. + self.assertNotEqual(data['prefix'], data['base_prefix']) + self.assertNotEqual(data['exec_prefix'], data['base_exec_prefix']) + + class MakefileTests(unittest.TestCase): @unittest.skipIf(sys.platform.startswith('win'), diff --git a/Lib/test/test_tkinter/test_misc.py b/Lib/test/test_tkinter/test_misc.py index b0b9ed60040443..579ce2af9fa0bf 100644 --- a/Lib/test/test_tkinter/test_misc.py +++ b/Lib/test/test_tkinter/test_misc.py @@ -66,9 +66,10 @@ def test_tk_busy(self): f.tk_busy_forget() self.assertFalse(f.tk_busy_status()) self.assertFalse(f.tk_busy_current()) - with self.assertRaisesRegex(TclError, "can't find busy window"): + errmsg = r"can(no|')t find busy window.*" + with self.assertRaisesRegex(TclError, errmsg): f.tk_busy_configure() - with self.assertRaisesRegex(TclError, "can't find busy window"): + with self.assertRaisesRegex(TclError, errmsg): f.tk_busy_forget() @requires_tk(8, 6, 6) @@ -87,7 +88,8 @@ def test_tk_busy_with_cursor(self): self.assertEqual(f.tk_busy_configure('cursor')[4], 'heart') f.tk_busy_forget() - with self.assertRaisesRegex(TclError, "can't find busy window"): + errmsg = r"can(no|')t find busy window.*" + with self.assertRaisesRegex(TclError, errmsg): f.tk_busy_cget('cursor') def test_tk_setPalette(self): diff --git a/Lib/test/test_tkinter/test_widgets.py b/Lib/test/test_tkinter/test_widgets.py index 9ea764ca2a39d8..f6e77973061956 100644 --- a/Lib/test/test_tkinter/test_widgets.py +++ b/Lib/test/test_tkinter/test_widgets.py @@ -7,9 +7,13 @@ from test.test_tkinter.support import (requires_tk, tk_version, get_tk_patchlevel, widget_eq, AbstractDefaultRootTest) + from test.test_tkinter.widget_tests import ( - add_standard_options, - AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests) + add_configure_tests, + AbstractWidgetTest, + StandardOptionsTests, + IntegerSizeTests, + PixelSizeTests) requires('gui') @@ -20,9 +24,17 @@ def float_round(x): return float(round(x)) - class AbstractToplevelTest(AbstractWidgetTest, PixelSizeTests): - _conv_pad_pixels = False + if tk_version < (9, 0): + _no_round = {'padx', 'pady'} + else: + _no_round = {'borderwidth', 'height', 'highlightthickness', 'padx', + 'pady', 'width'} + if tk_version < (9, 0): + _clipped = {'highlightthickness'} + else: + _clipped = {'borderwidth', 'height', 'highlightthickness', 'padx', + 'pady', 'width'} def test_configure_class(self): widget = self.create() @@ -58,7 +70,7 @@ def test_configure_visual(self): self.assertEqual(widget2['visual'], 'default') -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class ToplevelTest(AbstractToplevelTest, unittest.TestCase): OPTIONS = ( 'background', 'backgroundimage', 'borderwidth', @@ -101,7 +113,7 @@ def test_configure_use(self): self.assertEqual(widget2['use'], wid) -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class FrameTest(AbstractToplevelTest, unittest.TestCase): OPTIONS = ( 'background', 'backgroundimage', 'borderwidth', @@ -109,12 +121,17 @@ class FrameTest(AbstractToplevelTest, unittest.TestCase): 'highlightbackground', 'highlightcolor', 'highlightthickness', 'padx', 'pady', 'relief', 'takefocus', 'tile', 'visual', 'width', ) + if tk_version < (9, 0): + _no_round = {'padx', 'pady'} + else: + _no_round = {'borderwidth', 'height', 'highlightthickness', 'padx', + 'pady', 'width'} def create(self, **kwargs): return tkinter.Frame(self.root, **kwargs) -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class LabelFrameTest(AbstractToplevelTest, unittest.TestCase): OPTIONS = ( 'background', 'borderwidth', @@ -124,6 +141,11 @@ class LabelFrameTest(AbstractToplevelTest, unittest.TestCase): 'labelanchor', 'labelwidget', 'padx', 'pady', 'relief', 'takefocus', 'text', 'visual', 'width', ) + if tk_version < (9, 0): + _no_round = {'padx', 'pady'} + else: + _no_round = {'borderwidth', 'height', 'highlightthickness', 'padx', + 'pady', 'width'} def create(self, **kwargs): return tkinter.LabelFrame(self.root, **kwargs) @@ -141,15 +163,16 @@ def test_configure_labelwidget(self): self.checkParam(widget, 'labelwidget', label, expected='.foo') label.destroy() - +# Label, Button, Checkbutton, Radiobutton, MenuButton class AbstractLabelTest(AbstractWidgetTest, IntegerSizeTests): - _conv_pixels = False - _clip_highlightthickness = tk_version >= (8, 7) - _clip_pad = tk_version >= (8, 7) - _clip_borderwidth = tk_version >= (8, 7) - - -@add_standard_options(StandardOptionsTests) + _rounds_pixels = False + if tk_version < (9, 0): + _clipped = {} + else: + _clipped = {'borderwidth', 'insertborderwidth', 'highlightthickness', + 'padx', 'pady'} + +@add_configure_tests(StandardOptionsTests) class LabelTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'activeforeground', 'anchor', @@ -165,7 +188,7 @@ def create(self, **kwargs): return tkinter.Label(self.root, **kwargs) -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class ButtonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'activeforeground', 'anchor', @@ -186,7 +209,7 @@ def test_configure_default(self): self.checkEnumParam(widget, 'default', 'active', 'disabled', 'normal') -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class CheckbuttonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'activeforeground', 'anchor', @@ -240,8 +263,7 @@ def test_same_name(self): b2.deselect() self.assertEqual(v.get(), 0) - -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class RadiobuttonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'activeforeground', 'anchor', @@ -264,7 +286,7 @@ def test_configure_value(self): self.checkParams(widget, 'value', 1, 2.3, '', 'any string') -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class MenubuttonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'activeforeground', 'anchor', @@ -277,10 +299,11 @@ class MenubuttonTest(AbstractLabelTest, unittest.TestCase): 'takefocus', 'text', 'textvariable', 'underline', 'width', 'wraplength', ) - _conv_pixels = round - _clip_highlightthickness = True - _clip_pad = True - _clip_borderwidth = False + _rounds_pixels = (tk_version < (9, 0)) + if tk_version < (9, 0): + _clipped = {'highlightthickness', 'padx', 'pady'} + else: + _clipped ={ 'insertborderwidth', 'highlightthickness', 'padx', 'pady'} def create(self, **kwargs): return tkinter.Menubutton(self.root, **kwargs) @@ -298,7 +321,10 @@ def test_configure_image(self): widget = self.create() image = tkinter.PhotoImage(master=self.root, name='image1') self.checkParam(widget, 'image', image, conv=str) - errmsg = 'image "spam" doesn\'t exist' + if tk_version < (9, 0): + errmsg = 'image "spam" doesn\'t exist' + else: + errmsg = 'image "spam" does not exist' with self.assertRaises(tkinter.TclError) as cm: widget['image'] = 'spam' if errmsg is not None: @@ -328,9 +354,15 @@ def test_bad_kwarg(self): with self.assertRaisesRegex(TclError, r"^unknown option -image$"): tkinter.OptionMenu(self.root, None, 'b', image='') - -@add_standard_options(IntegerSizeTests, StandardOptionsTests) +@add_configure_tests(IntegerSizeTests, StandardOptionsTests) class EntryTest(AbstractWidgetTest, unittest.TestCase): + _rounds_pixels = (tk_version < (9, 0)) + if tk_version < (9, 0): + _clipped = {'highlightthickness'} + else: + _clipped = {'highlightthickness', 'borderwidth', 'insertborderwidth', + 'selectborderwidth'} + OPTIONS = ( 'background', 'borderwidth', 'cursor', 'disabledbackground', 'disabledforeground', @@ -355,16 +387,23 @@ def test_configure_disabledbackground(self): def test_configure_insertborderwidth(self): widget = self.create(insertwidth=100) self.checkPixelsParam(widget, 'insertborderwidth', - 0, 1.3, 2.6, 6, -2, '10p') + 0, 1.3, 2.6, 6, '10p') + self.checkParam(widget, 'insertborderwidth', -2) # insertborderwidth is bounded above by a half of insertwidth. - self.checkParam(widget, 'insertborderwidth', 60, expected=100//2) + expected = 100 // 2 if tk_version < (9, 0) else 60 + self.checkParam(widget, 'insertborderwidth', 60, expected=expected) def test_configure_insertwidth(self): widget = self.create() self.checkPixelsParam(widget, 'insertwidth', 1.3, 3.6, '10p') - self.checkParam(widget, 'insertwidth', 0.1, expected=2) - self.checkParam(widget, 'insertwidth', -2, expected=2) - self.checkParam(widget, 'insertwidth', 0.9, expected=1) + if tk_version < (9, 0): + self.checkParam(widget, 'insertwidth', 0.1, expected=2) + self.checkParam(widget, 'insertwidth', -2, expected=2) + self.checkParam(widget, 'insertwidth', 0.9, expected=1) + else: + self.checkParam(widget, 'insertwidth', 0.1) + self.checkParam(widget, 'insertwidth', -2, expected=0) + self.checkParam(widget, 'insertwidth', 0.9) def test_configure_invalidcommand(self): widget = self.create() @@ -422,7 +461,7 @@ def test_selection_methods(self): widget.selection_adjust(0) -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class SpinboxTest(EntryTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'background', 'borderwidth', @@ -559,7 +598,7 @@ def test_selection_element(self): self.assertEqual(widget.selection_element(), "buttondown") -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class TextTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'autoseparators', 'background', 'blockcursor', 'borderwidth', @@ -574,6 +613,9 @@ class TextTest(AbstractWidgetTest, unittest.TestCase): 'tabs', 'tabstyle', 'takefocus', 'undo', 'width', 'wrap', 'xscrollcommand', 'yscrollcommand', ) + _rounds_pixels = (tk_version < (9, 0)) + _no_round = {'selectborderwidth'} + _clipped = {'highlightthickness'} def create(self, **kwargs): return tkinter.Text(self.root, **kwargs) @@ -602,8 +644,10 @@ def test_configure_endline(self): def test_configure_height(self): widget = self.create() self.checkPixelsParam(widget, 'height', 100, 101.2, 102.6, '3c') - self.checkParam(widget, 'height', -100, expected=1) - self.checkParam(widget, 'height', 0, expected=1) + self.checkParam(widget, 'height', -100, + expected=1 if tk_version < (9, 0) else -100) + self.checkParam(widget, 'height', 0, + expected=1 if tk_version < (9, 0) else 0 ) def test_configure_maxundo(self): widget = self.create() @@ -696,7 +740,7 @@ def test_bbox(self): self.assertRaises(TypeError, widget.bbox, '1.1', 'end') -@add_standard_options(PixelSizeTests, StandardOptionsTests) +@add_configure_tests(PixelSizeTests, StandardOptionsTests) class CanvasTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'background', 'borderwidth', @@ -710,8 +754,15 @@ class CanvasTest(AbstractWidgetTest, unittest.TestCase): 'xscrollcommand', 'xscrollincrement', 'yscrollcommand', 'yscrollincrement', 'width', ) - - _conv_pixels = round + _rounds_pixels = True + if tk_version < (9, 0): + _noround = {} + _clipped = {'highlightthickness'} + else: + _no_round = {'borderwidth', 'height', 'highlightthickness', 'width', + 'xscrollincrement', 'yscrollincrement'} + _clipped = {'borderwidth', 'height', 'highlightthickness', 'width', + 'xscrollincrement', 'yscrollincrement'} _stringify = True def create(self, **kwargs): @@ -953,7 +1004,7 @@ def test_moveto(self): self.assertEqual(y2_2 - y1_2, y2_3 - y1_3) -@add_standard_options(IntegerSizeTests, StandardOptionsTests) +@add_configure_tests(IntegerSizeTests, StandardOptionsTests) class ListboxTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'activestyle', 'background', 'borderwidth', 'cursor', @@ -965,6 +1016,11 @@ class ListboxTest(AbstractWidgetTest, unittest.TestCase): 'selectmode', 'setgrid', 'state', 'takefocus', 'width', 'xscrollcommand', 'yscrollcommand', ) + _rounds_pixels = (tk_version < (9, 0)) + if tk_version < (9, 0): + _clipped = {'highlightthickness'} + else: + _clipped = { 'borderwidth', 'highlightthickness', 'selectborderwidth'} def create(self, **kwargs): return tkinter.Listbox(self.root, **kwargs) @@ -1091,7 +1147,7 @@ def test_get(self): self.assertRaises(TclError, lb.get, 2.4) -@add_standard_options(PixelSizeTests, StandardOptionsTests) +@add_configure_tests(PixelSizeTests, StandardOptionsTests) class ScaleTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'background', 'bigincrement', 'borderwidth', @@ -1102,6 +1158,8 @@ class ScaleTest(AbstractWidgetTest, unittest.TestCase): 'resolution', 'showvalue', 'sliderlength', 'sliderrelief', 'state', 'takefocus', 'tickinterval', 'to', 'troughcolor', 'variable', 'width', ) + _rounds_pixels = (tk_version < (9, 0)) + _clipped = {'highlightthickness'} default_orient = 'vertical' def create(self, **kwargs): @@ -1159,7 +1217,7 @@ def test_configure_to(self): conv=float_round) -@add_standard_options(PixelSizeTests, StandardOptionsTests) +@add_configure_tests(PixelSizeTests, StandardOptionsTests) class ScrollbarTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'activerelief', @@ -1170,7 +1228,14 @@ class ScrollbarTest(AbstractWidgetTest, unittest.TestCase): 'repeatdelay', 'repeatinterval', 'takefocus', 'troughcolor', 'width', ) - _conv_pixels = round + _rounds_pixels = True + if tk_version >= (9, 0): + _no_round = {'borderwidth', 'elementborderwidth', 'highlightthickness', + 'width'} + if tk_version < (9, 0): + _clipped = {'highlightthickness'} + else: + _clipped = {'borderwidth', 'highlightthickness', 'width'} _stringify = True default_orient = 'vertical' @@ -1208,7 +1273,7 @@ def test_set(self): self.assertRaises(TypeError, sb.set, 0.6, 0.7, 0.8) -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class PanedWindowTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'background', 'borderwidth', 'cursor', @@ -1219,6 +1284,15 @@ class PanedWindowTest(AbstractWidgetTest, unittest.TestCase): 'sashcursor', 'sashpad', 'sashrelief', 'sashwidth', 'showhandle', 'width', ) + _rounds_pixels = True + if tk_version < (9, 0): + _no_round = {'handlesize', 'height', 'proxyborderwidth', 'sashwidth', + 'selectborderwidth', 'width'} + else: + _no_round = {'borderwidth', 'handlepad', 'handlesize', 'height', + 'proxyborderwidth', 'sashpad', 'sashwidth', + 'selectborderwidth', 'width'} + _clipped = {} default_orient = 'horizontal' def create(self, **kwargs): @@ -1347,13 +1421,13 @@ def test_paneconfigure_minsize(self): def test_paneconfigure_padx(self): p, b, c = self.create2() - self.check_paneconfigure(p, b, 'padx', 1.3, 1) + self.check_paneconfigure(p, b, 'padx', 1.3, 1 if tk_version < (9, 0) else 1.3) self.check_paneconfigure_bad(p, b, 'padx', EXPECTED_SCREEN_DISTANCE_ERRMSG.format('badValue')) def test_paneconfigure_pady(self): p, b, c = self.create2() - self.check_paneconfigure(p, b, 'pady', 1.3, 1) + self.check_paneconfigure(p, b, 'pady', 1.3, 1 if tk_version < (9, 0) else 1.3) self.check_paneconfigure_bad(p, b, 'pady', EXPECTED_SCREEN_DISTANCE_ERRMSG.format('badValue')) @@ -1379,17 +1453,17 @@ def test_paneconfigure_width(self): EXPECTED_SCREEN_DISTANCE_OR_EMPTY_ERRMSG.format('badValue')) -@add_standard_options(StandardOptionsTests) +@add_configure_tests(StandardOptionsTests) class MenuTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'activebackground', 'activeborderwidth', 'activeforeground', - 'activerelief', - 'background', 'borderwidth', 'cursor', + 'activerelief', 'background', 'borderwidth', 'cursor', 'disabledforeground', 'font', 'foreground', 'postcommand', 'relief', 'selectcolor', 'takefocus', 'tearoff', 'tearoffcommand', 'title', 'type', ) - _conv_pixels = False + _rounds_pixels = False + _clipped = {} def create(self, **kwargs): return tkinter.Menu(self.root, **kwargs) @@ -1458,7 +1532,7 @@ def test_entryconfigure_variable(self): self.assertEqual(str(m1.entrycget(1, 'variable')), str(v2)) -@add_standard_options(PixelSizeTests, StandardOptionsTests) +@add_configure_tests(PixelSizeTests, StandardOptionsTests) class MessageTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'anchor', 'aspect', 'background', 'borderwidth', @@ -1467,11 +1541,12 @@ class MessageTest(AbstractWidgetTest, unittest.TestCase): 'justify', 'padx', 'pady', 'relief', 'takefocus', 'text', 'textvariable', 'width', ) - _conv_pad_pixels = False - if tk_version >= (8, 7): - _conv_pixels = False - _clip_pad = tk_version >= (8, 7) - _clip_borderwidth = tk_version >= (8, 7) + _rounds_pixels = (tk_version < (9, 0)) + _no_round = {'padx', 'pady'} + if tk_version < (9, 0): + _clipped = {'highlightthickness'} + else: + _clipped = {'borderwidth', 'highlightthickness', 'padx', 'pady'} def create(self, **kwargs): return tkinter.Message(self.root, **kwargs) @@ -1482,16 +1557,14 @@ def test_configure_aspect(self): def test_configure_padx(self): widget = self.create() - self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, '12m', - conv=self._conv_pad_pixels) - expected = self._default_pixels if self._clip_pad else -2 + self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, '12m') + expected = -2 if tk_version < (9, 0) else self._default_pixels self.checkParam(widget, 'padx', -2, expected=expected) def test_configure_pady(self): widget = self.create() - self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, '12m', - conv=self._conv_pad_pixels) - expected = self._default_pixels if self._clip_pad else -2 + self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, '12m') + expected = -2 if tk_version < (9, 0) else self._default_pixels self.checkParam(widget, 'pady', -2, expected=expected) def test_configure_width(self): diff --git a/Lib/test/test_tkinter/widget_tests.py b/Lib/test/test_tkinter/widget_tests.py index 8ab2f74245095d..ac7fb5977e04fc 100644 --- a/Lib/test/test_tkinter/widget_tests.py +++ b/Lib/test/test_tkinter/widget_tests.py @@ -6,17 +6,16 @@ pixels_conv, tcl_obj_eq) import test.support - _sentinel = object() +# Options which accept all values allowed by Tk_GetPixels +# borderwidth = bd + class AbstractWidgetTest(AbstractTkTest): - _default_pixels = '' if tk_version >= (9, 0) else -1 if tk_version >= (8, 7) else '' - _conv_pixels = round - _conv_pad_pixels = None - _stringify = False - _clip_highlightthickness = True - _clip_pad = False - _clip_borderwidth = False + _default_pixels = '' # Value for unset pixel options. + _rounds_pixels = True # True if some pixel options are rounded. + _no_round = {} # Pixel options which are not rounded nonetheless + _stringify = False # Whether to convert tuples to strings _allow_empty_justify = False @property @@ -44,6 +43,9 @@ def checkParam(self, widget, name, value, *, expected=_sentinel, widget[name] = value if expected is _sentinel: expected = value + if name in self._clipped: + if not isinstance(expected, str): + expected = max(expected, 0) if conv: expected = conv(expected) if self._stringify or not self.wantobjects: @@ -140,14 +142,17 @@ def checkEnumParam(self, widget, name, *values, errmsg = 'bad' + errmsg2 self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) - def checkPixelsParam(self, widget, name, *values, - conv=None, **kwargs): - if conv is None: - conv = self._conv_pixels + def checkPixelsParam(self, widget, name, *values, conv=None, **kwargs): + if not self._rounds_pixels or name in self._no_round: + conv = False + elif conv != str: + conv = round for value in values: expected = _sentinel conv1 = conv if isinstance(value, str): + if not getattr(self, '_converts_pixels', True): + conv1 = str if conv1 and conv1 is not str: expected = pixels_conv(value) * self.scaling conv1 = round @@ -172,8 +177,12 @@ def checkReliefParam(self, widget, name, *, allow_empty=False): def checkImageParam(self, widget, name): image = tkinter.PhotoImage(master=self.root, name='image1') self.checkParam(widget, name, image, conv=str) + if tk_version < (9, 0): + errmsg = 'image "spam" doesn\'t exist' + else: + errmsg = 'image "spam" does not exist' self.checkInvalidParam(widget, name, 'spam', - errmsg='image "spam" doesn\'t exist') + errmsg=errmsg) widget[name] = '' def checkVariableParam(self, widget, name, var): @@ -215,31 +224,80 @@ def test_keys(self): print('%s.OPTIONS doesn\'t contain "%s"' % (self.__class__.__name__, k)) +class PixelOptionsTests: + """Standard options that accept all formats acceptable to Tk_GetPixels. -class StandardOptionsTests: - STANDARD_OPTIONS = ( - 'activebackground', 'activeborderwidth', 'activeforeground', 'anchor', - 'background', 'bitmap', 'borderwidth', 'compound', 'cursor', - 'disabledforeground', 'exportselection', 'font', 'foreground', - 'highlightbackground', 'highlightcolor', 'highlightthickness', - 'image', 'insertbackground', 'insertborderwidth', - 'insertofftime', 'insertontime', 'insertwidth', - 'jump', 'justify', 'orient', 'padx', 'pady', 'relief', - 'repeatdelay', 'repeatinterval', - 'selectbackground', 'selectborderwidth', 'selectforeground', - 'setgrid', 'takefocus', 'text', 'textvariable', 'troughcolor', - 'underline', 'wraplength', 'xscrollcommand', 'yscrollcommand', - ) - - def test_configure_activebackground(self): - widget = self.create() - self.checkColorParam(widget, 'activebackground') + In addition to numbers, these options can be set with distances + specified as a string consisting of a number followed by a single + character giving the unit of distance. The allowed units are: + millimeters ('m'), centimeters ('c'), inches ('i') or points ('p'). + In Tk 9 a cget call for one of these options returns a Tcl_Obj of + type "pixels", whose string representation is the distance string + passed to configure. + """ + PIXEL_OPTIONS = ('activeborderwidth', 'borderwidth', 'highlightthickness', + 'insertborderwidth', 'insertwidth', 'padx', 'pady', 'selectborderwidth') def test_configure_activeborderwidth(self): widget = self.create() self.checkPixelsParam(widget, 'activeborderwidth', 0, 1.3, 2.9, 6, -2, '10p') + def test_configure_borderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'borderwidth', + 0, 1.3, 2.6, 6, '10p') + self.checkParam(widget, 'borderwidth', -2) + if 'bd' in self.OPTIONS: + self.checkPixelsParam(widget, 'bd', 0, 1.3, 2.6, 6, '10p') + self.checkParam(widget, 'bd', -2, expected=expected) + + def test_configure_highlightthickness(self): + widget = self.create() + self.checkPixelsParam(widget, 'highlightthickness', + 0, 1.3, 2.6, 6, '10p') + self.checkParam(widget, 'highlightthickness', -2) + + def test_configure_insertborderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'insertborderwidth', + 0, 1.3, 2.6, 6, '10p') + self.checkParam(widget, 'insertborderwidth', -2) + + def test_configure_insertwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'insertwidth', 1.3, 2.6, -2, '10p') + + def test_configure_padx(self): + widget = self.create() + self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, '12m') + self.checkParam(widget, 'padx', -2) + + def test_configure_pady(self): + widget = self.create() + self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, '12m') + self.checkParam(widget, 'pady', -2) + + def test_configure_selectborderwidth(self): + widget = self.create() + self.checkPixelsParam(widget, 'selectborderwidth', 1.3, 2.6, -2, '10p') + +class StandardOptionsTests(PixelOptionsTests): + + STANDARD_OPTIONS = ( 'activebackground', 'activeforeground', + 'anchor', 'background', 'bitmap', 'compound', 'cursor', + 'disabledforeground', 'exportselection', 'font', 'foreground', + 'highlightbackground', 'highlightcolor', 'image', + 'insertbackground', 'insertofftime', 'insertontime', 'jump', + 'justify', 'orient', 'relief', 'repeatdelay', 'repeatinterval', + 'selectbackground', 'selectforeground', 'setgrid', 'takefocus', + 'text', 'textvariable', 'troughcolor', 'underline', 'wraplength', + 'xscrollcommand', 'yscrollcommand', ) + PixelOptionsTests.PIXEL_OPTIONS + + def test_configure_activebackground(self): + widget = self.create() + self.checkColorParam(widget, 'activebackground') + def test_configure_activeforeground(self): widget = self.create() self.checkColorParam(widget, 'activeforeground') @@ -277,18 +335,6 @@ def test_configure_bitmap(self): self.checkInvalidParam(widget, 'bitmap', 'spam', errmsg='bitmap "spam" not defined') - def test_configure_borderwidth(self): - widget = self.create() - self.checkPixelsParam(widget, 'borderwidth', - 0, 1.3, 2.6, 6, '10p') - expected = 0 if self._clip_borderwidth else -2 - self.checkParam(widget, 'borderwidth', -2, expected=expected, - conv=self._conv_pixels) - if 'bd' in self.OPTIONS: - self.checkPixelsParam(widget, 'bd', 0, 1.3, 2.6, 6, '10p') - self.checkParam(widget, 'bd', -2, expected=expected, - conv=self._conv_pixels) - def test_configure_compound(self): widget = self.create() self.checkEnumParam(widget, 'compound', @@ -312,8 +358,8 @@ def test_configure_font(self): '-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*') is_ttk = widget.__class__.__module__ == 'tkinter.ttk' if not is_ttk: - self.checkInvalidParam(widget, 'font', '', - errmsg='font "" doesn\'t exist') + errmsg = 'font "" does ?n[o\']t exist' + self.checkInvalidParam(widget, 'font', '', errmsg=errmsg) def test_configure_foreground(self): widget = self.create() @@ -329,14 +375,6 @@ def test_configure_highlightcolor(self): widget = self.create() self.checkColorParam(widget, 'highlightcolor') - def test_configure_highlightthickness(self): - widget = self.create() - self.checkPixelsParam(widget, 'highlightthickness', - 0, 1.3, 2.6, 6, '10p') - expected = 0 if self._clip_highlightthickness else -2 - self.checkParam(widget, 'highlightthickness', -2, expected=expected, - conv=self._conv_pixels) - def test_configure_image(self): widget = self.create() self.checkImageParam(widget, 'image') @@ -345,11 +383,6 @@ def test_configure_insertbackground(self): widget = self.create() self.checkColorParam(widget, 'insertbackground') - def test_configure_insertborderwidth(self): - widget = self.create() - self.checkPixelsParam(widget, 'insertborderwidth', - 0, 1.3, 2.6, 6, -2, '10p') - def test_configure_insertofftime(self): widget = self.create() self.checkIntegerParam(widget, 'insertofftime', 100) @@ -358,10 +391,6 @@ def test_configure_insertontime(self): widget = self.create() self.checkIntegerParam(widget, 'insertontime', 100) - def test_configure_insertwidth(self): - widget = self.create() - self.checkPixelsParam(widget, 'insertwidth', 1.3, 2.6, -2, '10p') - def test_configure_jump(self): widget = self.create() self.checkBooleanParam(widget, 'jump') @@ -379,22 +408,6 @@ def test_configure_orient(self): self.assertEqual(str(widget['orient']), self.default_orient) self.checkEnumParam(widget, 'orient', 'horizontal', 'vertical') - def test_configure_padx(self): - widget = self.create() - self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, '12m', - conv=self._conv_pad_pixels) - expected = 0 if self._clip_pad else -2 - self.checkParam(widget, 'padx', -2, expected=expected, - conv=self._conv_pad_pixels) - - def test_configure_pady(self): - widget = self.create() - self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, '12m', - conv=self._conv_pad_pixels) - expected = 0 if self._clip_pad else -2 - self.checkParam(widget, 'pady', -2, expected=expected, - conv=self._conv_pad_pixels) - @requires_tk(8, 7) def test_configure_placeholder(self): widget = self.create() @@ -421,10 +434,6 @@ def test_configure_selectbackground(self): widget = self.create() self.checkColorParam(widget, 'selectbackground') - def test_configure_selectborderwidth(self): - widget = self.create() - self.checkPixelsParam(widget, 'selectborderwidth', 1.3, 2.6, -2, '10p') - def test_configure_selectforeground(self): widget = self.create() self.checkColorParam(widget, 'selectforeground') @@ -534,6 +543,7 @@ def test_configure_variable(self): class IntegerSizeTests: + """ Tests widgets which only accept integral width and height.""" def test_configure_height(self): widget = self.create() self.checkIntegerParam(widget, 'height', 100, -100, 0) @@ -544,6 +554,7 @@ def test_configure_width(self): class PixelSizeTests: + """ Tests widgets which accept screen distances for width and height.""" def test_configure_height(self): widget = self.create() self.checkPixelsParam(widget, 'height', 100, 101.2, 102.6, -100, 0, '3c') @@ -553,7 +564,7 @@ def test_configure_width(self): self.checkPixelsParam(widget, 'width', 402, 403.4, 404.6, -402, 0, '5i') -def add_standard_options(*source_classes): +def add_configure_tests(*source_classes): # This decorator adds test_configure_xxx methods from source classes for # every xxx option in the OPTIONS class attribute if they are not defined # explicitly. diff --git a/Lib/test/test_tomllib/test_error.py b/Lib/test/test_tomllib/test_error.py index d2ef59a29ca350..3a8587492859ca 100644 --- a/Lib/test/test_tomllib/test_error.py +++ b/Lib/test/test_tomllib/test_error.py @@ -49,7 +49,9 @@ def test_type_error(self): self.assertEqual(str(exc_info.exception), "Expected str object, not 'bool'") def test_module_name(self): - self.assertEqual(tomllib.TOMLDecodeError().__module__, tomllib.__name__) + self.assertEqual( + tomllib.TOMLDecodeError("", "", 0).__module__, tomllib.__name__ + ) def test_invalid_parse_float(self): def dict_returner(s: str) -> dict: @@ -64,3 +66,33 @@ def list_returner(s: str) -> list: self.assertEqual( str(exc_info.exception), "parse_float must not return dicts or lists" ) + + def test_deprecated_tomldecodeerror(self): + for args in [ + (), + ("err msg",), + (None,), + (None, "doc"), + ("err msg", None), + (None, "doc", None), + ("err msg", "doc", None), + ("one", "two", "three", "four"), + ("one", "two", 3, "four", "five"), + ]: + with self.assertWarns(DeprecationWarning): + e = tomllib.TOMLDecodeError(*args) # type: ignore[arg-type] + self.assertEqual(e.args, args) + + def test_tomldecodeerror(self): + msg = "error parsing" + doc = "v=1\n[table]\nv='val'" + pos = 13 + formatted_msg = "error parsing (at line 3, column 2)" + e = tomllib.TOMLDecodeError(msg, doc, pos) + self.assertEqual(e.args, (formatted_msg,)) + self.assertEqual(str(e), formatted_msg) + self.assertEqual(e.msg, msg) + self.assertEqual(e.doc, doc) + self.assertEqual(e.pos, pos) + self.assertEqual(e.lineno, 3) + self.assertEqual(e.colno, 2) diff --git a/Lib/test/test_tools/test_i18n.py b/Lib/test/test_tools/test_i18n.py index 21dead8f943bb7..6f71f0976819f1 100644 --- a/Lib/test/test_tools/test_i18n.py +++ b/Lib/test/test_tools/test_i18n.py @@ -87,17 +87,23 @@ def assert_POT_equal(self, expected, actual): self.maxDiff = None self.assertEqual(normalize_POT_file(expected), normalize_POT_file(actual)) - def extract_docstrings_from_str(self, module_content): - """ utility: return all msgids extracted from module_content """ - filename = 'test_docstrings.py' - with temp_cwd(None) as cwd: + def extract_from_str(self, module_content, *, args=(), strict=True): + """Return all msgids extracted from module_content.""" + filename = 'test.py' + with temp_cwd(None): with open(filename, 'w', encoding='utf-8') as fp: fp.write(module_content) - assert_python_ok('-Xutf8', self.script, '-D', filename) + res = assert_python_ok('-Xutf8', self.script, *args, filename) + if strict: + self.assertEqual(res.err, b'') with open('messages.pot', encoding='utf-8') as fp: data = fp.read() return self.get_msgids(data) + def extract_docstrings_from_str(self, module_content): + """Return all docstrings extracted from module_content.""" + return self.extract_from_str(module_content, args=('--docstrings',), strict=False) + def test_header(self): """Make sure the required fields are in the header, according to: http://www.gnu.org/software/gettext/manual/gettext.html#Header-Entry @@ -344,6 +350,23 @@ def test_calls_in_fstring_with_partially_wrong_expression(self): self.assertNotIn('foo', msgids) self.assertIn('bar', msgids) + def test_function_and_class_names(self): + """Test that function and class names are not mistakenly extracted.""" + msgids = self.extract_from_str(dedent('''\ + def _(x): + pass + + def _(x="foo"): + pass + + async def _(x): + pass + + class _(object): + pass + ''')) + self.assertEqual(msgids, ['']) + def test_pygettext_output(self): """Test that the pygettext output exactly matches snapshots.""" for input_file in DATA_DIR.glob('*.py'): diff --git a/Lib/test/test_ttk/test_style.py b/Lib/test/test_ttk/test_style.py index eeaf5de2e303f6..19918772514ad4 100644 --- a/Lib/test/test_ttk/test_style.py +++ b/Lib/test/test_ttk/test_style.py @@ -205,7 +205,8 @@ def test_element_create_from_errors(self): style = self.style with self.assertRaises(IndexError): style.element_create('plain.newelem', 'from') - with self.assertRaisesRegex(TclError, 'theme "spam" doesn\'t exist'): + with self.assertRaisesRegex(TclError, + 'theme "spam" (does not|doesn\'t) exist'): style.element_create('plain.newelem', 'from', 'spam') def test_element_create_image(self): diff --git a/Lib/test/test_ttk/test_widgets.py b/Lib/test/test_ttk/test_widgets.py index 10bec33be617a1..d5620becfa7187 100644 --- a/Lib/test/test_ttk/test_widgets.py +++ b/Lib/test/test_ttk/test_widgets.py @@ -8,7 +8,7 @@ from test.test_tkinter.support import ( AbstractTkTest, requires_tk, tk_version, get_tk_patchlevel, simulate_mouse_click, AbstractDefaultRootTest) -from test.test_tkinter.widget_tests import (add_standard_options, +from test.test_tkinter.widget_tests import (add_configure_tests, AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests) requires('gui') @@ -125,10 +125,11 @@ def test_cb(arg1, **kw): class AbstractToplevelTest(AbstractWidgetTest, PixelSizeTests): - _conv_pixels = False + _rounds_pixels = False + _clipped = {} -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class FrameTest(AbstractToplevelTest, unittest.TestCase): OPTIONS = ( 'borderwidth', 'class', 'cursor', 'height', @@ -140,7 +141,7 @@ def create(self, **kwargs): return ttk.Frame(self.root, **kwargs) -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class LabelFrameTest(AbstractToplevelTest, unittest.TestCase): OPTIONS = ( 'borderwidth', 'class', 'cursor', 'height', @@ -168,6 +169,8 @@ def test_configure_labelwidget(self): class AbstractLabelTest(AbstractWidgetTest): _allow_empty_justify = True + _rounds_pixels = False + _clipped = {} def checkImageParam(self, widget, name): image = tkinter.PhotoImage(master=self.root, name='image1') @@ -179,8 +182,11 @@ def checkImageParam(self, widget, name): expected=('image1', 'active', 'image2')) self.checkParam(widget, name, 'image1 active image2', expected=('image1', 'active', 'image2')) - self.checkInvalidParam(widget, name, 'spam', - errmsg='image "spam" doesn\'t exist') + if tk_version < (9, 0): + errmsg = 'image "spam" doesn\'t exist' + else: + errmsg = 'image "spam" does not exist' + self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) def test_configure_compound(self): values = ('none', 'text', 'image', 'center', 'top', 'bottom', 'left', 'right') @@ -196,7 +202,7 @@ def test_configure_width(self): self.checkParams(widget, 'width', 402, -402, 0) -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class LabelTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'anchor', 'background', 'borderwidth', @@ -214,7 +220,7 @@ def create(self, **kwargs): test_configure_justify = StandardOptionsTests.test_configure_justify -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class ButtonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'compound', 'cursor', 'default', @@ -239,7 +245,7 @@ def test_invoke(self): self.assertTrue(success) -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class CheckbuttonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'compound', 'cursor', @@ -326,7 +332,7 @@ def test_unique_variables2(self): self.assertEqual(len(set(variables)), len(buttons), variables) -@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +@add_configure_tests(IntegerSizeTests, StandardTtkOptionsTests) class EntryTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'background', 'class', 'cursor', @@ -336,6 +342,8 @@ class EntryTest(AbstractWidgetTest, unittest.TestCase): 'show', 'state', 'style', 'takefocus', 'textvariable', 'validate', 'validatecommand', 'width', 'xscrollcommand', ) + _rounds_pixels = False + _clipped = {} # bpo-27313: macOS Tk/Tcl may or may not report 'Entry.field'. IDENTIFY_AS = {'Entry.field', 'textarea'} @@ -371,8 +379,12 @@ def test_bbox(self): self.assertRaises(tkinter.TclError, self.entry.bbox, None) def test_identify(self): + if (tk_version >= (9, 0) and sys.platform == 'darwin' + and isinstance(self.entry, ttk.Combobox)): + self.skipTest('Test does not work on macOS Tk 9.') + # https://core.tcl-lang.org/tk/tktview/8b49e9cfa6 self.entry.pack() - self.entry.update() + self.root.update() self.assertIn(self.entry.identify(5, 5), self.IDENTIFY_AS) self.assertEqual(self.entry.identify(-1, -1), "") @@ -450,7 +462,7 @@ def validate(content): self.assertEqual(self.entry.state(), ()) -@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +@add_configure_tests(IntegerSizeTests, StandardTtkOptionsTests) class ComboboxTest(EntryTest, unittest.TestCase): OPTIONS = ( 'background', 'class', 'cursor', 'exportselection', @@ -479,11 +491,14 @@ def _show_drop_down_listbox(self): x, y = width - 5, 5 if sys.platform != 'darwin': # there's no down arrow on macOS self.assertRegex(self.combo.identify(x, y), r'.*downarrow\Z') - self.combo.event_generate('', x=x, y=y) + self.combo.event_generate('', x=x, y=y) self.combo.event_generate('', x=x, y=y) - self.combo.update_idletasks() def test_virtual_event(self): + if (tk_version >= (9, 0) and sys.platform == 'darwin' + and isinstance(self.entry, ttk.Combobox)): + self.skipTest('Test does not work on macOS Tk 9.') + # https://core.tcl-lang.org/tk/tktview/8b49e9cfa6 success = [] self.combo['values'] = [1] @@ -501,6 +516,10 @@ def test_virtual_event(self): self.assertTrue(success) def test_configure_postcommand(self): + if (tk_version >= (9, 0) and sys.platform == 'darwin' + and isinstance(self.entry, ttk.Combobox)): + self.skipTest('Test does not work on macOS Tk 9.') + # https://core.tcl-lang.org/tk/tktview/8b49e9cfa6 success = [] self.combo['postcommand'] = lambda: success.append(True) @@ -576,12 +595,14 @@ def check_get_current(getval, currval): combo2.destroy() -@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +@add_configure_tests(IntegerSizeTests, StandardTtkOptionsTests) class PanedWindowTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'height', 'orient', 'style', 'takefocus', 'width', ) + _rounds_pixels = False + _clipped = {} def setUp(self): super().setUp() @@ -712,7 +733,7 @@ def test_sashpos(self): self.assertIsInstance(self.paned.sashpos(0), int) -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class RadiobuttonTest(AbstractLabelTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'compound', 'cursor', @@ -791,13 +812,14 @@ def test_configure_menu(self): menu.destroy() -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class ScaleTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'cursor', 'from', 'length', 'orient', 'state', 'style', 'takefocus', 'to', 'value', 'variable', ) - _conv_pixels = False + _rounds_pixels = False + _clipped = {} default_orient = 'horizontal' def setUp(self): @@ -899,7 +921,7 @@ def test_set(self): self.assertRaises(tkinter.TclError, self.scale.set, None) -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class ProgressbarTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'anchor', 'class', 'cursor', 'font', 'foreground', 'justify', @@ -907,7 +929,8 @@ class ProgressbarTest(AbstractWidgetTest, unittest.TestCase): 'mode', 'maximum', 'phase', 'text', 'wraplength', 'style', 'takefocus', 'value', 'variable', ) - _conv_pixels = False + _rounds_pixels = False + _clipped = {} _allow_empty_justify = True default_orient = 'horizontal' @@ -952,24 +975,27 @@ def test_configure_value(self): @unittest.skipIf(sys.platform == 'darwin', 'ttk.Scrollbar is special on MacOSX') -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class ScrollbarTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'command', 'cursor', 'orient', 'style', 'takefocus', ) + _rounds_pixels = False + _clipped = {} default_orient = 'vertical' def create(self, **kwargs): return ttk.Scrollbar(self.root, **kwargs) -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class NotebookTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'height', 'padding', 'style', 'takefocus', 'width', ) - if tk_version >= (8, 7): - _conv_pixels = False + _rounds_pixels = (tk_version < (9,0)) + _converts_pixels = False + _clipped = {} def setUp(self): super().setUp() @@ -987,14 +1013,14 @@ def test_configure_height(self): if get_tk_patchlevel(self.root) < (8, 6, 15): self.checkIntegerParam(widget, 'height', 402, -402, 0) else: - self.checkPixelsParam(widget, 'height', '10c', 402, -402, 0, conv=False) + self.checkPixelsParam(widget, 'height', '10c', 402, -402, 0) def test_configure_width(self): widget = self.create() if get_tk_patchlevel(self.root) < (8, 6, 15): self.checkIntegerParam(widget, 'width', 402, -402, 0) else: - self.checkPixelsParam(widget, 'width', '10c', 402, -402, 0, conv=False) + self.checkPixelsParam(widget, 'width', '10c', 402, -402, 0) def test_tab_identifiers(self): self.nb.forget(0) @@ -1160,7 +1186,12 @@ def test_traversal(self): self.nb.select(0) - focus_identify_as = 'focus' if sys.platform != 'darwin' else '' + if sys.platform == 'darwin': + focus_identify_as = '' + elif sys.platform == 'win32': + focus_identify_as = 'focus' + else: + focus_identify_as = 'focus' if tk_version < (9,0) else 'padding' self.assertEqual(self.nb.identify(5, 5), focus_identify_as) simulate_mouse_click(self.nb, 5, 5) self.nb.focus_force() @@ -1193,7 +1224,7 @@ def test_traversal(self): self.assertEqual(self.nb.select(), str(self.child2)) -@add_standard_options(IntegerSizeTests, StandardTtkOptionsTests) +@add_configure_tests(IntegerSizeTests, StandardTtkOptionsTests) class SpinboxTest(EntryTest, unittest.TestCase): OPTIONS = ( 'background', 'class', 'command', 'cursor', 'exportselection', @@ -1370,7 +1401,7 @@ def test_configure_values(self): spin2.destroy() -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class TreeviewTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'columns', 'cursor', 'displaycolumns', @@ -1378,6 +1409,8 @@ class TreeviewTest(AbstractWidgetTest, unittest.TestCase): 'style', 'takefocus', 'titlecolumns', 'titleitems', 'xscrollcommand', 'yscrollcommand', ) + _rounds_pixels = False + _clipped = {} def setUp(self): super().setUp() @@ -1413,8 +1446,10 @@ def test_configure_displaycolumns(self): def test_configure_height(self): widget = self.create() - self.checkPixelsParam(widget, 'height', 100, -100, 0, '3c', conv=False) - self.checkPixelsParam(widget, 'height', 101.2, 102.6, conv=False) + self.checkPixelsParam(widget, 'height', 100, -100, 0, '3c', + conv=False) + self.checkPixelsParam(widget, 'height', 101.2, 102.6, '3c', + conv=False) def test_configure_selectmode(self): widget = self.create() @@ -1936,24 +1971,28 @@ def test_tag_has(self): self.assertEqual(self.tv.tag_has('tag3'), ()) -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class SeparatorTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'orient', 'style', 'takefocus', # 'state'? ) + _rounds_pixels = False + _clipped = {} default_orient = 'horizontal' def create(self, **kwargs): return ttk.Separator(self.root, **kwargs) -@add_standard_options(StandardTtkOptionsTests) +@add_configure_tests(StandardTtkOptionsTests) class SizegripTest(AbstractWidgetTest, unittest.TestCase): OPTIONS = ( 'class', 'cursor', 'style', 'takefocus', # 'state'? ) + _rounds_pixels = False + _clipped = {} def create(self, **kwargs): return ttk.Sizegrip(self.root, **kwargs) diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 2f1f9e86a0bce4..aa42beca5f9256 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -8,6 +8,7 @@ import inspect import itertools import operator +import os import pickle import re import sys @@ -4252,9 +4253,15 @@ def test_builtin_protocol_allowlist(self): class CustomProtocol(TestCase, Protocol): pass + class CustomPathLikeProtocol(os.PathLike, Protocol): + pass + class CustomContextManager(typing.ContextManager, Protocol): pass + class CustomAsyncIterator(typing.AsyncIterator, Protocol): + pass + def test_non_runtime_protocol_isinstance_check(self): class P(Protocol): x: int diff --git a/Lib/test/test_unittest/testmock/testpatch.py b/Lib/test/test_unittest/testmock/testpatch.py index f26e74ce0bc1ba..037c021e6eafcf 100644 --- a/Lib/test/test_unittest/testmock/testpatch.py +++ b/Lib/test/test_unittest/testmock/testpatch.py @@ -745,6 +745,54 @@ def test_stop_idempotent(self): self.assertIsNone(patcher.stop()) + def test_exit_idempotent(self): + patcher = patch(foo_name, 'bar', 3) + with patcher: + patcher.stop() + + + def test_second_start_failure(self): + patcher = patch(foo_name, 'bar', 3) + patcher.start() + try: + self.assertRaises(RuntimeError, patcher.start) + finally: + patcher.stop() + + + def test_second_enter_failure(self): + patcher = patch(foo_name, 'bar', 3) + with patcher: + self.assertRaises(RuntimeError, patcher.start) + + + def test_second_start_after_stop(self): + patcher = patch(foo_name, 'bar', 3) + patcher.start() + patcher.stop() + patcher.start() + patcher.stop() + + + def test_property_setters(self): + mock_object = Mock() + mock_bar = mock_object.bar + patcher = patch.object(mock_object, 'bar', 'x') + with patcher: + self.assertEqual(patcher.is_local, False) + self.assertIs(patcher.target, mock_object) + self.assertEqual(patcher.temp_original, mock_bar) + patcher.is_local = True + patcher.target = mock_bar + patcher.temp_original = mock_object + self.assertEqual(patcher.is_local, True) + self.assertIs(patcher.target, mock_bar) + self.assertEqual(patcher.temp_original, mock_object) + # if changes are left intact, they may lead to disruption as shown below (it might be what someone needs though) + self.assertEqual(mock_bar.bar, mock_object) + self.assertEqual(mock_object.bar, 'x') + + def test_patchobject_start_stop(self): original = something patcher = patch.object(PTModule, 'something', 'foo') @@ -1098,7 +1146,7 @@ def test_new_callable_patch(self): self.assertIsNot(m1, m2) for mock in m1, m2: - self.assertNotCallable(m1) + self.assertNotCallable(mock) def test_new_callable_patch_object(self): @@ -1111,7 +1159,7 @@ def test_new_callable_patch_object(self): self.assertIsNot(m1, m2) for mock in m1, m2: - self.assertNotCallable(m1) + self.assertNotCallable(mock) def test_new_callable_keyword_arguments(self): diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index 28369b21db06d4..a204ef41c3ce90 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -7,11 +7,9 @@ import email.message import io import unittest -from unittest.mock import patch from test import support from test.support import os_helper from test.support import socket_helper -from test.support import warnings_helper import os try: import ssl @@ -20,7 +18,6 @@ import sys import tempfile -from base64 import b64encode import collections @@ -35,32 +32,6 @@ def hexescape(char): hex_repr = "0%s" % hex_repr return "%" + hex_repr -# Shortcut for testing FancyURLopener -_urlopener = None - - -def urlopen(url, data=None, proxies=None): - """urlopen(url [, data]) -> open file-like object""" - global _urlopener - if proxies is not None: - opener = urllib.request.FancyURLopener(proxies=proxies) - elif not _urlopener: - opener = FancyURLopener() - _urlopener = opener - else: - opener = _urlopener - if data is None: - return opener.open(url) - else: - return opener.open(url, data) - - -def FancyURLopener(): - with warnings_helper.check_warnings( - ('FancyURLopener style of invoking requests is deprecated.', - DeprecationWarning)): - return urllib.request.FancyURLopener() - def fakehttp(fakedata, mock_close=False): class FakeSocket(io.BytesIO): @@ -119,26 +90,6 @@ def unfakehttp(self): http.client.HTTPConnection = self._connection_class -class FakeFTPMixin(object): - def fakeftp(self): - class FakeFtpWrapper(object): - def __init__(self, user, passwd, host, port, dirs, timeout=None, - persistent=True): - pass - - def retrfile(self, file, type): - return io.BytesIO(), 0 - - def close(self): - pass - - self._ftpwrapper_class = urllib.request.ftpwrapper - urllib.request.ftpwrapper = FakeFtpWrapper - - def unfakeftp(self): - urllib.request.ftpwrapper = self._ftpwrapper_class - - class urlopen_FileTests(unittest.TestCase): """Test urlopen() opening a temporary file. @@ -158,7 +109,7 @@ def setUp(self): f.close() self.pathname = os_helper.TESTFN self.quoted_pathname = urllib.parse.quote(self.pathname) - self.returned_obj = urlopen("file:%s" % self.quoted_pathname) + self.returned_obj = urllib.request.urlopen("file:%s" % self.quoted_pathname) def tearDown(self): """Shut down the open object""" @@ -205,7 +156,7 @@ def test_headers(self): self.assertIsInstance(self.returned_obj.headers, email.message.Message) def test_url(self): - self.assertEqual(self.returned_obj.url, self.quoted_pathname) + self.assertEqual(self.returned_obj.url, "file://" + self.quoted_pathname) def test_status(self): self.assertIsNone(self.returned_obj.status) @@ -214,7 +165,7 @@ def test_info(self): self.assertIsInstance(self.returned_obj.info(), email.message.Message) def test_geturl(self): - self.assertEqual(self.returned_obj.geturl(), self.quoted_pathname) + self.assertEqual(self.returned_obj.geturl(), "file://" + self.quoted_pathname) def test_getcode(self): self.assertIsNone(self.returned_obj.getcode()) @@ -339,13 +290,13 @@ def test_getproxies_environment_prefer_lowercase(self): self.assertEqual('http://somewhere:3128', proxies['http']) -class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin, FakeFTPMixin): +class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin): """Test urlopen() opening a fake http connection.""" def check_read(self, ver): self.fakehttp(b"HTTP/" + ver + b" 200 OK\r\n\r\nHello!") try: - fp = urlopen("http://python.org/") + fp = urllib.request.urlopen("http://python.org/") self.assertEqual(fp.readline(), b"Hello!") self.assertEqual(fp.readline(), b"") self.assertEqual(fp.geturl(), 'http://python.org/') @@ -366,8 +317,8 @@ def test_url_fragment(self): def test_willclose(self): self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello!") try: - resp = urlopen("http://www.python.org") - self.assertTrue(resp.fp.will_close) + resp = urllib.request.urlopen("http://www.python.org") + self.assertTrue(resp.will_close) finally: self.unfakehttp() @@ -392,9 +343,6 @@ def test_url_path_with_control_char_rejected(self): with self.assertRaisesRegex( InvalidURL, f"contain control.*{escaped_char_repr}"): urllib.request.urlopen(f"https:{schemeless_url}") - # This code path quotes the URL so there is no injection. - resp = urlopen(f"http:{schemeless_url}") - self.assertNotIn(char, resp.geturl()) finally: self.unfakehttp() @@ -416,11 +364,6 @@ def test_url_path_with_newline_header_injection_rejected(self): urllib.request.urlopen(f"http:{schemeless_url}") with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"): urllib.request.urlopen(f"https:{schemeless_url}") - # This code path quotes the URL so there is no injection. - resp = urlopen(f"http:{schemeless_url}") - self.assertNotIn(' ', resp.geturl()) - self.assertNotIn('\r', resp.geturl()) - self.assertNotIn('\n', resp.geturl()) finally: self.unfakehttp() @@ -435,9 +378,9 @@ def test_url_host_with_control_char_rejected(self): InvalidURL = http.client.InvalidURL with self.assertRaisesRegex( InvalidURL, f"contain control.*{escaped_char_repr}"): - urlopen(f"http:{schemeless_url}") + urllib.request.urlopen(f"http:{schemeless_url}") with self.assertRaisesRegex(InvalidURL, f"contain control.*{escaped_char_repr}"): - urlopen(f"https:{schemeless_url}") + urllib.request.urlopen(f"https:{schemeless_url}") finally: self.unfakehttp() @@ -450,9 +393,9 @@ def test_url_host_with_newline_header_injection_rejected(self): InvalidURL = http.client.InvalidURL with self.assertRaisesRegex( InvalidURL, r"contain control.*\\r"): - urlopen(f"http:{schemeless_url}") + urllib.request.urlopen(f"http:{schemeless_url}") with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"): - urlopen(f"https:{schemeless_url}") + urllib.request.urlopen(f"https:{schemeless_url}") finally: self.unfakehttp() @@ -476,7 +419,7 @@ def test_read_bogus(self): Content-Type: text/html; charset=iso-8859-1 ''', mock_close=True) try: - self.assertRaises(OSError, urlopen, "http://python.org/") + self.assertRaises(OSError, urllib.request.urlopen, "http://python.org/") finally: self.unfakehttp() @@ -492,20 +435,20 @@ def test_invalid_redirect(self): try: msg = "Redirection to url 'file:" with self.assertRaisesRegex(urllib.error.HTTPError, msg): - urlopen("http://python.org/") + urllib.request.urlopen("http://python.org/") finally: self.unfakehttp() def test_redirect_limit_independent(self): # Ticket #12923: make sure independent requests each use their # own retry limit. - for i in range(FancyURLopener().maxtries): + for i in range(urllib.request.HTTPRedirectHandler.max_redirections): self.fakehttp(b'''HTTP/1.1 302 Found Location: file://guidocomputer.athome.com:/python/license Connection: close ''', mock_close=True) try: - self.assertRaises(urllib.error.HTTPError, urlopen, + self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, "http://something") finally: self.unfakehttp() @@ -515,14 +458,14 @@ def test_empty_socket(self): # data. (#1680230) self.fakehttp(b'') try: - self.assertRaises(OSError, urlopen, "http://something") + self.assertRaises(OSError, urllib.request.urlopen, "http://something") finally: self.unfakehttp() def test_missing_localfile(self): # Test for #10836 with self.assertRaises(urllib.error.URLError) as e: - urlopen('file://localhost/a/file/which/doesnot/exists.py') + urllib.request.urlopen('file://localhost/a/file/which/doesnot/exists.py') self.assertTrue(e.exception.filename) self.assertTrue(e.exception.reason) @@ -531,71 +474,28 @@ def test_file_notexists(self): tmp_fileurl = 'file://localhost/' + tmp_file.replace(os.path.sep, '/') try: self.assertTrue(os.path.exists(tmp_file)) - with urlopen(tmp_fileurl) as fobj: + with urllib.request.urlopen(tmp_fileurl) as fobj: self.assertTrue(fobj) finally: os.close(fd) os.unlink(tmp_file) self.assertFalse(os.path.exists(tmp_file)) with self.assertRaises(urllib.error.URLError): - urlopen(tmp_fileurl) + urllib.request.urlopen(tmp_fileurl) def test_ftp_nohost(self): test_ftp_url = 'ftp:///path' with self.assertRaises(urllib.error.URLError) as e: - urlopen(test_ftp_url) + urllib.request.urlopen(test_ftp_url) self.assertFalse(e.exception.filename) self.assertTrue(e.exception.reason) def test_ftp_nonexisting(self): with self.assertRaises(urllib.error.URLError) as e: - urlopen('ftp://localhost/a/file/which/doesnot/exists.py') + urllib.request.urlopen('ftp://localhost/a/file/which/doesnot/exists.py') self.assertFalse(e.exception.filename) self.assertTrue(e.exception.reason) - @patch.object(urllib.request, 'MAXFTPCACHE', 0) - def test_ftp_cache_pruning(self): - self.fakeftp() - try: - urllib.request.ftpcache['test'] = urllib.request.ftpwrapper('user', 'pass', 'localhost', 21, []) - urlopen('ftp://localhost') - finally: - self.unfakeftp() - - def test_userpass_inurl(self): - self.fakehttp(b"HTTP/1.0 200 OK\r\n\r\nHello!") - try: - fp = urlopen("http://user:pass@python.org/") - self.assertEqual(fp.readline(), b"Hello!") - self.assertEqual(fp.readline(), b"") - self.assertEqual(fp.geturl(), 'http://user:pass@python.org/') - self.assertEqual(fp.getcode(), 200) - finally: - self.unfakehttp() - - def test_userpass_inurl_w_spaces(self): - self.fakehttp(b"HTTP/1.0 200 OK\r\n\r\nHello!") - try: - userpass = "a b:c d" - url = "http://{}@python.org/".format(userpass) - fakehttp_wrapper = http.client.HTTPConnection - authorization = ("Authorization: Basic %s\r\n" % - b64encode(userpass.encode("ASCII")).decode("ASCII")) - fp = urlopen(url) - # The authorization header must be in place - self.assertIn(authorization, fakehttp_wrapper.buf.decode("UTF-8")) - self.assertEqual(fp.readline(), b"Hello!") - self.assertEqual(fp.readline(), b"") - # the spaces are quoted in URL so no match - self.assertNotEqual(fp.geturl(), url) - self.assertEqual(fp.getcode(), 200) - finally: - self.unfakehttp() - - def test_URLopener_deprecation(self): - with warnings_helper.check_warnings(('',DeprecationWarning)): - urllib.request.URLopener() - class urlopen_DataTests(unittest.TestCase): """Test urlopen() opening a data URL.""" @@ -709,10 +609,6 @@ def tearDown(self): def constructLocalFileUrl(self, filePath): filePath = os.path.abspath(filePath) - try: - filePath.encode("utf-8") - except UnicodeEncodeError: - raise unittest.SkipTest("filePath is not encodable to utf8") return "file://%s" % urllib.request.pathname2url(filePath) def createNewTempFile(self, data=b""): @@ -1533,8 +1429,9 @@ def test_pathname2url_win(self): self.assertEqual(fn('C:\\a\\b%#c'), '///C:/a/b%25%23c') self.assertEqual(fn('C:\\a\\b\xe9'), '///C:/a/b%C3%A9') self.assertEqual(fn('C:\\foo\\bar\\spam.foo'), "///C:/foo/bar/spam.foo") - # Long drive letter - self.assertRaises(IOError, fn, "XX:\\") + # NTFS alternate data streams + self.assertEqual(fn('C:\\foo:bar'), '///C:/foo%3Abar') + self.assertEqual(fn('foo:bar'), 'foo%3Abar') # No drive letter self.assertEqual(fn("\\folder\\test\\"), '/folder/test/') self.assertEqual(fn("\\\\folder\\test\\"), '//folder/test/') @@ -1542,9 +1439,14 @@ def test_pathname2url_win(self): self.assertEqual(fn('\\\\some\\share\\'), '//some/share/') self.assertEqual(fn('\\\\some\\share\\a\\b.c'), '//some/share/a/b.c') self.assertEqual(fn('\\\\some\\share\\a\\b%#c\xe9'), '//some/share/a/b%25%23c%C3%A9') + # Alternate path separator + self.assertEqual(fn('C:/a/b.c'), '///C:/a/b.c') + self.assertEqual(fn('//some/share/a/b.c'), '//some/share/a/b.c') + self.assertEqual(fn('//?/C:/dir'), '///C:/dir') + self.assertEqual(fn('//?/unc/server/share/dir'), '//server/share/dir') # Round-tripping urls = ['///C:', - '///folder/test/', + '/folder/test/', '///C:/foo/bar/spam.foo'] for url in urls: self.assertEqual(fn(urllib.request.url2pathname(url)), url) @@ -1557,6 +1459,13 @@ def test_pathname2url_posix(self): self.assertEqual(fn('/a/b.c'), '/a/b.c') self.assertEqual(fn('/a/b%#c'), '/a/b%25%23c') + @unittest.skipUnless(os_helper.FS_NONASCII, 'need os_helper.FS_NONASCII') + def test_pathname2url_nonascii(self): + encoding = sys.getfilesystemencoding() + errors = sys.getfilesystemencodeerrors() + url = urllib.parse.quote(os_helper.FS_NONASCII, encoding=encoding, errors=errors) + self.assertEqual(urllib.request.pathname2url(os_helper.FS_NONASCII), url) + @unittest.skipUnless(sys.platform == 'win32', 'test specific to Windows pathnames.') def test_url2pathname_win(self): @@ -1568,7 +1477,7 @@ def test_url2pathname_win(self): self.assertEqual(fn('/C|//'), 'C:\\\\') self.assertEqual(fn('///C|/path'), 'C:\\path') # No DOS drive - self.assertEqual(fn("///C/test/"), '\\\\\\C\\test\\') + self.assertEqual(fn("///C/test/"), '\\C\\test\\') self.assertEqual(fn("////C/test/"), '\\\\C\\test\\') # DOS drive paths self.assertEqual(fn('C:/path/to/file'), 'C:\\path\\to\\file') @@ -1583,16 +1492,18 @@ def test_url2pathname_win(self): # UNC paths self.assertEqual(fn('//server/path/to/file'), '\\\\server\\path\\to\\file') self.assertEqual(fn('////server/path/to/file'), '\\\\server\\path\\to\\file') - self.assertEqual(fn('/////server/path/to/file'), '\\\\\\server\\path\\to\\file') + self.assertEqual(fn('/////server/path/to/file'), '\\\\server\\path\\to\\file') # Localhost paths self.assertEqual(fn('//localhost/C:/path/to/file'), 'C:\\path\\to\\file') self.assertEqual(fn('//localhost/C|/path/to/file'), 'C:\\path\\to\\file') + self.assertEqual(fn('//localhost/path/to/file'), '\\path\\to\\file') + self.assertEqual(fn('//localhost//server/path/to/file'), '\\\\server\\path\\to\\file') # Percent-encoded forward slashes are preserved for backwards compatibility self.assertEqual(fn('C:/foo%2fbar'), 'C:\\foo/bar') self.assertEqual(fn('//server/share/foo%2fbar'), '\\\\server\\share\\foo/bar') # Round-tripping paths = ['C:', - r'\\\C\test\\', + r'\C\test\\', r'C:\foo\bar\spam.foo'] for path in paths: self.assertEqual(fn(urllib.request.pathname2url(path)), path) @@ -1603,9 +1514,18 @@ def test_url2pathname_posix(self): fn = urllib.request.url2pathname self.assertEqual(fn('/foo/bar'), '/foo/bar') self.assertEqual(fn('//foo/bar'), '//foo/bar') - self.assertEqual(fn('///foo/bar'), '///foo/bar') - self.assertEqual(fn('////foo/bar'), '////foo/bar') - self.assertEqual(fn('//localhost/foo/bar'), '//localhost/foo/bar') + self.assertEqual(fn('///foo/bar'), '/foo/bar') + self.assertEqual(fn('////foo/bar'), '//foo/bar') + self.assertEqual(fn('//localhost/foo/bar'), '/foo/bar') + + @unittest.skipUnless(os_helper.FS_NONASCII, 'need os_helper.FS_NONASCII') + def test_url2pathname_nonascii(self): + encoding = sys.getfilesystemencoding() + errors = sys.getfilesystemencodeerrors() + url = os_helper.FS_NONASCII + self.assertEqual(urllib.request.url2pathname(url), os_helper.FS_NONASCII) + url = urllib.parse.quote(url, encoding=encoding, errors=errors) + self.assertEqual(urllib.request.url2pathname(url), os_helper.FS_NONASCII) class Utility_Tests(unittest.TestCase): """Testcase to test the various utility functions in the urllib.""" @@ -1615,56 +1535,6 @@ def test_thishost(self): self.assertIsInstance(urllib.request.thishost(), tuple) -class URLopener_Tests(FakeHTTPMixin, unittest.TestCase): - """Testcase to test the open method of URLopener class.""" - - def test_quoted_open(self): - class DummyURLopener(urllib.request.URLopener): - def open_spam(self, url): - return url - with warnings_helper.check_warnings( - ('DummyURLopener style of invoking requests is deprecated.', - DeprecationWarning)): - self.assertEqual(DummyURLopener().open( - 'spam://example/ /'),'//example/%20/') - - # test the safe characters are not quoted by urlopen - self.assertEqual(DummyURLopener().open( - "spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"), - "//c:|windows%/:=&?~#+!$,;'@()*[]|/path/") - - @warnings_helper.ignore_warnings(category=DeprecationWarning) - def test_urlopener_retrieve_file(self): - with os_helper.temp_dir() as tmpdir: - fd, tmpfile = tempfile.mkstemp(dir=tmpdir) - os.close(fd) - fileurl = "file:" + urllib.request.pathname2url(tmpfile) - filename, _ = urllib.request.URLopener().retrieve(fileurl) - # Some buildbots have TEMP folder that uses a lowercase drive letter. - self.assertEqual(os.path.normcase(filename), os.path.normcase(tmpfile)) - - @warnings_helper.ignore_warnings(category=DeprecationWarning) - def test_urlopener_retrieve_remote(self): - url = "http://www.python.org/file.txt" - self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello!") - self.addCleanup(self.unfakehttp) - filename, _ = urllib.request.URLopener().retrieve(url) - self.assertEqual(os.path.splitext(filename)[1], ".txt") - - @warnings_helper.ignore_warnings(category=DeprecationWarning) - def test_local_file_open(self): - # bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme - class DummyURLopener(urllib.request.URLopener): - def open_local_file(self, url): - return url - for url in ('local_file://example', 'local-file://example'): - self.assertRaises(OSError, urllib.request.urlopen, url) - self.assertRaises(OSError, urllib.request.URLopener().open, url) - self.assertRaises(OSError, urllib.request.URLopener().retrieve, url) - self.assertRaises(OSError, DummyURLopener().open, url) - self.assertRaises(OSError, DummyURLopener().retrieve, url) - - class RequestTests(unittest.TestCase): """Unit tests for urllib.request.Request.""" diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index b90ccc2f125b93..99ad11cf0552eb 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -718,10 +718,6 @@ def test_processors(self): def sanepathname2url(path): - try: - path.encode("utf-8") - except UnicodeEncodeError: - raise unittest.SkipTest("path is not encodable to utf8") urlpath = urllib.request.pathname2url(path) if os.name == "nt" and urlpath.startswith("///"): urlpath = urlpath[2:] diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py index 49a3b5afdebb2f..f824dddf711761 100644 --- a/Lib/test/test_urllibnet.py +++ b/Lib/test/test_urllibnet.py @@ -5,6 +5,7 @@ import contextlib import socket +import urllib.error import urllib.parse import urllib.request import os @@ -101,13 +102,10 @@ def test_getcode(self): # test getcode() with the fancy opener to get 404 error codes URL = self.url + "XXXinvalidXXX" with socket_helper.transient_internet(URL): - with self.assertWarns(DeprecationWarning): - open_url = urllib.request.FancyURLopener().open(URL) - try: - code = open_url.getcode() - finally: - open_url.close() - self.assertEqual(code, 404) + with self.assertRaises(urllib.error.URLError) as e: + with urllib.request.urlopen(URL): + pass + self.assertEqual(e.exception.code, 404) @support.requires_resource('walltime') def test_bad_address(self): diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 297fb4831c16bf..4516bdea6adb19 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -1314,9 +1314,17 @@ def test_parse_qsl_bytes(self): def test_parse_qsl_false_value(self): kwargs = dict(keep_blank_values=True, strict_parsing=True) - for x in '', b'', None, 0, 0.0, [], {}, memoryview(b''): + for x in '', b'', None, memoryview(b''): self.assertEqual(urllib.parse.parse_qsl(x, **kwargs), []) self.assertRaises(ValueError, urllib.parse.parse_qsl, x, separator=1) + for x in 0, 0.0, [], {}: + with self.assertWarns(DeprecationWarning) as cm: + self.assertEqual(urllib.parse.parse_qsl(x, **kwargs), []) + self.assertEqual(cm.filename, __file__) + with self.assertWarns(DeprecationWarning) as cm: + self.assertEqual(urllib.parse.parse_qs(x, **kwargs), {}) + self.assertEqual(cm.filename, __file__) + self.assertRaises(ValueError, urllib.parse.parse_qsl, x, separator=1) def test_parse_qsl_errors(self): self.assertRaises(TypeError, urllib.parse.parse_qsl, list(b'a=b')) diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py index e177464c00f7a6..7bd26a8ca34b62 100755 --- a/Lib/test/test_uuid.py +++ b/Lib/test/test_uuid.py @@ -8,8 +8,10 @@ import io import os import pickle +import random import sys import weakref +from itertools import product from unittest import mock py_uuid = import_helper.import_fresh_module('uuid', blocked=['_uuid']) @@ -267,7 +269,7 @@ def test_exceptions(self): # Version number out of range. badvalue(lambda: self.uuid.UUID('00'*16, version=0)) - badvalue(lambda: self.uuid.UUID('00'*16, version=6)) + badvalue(lambda: self.uuid.UUID('00'*16, version=42)) # Integer value out of range. badvalue(lambda: self.uuid.UUID(int=-1)) @@ -681,6 +683,37 @@ def test_uuid5(self): equal(u, self.uuid.UUID(v)) equal(str(u), v) + def test_uuid8(self): + equal = self.assertEqual + u = self.uuid.uuid8() + + equal(u.variant, self.uuid.RFC_4122) + equal(u.version, 8) + + for (_, hi, mid, lo) in product( + range(10), # repeat 10 times + [None, 0, random.getrandbits(48)], + [None, 0, random.getrandbits(12)], + [None, 0, random.getrandbits(62)], + ): + u = self.uuid.uuid8(hi, mid, lo) + equal(u.variant, self.uuid.RFC_4122) + equal(u.version, 8) + if hi is not None: + equal((u.int >> 80) & 0xffffffffffff, hi) + if mid is not None: + equal((u.int >> 64) & 0xfff, mid) + if lo is not None: + equal(u.int & 0x3fffffffffffffff, lo) + + def test_uuid8_uniqueness(self): + # Test that UUIDv8-generated values are unique + # (up to a negligible probability of failure). + u1 = self.uuid.uuid8() + u2 = self.uuid.uuid8() + self.assertNotEqual(u1.int, u2.int) + self.assertEqual(u1.version, u2.version) + @support.requires_fork() def testIssue8621(self): # On at least some versions of OSX self.uuid.uuid4 generates diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py index 8b59630717e790..4e3c877896f295 100644 --- a/Lib/test/test_warnings/__init__.py +++ b/Lib/test/test_warnings/__init__.py @@ -533,6 +533,18 @@ def test_skip_file_prefixes(self): warning_tests.package("prefix02", stacklevel=3) self.assertIn("unittest", w[-1].filename) + def test_skip_file_prefixes_file_path(self): + # see: gh-126209 + with warnings_state(self.module): + skipped = warning_tests.__file__ + with original_warnings.catch_warnings( + record=True, module=self.module, + ) as w: + warning_tests.outer("msg", skip_file_prefixes=(skipped,)) + + self.assertEqual(len(w), 1) + self.assertNotEqual(w[-1].filename, skipped) + def test_skip_file_prefixes_type_errors(self): with warnings_state(self.module): warn = warning_tests.warnings.warn diff --git a/Lib/test/test_warnings/data/stacklevel.py b/Lib/test/test_warnings/data/stacklevel.py index c6dd24733b3b74..fe36242d3d20c2 100644 --- a/Lib/test/test_warnings/data/stacklevel.py +++ b/Lib/test/test_warnings/data/stacklevel.py @@ -4,11 +4,13 @@ import warnings from test.test_warnings.data import package_helper -def outer(message, stacklevel=1): - inner(message, stacklevel) -def inner(message, stacklevel=1): - warnings.warn(message, stacklevel=stacklevel) +def outer(message, stacklevel=1, skip_file_prefixes=()): + inner(message, stacklevel, skip_file_prefixes) + +def inner(message, stacklevel=1, skip_file_prefixes=()): + warnings.warn(message, stacklevel=stacklevel, + skip_file_prefixes=skip_file_prefixes) def package(message, *, stacklevel): package_helper.inner_api(message, stacklevel=stacklevel, diff --git a/Lib/test/translationdata/getopt/msgids.txt b/Lib/test/translationdata/getopt/msgids.txt new file mode 100644 index 00000000000000..1ffab1f31abad5 --- /dev/null +++ b/Lib/test/translationdata/getopt/msgids.txt @@ -0,0 +1,6 @@ +option -%s not recognized +option -%s requires argument +option --%s must not have an argument +option --%s not a unique prefix +option --%s not recognized +option --%s requires argument \ No newline at end of file diff --git a/Lib/test/translationdata/optparse/msgids.txt b/Lib/test/translationdata/optparse/msgids.txt new file mode 100644 index 00000000000000..ac5317c736af8c --- /dev/null +++ b/Lib/test/translationdata/optparse/msgids.txt @@ -0,0 +1,14 @@ +%prog [options] +%s option does not take a value +Options +Usage +Usage: %s\n +ambiguous option: %s (%s?) +complex +floating-point +integer +no such option: %s +option %s: invalid %s value: %r +option %s: invalid choice: %r (choose from %s) +show program's version number and exit +show this help message and exit \ No newline at end of file diff --git a/Lib/textwrap.py b/Lib/textwrap.py index 7ca393d1c371aa..1bf07aa46cad99 100644 --- a/Lib/textwrap.py +++ b/Lib/textwrap.py @@ -2,7 +2,7 @@ """ # Copyright (C) 1999-2001 Gregory P. Ward. -# Copyright (C) 2002, 2003 Python Software Foundation. +# Copyright (C) 2002 Python Software Foundation. # Written by Greg Ward import re diff --git a/Lib/tkinter/ttk.py b/Lib/tkinter/ttk.py index 073b3ae20797c3..8ddb7f97e3b233 100644 --- a/Lib/tkinter/ttk.py +++ b/Lib/tkinter/ttk.py @@ -321,6 +321,8 @@ def _tclobj_to_py(val): elif hasattr(val, 'typename'): # some other (single) Tcl object val = _convert_stringval(val) + if isinstance(val, tuple) and len(val) == 0: + return '' return val def tclobjs_to_py(adict): diff --git a/Lib/tomllib/_parser.py b/Lib/tomllib/_parser.py index 5671326646ca5a..4d208bcfb4a9a6 100644 --- a/Lib/tomllib/_parser.py +++ b/Lib/tomllib/_parser.py @@ -8,6 +8,7 @@ import string from types import MappingProxyType from typing import Any, BinaryIO, NamedTuple +import warnings from ._re import ( RE_DATETIME, @@ -50,8 +51,68 @@ ) +class DEPRECATED_DEFAULT: + """Sentinel to be used as default arg during deprecation + period of TOMLDecodeError's free-form arguments.""" + + class TOMLDecodeError(ValueError): - """An error raised if a document is not valid TOML.""" + """An error raised if a document is not valid TOML. + + Adds the following attributes to ValueError: + msg: The unformatted error message + doc: The TOML document being parsed + pos: The index of doc where parsing failed + lineno: The line corresponding to pos + colno: The column corresponding to pos + """ + + def __init__( + self, + msg: str = DEPRECATED_DEFAULT, # type: ignore[assignment] + doc: str = DEPRECATED_DEFAULT, # type: ignore[assignment] + pos: Pos = DEPRECATED_DEFAULT, # type: ignore[assignment] + *args: Any, + ): + if ( + args + or not isinstance(msg, str) + or not isinstance(doc, str) + or not isinstance(pos, int) + ): + warnings.warn( + "Free-form arguments for TOMLDecodeError are deprecated. " + "Please set 'msg' (str), 'doc' (str) and 'pos' (int) arguments only.", + DeprecationWarning, + stacklevel=2, + ) + if pos is not DEPRECATED_DEFAULT: # type: ignore[comparison-overlap] + args = pos, *args + if doc is not DEPRECATED_DEFAULT: # type: ignore[comparison-overlap] + args = doc, *args + if msg is not DEPRECATED_DEFAULT: # type: ignore[comparison-overlap] + args = msg, *args + ValueError.__init__(self, *args) + return + + lineno = doc.count("\n", 0, pos) + 1 + if lineno == 1: + colno = pos + 1 + else: + colno = pos - doc.rindex("\n", 0, pos) + + if pos >= len(doc): + coord_repr = "end of document" + else: + coord_repr = f"line {lineno}, column {colno}" + errmsg = f"{msg} (at {coord_repr})" + ValueError.__init__(self, errmsg) + + self.msg = msg + self.doc = doc + self.pos = pos + self.lineno = lineno + self.colno = colno def load(fp: BinaryIO, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: @@ -118,7 +179,7 @@ def loads(s: str, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: # n pos, header = create_dict_rule(src, pos, out) pos = skip_chars(src, pos, TOML_WS) elif char != "#": - raise suffixed_err(src, pos, "Invalid statement") + raise TOMLDecodeError("Invalid statement", src, pos) # 3. Skip comment pos = skip_comment(src, pos) @@ -129,8 +190,8 @@ def loads(s: str, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: # n except IndexError: break if char != "\n": - raise suffixed_err( - src, pos, "Expected newline or end of document after a statement" + raise TOMLDecodeError( + "Expected newline or end of document after a statement", src, pos ) pos += 1 @@ -256,12 +317,12 @@ def skip_until( except ValueError: new_pos = len(src) if error_on_eof: - raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None + raise TOMLDecodeError(f"Expected {expect!r}", src, new_pos) from None if not error_on.isdisjoint(src[pos:new_pos]): while src[pos] not in error_on: pos += 1 - raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") + raise TOMLDecodeError(f"Found invalid character {src[pos]!r}", src, pos) return new_pos @@ -292,15 +353,17 @@ def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: pos, key = parse_key(src, pos) if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Cannot declare {key} twice") + raise TOMLDecodeError(f"Cannot declare {key} twice", src, pos) out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) try: out.data.get_or_create_nest(key) except KeyError: - raise suffixed_err(src, pos, "Cannot overwrite a value") from None + raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None if not src.startswith("]", pos): - raise suffixed_err(src, pos, "Expected ']' at the end of a table declaration") + raise TOMLDecodeError( + "Expected ']' at the end of a table declaration", src, pos + ) return pos + 1, key @@ -310,7 +373,7 @@ def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: pos, key = parse_key(src, pos) if out.flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + raise TOMLDecodeError(f"Cannot mutate immutable namespace {key}", src, pos) # Free the namespace now that it points to another empty list item... out.flags.unset_all(key) # ...but this key precisely is still prohibited from table declaration @@ -318,10 +381,12 @@ def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: try: out.data.append_nest_to_list(key) except KeyError: - raise suffixed_err(src, pos, "Cannot overwrite a value") from None + raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None if not src.startswith("]]", pos): - raise suffixed_err(src, pos, "Expected ']]' at the end of an array declaration") + raise TOMLDecodeError( + "Expected ']]' at the end of an array declaration", src, pos + ) return pos + 2, key @@ -336,22 +401,22 @@ def key_value_rule( for cont_key in relative_path_cont_keys: # Check that dotted key syntax does not redefine an existing table if out.flags.is_(cont_key, Flags.EXPLICIT_NEST): - raise suffixed_err(src, pos, f"Cannot redefine namespace {cont_key}") + raise TOMLDecodeError(f"Cannot redefine namespace {cont_key}", src, pos) # Containers in the relative path can't be opened with the table syntax or # dotted key/value syntax in following table sections. out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST) if out.flags.is_(abs_key_parent, Flags.FROZEN): - raise suffixed_err( - src, pos, f"Cannot mutate immutable namespace {abs_key_parent}" + raise TOMLDecodeError( + f"Cannot mutate immutable namespace {abs_key_parent}", src, pos ) try: nest = out.data.get_or_create_nest(abs_key_parent) except KeyError: - raise suffixed_err(src, pos, "Cannot overwrite a value") from None + raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None if key_stem in nest: - raise suffixed_err(src, pos, "Cannot overwrite a value") + raise TOMLDecodeError("Cannot overwrite a value", src, pos) # Mark inline table and array namespaces recursively immutable if isinstance(value, (dict, list)): out.flags.set(header + key, Flags.FROZEN, recursive=True) @@ -368,7 +433,7 @@ def parse_key_value_pair( except IndexError: char = None if char != "=": - raise suffixed_err(src, pos, "Expected '=' after a key in a key/value pair") + raise TOMLDecodeError("Expected '=' after a key in a key/value pair", src, pos) pos += 1 pos = skip_chars(src, pos, TOML_WS) pos, value = parse_value(src, pos, parse_float) @@ -406,7 +471,7 @@ def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]: return parse_literal_str(src, pos) if char == '"': return parse_one_line_basic_str(src, pos) - raise suffixed_err(src, pos, "Invalid initial character for a key part") + raise TOMLDecodeError("Invalid initial character for a key part", src, pos) def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]: @@ -430,7 +495,7 @@ def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, list] if c == "]": return pos + 1, array if c != ",": - raise suffixed_err(src, pos, "Unclosed array") + raise TOMLDecodeError("Unclosed array", src, pos) pos += 1 pos = skip_comments_and_array_ws(src, pos) @@ -450,20 +515,20 @@ def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos pos, key, value = parse_key_value_pair(src, pos, parse_float) key_parent, key_stem = key[:-1], key[-1] if flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + raise TOMLDecodeError(f"Cannot mutate immutable namespace {key}", src, pos) try: nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) except KeyError: - raise suffixed_err(src, pos, "Cannot overwrite a value") from None + raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None if key_stem in nest: - raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") + raise TOMLDecodeError(f"Duplicate inline table key {key_stem!r}", src, pos) nest[key_stem] = value pos = skip_chars(src, pos, TOML_WS) c = src[pos : pos + 1] if c == "}": return pos + 1, nested_dict.dict if c != ",": - raise suffixed_err(src, pos, "Unclosed inline table") + raise TOMLDecodeError("Unclosed inline table", src, pos) if isinstance(value, (dict, list)): flags.set(key, Flags.FROZEN, recursive=True) pos += 1 @@ -485,7 +550,7 @@ def parse_basic_str_escape( except IndexError: return pos, "" if char != "\n": - raise suffixed_err(src, pos, "Unescaped '\\' in a string") + raise TOMLDecodeError("Unescaped '\\' in a string", src, pos) pos += 1 pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) return pos, "" @@ -496,7 +561,7 @@ def parse_basic_str_escape( try: return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] except KeyError: - raise suffixed_err(src, pos, "Unescaped '\\' in a string") from None + raise TOMLDecodeError("Unescaped '\\' in a string", src, pos) from None def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: @@ -506,11 +571,13 @@ def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]: hex_str = src[pos : pos + hex_len] if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): - raise suffixed_err(src, pos, "Invalid hex value") + raise TOMLDecodeError("Invalid hex value", src, pos) pos += hex_len hex_int = int(hex_str, 16) if not is_unicode_scalar_value(hex_int): - raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + raise TOMLDecodeError( + "Escaped character is not a Unicode scalar value", src, pos + ) return pos, chr(hex_int) @@ -567,7 +634,7 @@ def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: try: char = src[pos] except IndexError: - raise suffixed_err(src, pos, "Unterminated string") from None + raise TOMLDecodeError("Unterminated string", src, pos) from None if char == '"': if not multiline: return pos + 1, result + src[start_pos:pos] @@ -582,7 +649,7 @@ def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: start_pos = pos continue if char in error_on: - raise suffixed_err(src, pos, f"Illegal character {char!r}") + raise TOMLDecodeError(f"Illegal character {char!r}", src, pos) pos += 1 @@ -630,7 +697,7 @@ def parse_value( # noqa: C901 try: datetime_obj = match_to_datetime(datetime_match) except ValueError as e: - raise suffixed_err(src, pos, "Invalid date or datetime") from e + raise TOMLDecodeError("Invalid date or datetime", src, pos) from e return datetime_match.end(), datetime_obj localtime_match = RE_LOCALTIME.match(src, pos) if localtime_match: @@ -651,24 +718,7 @@ def parse_value( # noqa: C901 if first_four in {"-inf", "+inf", "-nan", "+nan"}: return pos + 4, parse_float(first_four) - raise suffixed_err(src, pos, "Invalid value") - - -def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: - """Return a `TOMLDecodeError` where error message is suffixed with - coordinates in source.""" - - def coord_repr(src: str, pos: Pos) -> str: - if pos >= len(src): - return "end of document" - line = src.count("\n", 0, pos) + 1 - if line == 1: - column = pos + 1 - else: - column = pos - src.rindex("\n", 0, pos) - return f"line {line}, column {column}" - - return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + raise TOMLDecodeError("Invalid value", src, pos) def is_unicode_scalar_value(codepoint: int) -> bool: diff --git a/Lib/typing.py b/Lib/typing.py index c924c767042552..938e52922aee03 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -1940,9 +1940,11 @@ def _allow_reckless_class_checks(depth=2): _PROTO_ALLOWLIST = { 'collections.abc': [ 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', + 'AsyncIterator', 'Hashable', 'Sized', 'Container', 'Collection', + 'Reversible', 'Buffer', ], 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'os': ['PathLike'], } diff --git a/Lib/unittest/__init__.py b/Lib/unittest/__init__.py index 324e5d038aef03..78ff6bb4fdcce5 100644 --- a/Lib/unittest/__init__.py +++ b/Lib/unittest/__init__.py @@ -27,7 +27,7 @@ def testMultiply(self): http://docs.python.org/library/unittest.html Copyright (c) 1999-2003 Steve Purcell -Copyright (c) 2003-2010 Python Software Foundation +Copyright (c) 2003 Python Software Foundation This module is free software, and you may redistribute it and/or modify it under the same terms as Python itself, so long as this copyright message and disclaimer are retained in their original form. diff --git a/Lib/unittest/async_case.py b/Lib/unittest/async_case.py index bd06eb3207697a..6000af1cef0a78 100644 --- a/Lib/unittest/async_case.py +++ b/Lib/unittest/async_case.py @@ -5,6 +5,7 @@ from .case import TestCase +__unittest = True class IsolatedAsyncioTestCase(TestCase): # Names intentionally have a long prefix diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index 21ca061a77c26f..55cb4b1f6aff90 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -1360,6 +1360,7 @@ def __init__( self.autospec = autospec self.kwargs = kwargs self.additional_patchers = [] + self.is_started = False def copy(self): @@ -1472,6 +1473,9 @@ def get_original(self): def __enter__(self): """Perform the patch.""" + if self.is_started: + raise RuntimeError("Patch is already started") + new, spec, spec_set = self.new, self.spec, self.spec_set autospec, kwargs = self.autospec, self.kwargs new_callable = self.new_callable @@ -1603,6 +1607,7 @@ def __enter__(self): self.temp_original = original self.is_local = local self._exit_stack = contextlib.ExitStack() + self.is_started = True try: setattr(self.target, self.attribute, new_attr) if self.attribute_name is not None: @@ -1622,6 +1627,9 @@ def __enter__(self): def __exit__(self, *exc_info): """Undo the patch.""" + if not self.is_started: + return + if self.is_local and self.temp_original is not DEFAULT: setattr(self.target, self.attribute, self.temp_original) else: @@ -1638,6 +1646,7 @@ def __exit__(self, *exc_info): del self.target exit_stack = self._exit_stack del self._exit_stack + self.is_started = False return exit_stack.__exit__(*exc_info) diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index a721d777c82f82..8d7631d5693ece 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -753,7 +753,8 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False, parsed_result = {} pairs = parse_qsl(qs, keep_blank_values, strict_parsing, encoding=encoding, errors=errors, - max_num_fields=max_num_fields, separator=separator) + max_num_fields=max_num_fields, separator=separator, + _stacklevel=2) for name, value in pairs: if name in parsed_result: parsed_result[name].append(value) @@ -763,7 +764,7 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False, def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, - encoding='utf-8', errors='replace', max_num_fields=None, separator='&'): + encoding='utf-8', errors='replace', max_num_fields=None, separator='&', *, _stacklevel=1): """Parse a query given as a string argument. Arguments: @@ -791,7 +792,6 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, Returns a list, as G-d intended. """ - if not separator or not isinstance(separator, (str, bytes)): raise ValueError("Separator must be of type string or bytes.") if isinstance(qs, str): @@ -800,12 +800,21 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, eq = '=' def _unquote(s): return unquote_plus(s, encoding=encoding, errors=errors) + elif qs is None: + return [] else: - if not qs: - return [] - # Use memoryview() to reject integers and iterables, - # acceptable by the bytes constructor. - qs = bytes(memoryview(qs)) + try: + # Use memoryview() to reject integers and iterables, + # acceptable by the bytes constructor. + qs = bytes(memoryview(qs)) + except TypeError: + if not qs: + warnings.warn(f"Accepting {type(qs).__name__} objects with " + f"false value in urllib.parse.parse_qsl() is " + f"deprecated as of 3.14", + DeprecationWarning, stacklevel=_stacklevel + 1) + return [] + raise if isinstance(separator, str): separator = bytes(separator, 'ascii') eq = b'=' diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index bc35d8a80e5d03..80be65c613e971 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -83,6 +83,7 @@ import base64 import bisect +import contextlib import email import hashlib import http.client @@ -94,15 +95,13 @@ import sys import time import tempfile -import contextlib -import warnings from urllib.error import URLError, HTTPError, ContentTooShortError from urllib.parse import ( urlparse, urlsplit, urljoin, unwrap, quote, unquote, _splittype, _splithost, _splitport, _splituser, _splitpasswd, - _splitattr, _splitquery, _splitvalue, _splittag, _to_bytes, + _splitattr, _splitvalue, _splittag, unquote_to_bytes, urlunparse) from urllib.response import addinfourl, addclosehook @@ -128,7 +127,7 @@ 'urlopen', 'install_opener', 'build_opener', 'pathname2url', 'url2pathname', 'getproxies', # Legacy interface - 'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener', + 'urlretrieve', 'urlcleanup', ] # used in User-Agent header sent @@ -165,8 +164,7 @@ def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, the reason phrase returned by the server --- instead of the response headers as it is specified in the documentation for HTTPResponse. - For FTP, file, and data URLs and requests explicitly handled by legacy - URLopener and FancyURLopener classes, this function returns a + For FTP, file, and data URLs, this function returns a urllib.response.addinfourl object. Note that None may be returned if no handler handles the request (though @@ -940,6 +938,7 @@ def _parse_realm(self, header): for mo in AbstractBasicAuthHandler.rx.finditer(header): scheme, quote, realm = mo.groups() if quote not in ['"', "'"]: + import warnings warnings.warn("Basic Auth Realm was unquoted", UserWarning, 3) @@ -1495,7 +1494,7 @@ def open_local_file(self, req): origurl = 'file://' + filename return addinfourl(open(localfile, 'rb'), headers, origurl) except OSError as exp: - raise URLError(exp) + raise URLError(exp, exp.filename) raise URLError('file not on local host') def _safe_gethostbyname(host): @@ -1647,8 +1646,6 @@ def data_open(self, req): # Code move from the old urllib module -MAXFTPCACHE = 10 # Trim the ftp cache beyond this size - # Helper for non-unix systems if os.name == 'nt': from nturl2path import url2pathname, pathname2url @@ -1656,684 +1653,23 @@ def data_open(self, req): def url2pathname(pathname): """OS-specific conversion from a relative URL of the 'file' scheme to a file system path; not recommended for general use.""" - return unquote(pathname) + if pathname[:3] == '///': + # URL has an empty authority section, so the path begins on the + # third character. + pathname = pathname[2:] + elif pathname[:12] == '//localhost/': + # Skip past 'localhost' authority. + pathname = pathname[11:] + encoding = sys.getfilesystemencoding() + errors = sys.getfilesystemencodeerrors() + return unquote(pathname, encoding=encoding, errors=errors) def pathname2url(pathname): """OS-specific conversion from a file system path to a relative URL of the 'file' scheme; not recommended for general use.""" - return quote(pathname) - - -ftpcache = {} - - -class URLopener: - """Class to open URLs. - This is a class rather than just a subroutine because we may need - more than one set of global protocol-specific options. - Note -- this is a base class for those who don't want the - automatic handling of errors type 302 (relocated) and 401 - (authorization needed).""" - - __tempfiles = None - - version = "Python-urllib/%s" % __version__ - - # Constructor - def __init__(self, proxies=None, **x509): - msg = "%(class)s style of invoking requests is deprecated. " \ - "Use newer urlopen functions/methods" % {'class': self.__class__.__name__} - warnings.warn(msg, DeprecationWarning, stacklevel=3) - if proxies is None: - proxies = getproxies() - assert hasattr(proxies, 'keys'), "proxies must be a mapping" - self.proxies = proxies - self.key_file = x509.get('key_file') - self.cert_file = x509.get('cert_file') - self.addheaders = [('User-Agent', self.version), ('Accept', '*/*')] - self.__tempfiles = [] - self.__unlink = os.unlink # See cleanup() - self.tempcache = None - # Undocumented feature: if you assign {} to tempcache, - # it is used to cache files retrieved with - # self.retrieve(). This is not enabled by default - # since it does not work for changing documents (and I - # haven't got the logic to check expiration headers - # yet). - self.ftpcache = ftpcache - # Undocumented feature: you can use a different - # ftp cache by assigning to the .ftpcache member; - # in case you want logically independent URL openers - # XXX This is not threadsafe. Bah. - - def __del__(self): - self.close() - - def close(self): - self.cleanup() - - def cleanup(self): - # This code sometimes runs when the rest of this module - # has already been deleted, so it can't use any globals - # or import anything. - if self.__tempfiles: - for file in self.__tempfiles: - try: - self.__unlink(file) - except OSError: - pass - del self.__tempfiles[:] - if self.tempcache: - self.tempcache.clear() - - def addheader(self, *args): - """Add a header to be used by the HTTP interface only - e.g. u.addheader('Accept', 'sound/basic')""" - self.addheaders.append(args) - - # External interface - def open(self, fullurl, data=None): - """Use URLopener().open(file) instead of open(file, 'r').""" - fullurl = unwrap(_to_bytes(fullurl)) - fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") - if self.tempcache and fullurl in self.tempcache: - filename, headers = self.tempcache[fullurl] - fp = open(filename, 'rb') - return addinfourl(fp, headers, fullurl) - urltype, url = _splittype(fullurl) - if not urltype: - urltype = 'file' - if urltype in self.proxies: - proxy = self.proxies[urltype] - urltype, proxyhost = _splittype(proxy) - host, selector = _splithost(proxyhost) - url = (host, fullurl) # Signal special case to open_*() - else: - proxy = None - name = 'open_' + urltype - self.type = urltype - name = name.replace('-', '_') - if not hasattr(self, name) or name == 'open_local_file': - if proxy: - return self.open_unknown_proxy(proxy, fullurl, data) - else: - return self.open_unknown(fullurl, data) - try: - if data is None: - return getattr(self, name)(url) - else: - return getattr(self, name)(url, data) - except (HTTPError, URLError): - raise - except OSError as msg: - raise OSError('socket error', msg) from msg - - def open_unknown(self, fullurl, data=None): - """Overridable interface to open unknown URL type.""" - type, url = _splittype(fullurl) - raise OSError('url error', 'unknown url type', type) - - def open_unknown_proxy(self, proxy, fullurl, data=None): - """Overridable interface to open unknown URL type.""" - type, url = _splittype(fullurl) - raise OSError('url error', 'invalid proxy for %s' % type, proxy) - - # External interface - def retrieve(self, url, filename=None, reporthook=None, data=None): - """retrieve(url) returns (filename, headers) for a local object - or (tempfilename, headers) for a remote object.""" - url = unwrap(_to_bytes(url)) - if self.tempcache and url in self.tempcache: - return self.tempcache[url] - type, url1 = _splittype(url) - if filename is None and (not type or type == 'file'): - try: - fp = self.open_local_file(url1) - hdrs = fp.info() - fp.close() - return url2pathname(_splithost(url1)[1]), hdrs - except OSError: - pass - fp = self.open(url, data) - try: - headers = fp.info() - if filename: - tfp = open(filename, 'wb') - else: - garbage, path = _splittype(url) - garbage, path = _splithost(path or "") - path, garbage = _splitquery(path or "") - path, garbage = _splitattr(path or "") - suffix = os.path.splitext(path)[1] - (fd, filename) = tempfile.mkstemp(suffix) - self.__tempfiles.append(filename) - tfp = os.fdopen(fd, 'wb') - try: - result = filename, headers - if self.tempcache is not None: - self.tempcache[url] = result - bs = 1024*8 - size = -1 - read = 0 - blocknum = 0 - if "content-length" in headers: - size = int(headers["Content-Length"]) - if reporthook: - reporthook(blocknum, bs, size) - while block := fp.read(bs): - read += len(block) - tfp.write(block) - blocknum += 1 - if reporthook: - reporthook(blocknum, bs, size) - finally: - tfp.close() - finally: - fp.close() - - # raise exception if actual size does not match content-length header - if size >= 0 and read < size: - raise ContentTooShortError( - "retrieval incomplete: got only %i out of %i bytes" - % (read, size), result) - - return result - - # Each method named open_ knows how to open that type of URL - - def _open_generic_http(self, connection_factory, url, data): - """Make an HTTP connection using connection_class. - - This is an internal method that should be called from - open_http() or open_https(). - - Arguments: - - connection_factory should take a host name and return an - HTTPConnection instance. - - url is the url to retrieval or a host, relative-path pair. - - data is payload for a POST request or None. - """ - - user_passwd = None - proxy_passwd= None - if isinstance(url, str): - host, selector = _splithost(url) - if host: - user_passwd, host = _splituser(host) - host = unquote(host) - realhost = host - else: - host, selector = url - # check whether the proxy contains authorization information - proxy_passwd, host = _splituser(host) - # now we proceed with the url we want to obtain - urltype, rest = _splittype(selector) - url = rest - user_passwd = None - if urltype.lower() != 'http': - realhost = None - else: - realhost, rest = _splithost(rest) - if realhost: - user_passwd, realhost = _splituser(realhost) - if user_passwd: - selector = "%s://%s%s" % (urltype, realhost, rest) - if proxy_bypass(realhost): - host = realhost - - if not host: raise OSError('http error', 'no host given') - - if proxy_passwd: - proxy_passwd = unquote(proxy_passwd) - proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii') - else: - proxy_auth = None - - if user_passwd: - user_passwd = unquote(user_passwd) - auth = base64.b64encode(user_passwd.encode()).decode('ascii') - else: - auth = None - http_conn = connection_factory(host) - headers = {} - if proxy_auth: - headers["Proxy-Authorization"] = "Basic %s" % proxy_auth - if auth: - headers["Authorization"] = "Basic %s" % auth - if realhost: - headers["Host"] = realhost - - # Add Connection:close as we don't support persistent connections yet. - # This helps in closing the socket and avoiding ResourceWarning - - headers["Connection"] = "close" - - for header, value in self.addheaders: - headers[header] = value - - if data is not None: - headers["Content-Type"] = "application/x-www-form-urlencoded" - http_conn.request("POST", selector, data, headers) - else: - http_conn.request("GET", selector, headers=headers) - - try: - response = http_conn.getresponse() - except http.client.BadStatusLine: - # something went wrong with the HTTP status line - raise URLError("http protocol error: bad status line") - - # According to RFC 2616, "2xx" code indicates that the client's - # request was successfully received, understood, and accepted. - if 200 <= response.status < 300: - return addinfourl(response, response.msg, "http:" + url, - response.status) - else: - return self.http_error( - url, response.fp, - response.status, response.reason, response.msg, data) - - def open_http(self, url, data=None): - """Use HTTP protocol.""" - return self._open_generic_http(http.client.HTTPConnection, url, data) - - def http_error(self, url, fp, errcode, errmsg, headers, data=None): - """Handle http errors. - - Derived class can override this, or provide specific handlers - named http_error_DDD where DDD is the 3-digit error code.""" - # First check if there's a specific handler for this error - name = 'http_error_%d' % errcode - if hasattr(self, name): - method = getattr(self, name) - if data is None: - result = method(url, fp, errcode, errmsg, headers) - else: - result = method(url, fp, errcode, errmsg, headers, data) - if result: return result - return self.http_error_default(url, fp, errcode, errmsg, headers) - - def http_error_default(self, url, fp, errcode, errmsg, headers): - """Default error handler: close the connection and raise OSError.""" - fp.close() - raise HTTPError(url, errcode, errmsg, headers, None) - - if _have_ssl: - def _https_connection(self, host): - if self.key_file or self.cert_file: - http_version = http.client.HTTPSConnection._http_vsn - context = http.client._create_https_context(http_version) - context.load_cert_chain(self.cert_file, self.key_file) - # cert and key file means the user wants to authenticate. - # enable TLS 1.3 PHA implicitly even for custom contexts. - if context.post_handshake_auth is not None: - context.post_handshake_auth = True - else: - context = None - return http.client.HTTPSConnection(host, context=context) - - def open_https(self, url, data=None): - """Use HTTPS protocol.""" - return self._open_generic_http(self._https_connection, url, data) - - def open_file(self, url): - """Use local file or FTP depending on form of URL.""" - if not isinstance(url, str): - raise URLError('file error: proxy support for file protocol currently not implemented') - if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/': - raise ValueError("file:// scheme is supported only on localhost") - else: - return self.open_local_file(url) - - def open_local_file(self, url): - """Use local file.""" - import email.utils - import mimetypes - host, file = _splithost(url) - localname = url2pathname(file) - try: - stats = os.stat(localname) - except OSError as e: - raise URLError(e.strerror, e.filename) - size = stats.st_size - modified = email.utils.formatdate(stats.st_mtime, usegmt=True) - mtype = mimetypes.guess_type(url)[0] - headers = email.message_from_string( - 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % - (mtype or 'text/plain', size, modified)) - if not host: - urlfile = file - if file[:1] == '/': - urlfile = 'file://' + file - return addinfourl(open(localname, 'rb'), headers, urlfile) - host, port = _splitport(host) - if (not port - and socket.gethostbyname(host) in ((localhost(),) + thishost())): - urlfile = file - if file[:1] == '/': - urlfile = 'file://' + file - elif file[:2] == './': - raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url) - return addinfourl(open(localname, 'rb'), headers, urlfile) - raise URLError('local file error: not on local host') - - def open_ftp(self, url): - """Use FTP protocol.""" - if not isinstance(url, str): - raise URLError('ftp error: proxy support for ftp protocol currently not implemented') - import mimetypes - host, path = _splithost(url) - if not host: raise URLError('ftp error: no host given') - host, port = _splitport(host) - user, host = _splituser(host) - if user: user, passwd = _splitpasswd(user) - else: passwd = None - host = unquote(host) - user = unquote(user or '') - passwd = unquote(passwd or '') - host = socket.gethostbyname(host) - if not port: - import ftplib - port = ftplib.FTP_PORT - else: - port = int(port) - path, attrs = _splitattr(path) - path = unquote(path) - dirs = path.split('/') - dirs, file = dirs[:-1], dirs[-1] - if dirs and not dirs[0]: dirs = dirs[1:] - if dirs and not dirs[0]: dirs[0] = '/' - key = user, host, port, '/'.join(dirs) - # XXX thread unsafe! - if len(self.ftpcache) > MAXFTPCACHE: - # Prune the cache, rather arbitrarily - for k in list(self.ftpcache): - if k != key: - v = self.ftpcache[k] - del self.ftpcache[k] - v.close() - try: - if key not in self.ftpcache: - self.ftpcache[key] = \ - ftpwrapper(user, passwd, host, port, dirs) - if not file: type = 'D' - else: type = 'I' - for attr in attrs: - attr, value = _splitvalue(attr) - if attr.lower() == 'type' and \ - value in ('a', 'A', 'i', 'I', 'd', 'D'): - type = value.upper() - (fp, retrlen) = self.ftpcache[key].retrfile(file, type) - mtype = mimetypes.guess_type("ftp:" + url)[0] - headers = "" - if mtype: - headers += "Content-Type: %s\n" % mtype - if retrlen is not None and retrlen >= 0: - headers += "Content-Length: %d\n" % retrlen - headers = email.message_from_string(headers) - return addinfourl(fp, headers, "ftp:" + url) - except ftperrors() as exp: - raise URLError(f'ftp error: {exp}') from exp - - def open_data(self, url, data=None): - """Use "data" URL.""" - if not isinstance(url, str): - raise URLError('data error: proxy support for data protocol currently not implemented') - # ignore POSTed data - # - # syntax of data URLs: - # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data - # mediatype := [ type "/" subtype ] *( ";" parameter ) - # data := *urlchar - # parameter := attribute "=" value - try: - [type, data] = url.split(',', 1) - except ValueError: - raise OSError('data error', 'bad data URL') - if not type: - type = 'text/plain;charset=US-ASCII' - semi = type.rfind(';') - if semi >= 0 and '=' not in type[semi:]: - encoding = type[semi+1:] - type = type[:semi] - else: - encoding = '' - msg = [] - msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT', - time.gmtime(time.time()))) - msg.append('Content-type: %s' % type) - if encoding == 'base64': - # XXX is this encoding/decoding ok? - data = base64.decodebytes(data.encode('ascii')).decode('latin-1') - else: - data = unquote(data) - msg.append('Content-Length: %d' % len(data)) - msg.append('') - msg.append(data) - msg = '\n'.join(msg) - headers = email.message_from_string(msg) - f = io.StringIO(msg) - #f.fileno = None # needed for addinfourl - return addinfourl(f, headers, url) - - -class FancyURLopener(URLopener): - """Derived class with handlers for errors we can handle (perhaps).""" - - def __init__(self, *args, **kwargs): - URLopener.__init__(self, *args, **kwargs) - self.auth_cache = {} - self.tries = 0 - self.maxtries = 10 - - def http_error_default(self, url, fp, errcode, errmsg, headers): - """Default error handling -- don't raise an exception.""" - return addinfourl(fp, headers, "http:" + url, errcode) - - def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): - """Error 302 -- relocated (temporarily).""" - self.tries += 1 - try: - if self.maxtries and self.tries >= self.maxtries: - if hasattr(self, "http_error_500"): - meth = self.http_error_500 - else: - meth = self.http_error_default - return meth(url, fp, 500, - "Internal Server Error: Redirect Recursion", - headers) - result = self.redirect_internal(url, fp, errcode, errmsg, - headers, data) - return result - finally: - self.tries = 0 - - def redirect_internal(self, url, fp, errcode, errmsg, headers, data): - if 'location' in headers: - newurl = headers['location'] - elif 'uri' in headers: - newurl = headers['uri'] - else: - return - fp.close() - - # In case the server sent a relative URL, join with original: - newurl = urljoin(self.type + ":" + url, newurl) - - urlparts = urlparse(newurl) - - # For security reasons, we don't allow redirection to anything other - # than http, https and ftp. - - # We are using newer HTTPError with older redirect_internal method - # This older method will get deprecated in 3.3 - - if urlparts.scheme not in ('http', 'https', 'ftp', ''): - raise HTTPError(newurl, errcode, - errmsg + - " Redirection to url '%s' is not allowed." % newurl, - headers, fp) - - return self.open(newurl) - - def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): - """Error 301 -- also relocated (permanently).""" - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - - def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): - """Error 303 -- also relocated (essentially identical to 302).""" - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - - def http_error_307(self, url, fp, errcode, errmsg, headers, data=None): - """Error 307 -- relocated, but turn POST into error.""" - if data is None: - return self.http_error_302(url, fp, errcode, errmsg, headers, data) - else: - return self.http_error_default(url, fp, errcode, errmsg, headers) - - def http_error_308(self, url, fp, errcode, errmsg, headers, data=None): - """Error 308 -- relocated, but turn POST into error.""" - if data is None: - return self.http_error_301(url, fp, errcode, errmsg, headers, data) - else: - return self.http_error_default(url, fp, errcode, errmsg, headers) - - def http_error_401(self, url, fp, errcode, errmsg, headers, data=None, - retry=False): - """Error 401 -- authentication required. - This function supports Basic authentication only.""" - if 'www-authenticate' not in headers: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - stuff = headers['www-authenticate'] - match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) - if not match: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - scheme, realm = match.groups() - if scheme.lower() != 'basic': - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - if not retry: - URLopener.http_error_default(self, url, fp, errcode, errmsg, - headers) - name = 'retry_' + self.type + '_basic_auth' - if data is None: - return getattr(self,name)(url, realm) - else: - return getattr(self,name)(url, realm, data) - - def http_error_407(self, url, fp, errcode, errmsg, headers, data=None, - retry=False): - """Error 407 -- proxy authentication required. - This function supports Basic authentication only.""" - if 'proxy-authenticate' not in headers: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - stuff = headers['proxy-authenticate'] - match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) - if not match: - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - scheme, realm = match.groups() - if scheme.lower() != 'basic': - URLopener.http_error_default(self, url, fp, - errcode, errmsg, headers) - if not retry: - URLopener.http_error_default(self, url, fp, errcode, errmsg, - headers) - name = 'retry_proxy_' + self.type + '_basic_auth' - if data is None: - return getattr(self,name)(url, realm) - else: - return getattr(self,name)(url, realm, data) - - def retry_proxy_http_basic_auth(self, url, realm, data=None): - host, selector = _splithost(url) - newurl = 'http://' + host + selector - proxy = self.proxies['http'] - urltype, proxyhost = _splittype(proxy) - proxyhost, proxyselector = _splithost(proxyhost) - i = proxyhost.find('@') + 1 - proxyhost = proxyhost[i:] - user, passwd = self.get_user_passwd(proxyhost, realm, i) - if not (user or passwd): return None - proxyhost = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), proxyhost) - self.proxies['http'] = 'http://' + proxyhost + proxyselector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_proxy_https_basic_auth(self, url, realm, data=None): - host, selector = _splithost(url) - newurl = 'https://' + host + selector - proxy = self.proxies['https'] - urltype, proxyhost = _splittype(proxy) - proxyhost, proxyselector = _splithost(proxyhost) - i = proxyhost.find('@') + 1 - proxyhost = proxyhost[i:] - user, passwd = self.get_user_passwd(proxyhost, realm, i) - if not (user or passwd): return None - proxyhost = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), proxyhost) - self.proxies['https'] = 'https://' + proxyhost + proxyselector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_http_basic_auth(self, url, realm, data=None): - host, selector = _splithost(url) - i = host.find('@') + 1 - host = host[i:] - user, passwd = self.get_user_passwd(host, realm, i) - if not (user or passwd): return None - host = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), host) - newurl = 'http://' + host + selector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def retry_https_basic_auth(self, url, realm, data=None): - host, selector = _splithost(url) - i = host.find('@') + 1 - host = host[i:] - user, passwd = self.get_user_passwd(host, realm, i) - if not (user or passwd): return None - host = "%s:%s@%s" % (quote(user, safe=''), - quote(passwd, safe=''), host) - newurl = 'https://' + host + selector - if data is None: - return self.open(newurl) - else: - return self.open(newurl, data) - - def get_user_passwd(self, host, realm, clear_cache=0): - key = realm + '@' + host.lower() - if key in self.auth_cache: - if clear_cache: - del self.auth_cache[key] - else: - return self.auth_cache[key] - user, passwd = self.prompt_user_passwd(host, realm) - if user or passwd: self.auth_cache[key] = (user, passwd) - return user, passwd - - def prompt_user_passwd(self, host, realm): - """Override this in a GUI environment!""" - import getpass - try: - user = input("Enter username for %s at %s: " % (realm, host)) - passwd = getpass.getpass("Enter password for %s in %s at %s: " % - (user, realm, host)) - return user, passwd - except KeyboardInterrupt: - print() - return None, None + encoding = sys.getfilesystemencoding() + errors = sys.getfilesystemencodeerrors() + return quote(pathname, encoding=encoding, errors=errors) # Utility functions @@ -2481,9 +1817,7 @@ def getproxies_environment(): """Return a dictionary of scheme -> proxy server URL mappings. Scan the environment for variables named _proxy; - this seems to be the standard convention. If you need a - different way, you can pass a proxies dictionary to the - [Fancy]URLopener constructor. + this seems to be the standard convention. """ # in order to prefer lowercase variables, process environment in # two passes: first matches any, second pass matches lowercase only diff --git a/Lib/uuid.py b/Lib/uuid.py index 4d4f06cfc9ebbe..9c6ad9643cf6d5 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -1,8 +1,8 @@ -r"""UUID objects (universally unique identifiers) according to RFC 4122. +r"""UUID objects (universally unique identifiers) according to RFC 4122/9562. This module provides immutable UUID objects (class UUID) and the functions -uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5 -UUIDs as specified in RFC 4122. +uuid1(), uuid3(), uuid4(), uuid5(), and uuid8() for generating version 1, 3, +4, 5, and 8 UUIDs as specified in RFC 4122/9562. If all you want is a unique ID, you should probably call uuid1() or uuid4(). Note that uuid1() may compromise privacy since it creates a UUID containing @@ -124,12 +124,12 @@ class UUID: int the UUID as a 128-bit integer - urn the UUID as a URN as specified in RFC 4122 + urn the UUID as a URN as specified in RFC 4122/9562 variant the UUID variant (one of the constants RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE) - version the UUID version number (1 through 5, meaningful only + version the UUID version number (1 through 8, meaningful only when the variant is RFC_4122) is_safe An enum indicating whether the UUID has been generated in @@ -214,9 +214,9 @@ def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None, if not 0 <= int < 1<<128: raise ValueError('int is out of range (need a 128-bit value)') if version is not None: - if not 1 <= version <= 5: + if not 1 <= version <= 8: raise ValueError('illegal version number') - # Set the variant to RFC 4122. + # Set the variant to RFC 4122/9562. int &= ~(0xc000 << 48) int |= 0x8000 << 48 # Set the version number. @@ -355,7 +355,7 @@ def variant(self): @property def version(self): - # The version bits are only meaningful for RFC 4122 UUIDs. + # The version bits are only meaningful for RFC 4122/9562 UUIDs. if self.variant == RFC_4122: return int((self.int >> 76) & 0xf) @@ -719,6 +719,28 @@ def uuid5(namespace, name): hash = sha1(namespace.bytes + name).digest() return UUID(bytes=hash[:16], version=5) +def uuid8(a=None, b=None, c=None): + """Generate a UUID from three custom blocks. + + * 'a' is the first 48-bit chunk of the UUID (octets 0-5); + * 'b' is the mid 12-bit chunk (octets 6-7); + * 'c' is the last 62-bit chunk (octets 8-15). + + When a value is not specified, a pseudo-random value is generated. + """ + if a is None: + import random + a = random.getrandbits(48) + if b is None: + import random + b = random.getrandbits(12) + if c is None: + import random + c = random.getrandbits(62) + int_uuid_8 = (a & 0xffff_ffff_ffff) << 80 + int_uuid_8 |= (b & 0xfff) << 64 + int_uuid_8 |= c & 0x3fff_ffff_ffff_ffff + return UUID(int=int_uuid_8, version=8) def main(): """Run the uuid command line interface.""" @@ -726,7 +748,8 @@ def main(): "uuid1": uuid1, "uuid3": uuid3, "uuid4": uuid4, - "uuid5": uuid5 + "uuid5": uuid5, + "uuid8": uuid8, } uuid_namespace_funcs = ("uuid3", "uuid5") namespaces = { diff --git a/Lib/wsgiref/headers.py b/Lib/wsgiref/headers.py index 05d2ba4c664e5e..c78879f80c7df2 100644 --- a/Lib/wsgiref/headers.py +++ b/Lib/wsgiref/headers.py @@ -1,7 +1,7 @@ """Manage HTTP Response Headers Much of this module is red-handedly pilfered from email.message in the stdlib, -so portions are Copyright (C) 2001,2002 Python Software Foundation, and were +so portions are Copyright (C) 2001 Python Software Foundation, and were written by Barry Warsaw. """ diff --git a/Lib/zipfile/_path/__init__.py b/Lib/zipfile/_path/__init__.py index c0e53e273cfaac..5ae16ec970dda4 100644 --- a/Lib/zipfile/_path/__init__.py +++ b/Lib/zipfile/_path/__init__.py @@ -339,7 +339,7 @@ def open(self, mode='r', *args, pwd=None, **kwargs): if self.is_dir(): raise IsADirectoryError(self) zip_mode = mode[0] - if not self.exists() and zip_mode == 'r': + if zip_mode == 'r' and not self.exists(): raise FileNotFoundError(self) stream = self.root.open(self.at, zip_mode, pwd=pwd) if 'b' in mode: diff --git a/Mac/BuildScript/resources/License.rtf b/Mac/BuildScript/resources/License.rtf index 1255d1ce48ed6c..b5cb8ec41c86e2 100644 --- a/Mac/BuildScript/resources/License.rtf +++ b/Mac/BuildScript/resources/License.rtf @@ -64,7 +64,7 @@ Some software incorporated into Python is under different licenses. The licenses \f1\b0 \ 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation.\ \ -2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright \'a9 2001-2020 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.\ +2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright \'a9 2001 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.\ \ 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python.\ \ diff --git a/Mac/PythonLauncher/Info.plist.in b/Mac/PythonLauncher/Info.plist.in index 233694788ac2b7..ce8f27cd7d4de7 100644 --- a/Mac/PythonLauncher/Info.plist.in +++ b/Mac/PythonLauncher/Info.plist.in @@ -40,9 +40,9 @@ CFBundleExecutable Python Launcher NSHumanReadableCopyright - Copyright © 2001-2024 Python Software Foundation + Copyright © 2001 Python Software Foundation CFBundleGetInfoString - %VERSION%, © 2001-2024 Python Software Foundation + %VERSION%, © 2001 Python Software Foundation CFBundleIconFile PythonLauncher.icns CFBundleIdentifier diff --git a/Makefile.pre.in b/Makefile.pre.in index a337223d4d8608..8d94ba361fd934 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2567,6 +2567,8 @@ TESTSUBDIRS= idlelib/idle_test \ test/tracedmodules \ test/translationdata \ test/translationdata/argparse \ + test/translationdata/getopt \ + test/translationdata/optparse \ test/typinganndata \ test/wheeldata \ test/xmltestdata \ diff --git a/Misc/ACKS b/Misc/ACKS index 1a25088052f4e1..08cd293eac3835 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -485,6 +485,7 @@ Luke Dunstan Virgil Dupras Bruno Dupuis Andy Dustman +Sayandip Dutta Gary Duzan Eugene Dvurechenski Karmen Dykstra @@ -732,6 +733,7 @@ Larry Hastings Tim Hatch Zac Hatfield-Dodds Shane Hathaway +Akinori Hattori Michael Haubenwallner Janko Hauser Flavian Hautbois @@ -953,6 +955,7 @@ Sanyam Khurana Tyler Kieft Mads Kiilerich Jason Killen +Beomsoo Bombs Kim Derek D. Kim Gihwan Kim Jan Kim diff --git a/Misc/NEWS.d/3.14.0a2.rst b/Misc/NEWS.d/3.14.0a2.rst new file mode 100644 index 00000000000000..7384ce54cb8914 --- /dev/null +++ b/Misc/NEWS.d/3.14.0a2.rst @@ -0,0 +1,1762 @@ +.. date: 2024-11-16-22-08-41 +.. gh-issue: 126911 +.. nonce: HchCZZ +.. release date: 2024-11-19 +.. section: Windows + +Update credits command output. + +.. + +.. date: 2024-11-12-22-31-13 +.. gh-issue: 118973 +.. nonce: _lfxW6 +.. section: Windows + +Ensures the experimental free-threaded install includes the ``_tkinter`` +module. The optional Tcl/Tk component must also be installed in order for +the module to work. + +.. + +.. date: 2024-11-07-20-42-31 +.. gh-issue: 126497 +.. nonce: EARpd- +.. section: Windows + +Fixes venv failure due to missing redirector executables in experimental +free-threaded installs. + +.. + +.. date: 2024-10-29-20-09-52 +.. gh-issue: 126074 +.. nonce: 83ZzZs +.. section: Windows + +Removed unnecessary DLLs from Windows embeddable package + +.. + +.. date: 2024-10-29-19-48-03 +.. gh-issue: 125315 +.. nonce: jdB9qN +.. section: Windows + +Avoid crashing in :mod:`platform` due to slow WMI calls on some Windows +machines. + +.. + +.. date: 2024-10-29-09-39-06 +.. gh-issue: 126084 +.. nonce: 3wAL8o +.. section: Windows + +Fix venvwlauncher to launch pythonw instead of python so no extra console +window is created. + +.. + +.. date: 2024-10-23-17-24-23 +.. gh-issue: 125842 +.. nonce: m3EF9E +.. section: Windows + +Fix a :exc:`SystemError` when :func:`sys.exit` is called with ``0xffffffff`` +on Windows. + +.. + +.. date: 2024-10-15-21-28-43 +.. gh-issue: 125550 +.. nonce: hmGWCP +.. section: Windows + +Enable the :ref:`launcher` to detect Python 3.14 installs from the Windows +Store. + +.. + +.. date: 2024-09-07-15-16-24 +.. gh-issue: 123803 +.. nonce: J9VNQU +.. section: Windows + +All Windows code pages are now supported as "cpXXX" codecs on Windows. + +.. + +.. date: 2024-11-13-22-23-36 +.. gh-issue: 126807 +.. nonce: vpaWuN +.. section: Tools/Demos + +Fix extraction warnings in :program:`pygettext.py` caused by mistaking +function definitions for function calls. + +.. + +.. date: 2024-10-30-13-59-07 +.. gh-issue: 126167 +.. nonce: j5cCWE +.. section: Tools/Demos + +The iOS testbed was modified so that it can be used by third-party projects +for testing purposes. + +.. + +.. date: 2024-11-17-16-56-48 +.. gh-issue: 126909 +.. nonce: 60VTxW +.. section: Tests + +Fix test_os extended attribute tests to work on filesystems with 1 KiB xattr +size limit. + +.. + +.. date: 2024-10-21-14-10-56 +.. gh-issue: 125730 +.. nonce: kcWbvI +.. section: Tests + +Change ``make test`` to not run GUI tests by default. Use ``make ci`` to run +tests with GUI tests instead. + +.. + +.. date: 2024-09-30-22-52-44 +.. gh-issue: 124295 +.. nonce: VZy5kx +.. section: Tests + +Add translation tests to the :mod:`argparse` module. + +.. + +.. date: 2024-11-13-11-09-12 +.. gh-issue: 126623 +.. nonce: TO7NnR +.. section: Security + +Upgrade libexpat to 2.6.4 + +.. + +.. date: 2024-11-18-15-33-25 +.. gh-issue: 85957 +.. nonce: 8gT3B- +.. section: Library + +Add missing MIME types for images with RFCs: emf, fits, g3fax, jp2, jpm, +jpx, t38, tiff-fx and wmf. Patch by Hugo van Kemenade. + +.. + +.. date: 2024-11-17-01-14-59 +.. gh-issue: 126920 +.. nonce: s8-f_L +.. section: Library + +Fix the ``prefix`` and ``exec_prefix`` keys from +:py:func:`sysconfig.get_config_vars` incorrectly having the same value as +:py:const:`sys.base_prefix` and :py:const:`sys.base_exec_prefix`, +respectively, inside virtual environments. They now accurately reflect +:py:const:`sys.prefix` and :py:const:`sys.exec_prefix`. + +.. + +.. date: 2024-11-14-22-25-49 +.. gh-issue: 67877 +.. nonce: G9hw0w +.. section: Library + +Fix memory leaks when :mod:`regular expression ` matching terminates +abruptly, either because of a signal or because memory allocation fails. + +.. + +.. date: 2024-11-14-13-16-20 +.. gh-issue: 125063 +.. nonce: kJ-WnH +.. section: Library + +:mod:`marshal` now supports :class:`slice` objects. The marshal format +version was increased to 5. + +.. + +.. date: 2024-11-13-22-25-57 +.. gh-issue: 126789 +.. nonce: lKzlc7 +.. section: Library + +Fixed the values of :py:func:`sysconfig.get_config_vars`, +:py:func:`sysconfig.get_paths`, and their siblings when the :py:mod:`site` +initialization happens after :py:mod:`sysconfig` has built a cache for +:py:func:`sysconfig.get_config_vars`. + +.. + +.. date: 2024-11-13-20-03-18 +.. gh-issue: 126188 +.. nonce: RJLKk- +.. section: Library + +Update bundled pip to 24.3.1 + +.. + +.. date: 2024-11-12-21-43-12 +.. gh-issue: 126766 +.. nonce: oi2KJ7 +.. section: Library + +Fix issue where :func:`urllib.request.url2pathname` failed to discard two +leading slashes introducing an empty authority section. + +.. + +.. date: 2024-11-11-14-52-21 +.. gh-issue: 126705 +.. nonce: 0W7jFW +.. section: Library + +Allow :class:`os.PathLike` to be a base for Protocols. + +.. + +.. date: 2024-11-11-13-24-22 +.. gh-issue: 126699 +.. nonce: ONGbMd +.. section: Library + +Allow :class:`collections.abc.AsyncIterator` to be a base for Protocols. + +.. + +.. date: 2024-11-11-13-00-21 +.. gh-issue: 126654 +.. nonce: 4gfP2y +.. section: Library + +Fix crash when non-dict was passed to several functions in ``_interpreters`` +module. + +.. + +.. date: 2024-11-10-18-14-51 +.. gh-issue: 104745 +.. nonce: zAa5Ke +.. section: Library + +Limit starting a patcher (from :func:`unittest.mock.patch` or +:func:`unittest.mock.patch.object`) more than once without stopping it + +.. + +.. date: 2024-11-09-10-31-10 +.. gh-issue: 126595 +.. nonce: A-7MyC +.. section: Library + +Fix a crash when instantiating :class:`itertools.count` with an initial +count of :data:`sys.maxsize` on debug builds. Patch by Bénédikt Tran. + +.. + +.. date: 2024-11-08-17-05-10 +.. gh-issue: 120423 +.. nonce: 7rdLVV +.. section: Library + +Fix issue where :func:`urllib.request.pathname2url` mishandled Windows paths +with embedded forward slashes. + +.. + +.. date: 2024-11-08-11-06-14 +.. gh-issue: 126565 +.. nonce: dFFO22 +.. section: Library + +Improve performances of :meth:`zipfile.Path.open` for non-reading modes. + +.. + +.. date: 2024-11-07-22-41-47 +.. gh-issue: 126505 +.. nonce: iztYE1 +.. section: Library + +Fix bugs in compiling case-insensitive :mod:`regular expressions ` with +character classes containing non-BMP characters: upper-case non-BMP +character did was ignored and the ASCII flag was ignored when matching a +character range whose upper bound is beyond the BMP region. + +.. + +.. date: 2024-11-07-01-40-11 +.. gh-issue: 117378 +.. nonce: o9O5uM +.. section: Library + +Fixed the :mod:`multiprocessing` ``"forkserver"`` start method forkserver +process to correctly inherit the parent's :data:`sys.path` during the +importing of :func:`multiprocessing.set_forkserver_preload` modules in the +same manner as :data:`sys.path` is configured in workers before executing +work items. + +This bug caused some forkserver module preloading to silently fail to +preload. This manifested as a performance degration in child processes when +the ``sys.path`` was required due to additional repeated work in every +worker. + +It could also have a side effect of ``""`` remaining in :data:`sys.path` +during forkserver preload imports instead of the absolute path from +:func:`os.getcwd` at multiprocessing import time used in the worker +``sys.path``. + +The ``sys.path`` differences between phases in the child process could +potentially have caused preload to import incorrect things from the wrong +location. We are unaware of that actually having happened in practice. + +.. + +.. date: 2024-11-06-23-40-28 +.. gh-issue: 125679 +.. nonce: Qq9xF5 +.. section: Library + +The :class:`multiprocessing.Lock` and :class:`multiprocessing.RLock` +``repr`` values no longer say "unknown" on macOS. + +.. + +.. date: 2024-11-06-18-30-50 +.. gh-issue: 126476 +.. nonce: F1wh3c +.. section: Library + +Raise :class:`calendar.IllegalMonthError` (now a subclass of +:class:`IndexError`) for :func:`calendar.month` when the input month is not +correct. + +.. + +.. date: 2024-11-06-13-41-38 +.. gh-issue: 126489 +.. nonce: toaf-0 +.. section: Library + +The Python implementation of :mod:`pickle` no longer calls +:meth:`pickle.Pickler.persistent_id` for the result of +:meth:`!persistent_id`. + +.. + +.. date: 2024-11-05-11-28-45 +.. gh-issue: 126451 +.. nonce: XJMtqz +.. section: Library + +Register the :class:`contextvars.Context` type to +:class:`collections.abc.Mapping`. + +.. + +.. date: 2024-11-05-09-54-49 +.. gh-issue: 126175 +.. nonce: spnjJr +.. section: Library + +Add ``msg``, ``doc``, ``pos``, ``lineno`` and ``colno`` attributes to +:exc:`tomllib.TOMLDecodeError`. Deprecate instantiating with free-form +arguments. + +.. + +.. date: 2024-11-04-22-53-09 +.. gh-issue: 89416 +.. nonce: YVQaas +.. section: Library + +Add :rfc:`9559` MIME types for Matroska audiovisual container formats. Patch +by Hugo van Kemenade. + +.. + +.. date: 2024-11-04-16-40-02 +.. gh-issue: 126417 +.. nonce: OWPqn0 +.. section: Library + +Register the :class:`!multiprocessing.managers.DictProxy` and +:class:`!multiprocessing.managers.ListProxy` types in +:mod:`multiprocessing.managers` to :class:`collections.abc.MutableMapping` +and :class:`collections.abc.MutableSequence`, respectively. + +.. + +.. date: 2024-11-04-13-16-18 +.. gh-issue: 126390 +.. nonce: Cxvqa5 +.. section: Library + +Add support for returning intermixed options and non-option arguments in +order in :func:`getopt.gnu_getopt`. + +.. + +.. date: 2024-11-03-23-25-07 +.. gh-issue: 126374 +.. nonce: Xu_THP +.. section: Library + +Add support for options with optional arguments in the :mod:`getopt` module. + +.. + +.. date: 2024-11-03-14-43-51 +.. gh-issue: 126363 +.. nonce: Xus7vU +.. section: Library + +Speed up pattern parsing in :meth:`pathlib.Path.glob` by skipping creation +of a :class:`pathlib.Path` object for the pattern. + +.. + +.. date: 2024-11-03-10-48-07 +.. gh-issue: 126353 +.. nonce: ChDzot +.. section: Library + +:func:`asyncio.get_event_loop` now does not implicitly creates an event +loop. It now raises a :exc:`RuntimeError` if there is no set event loop. +Patch by Kumar Aditya. + +.. + +.. date: 2024-11-03-09-42-42 +.. gh-issue: 126313 +.. nonce: EFP6Dl +.. section: Library + +Fix an issue in :func:`curses.napms` when :func:`curses.initscr` has not yet +been called. Patch by Bénédikt Tran. + +.. + +.. date: 2024-11-02-19-20-44 +.. gh-issue: 126303 +.. nonce: yVvyWB +.. section: Library + +Fix pickling and copying of :class:`os.sched_param` objects. + +.. + +.. date: 2024-11-01-14-31-41 +.. gh-issue: 126138 +.. nonce: yTniOG +.. section: Library + +Fix a use-after-free crash on :class:`asyncio.Task` objects whose underlying +coroutine yields an object that implements an evil +:meth:`~object.__getattribute__`. Patch by Nico Posada. + +.. + +.. date: 2024-11-01-10-35-49 +.. gh-issue: 120057 +.. nonce: YWy81Q +.. section: Library + +Replace the ``os.environ.refresh()`` method with a new +:func:`os.reload_environ` function. Patch by Victor Stinner. + +.. + +.. date: 2024-10-31-14-06-28 +.. gh-issue: 126220 +.. nonce: uJAJCU +.. section: Library + +Fix crash in :class:`!cProfile.Profile` and :class:`!_lsprof.Profiler` when +their callbacks were directly called with 0 arguments. + +.. + +.. date: 2024-10-30-23-59-36 +.. gh-issue: 126212 +.. nonce: _9uYjT +.. section: Library + +Fix issue where :func:`urllib.request.pathname2url` and +:func:`~urllib.request.url2pathname` removed slashes from Windows DOS drive +paths and URLs. + +.. + +.. date: 2024-10-30-23-42-44 +.. gh-issue: 126223 +.. nonce: k2qooc +.. section: Library + +Raise a :exc:`UnicodeEncodeError` instead of a :exc:`SystemError` upon +calling :func:`!_interpreters.create` with an invalid Unicode character. + +.. + +.. date: 2024-10-30-20-45-17 +.. gh-issue: 126205 +.. nonce: CHEmtx +.. section: Library + +Fix issue where :func:`urllib.request.pathname2url` generated URLs beginning +with four slashes (rather than two) when given a Windows UNC path. + +.. + +.. date: 2024-10-30-00-12-22 +.. gh-issue: 126156 +.. nonce: BOSqv0 +.. section: Library + +Improved performances of creating :py:class:`~http.cookies.Morsel` objects +by a factor of 3.8x. + +.. + +.. date: 2024-10-29-11-45-44 +.. gh-issue: 126105 +.. nonce: cOL-R6 +.. section: Library + +Fix a crash in :mod:`ast` when the :attr:`ast.AST._fields` attribute is +deleted. + +.. + +.. date: 2024-10-29-10-58-52 +.. gh-issue: 126106 +.. nonce: rlF798 +.. section: Library + +Fixes a possible ``NULL`` pointer dereference in :mod:`ssl`. + +.. + +.. date: 2024-10-29-10-38-28 +.. gh-issue: 126080 +.. nonce: qKRBuo +.. section: Library + +Fix a use-after-free crash on :class:`asyncio.Task` objects for which the +underlying event loop implements an evil :meth:`~object.__getattribute__`. +Reported by Nico-Posada. Patch by Bénédikt Tran. + +.. + +.. date: 2024-10-29-07-24-52 +.. gh-issue: 125322 +.. nonce: sstOM- +.. section: Library + +Correct detection of complex numbers support in libffi. + +.. + +.. date: 2024-10-28-22-35-22 +.. gh-issue: 126083 +.. nonce: TuI--n +.. section: Library + +Fixed a reference leak in :class:`asyncio.Task` objects when reinitializing +the same object with a non-``None`` context. Patch by Nico Posada. + +.. + +.. date: 2024-10-28-11-33-59 +.. gh-issue: 126068 +.. nonce: Pdznm_ +.. section: Library + +Fix exceptions in the :mod:`argparse` module so that only error messages for +ArgumentError and ArgumentTypeError are now translated. ArgumentError is now +only used for command line errors, not for logical errors in the program. +TypeError is now raised instead of ValueError for some logical errors. + +.. + +.. date: 2024-10-28-01-24-52 +.. gh-issue: 125413 +.. nonce: Jat5kq +.. section: Library + +Add :meth:`pathlib.Path.scandir` method to efficiently fetch directory +children and their file attributes. This is a trivial wrapper of +:func:`os.scandir`. + +.. + +.. date: 2024-10-26-12-50-48 +.. gh-issue: 125984 +.. nonce: d4vp5_ +.. section: Library + +Fix use-after-free crashes on :class:`asyncio.Future` objects for which the +underlying event loop implements an evil :meth:`~object.__getattribute__`. +Reported by Nico-Posada. Patch by Bénédikt Tran. + +.. + +.. date: 2024-10-25-20-52-15 +.. gh-issue: 125926 +.. nonce: pp8rtZ +.. section: Library + +Fix :func:`urllib.parse.urljoin` for base URI with undefined authority. +Although :rfc:`3986` only specify reference resolution for absolute base +URI, :func:`!urljoin` should continue to return sensible result for relative +base URI. + +.. + +.. date: 2024-10-25-11-13-24 +.. gh-issue: 125969 +.. nonce: YvbrTr +.. section: Library + +Fix an out-of-bounds crash when an evil :meth:`asyncio.loop.call_soon` +mutates the length of the internal callbacks list. Patch by Bénédikt Tran. + +.. + +.. date: 2024-10-25-10-53-56 +.. gh-issue: 125966 +.. nonce: eOCYU_ +.. section: Library + +Fix a use-after-free crash in :meth:`asyncio.Future.remove_done_callback`. +Patch by Bénédikt Tran. + +.. + +.. date: 2024-10-24-14-08-10 +.. gh-issue: 125789 +.. nonce: eaiAMw +.. section: Library + +Fix possible crash when mutating list of callbacks returned by +:attr:`!asyncio.Future._callbacks`. It now always returns a new copy in C +implementation :mod:`!_asyncio`. Patch by Kumar Aditya. + +.. + +.. date: 2024-10-24-13-40-20 +.. gh-issue: 126916 +.. nonce: MAgz6D +.. section: Library + +Allow the *initial* parameter of :func:`functools.reduce` to be passed as a +keyword argument. Patch by Sayandip Dutta. + +.. + +.. date: 2024-10-24-10-49-47 +.. gh-issue: 124452 +.. nonce: eqTRgx +.. section: Library + +Fix an issue in :meth:`email.policy.EmailPolicy.header_source_parse` and +:meth:`email.policy.Compat32.header_source_parse` that introduced spurious +leading whitespaces into header values when the header includes a newline +character after the header name delimiter (``:``) and before the value. + +.. + +.. date: 2024-10-23-20-44-30 +.. gh-issue: 117941 +.. nonce: Y9jdlW +.. section: Library + +:class:`!argparse.BooleanOptionalAction` now rejects option names starting +with ``--no-``. + +.. + +.. date: 2024-10-23-17-45-40 +.. gh-issue: 125884 +.. nonce: 41E_PD +.. section: Library + +Fixed the bug for :mod:`pdb` where it can't set breakpoints on functions +with certain annotations. + +.. + +.. date: 2024-10-22-13-28-00 +.. gh-issue: 125355 +.. nonce: zssHm_ +.. section: Library + +Fix several bugs in :meth:`argparse.ArgumentParser.parse_intermixed_args`. + +* The parser no longer changes temporarily during parsing. +* Default values are not processed twice. +* Required mutually exclusive groups containing positional arguments are now supported. +* The missing arguments report now includes the names of all required optional and positional arguments. +* Unknown options can be intermixed with positional arguments in parse_known_intermixed_args(). + +.. + +.. date: 2024-10-21-13-52-37 +.. gh-issue: 125767 +.. nonce: 0kK4lX +.. section: Library + +:class:`super` objects are now :mod:`pickleable ` and :mod:`copyable +`. + +.. + +.. date: 2024-10-21-12-06-55 +.. gh-issue: 124969 +.. nonce: xiY8UP +.. section: Library + +``locale.nl_langinfo(locale.ALT_DIGITS)`` now returns a string again. The +returned value consists of up to 100 semicolon-separated symbols. + +.. + +.. date: 2024-10-20-00-56-44 +.. gh-issue: 84850 +.. nonce: p5TeUB +.. section: Library + +Remove :class:`!URLopener` and :class:`!FancyURLopener` classes from +:mod:`urllib.request`. They had previously raised :exc:`DeprecationWarning` +since Python 3.3. + +.. + +.. date: 2024-10-19-16-06-52 +.. gh-issue: 125666 +.. nonce: jGfdCP +.. section: Library + +Avoid the exiting the interpreter if a null byte is given as input in the +new REPL. + +.. + +.. date: 2024-10-19-13-37-37 +.. gh-issue: 125710 +.. nonce: FyFAAr +.. section: Library + +[Enum] fix hashable<->nonhashable comparisons for member values + +.. + +.. date: 2024-10-19-11-06-06 +.. gh-issue: 125631 +.. nonce: BlhVvR +.. section: Library + +Restore ability to set :attr:`~pickle.Pickler.persistent_id` and +:attr:`~pickle.Unpickler.persistent_load` attributes of instances of the +:class:`!Pickler` and :class:`!Unpickler` classes in the :mod:`pickle` +module. + +.. + +.. date: 2024-10-19-01-30-40 +.. gh-issue: 125378 +.. nonce: WTosxX +.. section: Library + +Fixed the bug in :mod:`pdb` where after a multi-line command, an empty line +repeats the first line of the multi-line command, instead of the full +command. + +.. + +.. date: 2024-10-18-09-51-29 +.. gh-issue: 125682 +.. nonce: vsj4cU +.. section: Library + +Reject non-ASCII digits in the Python implementation of :func:`json.loads` +conforming to the JSON specification. + +.. + +.. date: 2024-10-18-08-58-10 +.. gh-issue: 125660 +.. nonce: sDdDqO +.. section: Library + +Reject invalid unicode escapes for Python implementation of +:func:`json.loads`. + +.. + +.. date: 2024-10-17-20-36-06 +.. gh-issue: 52551 +.. nonce: EIVNYY +.. section: Library + +Use :c:func:`!wcsftime` to implement :func:`time.strftime` on Windows. + +.. + +.. date: 2024-10-17-16-10-29 +.. gh-issue: 125259 +.. nonce: oMew0c +.. section: Library + +Fix the notes removal logic for errors thrown in enum initialization. + +.. + +.. date: 2024-10-17-04-52-00 +.. gh-issue: 125633 +.. nonce: lMck06 +.. section: Library + +Add function :func:`inspect.ispackage` to determine whether an object is a +:term:`package` or not. + +.. + +.. date: 2024-10-16-22-45-50 +.. gh-issue: 125614 +.. nonce: 3OEo_Q +.. section: Library + +In the :data:`~annotationlib.Format.FORWARDREF` format of +:mod:`annotationlib`, fix bug where nested expressions were not returned as +:class:`annotationlib.ForwardRef` format. + +.. + +.. date: 2024-10-16-20-32-40 +.. gh-issue: 125590 +.. nonce: stHzOP +.. section: Library + +Allow ``FrameLocalsProxy`` to delete and pop if the key is not a fast +variable. + +.. + +.. date: 2024-10-16-15-55-50 +.. gh-issue: 125600 +.. nonce: yMsJx0 +.. section: Library + +Only show stale code warning in :mod:`pdb` when we display source code. + +.. + +.. date: 2024-10-16-04-50-53 +.. gh-issue: 125542 +.. nonce: vZJ-Ns +.. section: Library + +Deprecate passing keyword-only *prefix_chars* argument to +:meth:`argparse.ArgumentParser.add_argument_group`. + +.. + +.. date: 2024-10-15-16-50-03 +.. gh-issue: 125541 +.. nonce: FfhmWo +.. section: Library + +Pressing :kbd:`Ctrl-C` while blocked in :meth:`threading.Lock.acquire`, +:meth:`threading.RLock.acquire`, and :meth:`threading.Thread.join` now +interrupts the function call and raises a :exc:`KeyboardInterrupt` exception +on Windows, similar to how those functions behave on macOS and Linux. + +.. + +.. date: 2024-10-15-14-01-03 +.. gh-issue: 125519 +.. nonce: TqGh6a +.. section: Library + +Improve traceback if :func:`importlib.reload` is called with an object that +is not a module. Patch by Alex Waygood. + +.. + +.. date: 2024-10-14-17-29-34 +.. gh-issue: 125451 +.. nonce: fmP3T9 +.. section: Library + +Fix deadlock when :class:`concurrent.futures.ProcessPoolExecutor` shuts down +concurrently with an error when feeding a job to a worker process. + +.. + +.. date: 2024-10-14-02-07-44 +.. gh-issue: 125115 +.. nonce: IOf3ON +.. section: Library + +Fixed a bug in :mod:`pdb` where arguments starting with ``-`` can't be +passed to the debugged script. + +.. + +.. date: 2024-10-13-15-04-58 +.. gh-issue: 125398 +.. nonce: UW7Ndv +.. section: Library + +Fix the conversion of the :envvar:`!VIRTUAL_ENV` path in the activate script +in :mod:`venv` when running in Git Bash for Windows. + +.. + +.. date: 2024-10-11-00-40-13 +.. gh-issue: 125245 +.. nonce: 8vReM- +.. section: Library + +Fix race condition when importing :mod:`collections.abc`, which could +incorrectly return an empty module. + +.. + +.. date: 2024-10-09-17-07-33 +.. gh-issue: 52551 +.. nonce: PBakSY +.. section: Library + +Fix encoding issues in :func:`time.strftime`, the +:meth:`~datetime.datetime.strftime` method of the :mod:`datetime` classes +:class:`~datetime.datetime`, :class:`~datetime.date` and +:class:`~datetime.time` and formatting of these classes. Characters not +encodable in the current locale are now acceptable in the format string. +Surrogate pairs and sequence of surrogatescape-encoded bytes are no longer +recombinated. Embedded null character no longer terminates the format +string. + +.. + +.. date: 2024-10-04-22-43-48 +.. gh-issue: 124984 +.. nonce: xjMv9b +.. section: Library + +Fixed thread safety in :mod:`ssl` in the free-threaded build. OpenSSL +operations are now protected by a per-object lock. + +.. + +.. date: 2024-09-28-02-03-04 +.. gh-issue: 124651 +.. nonce: bLBGtH +.. section: Library + +Properly quote template strings in :mod:`venv` activation scripts. + +.. + +.. date: 2024-09-27-15-42-55 +.. gh-issue: 124694 +.. nonce: uUy32y +.. section: Library + +We've added :class:`concurrent.futures.InterpreterPoolExecutor`, which +allows you to run code in multiple isolated interpreters. This allows you +to circumvent the limitations of CPU-bound threads (due to the GIL). Patch +by Eric Snow. + +This addition is unrelated to :pep:`734`. + +.. + +.. date: 2024-09-27-13-10-17 +.. gh-issue: 58032 +.. nonce: 0aNAQ0 +.. section: Library + +Deprecate the :class:`argparse.FileType` type converter. + +.. + +.. date: 2024-09-24-18-49-16 +.. gh-issue: 99749 +.. nonce: gBDJX7 +.. section: Library + +Adds a feature to optionally enable suggestions for argument choices and +subparser names if mistyped by the user. + +.. + +.. date: 2024-09-24-18-16-59 +.. gh-issue: 58956 +.. nonce: 0wFrBR +.. section: Library + +Fixed a bug in :mod:`pdb` where sometimes the breakpoint won't trigger if it +was set on a function which is already in the call stack. + +.. + +.. date: 2024-09-17-10-38-26 +.. gh-issue: 124111 +.. nonce: Hd53VN +.. section: Library + +The tkinter module can now be built to use either the new version 9.0.0 of +Tcl/Tk or the latest release 8.6.15 of Tcl/Tk 8. Tcl/Tk 9 includes many +improvements, both to the Tcl language and to the appearance and utility of +the graphical user interface provided by Tk. + +.. + +.. date: 2024-09-07-13-57-49 +.. gh-issue: 80958 +.. nonce: fVYnqV +.. section: Library + +unittest discovery supports PEP 420 namespace packages as start directory +again. + +.. + +.. date: 2024-08-28-19-27-35 +.. gh-issue: 123370 +.. nonce: SPZ9Ux +.. section: Library + +Fix the canvas not clearing after running turtledemo clock. + +.. + +.. date: 2024-08-22-12-12-35 +.. gh-issue: 89083 +.. nonce: b6zFh0 +.. section: Library + +Add :func:`uuid.uuid8` for generating UUIDv8 objects as specified in +:rfc:`9562`. Patch by Bénédikt Tran + +.. + +.. date: 2024-08-01-11-15-55 +.. gh-issue: 122549 +.. nonce: ztV4Kz +.. section: Library + +Add :func:`platform.invalidate_caches` to invalidate cached results. + +.. + +.. date: 2024-07-23-02-24-50 +.. gh-issue: 120754 +.. nonce: nHb5mG +.. section: Library + +Update unbounded ``read`` calls in :mod:`zipfile` to specify an explicit +``size`` putting a limit on how much data they may read. This also updates +handling around ZIP max comment size to match the standard instead of +reading comments that are one byte too long. + +.. + +.. date: 2024-07-02-15-56-42 +.. gh-issue: 121267 +.. nonce: yFBWkh +.. section: Library + +Improve the performance of :mod:`tarfile` when writing files, by caching +user names and group names. + +.. + +.. date: 2024-06-06-04-06-05 +.. gh-issue: 70764 +.. nonce: 6511hw +.. section: Library + +Fixed an issue where :func:`inspect.getclosurevars` would incorrectly +classify an attribute name as a global variable when the name exists both as +an attribute name and a global variable. + +.. + +.. date: 2024-06-05-19-09-36 +.. gh-issue: 118289 +.. nonce: moL9_d +.. section: Library + +:func:`!posixpath.realpath` now raises :exc:`NotADirectoryError` when +*strict* mode is enabled and a non-directory path with a trailing slash is +supplied. + +.. + +.. date: 2024-06-02-11-48-19 +.. gh-issue: 119826 +.. nonce: N1obGa +.. section: Library + +Always return an absolute path for :func:`os.path.abspath` on Windows. + +.. + +.. date: 2024-05-28-14-35-23 +.. gh-issue: 97850 +.. nonce: dCtjel +.. section: Library + +Remove deprecated :func:`!pkgutil.get_loader` and +:func:`!pkgutil.find_loader`. + +.. + +.. date: 2024-05-13-10-09-41 +.. gh-issue: 118986 +.. nonce: -r4W9h +.. section: Library + +Add :data:`!socket.IPV6_RECVERR` constant (available since Linux 2.2). + +.. + +.. date: 2024-03-16-13-38-27 +.. gh-issue: 116897 +.. nonce: UDQTjp +.. section: Library + +Accepting objects with false values (like ``0`` and ``[]``) except empty +strings, byte-like objects and ``None`` in :mod:`urllib.parse` functions +:func:`~urllib.parse.parse_qsl` and :func:`~urllib.parse.parse_qs` is now +deprecated. + +.. + +.. date: 2023-10-26-16-36-22 +.. gh-issue: 101955 +.. nonce: Ixu3IF +.. section: Library + +Fix SystemError when match regular expression pattern containing some +combination of possessive quantifier, alternative and capture group. + +.. + +.. date: 2022-10-15-10-18-20 +.. gh-issue: 71936 +.. nonce: MzJjc_ +.. section: Library + +Fix a race condition in :class:`multiprocessing.pool.Pool`. + +.. + +.. bpo: 46128 +.. date: 2021-12-19-10-47-24 +.. nonce: Qv3EK1 +.. section: Library + +Strip :class:`unittest.IsolatedAsyncioTestCase` stack frames from reported +stacktraces. + +.. + +.. date: 2020-05-19-01-12-47 +.. gh-issue: 84852 +.. nonce: FEjHJW +.. section: Library + +Add MIME types for MS Embedded OpenType, OpenType Layout, TrueType, WOFF 1.0 +and 2.0 fonts. Patch by Sahil Prajapati and Hugo van Kemenade. + +.. + +.. date: 2024-11-09-19-43-10 +.. gh-issue: 126622 +.. nonce: YacfDc +.. section: Documentation + +Added stub pages for removed modules explaining their removal, where to find +replacements, and linking to the last Python version that supported them. +Contributed by Ned Batchelder. + +.. + +.. date: 2024-10-10-23-46-54 +.. gh-issue: 125277 +.. nonce: QAby09 +.. section: Documentation + +Require Sphinx 7.2.6 or later to build the Python documentation. Patch by +Adam Turner. + +.. + +.. date: 2023-03-28-22-24-45 +.. gh-issue: 60712 +.. nonce: So5uad +.. section: Documentation + +Include the :class:`object` type in the lists of documented types. Change by +Furkan Onder and Martin Panter. + +.. + +.. date: 2024-11-13-17-18-13 +.. gh-issue: 126795 +.. nonce: _JBX9e +.. section: Core and Builtins + +Increase the threshold for JIT code warmup. Depending on platform and +workload, this can result in performance gains of 1-9% and memory savings of +3-5%. + +.. + +.. date: 2024-11-12-19-24-00 +.. gh-issue: 126341 +.. nonce: 5SdAe1 +.. section: Core and Builtins + +Now :exc:`ValueError` is raised instead of :exc:`SystemError` when trying to +iterate over a released :class:`memoryview` object. + +.. + +.. date: 2024-11-11-17-02-48 +.. gh-issue: 126688 +.. nonce: QiOXUi +.. section: Core and Builtins + +Fix a crash when calling :func:`os.fork` on some operating systems, +including SerenityOS. + +.. + +.. date: 2024-11-09-16-10-22 +.. gh-issue: 126066 +.. nonce: 9zs4m4 +.. section: Core and Builtins + +Fix :mod:`importlib` to not write an incomplete .pyc files when a ulimit or +some other operating system mechanism is preventing the write to go through +fully. + +.. + +.. date: 2024-11-06-16-34-11 +.. gh-issue: 126222 +.. nonce: 9NBfTn +.. section: Core and Builtins + +Do not include count of "peek" items in ``_PyUop_num_popped``. This ensures +that the correct number of items are popped from the stack when a micro-op +exits with an error. + +.. + +.. date: 2024-11-03-15-15-36 +.. gh-issue: 126366 +.. nonce: 8BBdGU +.. section: Core and Builtins + +Fix crash when using ``yield from`` on an object that raises an exception in +its ``__iter__``. + +.. + +.. date: 2024-11-02-18-01-31 +.. gh-issue: 126209 +.. nonce: 2ZIhrS +.. section: Core and Builtins + +Fix an issue with ``skip_file_prefixes`` parameter which resulted in an +inconsistent behaviour between the C and Python implementations of +:func:`warnings.warn`. Patch by Daehee Kim. + +.. + +.. date: 2024-11-02-14-43-46 +.. gh-issue: 126312 +.. nonce: LMHzLT +.. section: Core and Builtins + +Fix crash during garbage collection on an object frozen by :func:`gc.freeze` +on the free-threaded build. + +.. + +.. date: 2024-11-01-09-58-06 +.. gh-issue: 103951 +.. nonce: 6qduwj +.. section: Core and Builtins + +Relax optimization requirements to allow fast attribute access to module +subclasses. + +.. + +.. date: 2024-10-31-21-49-00 +.. gh-issue: 126072 +.. nonce: o9k8Ns +.. section: Core and Builtins + +Following :gh:`126101`, for :ref:`codeobjects` like lambda, annotation and +type alias, we no longer add ``None`` to its :attr:`~codeobject.co_consts`. + +.. + +.. date: 2024-10-30-18-16-10 +.. gh-issue: 126195 +.. nonce: 6ezBpr +.. section: Core and Builtins + +Improve JIT performance by 1.4% on macOS Apple Silicon by using +platform-specific memory protection APIs. Patch by Diego Russo. + +.. + +.. date: 2024-10-29-15-17-31 +.. gh-issue: 126139 +.. nonce: B4OQ8a +.. section: Core and Builtins + +Provide better error location when attempting to use a :term:`future +statement <__future__>` with an unknown future feature. + +.. + +.. date: 2024-10-29-10-37-39 +.. gh-issue: 126072 +.. nonce: XLKlxv +.. section: Core and Builtins + +Add a new attribute in :attr:`~codeobject.co_flags` to indicate whether the +first item in :attr:`~codeobject.co_consts` is the docstring. If a code +object has no docstring, ``None`` will **NOT** be inserted. + +.. + +.. date: 2024-10-28-13-18-16 +.. gh-issue: 126076 +.. nonce: MebZuS +.. section: Core and Builtins + +Relocated objects such as ``tuple``, ``bytes`` and ``str`` objects are +properly tracked by :mod:`tracemalloc` and its associated hooks. Patch by +Pablo Galindo. + +.. + +.. date: 2024-10-27-20-31-43 +.. gh-issue: 90370 +.. nonce: IP_W3a +.. section: Core and Builtins + +Avoid temporary tuple creation for vararg in argument passing with Argument +Clinic generated code (if arguments either vararg or positional-only). + +.. + +.. date: 2024-10-26-23-50-03 +.. gh-issue: 126018 +.. nonce: Hq-qcM +.. section: Core and Builtins + +Fix a crash in :func:`sys.audit` when passing a non-string as first argument +and Python was compiled in debug mode. + +.. + +.. date: 2024-10-26-13-32-48 +.. gh-issue: 126012 +.. nonce: 2KalhG +.. section: Core and Builtins + +The :class:`memoryview` type now supports subscription, making it a +:term:`generic type`. + +.. + +.. date: 2024-10-25-15-56-14 +.. gh-issue: 125837 +.. nonce: KlCdgD +.. section: Core and Builtins + +Adds :opcode:`LOAD_SMALL_INT` and :opcode:`LOAD_CONST_IMMORTAL` +instructions. ``LOAD_SMALL_INT`` pushes a small integer equal to the +``oparg`` to the stack. ``LOAD_CONST_IMMORTAL`` does the same as +``LOAD_CONST`` but is more efficient for immortal objects. Removes +``RETURN_CONST`` instruction. + +.. + +.. date: 2024-10-24-22-43-03 +.. gh-issue: 125942 +.. nonce: 3UQht1 +.. section: Core and Builtins + +On Android, the ``errors`` setting of :any:`sys.stdout` was changed from +``surrogateescape`` to ``backslashreplace``. + +.. + +.. date: 2024-10-23-14-42-27 +.. gh-issue: 125859 +.. nonce: m3EF9E +.. section: Core and Builtins + +Fix a crash in the free threading build when :func:`gc.get_objects` or +:func:`gc.get_referrers` is called during an in-progress garbage collection. + +.. + +.. date: 2024-10-23-14-05-47 +.. gh-issue: 125868 +.. nonce: uLfXYB +.. section: Core and Builtins + +It was possible in 3.14.0a1 only for attribute lookup to give the wrong +value. This was due to an incorrect specialization in very specific +circumstances. This is fixed in 3.14.0a2. + +.. + +.. date: 2024-10-22-04-18-53 +.. gh-issue: 125498 +.. nonce: cFjPIn +.. section: Core and Builtins + +The JIT has been updated to leverage Clang 19’s new ``preserve_none`` +attribute, which supports more platforms and is more useful than LLVM's +existing ``ghccc`` calling convention. This also removes the need to +manually patch the calling convention in LLVM IR, simplifying the JIT +compilation process. + +.. + +.. date: 2024-10-18-16-00-10 +.. gh-issue: 125703 +.. nonce: QRoqMo +.. section: Core and Builtins + +Correctly honour :mod:`tracemalloc` hooks in specialized ``Py_DECREF`` +paths. Patch by Pablo Galindo + +.. + +.. date: 2024-10-18-10-11-43 +.. gh-issue: 125593 +.. nonce: Q97m3A +.. section: Core and Builtins + +Use color to highlight error locations in traceback from exception group + +.. + +.. date: 2024-10-16-23-06-06 +.. gh-issue: 125017 +.. nonce: fcltj0 +.. section: Core and Builtins + +Fix crash on certain accesses to the ``__annotations__`` of +:class:`staticmethod` and :class:`classmethod` objects. + +.. + +.. date: 2024-10-16-13-52-48 +.. gh-issue: 125588 +.. nonce: kCahyO +.. section: Core and Builtins + +The Python PEG generator can now use f-strings in the grammar actions. Patch +by Pablo Galindo + +.. + +.. date: 2024-10-16-12-12-39 +.. gh-issue: 125444 +.. nonce: 9tG2X6 +.. section: Core and Builtins + +Fix illegal instruction for older Arm architectures. Patch by Diego Russo, +testing by Ross Burton. + +.. + +.. date: 2024-10-14-17-13-12 +.. gh-issue: 118423 +.. nonce: SkBoda +.. section: Core and Builtins + +Add a new ``INSTRUCTION_SIZE`` macro to the cases generator which returns +the current instruction size. + +.. + +.. date: 2024-10-09-13-53-50 +.. gh-issue: 125038 +.. nonce: ffSLCz +.. section: Core and Builtins + +Fix crash when iterating over a generator expression after direct changes on +``gi_frame.f_locals``. Patch by Mikhail Efimov. + +.. + +.. date: 2024-10-01-17-31-32 +.. gh-issue: 124855 +.. nonce: sdsv_H +.. section: Core and Builtins + +Don't allow the JIT and perf support to be active at the same time. Patch by +Pablo Galindo + +.. + +.. date: 2024-09-14-20-09-39 +.. gh-issue: 123714 +.. nonce: o1mbe4 +.. section: Core and Builtins + +Update JIT compilation to use LLVM 19 + +.. + +.. date: 2024-09-11-01-32-07 +.. gh-issue: 123930 +.. nonce: BkPfB6 +.. section: Core and Builtins + +Improve the error message when a script shadowing a module from the standard +library causes :exc:`ImportError` to be raised during a "from" import. +Similarly, improve the error message when a script shadowing a third party +module attempts to "from" import an attribute from that third party module +while still initialising. + +.. + +.. date: 2024-06-13-19-12-49 +.. gh-issue: 119793 +.. nonce: FDVCDk +.. section: Core and Builtins + +The :func:`map` built-in now has an optional keyword-only *strict* flag like +:func:`zip` to check that all the iterables are of equal length. Patch by +Wannes Boeykens. + +.. + +.. date: 2024-05-12-03-10-36 +.. gh-issue: 118950 +.. nonce: 5Wc4vp +.. section: Core and Builtins + +Fix bug where SSLProtocol.connection_lost wasn't getting called when OSError +was thrown on writing to socket. + +.. + +.. date: 2023-12-30-00-21-45 +.. gh-issue: 113570 +.. nonce: _XQgsW +.. section: Core and Builtins + +Fixed a bug in ``reprlib.repr`` where it incorrectly called the repr method +on shadowed Python built-in types. + +.. + +.. date: 2024-11-07-20-24-58 +.. gh-issue: 126554 +.. nonce: ri12eb +.. section: C API + +Fix error handling in :class:`ctypes.CDLL` objects which could result in a +crash in rare situations. + +.. + +.. date: 2024-10-28-15-56-03 +.. gh-issue: 126061 +.. nonce: Py51_1 +.. section: C API + +Add :c:func:`PyLong_IsPositive`, :c:func:`PyLong_IsNegative` and +:c:func:`PyLong_IsZero` for checking if a :c:type:`PyLongObject` is +positive, negative, or zero, respectively. + +.. + +.. date: 2024-10-16-19-28-23 +.. gh-issue: 125608 +.. nonce: gTsU2g +.. section: C API + +Fix a bug where dictionary watchers (e.g., :c:func:`PyDict_Watch`) on an +object's attribute dictionary (:attr:`~object.__dict__`) were not triggered +when the object's attributes were modified. + +.. + +.. date: 2024-09-03-13-33-33 +.. gh-issue: 123619 +.. nonce: HhgUUI +.. section: C API + +Added the :c:func:`PyUnstable_Object_EnableDeferredRefcount` function for +enabling :pep:`703` deferred reference counting. + +.. + +.. date: 2024-07-30-14-40-08 +.. gh-issue: 121654 +.. nonce: tgGeAl +.. section: C API + +Add :c:func:`PyType_Freeze` function to make a type immutable. Patch by +Victor Stinner. + +.. + +.. date: 2024-06-04-13-38-44 +.. gh-issue: 120026 +.. nonce: uhEvJ9 +.. section: C API + +The :c:macro:`!Py_HUGE_VAL` macro is :term:`soft deprecated`. + +.. + +.. date: 2024-11-13-15-47-09 +.. gh-issue: 126691 +.. nonce: ni4K-b +.. section: Build + +Removed the ``--with-emscripten-target`` configure flag. We unified the +``node`` and ``browser`` options and the same build can now be used, +independent of target runtime. + +.. + +.. date: 2024-11-07-11-09-31 +.. gh-issue: 123877 +.. nonce: CVdd0b +.. section: Build + +Use ``wasm32-wasip1`` as the target triple for WASI instead of +``wasm32-wasi``. The latter will eventually be reclaimed for WASI 1.0 while +CPython currently only supports WASI preview1. + +.. + +.. date: 2024-11-06-11-12-04 +.. gh-issue: 126458 +.. nonce: 7vzHtx +.. section: Build + +Disable SIMD support for HACL under WASI. + +.. + +.. date: 2024-11-04-09-42-04 +.. gh-issue: 89640 +.. nonce: QBv05o +.. section: Build + +Hard-code float word ordering as little endian on WASM. + +.. + +.. date: 2024-10-31-15-37-05 +.. gh-issue: 126206 +.. nonce: oC6z2i +.. section: Build + +``make clinic`` now runs Argument Clinic using the ``--force`` option, thus +forcefully regenerating generated code. + +.. + +.. date: 2024-10-30-17-47-15 +.. gh-issue: 126187 +.. nonce: 0jFCZB +.. section: Build + +Introduced ``Tools/wasm/emscripten.py`` to simplify doing Emscripten builds. + +.. + +.. date: 2024-10-25-17-20-50 +.. gh-issue: 124932 +.. nonce: F-aNuS +.. section: Build + +For cross builds, there is now support for having a different install +``prefix`` than the ``host_prefix`` used by ``getpath.py``. This is set to +``/`` by default for Emscripten, on other platforms the default behavior is +the same as before. + +.. + +.. date: 2024-10-25-00-29-15 +.. gh-issue: 125946 +.. nonce: KPA3g0 +.. section: Build + +The minimum supported Android version is now 7.0 (API level 24). + +.. + +.. date: 2024-10-24-22-14-35 +.. gh-issue: 125940 +.. nonce: 2wMtTA +.. section: Build + +The Android build now supports `16 KB page sizes +`__. + +.. + +.. date: 2024-10-16-09-37-51 +.. gh-issue: 89640 +.. nonce: UDsW-j +.. section: Build + +Improve detection of float word ordering on Linux when link-time +optimizations are enabled. + +.. + +.. date: 2024-10-04-17-29-23 +.. gh-issue: 124928 +.. nonce: FsGffe +.. section: Build + +Emscripten builds now require node >= 18. + +.. + +.. date: 2024-03-03-20-28-23 +.. gh-issue: 115382 +.. nonce: 97hJFE +.. section: Build + +Fix cross compile failures when the host and target SOABIs match. diff --git a/Misc/NEWS.d/next/Build/2024-03-03-20-28-23.gh-issue-115382.97hJFE.rst b/Misc/NEWS.d/next/Build/2024-03-03-20-28-23.gh-issue-115382.97hJFE.rst deleted file mode 100644 index f8d19651fc5854..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-03-03-20-28-23.gh-issue-115382.97hJFE.rst +++ /dev/null @@ -1 +0,0 @@ -Fix cross compile failures when the host and target SOABIs match. diff --git a/Misc/NEWS.d/next/Build/2024-10-04-17-29-23.gh-issue-124928.FsGffe.rst b/Misc/NEWS.d/next/Build/2024-10-04-17-29-23.gh-issue-124928.FsGffe.rst deleted file mode 100644 index 291bf336ef225e..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-04-17-29-23.gh-issue-124928.FsGffe.rst +++ /dev/null @@ -1 +0,0 @@ -Emscripten builds now require node >= 18. diff --git a/Misc/NEWS.d/next/Build/2024-10-16-09-37-51.gh-issue-89640.UDsW-j.rst b/Misc/NEWS.d/next/Build/2024-10-16-09-37-51.gh-issue-89640.UDsW-j.rst deleted file mode 100644 index 5aba2c789b6842..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-16-09-37-51.gh-issue-89640.UDsW-j.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve detection of float word ordering on Linux when link-time optimizations -are enabled. diff --git a/Misc/NEWS.d/next/Build/2024-10-24-22-14-35.gh-issue-125940.2wMtTA.rst b/Misc/NEWS.d/next/Build/2024-10-24-22-14-35.gh-issue-125940.2wMtTA.rst deleted file mode 100644 index 2b4c1c95db8806..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-24-22-14-35.gh-issue-125940.2wMtTA.rst +++ /dev/null @@ -1,2 +0,0 @@ -The Android build now supports `16 KB page sizes -`__. diff --git a/Misc/NEWS.d/next/Build/2024-10-25-00-29-15.gh-issue-125946.KPA3g0.rst b/Misc/NEWS.d/next/Build/2024-10-25-00-29-15.gh-issue-125946.KPA3g0.rst deleted file mode 100644 index ecab57c8111d45..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-25-00-29-15.gh-issue-125946.KPA3g0.rst +++ /dev/null @@ -1 +0,0 @@ -The minimum supported Android version is now 7.0 (API level 24). diff --git a/Misc/NEWS.d/next/Build/2024-10-25-17-20-50.gh-issue-124932.F-aNuS.rst b/Misc/NEWS.d/next/Build/2024-10-25-17-20-50.gh-issue-124932.F-aNuS.rst deleted file mode 100644 index 10c4171dc14a64..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-25-17-20-50.gh-issue-124932.F-aNuS.rst +++ /dev/null @@ -1,4 +0,0 @@ -For cross builds, there is now support for having a different install -``prefix`` than the ``host_prefix`` used by ``getpath.py``. This is set to ``/`` by -default for Emscripten, on other platforms the default behavior is the same -as before. diff --git a/Misc/NEWS.d/next/Build/2024-10-30-17-47-15.gh-issue-126187.0jFCZB.rst b/Misc/NEWS.d/next/Build/2024-10-30-17-47-15.gh-issue-126187.0jFCZB.rst deleted file mode 100644 index c295a91c2225a3..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-30-17-47-15.gh-issue-126187.0jFCZB.rst +++ /dev/null @@ -1 +0,0 @@ -Introduced ``Tools/wasm/emscripten.py`` to simplify doing Emscripten builds. diff --git a/Misc/NEWS.d/next/Build/2024-10-31-15-37-05.gh-issue-126206.oC6z2i.rst b/Misc/NEWS.d/next/Build/2024-10-31-15-37-05.gh-issue-126206.oC6z2i.rst deleted file mode 100644 index 24b172e1747403..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-10-31-15-37-05.gh-issue-126206.oC6z2i.rst +++ /dev/null @@ -1,2 +0,0 @@ -``make clinic`` now runs Argument Clinic using the ``--force`` option, -thus forcefully regenerating generated code. diff --git a/Misc/NEWS.d/next/Build/2024-11-04-09-42-04.gh-issue-89640.QBv05o.rst b/Misc/NEWS.d/next/Build/2024-11-04-09-42-04.gh-issue-89640.QBv05o.rst deleted file mode 100644 index 4fa44a1d6493b4..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-11-04-09-42-04.gh-issue-89640.QBv05o.rst +++ /dev/null @@ -1 +0,0 @@ -Hard-code float word ordering as little endian on WASM. diff --git a/Misc/NEWS.d/next/Build/2024-11-06-11-12-04.gh-issue-126458.7vzHtx.rst b/Misc/NEWS.d/next/Build/2024-11-06-11-12-04.gh-issue-126458.7vzHtx.rst deleted file mode 100644 index cc06dd8a30e30a..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-11-06-11-12-04.gh-issue-126458.7vzHtx.rst +++ /dev/null @@ -1 +0,0 @@ -Disable SIMD support for HACL under WASI. diff --git a/Misc/NEWS.d/next/Build/2024-11-07-11-09-31.gh-issue-123877.CVdd0b.rst b/Misc/NEWS.d/next/Build/2024-11-07-11-09-31.gh-issue-123877.CVdd0b.rst deleted file mode 100644 index 080d2f2ab12af9..00000000000000 --- a/Misc/NEWS.d/next/Build/2024-11-07-11-09-31.gh-issue-123877.CVdd0b.rst +++ /dev/null @@ -1,3 +0,0 @@ -Use ``wasm32-wasip1`` as the target triple for WASI instead of -``wasm32-wasi``. The latter will eventually be reclaimed for WASI 1.0 while -CPython currently only supports WASI preview1. diff --git a/Misc/NEWS.d/next/Build/2024-11-20-17-12-40.gh-issue-126898.I2zILt.rst b/Misc/NEWS.d/next/Build/2024-11-20-17-12-40.gh-issue-126898.I2zILt.rst new file mode 100644 index 00000000000000..37783c4e890015 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-11-20-17-12-40.gh-issue-126898.I2zILt.rst @@ -0,0 +1 @@ +The Emscripten build of Python is now based on ES6 modules. diff --git a/Misc/NEWS.d/next/C_API/2024-06-04-13-38-44.gh-issue-120026.uhEvJ9.rst b/Misc/NEWS.d/next/C_API/2024-06-04-13-38-44.gh-issue-120026.uhEvJ9.rst deleted file mode 100644 index d43a138724ad33..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-06-04-13-38-44.gh-issue-120026.uhEvJ9.rst +++ /dev/null @@ -1 +0,0 @@ -The :c:macro:`!Py_HUGE_VAL` macro is :term:`soft deprecated`. diff --git a/Misc/NEWS.d/next/C_API/2024-07-03-13-39-13.gh-issue-121058.MKi1MV.rst b/Misc/NEWS.d/next/C_API/2024-07-03-13-39-13.gh-issue-121058.MKi1MV.rst new file mode 100644 index 00000000000000..133d8cb6fe4b9e --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2024-07-03-13-39-13.gh-issue-121058.MKi1MV.rst @@ -0,0 +1,2 @@ +``PyThreadState_Clear()`` now warns (and calls ``sys.excepthook``) if the +thread state still has an active exception. diff --git a/Misc/NEWS.d/next/C_API/2024-07-30-14-40-08.gh-issue-121654.tgGeAl.rst b/Misc/NEWS.d/next/C_API/2024-07-30-14-40-08.gh-issue-121654.tgGeAl.rst deleted file mode 100644 index 134d36c281ab21..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-07-30-14-40-08.gh-issue-121654.tgGeAl.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :c:func:`PyType_Freeze` function to make a type immutable. Patch by -Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2024-10-16-19-28-23.gh-issue-125608.gTsU2g.rst b/Misc/NEWS.d/next/C_API/2024-10-16-19-28-23.gh-issue-125608.gTsU2g.rst deleted file mode 100644 index e70f9f173957a2..00000000000000 --- a/Misc/NEWS.d/next/C_API/2024-10-16-19-28-23.gh-issue-125608.gTsU2g.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a bug where dictionary watchers (e.g., :c:func:`PyDict_Watch`) on an -object's attribute dictionary (:attr:`~object.__dict__`) were not triggered -when the object's attributes were modified. diff --git a/Misc/NEWS.d/next/C API/README.rst b/Misc/NEWS.d/next/C_API/README.rst similarity index 100% rename from Misc/NEWS.d/next/C API/README.rst rename to Misc/NEWS.d/next/C_API/README.rst diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2023-12-30-00-21-45.gh-issue-113570._XQgsW.rst b/Misc/NEWS.d/next/Core_and_Builtins/2023-12-30-00-21-45.gh-issue-113570._XQgsW.rst deleted file mode 100644 index 6e0f0afe05369b..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2023-12-30-00-21-45.gh-issue-113570._XQgsW.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug in ``reprlib.repr`` where it incorrectly called the repr method on shadowed Python built-in types. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-05-12-03-10-36.gh-issue-118950.5Wc4vp.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-05-12-03-10-36.gh-issue-118950.5Wc4vp.rst deleted file mode 100644 index 82be975f4d808d..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-05-12-03-10-36.gh-issue-118950.5Wc4vp.rst +++ /dev/null @@ -1 +0,0 @@ -Fix bug where SSLProtocol.connection_lost wasn't getting called when OSError was thrown on writing to socket. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-06-13-19-12-49.gh-issue-119793.FDVCDk.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-06-13-19-12-49.gh-issue-119793.FDVCDk.rst deleted file mode 100644 index 976d6712e4b6af..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-06-13-19-12-49.gh-issue-119793.FDVCDk.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :func:`map` built-in now has an optional keyword-only *strict* flag -like :func:`zip` to check that all the iterables are of equal length. -Patch by Wannes Boeykens. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-11-01-32-07.gh-issue-123930.BkPfB6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-11-01-32-07.gh-issue-123930.BkPfB6.rst deleted file mode 100644 index 3c8eb02b2dc2d6..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-11-01-32-07.gh-issue-123930.BkPfB6.rst +++ /dev/null @@ -1,4 +0,0 @@ -Improve the error message when a script shadowing a module from the standard -library causes :exc:`ImportError` to be raised during a "from" import. -Similarly, improve the error message when a script shadowing a third party module -attempts to "from" import an attribute from that third party module while still initialising. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-14-20-09-39.gh-issue-123714.o1mbe4.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-14-20-09-39.gh-issue-123714.o1mbe4.rst deleted file mode 100644 index 03a2ef63238073..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-14-20-09-39.gh-issue-123714.o1mbe4.rst +++ /dev/null @@ -1 +0,0 @@ -Update JIT compilation to use LLVM 19 diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-21-50-23.gh-issue-124470.pFr3_d.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-21-50-23.gh-issue-124470.pFr3_d.rst new file mode 100644 index 00000000000000..8f2f37146d3c13 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-09-25-21-50-23.gh-issue-124470.pFr3_d.rst @@ -0,0 +1 @@ +Fix crash in free-threaded builds when replacing object dictionary while reading attribute on another thread diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-01-17-31-32.gh-issue-124855.sdsv_H.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-01-17-31-32.gh-issue-124855.sdsv_H.rst deleted file mode 100644 index b65a5e6ac11c76..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-01-17-31-32.gh-issue-124855.sdsv_H.rst +++ /dev/null @@ -1,2 +0,0 @@ -Don't allow the JIT and perf support to be active at the same time. Patch by -Pablo Galindo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-09-13-53-50.gh-issue-125038.ffSLCz.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-09-13-53-50.gh-issue-125038.ffSLCz.rst deleted file mode 100644 index 15de48ec0e4450..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-09-13-53-50.gh-issue-125038.ffSLCz.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash when iterating over a generator expression after direct changes on ``gi_frame.f_locals``. -Patch by Mikhail Efimov. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-14-17-13-12.gh-issue-118423.SkBoda.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-14-17-13-12.gh-issue-118423.SkBoda.rst deleted file mode 100644 index 8511a8de5530d6..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-14-17-13-12.gh-issue-118423.SkBoda.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add a new ``INSTRUCTION_SIZE`` macro to the cases generator which returns -the current instruction size. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-12-12-39.gh-issue-125444.9tG2X6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-12-12-39.gh-issue-125444.9tG2X6.rst deleted file mode 100644 index 13c1e745edf8d5..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-12-12-39.gh-issue-125444.9tG2X6.rst +++ /dev/null @@ -1 +0,0 @@ -Fix illegal instruction for older Arm architectures. Patch by Diego Russo, testing by Ross Burton. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-13-52-48.gh-issue-125588.kCahyO.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-13-52-48.gh-issue-125588.kCahyO.rst deleted file mode 100644 index 1d59a9c3c205b8..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-13-52-48.gh-issue-125588.kCahyO.rst +++ /dev/null @@ -1,2 +0,0 @@ -The Python PEG generator can now use f-strings in the grammar actions. Patch -by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-23-06-06.gh-issue-125017.fcltj0.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-23-06-06.gh-issue-125017.fcltj0.rst deleted file mode 100644 index 11c526643c3122..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-16-23-06-06.gh-issue-125017.fcltj0.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash on certain accesses to the ``__annotations__`` of -:class:`staticmethod` and :class:`classmethod` objects. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-10-11-43.gh-issue-125593.Q97m3A.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-10-11-43.gh-issue-125593.Q97m3A.rst deleted file mode 100644 index 220e94467af849..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-10-11-43.gh-issue-125593.Q97m3A.rst +++ /dev/null @@ -1 +0,0 @@ -Use color to highlight error locations in traceback from exception group diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-16-00-10.gh-issue-125703.QRoqMo.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-16-00-10.gh-issue-125703.QRoqMo.rst deleted file mode 100644 index 7cbfa725e78cef..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-18-16-00-10.gh-issue-125703.QRoqMo.rst +++ /dev/null @@ -1,2 +0,0 @@ -Correctly honour :mod:`tracemalloc` hooks in specialized ``Py_DECREF`` -paths. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-22-04-18-53.gh-issue-125498.cFjPIn.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-22-04-18-53.gh-issue-125498.cFjPIn.rst deleted file mode 100644 index 807c2e07210e7f..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-22-04-18-53.gh-issue-125498.cFjPIn.rst +++ /dev/null @@ -1,4 +0,0 @@ -The JIT has been updated to leverage Clang 19’s new ``preserve_none`` attribute, -which supports more platforms and is more useful than LLVM's existing ``ghccc`` -calling convention. This also removes the need to manually patch the calling -convention in LLVM IR, simplifying the JIT compilation process. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-05-47.gh-issue-125868.uLfXYB.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-05-47.gh-issue-125868.uLfXYB.rst deleted file mode 100644 index dea250e7166ec6..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-05-47.gh-issue-125868.uLfXYB.rst +++ /dev/null @@ -1,3 +0,0 @@ -It was possible in 3.14.0a1 only for attribute lookup to give the wrong -value. This was due to an incorrect specialization in very specific -circumstances. This is fixed in 3.14.0a2. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-42-27.gh-issue-125859.m3EF9E.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-42-27.gh-issue-125859.m3EF9E.rst deleted file mode 100644 index d36aa8fbe7482f..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-23-14-42-27.gh-issue-125859.m3EF9E.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash in the free threading build when :func:`gc.get_objects` or -:func:`gc.get_referrers` is called during an in-progress garbage collection. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-24-22-43-03.gh-issue-125942.3UQht1.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-24-22-43-03.gh-issue-125942.3UQht1.rst deleted file mode 100644 index d1b1ecd2a724ab..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-24-22-43-03.gh-issue-125942.3UQht1.rst +++ /dev/null @@ -1,2 +0,0 @@ -On Android, the ``errors`` setting of :any:`sys.stdout` was changed from -``surrogateescape`` to ``backslashreplace``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-25-15-56-14.gh-issue-125837.KlCdgD.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-25-15-56-14.gh-issue-125837.KlCdgD.rst deleted file mode 100644 index 9538f34f969377..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-25-15-56-14.gh-issue-125837.KlCdgD.rst +++ /dev/null @@ -1,5 +0,0 @@ -Adds :opcode:`LOAD_SMALL_INT` and :opcode:`LOAD_CONST_IMMORTAL` instructions. -``LOAD_SMALL_INT`` pushes a small integer equal to the ``oparg`` to the stack. -``LOAD_CONST_IMMORTAL`` does the same as ``LOAD_CONST`` but is more -efficient for immortal objects. -Removes ``RETURN_CONST`` instruction. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-13-32-48.gh-issue-126012.2KalhG.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-13-32-48.gh-issue-126012.2KalhG.rst deleted file mode 100644 index 5307920ddcf200..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-13-32-48.gh-issue-126012.2KalhG.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :class:`memoryview` type now supports subscription, making it a -:term:`generic type`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-23-50-03.gh-issue-126018.Hq-qcM.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-23-50-03.gh-issue-126018.Hq-qcM.rst deleted file mode 100644 index e019408638997b..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-26-23-50-03.gh-issue-126018.Hq-qcM.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash in :func:`sys.audit` when passing a non-string as first argument -and Python was compiled in debug mode. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-20-31-43.gh-issue-90370.IP_W3a.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-20-31-43.gh-issue-90370.IP_W3a.rst deleted file mode 100644 index b6a19c06a228ca..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-27-20-31-43.gh-issue-90370.IP_W3a.rst +++ /dev/null @@ -1,2 +0,0 @@ -Avoid temporary tuple creation for vararg in argument passing with Argument -Clinic generated code (if arguments either vararg or positional-only). diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-10-37-39.gh-issue-126072.XLKlxv.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-10-37-39.gh-issue-126072.XLKlxv.rst deleted file mode 100644 index 4ad30e9f954ecf..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-10-37-39.gh-issue-126072.XLKlxv.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add a new attribute in :attr:`~codeobject.co_flags` to indicate whether the -first item in :attr:`~codeobject.co_consts` is the docstring. If a code -object has no docstring, ``None`` will **NOT** be inserted. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-15-17-31.gh-issue-126139.B4OQ8a.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-15-17-31.gh-issue-126139.B4OQ8a.rst deleted file mode 100644 index 278971b46d18ab..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-29-15-17-31.gh-issue-126139.B4OQ8a.rst +++ /dev/null @@ -1,2 +0,0 @@ -Provide better error location when attempting to use a :term:`future -statement <__future__>` with an unknown future feature. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-31-21-49-00.gh-issue-126072.o9k8Ns.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-31-21-49-00.gh-issue-126072.o9k8Ns.rst deleted file mode 100644 index 2464ac78cf429b..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-31-21-49-00.gh-issue-126072.o9k8Ns.rst +++ /dev/null @@ -1,2 +0,0 @@ -Following :gh:`126101`, for :ref:`codeobjects` like lambda, annotation and type alias, -we no longer add ``None`` to its :attr:`~codeobject.co_consts`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-15-15-36.gh-issue-126366.8BBdGU.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-15-15-36.gh-issue-126366.8BBdGU.rst deleted file mode 100644 index a47233602e4eff..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-15-15-36.gh-issue-126366.8BBdGU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash when using ``yield from`` on an object that raises an exception in -its ``__iter__``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-06-16-34-11.gh-issue-126222.9NBfTn.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-06-16-34-11.gh-issue-126222.9NBfTn.rst deleted file mode 100644 index ebf6673782f02c..00000000000000 --- a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-06-16-34-11.gh-issue-126222.9NBfTn.rst +++ /dev/null @@ -1,3 +0,0 @@ -Do not include count of "peek" items in ``_PyUop_num_popped``. This ensures -that the correct number of items are popped from the stack when a micro-op -exits with an error. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-07-21-48-23.gh-issue-126091.ETaRGE.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-07-21-48-23.gh-issue-126091.ETaRGE.rst new file mode 100644 index 00000000000000..08118ff1af657d --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-07-21-48-23.gh-issue-126091.ETaRGE.rst @@ -0,0 +1,2 @@ +Ensure stack traces are complete when throwing into a generator chain that +ends in a custom generator. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-15-16-39-37.gh-issue-126892.QR6Yo3.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-15-16-39-37.gh-issue-126892.QR6Yo3.rst new file mode 100644 index 00000000000000..db3c398e5dbdbe --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-15-16-39-37.gh-issue-126892.QR6Yo3.rst @@ -0,0 +1,2 @@ +Require cold or invalidated code to "warm up" before being JIT compiled +again. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-23-18-17.gh-issue-126980.r8QHdi.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-23-18-17.gh-issue-126980.r8QHdi.rst new file mode 100644 index 00000000000000..84484e7c3001da --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-23-18-17.gh-issue-126980.r8QHdi.rst @@ -0,0 +1,3 @@ +Fix :meth:`~object.__buffer__` of :class:`bytearray` crashing when +:attr:`~inspect.BufferFlags.READ` or :attr:`~inspect.BufferFlags.WRITE` are +passed as flags. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-17-17-32.gh-issue-127010.9Cl4bb.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-17-17-32.gh-issue-127010.9Cl4bb.rst new file mode 100644 index 00000000000000..36e379c88ab27e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-17-17-32.gh-issue-127010.9Cl4bb.rst @@ -0,0 +1,4 @@ +Simplify GC tracking of dictionaries. All dictionaries are tracked when +created, rather than being lazily tracked when a trackable object was added +to them. This simplifies the code considerably and results in a slight +speedup. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-21-49-58.gh-issue-127020.5vvI17.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-21-49-58.gh-issue-127020.5vvI17.rst new file mode 100644 index 00000000000000..a8fd9272f5a923 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-19-21-49-58.gh-issue-127020.5vvI17.rst @@ -0,0 +1,4 @@ +Fix a crash in the free threading build when :c:func:`PyCode_GetCode`, +:c:func:`PyCode_GetVarnames`, :c:func:`PyCode_GetCellvars`, or +:c:func:`PyCode_GetFreevars` were called from multiple threads at the same +time. diff --git a/Misc/NEWS.d/next/Core and Builtins/README.rst b/Misc/NEWS.d/next/Core_and_Builtins/README.rst similarity index 100% rename from Misc/NEWS.d/next/Core and Builtins/README.rst rename to Misc/NEWS.d/next/Core_and_Builtins/README.rst diff --git a/Misc/NEWS.d/next/Documentation/2023-03-28-22-24-45.gh-issue-60712.So5uad.rst b/Misc/NEWS.d/next/Documentation/2023-03-28-22-24-45.gh-issue-60712.So5uad.rst deleted file mode 100644 index e401cc2535e389..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2023-03-28-22-24-45.gh-issue-60712.So5uad.rst +++ /dev/null @@ -1,2 +0,0 @@ -Include the :class:`object` type in the lists of documented types. -Change by Furkan Onder and Martin Panter. diff --git a/Misc/NEWS.d/next/Documentation/2024-10-10-23-46-54.gh-issue-125277.QAby09.rst b/Misc/NEWS.d/next/Documentation/2024-10-10-23-46-54.gh-issue-125277.QAby09.rst deleted file mode 100644 index fcd6e22c27b5f4..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2024-10-10-23-46-54.gh-issue-125277.QAby09.rst +++ /dev/null @@ -1,2 +0,0 @@ -Require Sphinx 7.2.6 or later to build the Python documentation. -Patch by Adam Turner. diff --git a/Misc/NEWS.d/next/Library/2022-11-10-17-16-45.gh-issue-97514.kzA0zl.rst b/Misc/NEWS.d/next/Library/2022-11-10-17-16-45.gh-issue-97514.kzA0zl.rst new file mode 100644 index 00000000000000..10c56edb8c7303 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-11-10-17-16-45.gh-issue-97514.kzA0zl.rst @@ -0,0 +1,10 @@ +Authentication was added to the :mod:`multiprocessing` forkserver start +method control socket so that only processes with the authentication key +generated by the process that spawned the forkserver can control it. This +is an enhancement over the other :gh:`97514` fixes so that access is no +longer limited only by filesystem permissions. + +The file descriptor exchange of control pipes with the forked worker process +now requires an explicit acknowledgement byte to be sent over the socket after +the exchange on all forkserver supporting platforms. That makes testing the +above much easier. diff --git a/Misc/NEWS.d/next/Library/2024-05-13-10-09-41.gh-issue-118986.-r4W9h.rst b/Misc/NEWS.d/next/Library/2024-05-13-10-09-41.gh-issue-118986.-r4W9h.rst deleted file mode 100644 index 196da60a950bfb..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-05-13-10-09-41.gh-issue-118986.-r4W9h.rst +++ /dev/null @@ -1 +0,0 @@ -Add :data:`!socket.IPV6_RECVERR` constant (available since Linux 2.2). diff --git a/Misc/NEWS.d/next/Library/2024-05-28-14-35-23.gh-issue-97850.dCtjel.rst b/Misc/NEWS.d/next/Library/2024-05-28-14-35-23.gh-issue-97850.dCtjel.rst deleted file mode 100644 index bb94f7d8ad124d..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-05-28-14-35-23.gh-issue-97850.dCtjel.rst +++ /dev/null @@ -1 +0,0 @@ -Remove deprecated :func:`!pkgutil.get_loader` and :func:`!pkgutil.find_loader`. diff --git a/Misc/NEWS.d/next/Library/2024-06-06-04-06-05.gh-issue-70764.6511hw.rst b/Misc/NEWS.d/next/Library/2024-06-06-04-06-05.gh-issue-70764.6511hw.rst deleted file mode 100644 index 4cfb66a6ccc6ee..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-06-06-04-06-05.gh-issue-70764.6511hw.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed an issue where :func:`inspect.getclosurevars` would incorrectly classify an attribute name as a global variable when the name exists both as an attribute name and a global variable. diff --git a/Misc/NEWS.d/next/Library/2024-07-02-15-56-42.gh-issue-121267.yFBWkh.rst b/Misc/NEWS.d/next/Library/2024-07-02-15-56-42.gh-issue-121267.yFBWkh.rst deleted file mode 100644 index 9e52405c15a82d..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-07-02-15-56-42.gh-issue-121267.yFBWkh.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve the performance of :mod:`tarfile` when writing files, by caching user names -and group names. diff --git a/Misc/NEWS.d/next/Library/2024-07-23-02-24-50.gh-issue-120754.nHb5mG.rst b/Misc/NEWS.d/next/Library/2024-07-23-02-24-50.gh-issue-120754.nHb5mG.rst deleted file mode 100644 index 6c33e7b7ec7716..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-07-23-02-24-50.gh-issue-120754.nHb5mG.rst +++ /dev/null @@ -1 +0,0 @@ -Update unbounded ``read`` calls in :mod:`zipfile` to specify an explicit ``size`` putting a limit on how much data they may read. This also updates handling around ZIP max comment size to match the standard instead of reading comments that are one byte too long. diff --git a/Misc/NEWS.d/next/Library/2024-08-28-19-27-35.gh-issue-123370.SPZ9Ux.rst b/Misc/NEWS.d/next/Library/2024-08-28-19-27-35.gh-issue-123370.SPZ9Ux.rst deleted file mode 100644 index 1fd5cc54eaf3e7..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-08-28-19-27-35.gh-issue-123370.SPZ9Ux.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the canvas not clearing after running turtledemo clock. diff --git a/Misc/NEWS.d/next/Library/2024-09-07-13-57-49.gh-issue-80958.fVYnqV.rst b/Misc/NEWS.d/next/Library/2024-09-07-13-57-49.gh-issue-80958.fVYnqV.rst deleted file mode 100644 index f0edd7b1ac6e8b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-07-13-57-49.gh-issue-80958.fVYnqV.rst +++ /dev/null @@ -1 +0,0 @@ -unittest discovery supports PEP 420 namespace packages as start directory again. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-18-16-59.gh-issue-58956.0wFrBR.rst b/Misc/NEWS.d/next/Library/2024-09-24-18-16-59.gh-issue-58956.0wFrBR.rst deleted file mode 100644 index a882a632fddf1b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-24-18-16-59.gh-issue-58956.0wFrBR.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug in :mod:`pdb` where sometimes the breakpoint won't trigger if it was set on a function which is already in the call stack. diff --git a/Misc/NEWS.d/next/Library/2024-09-24-18-49-16.gh-issue-99749.gBDJX7.rst b/Misc/NEWS.d/next/Library/2024-09-24-18-49-16.gh-issue-99749.gBDJX7.rst deleted file mode 100644 index 3ecd75c5b551b6..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-24-18-49-16.gh-issue-99749.gBDJX7.rst +++ /dev/null @@ -1 +0,0 @@ -Adds a feature to optionally enable suggestions for argument choices and subparser names if mistyped by the user. diff --git a/Misc/NEWS.d/next/Library/2024-09-27-13-10-17.gh-issue-58032.0aNAQ0.rst b/Misc/NEWS.d/next/Library/2024-09-27-13-10-17.gh-issue-58032.0aNAQ0.rst deleted file mode 100644 index 278512b22a8d3f..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-27-13-10-17.gh-issue-58032.0aNAQ0.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecate the :class:`argparse.FileType` type converter. diff --git a/Misc/NEWS.d/next/Library/2024-09-27-15-42-55.gh-issue-124694.uUy32y.rst b/Misc/NEWS.d/next/Library/2024-09-27-15-42-55.gh-issue-124694.uUy32y.rst deleted file mode 100644 index 1aa1a463b0c63a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-27-15-42-55.gh-issue-124694.uUy32y.rst +++ /dev/null @@ -1,6 +0,0 @@ -We've added :class:`concurrent.futures.InterpreterPoolExecutor`, which -allows you to run code in multiple isolated interpreters. This allows you -to circumvent the limitations of CPU-bound threads (due to the GIL). Patch -by Eric Snow. - -This addition is unrelated to :pep:`734`. diff --git a/Misc/NEWS.d/next/Library/2024-09-28-02-03-04.gh-issue-124651.bLBGtH.rst b/Misc/NEWS.d/next/Library/2024-09-28-02-03-04.gh-issue-124651.bLBGtH.rst deleted file mode 100644 index 17fc9171390dd9..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-09-28-02-03-04.gh-issue-124651.bLBGtH.rst +++ /dev/null @@ -1 +0,0 @@ -Properly quote template strings in :mod:`venv` activation scripts. diff --git a/Misc/NEWS.d/next/Library/2024-10-04-22-43-48.gh-issue-124984.xjMv9b.rst b/Misc/NEWS.d/next/Library/2024-10-04-22-43-48.gh-issue-124984.xjMv9b.rst deleted file mode 100644 index dd0a55a6854c25..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-04-22-43-48.gh-issue-124984.xjMv9b.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed thread safety in :mod:`ssl` in the free-threaded build. OpenSSL operations are now protected by a per-object lock. diff --git a/Misc/NEWS.d/next/Library/2024-10-09-17-07-33.gh-issue-52551.PBakSY.rst b/Misc/NEWS.d/next/Library/2024-10-09-17-07-33.gh-issue-52551.PBakSY.rst deleted file mode 100644 index edc9ac5bb23117..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-09-17-07-33.gh-issue-52551.PBakSY.rst +++ /dev/null @@ -1,8 +0,0 @@ -Fix encoding issues in :func:`time.strftime`, the -:meth:`~datetime.datetime.strftime` method of the :mod:`datetime` classes -:class:`~datetime.datetime`, :class:`~datetime.date` and -:class:`~datetime.time` and formatting of these classes. Characters not -encodable in the current locale are now acceptable in the format string. -Surrogate pairs and sequence of surrogatescape-encoded bytes are no longer -recombinated. Embedded null character no longer terminates the format -string. diff --git a/Misc/NEWS.d/next/Library/2024-10-11-00-40-13.gh-issue-125245.8vReM-.rst b/Misc/NEWS.d/next/Library/2024-10-11-00-40-13.gh-issue-125245.8vReM-.rst deleted file mode 100644 index c880efe73d06b4..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-11-00-40-13.gh-issue-125245.8vReM-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix race condition when importing :mod:`collections.abc`, which could -incorrectly return an empty module. diff --git a/Misc/NEWS.d/next/Library/2024-10-13-15-04-58.gh-issue-125398.UW7Ndv.rst b/Misc/NEWS.d/next/Library/2024-10-13-15-04-58.gh-issue-125398.UW7Ndv.rst deleted file mode 100644 index a188b35e1fbdbc..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-13-15-04-58.gh-issue-125398.UW7Ndv.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the conversion of the :envvar:`!VIRTUAL_ENV` path in the activate script in :mod:`venv` when running in Git Bash for Windows. diff --git a/Misc/NEWS.d/next/Library/2024-10-14-02-07-44.gh-issue-125115.IOf3ON.rst b/Misc/NEWS.d/next/Library/2024-10-14-02-07-44.gh-issue-125115.IOf3ON.rst deleted file mode 100644 index 3583d537a6ec61..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-14-02-07-44.gh-issue-125115.IOf3ON.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a bug in :mod:`pdb` where arguments starting with ``-`` can't be passed to the debugged script. diff --git a/Misc/NEWS.d/next/Library/2024-10-14-17-29-34.gh-issue-125451.fmP3T9.rst b/Misc/NEWS.d/next/Library/2024-10-14-17-29-34.gh-issue-125451.fmP3T9.rst deleted file mode 100644 index 589988d4d6273f..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-14-17-29-34.gh-issue-125451.fmP3T9.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix deadlock when :class:`concurrent.futures.ProcessPoolExecutor` shuts down -concurrently with an error when feeding a job to a worker process. diff --git a/Misc/NEWS.d/next/Library/2024-10-15-14-01-03.gh-issue-125519.TqGh6a.rst b/Misc/NEWS.d/next/Library/2024-10-15-14-01-03.gh-issue-125519.TqGh6a.rst deleted file mode 100644 index e6062625104590..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-15-14-01-03.gh-issue-125519.TqGh6a.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve traceback if :func:`importlib.reload` is called with an object that -is not a module. Patch by Alex Waygood. diff --git a/Misc/NEWS.d/next/Library/2024-10-15-16-50-03.gh-issue-125541.FfhmWo.rst b/Misc/NEWS.d/next/Library/2024-10-15-16-50-03.gh-issue-125541.FfhmWo.rst deleted file mode 100644 index 7a20bca1739869..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-15-16-50-03.gh-issue-125541.FfhmWo.rst +++ /dev/null @@ -1,4 +0,0 @@ -Pressing :kbd:`Ctrl-C` while blocked in :meth:`threading.Lock.acquire`, -:meth:`threading.RLock.acquire`, and :meth:`threading.Thread.join` now -interrupts the function call and raises a :exc:`KeyboardInterrupt` exception -on Windows, similar to how those functions behave on macOS and Linux. diff --git a/Misc/NEWS.d/next/Library/2024-10-16-04-50-53.gh-issue-125542.vZJ-Ns.rst b/Misc/NEWS.d/next/Library/2024-10-16-04-50-53.gh-issue-125542.vZJ-Ns.rst deleted file mode 100644 index 777920cc54ff9b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-16-04-50-53.gh-issue-125542.vZJ-Ns.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate passing keyword-only *prefix_chars* argument to -:meth:`argparse.ArgumentParser.add_argument_group`. diff --git a/Misc/NEWS.d/next/Library/2024-10-16-15-55-50.gh-issue-125600.yMsJx0.rst b/Misc/NEWS.d/next/Library/2024-10-16-15-55-50.gh-issue-125600.yMsJx0.rst deleted file mode 100644 index 19bf4fbefb601b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-16-15-55-50.gh-issue-125600.yMsJx0.rst +++ /dev/null @@ -1 +0,0 @@ -Only show stale code warning in :mod:`pdb` when we display source code. diff --git a/Misc/NEWS.d/next/Library/2024-10-16-20-32-40.gh-issue-125590.stHzOP.rst b/Misc/NEWS.d/next/Library/2024-10-16-20-32-40.gh-issue-125590.stHzOP.rst deleted file mode 100644 index dc6765ada641a9..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-16-20-32-40.gh-issue-125590.stHzOP.rst +++ /dev/null @@ -1 +0,0 @@ -Allow ``FrameLocalsProxy`` to delete and pop if the key is not a fast variable. diff --git a/Misc/NEWS.d/next/Library/2024-10-16-22-45-50.gh-issue-125614.3OEo_Q.rst b/Misc/NEWS.d/next/Library/2024-10-16-22-45-50.gh-issue-125614.3OEo_Q.rst deleted file mode 100644 index 5f4803c9b74578..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-16-22-45-50.gh-issue-125614.3OEo_Q.rst +++ /dev/null @@ -1,3 +0,0 @@ -In the :data:`~annotationlib.Format.FORWARDREF` format of -:mod:`annotationlib`, fix bug where nested expressions were not returned as -:class:`annotationlib.ForwardRef` format. diff --git a/Misc/NEWS.d/next/Library/2024-10-17-04-52-00.gh-issue-125633.lMck06.rst b/Misc/NEWS.d/next/Library/2024-10-17-04-52-00.gh-issue-125633.lMck06.rst deleted file mode 100644 index e816a13b75e0c7..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-17-04-52-00.gh-issue-125633.lMck06.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add function :func:`inspect.ispackage` to determine whether an object is a -:term:`package` or not. diff --git a/Misc/NEWS.d/next/Library/2024-10-17-16-10-29.gh-issue-125259.oMew0c.rst b/Misc/NEWS.d/next/Library/2024-10-17-16-10-29.gh-issue-125259.oMew0c.rst deleted file mode 100644 index 4fa6330abea512..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-17-16-10-29.gh-issue-125259.oMew0c.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the notes removal logic for errors thrown in enum initialization. diff --git a/Misc/NEWS.d/next/Library/2024-10-17-20-36-06.gh-issue-52551.EIVNYY.rst b/Misc/NEWS.d/next/Library/2024-10-17-20-36-06.gh-issue-52551.EIVNYY.rst deleted file mode 100644 index 48d3d93c3d72af..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-17-20-36-06.gh-issue-52551.EIVNYY.rst +++ /dev/null @@ -1 +0,0 @@ -Use :c:func:`!wcsftime` to implement :func:`time.strftime` on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-10-18-08-58-10.gh-issue-125660.sDdDqO.rst b/Misc/NEWS.d/next/Library/2024-10-18-08-58-10.gh-issue-125660.sDdDqO.rst deleted file mode 100644 index 74d76c7bddae7d..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-18-08-58-10.gh-issue-125660.sDdDqO.rst +++ /dev/null @@ -1 +0,0 @@ -Reject invalid unicode escapes for Python implementation of :func:`json.loads`. diff --git a/Misc/NEWS.d/next/Library/2024-10-18-09-51-29.gh-issue-125682.vsj4cU.rst b/Misc/NEWS.d/next/Library/2024-10-18-09-51-29.gh-issue-125682.vsj4cU.rst deleted file mode 100644 index 3eb2905ad8d810..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-18-09-51-29.gh-issue-125682.vsj4cU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Reject non-ASCII digits in the Python implementation of :func:`json.loads` -conforming to the JSON specification. diff --git a/Misc/NEWS.d/next/Library/2024-10-19-01-30-40.gh-issue-125378.WTosxX.rst b/Misc/NEWS.d/next/Library/2024-10-19-01-30-40.gh-issue-125378.WTosxX.rst deleted file mode 100644 index dc76889d3b210a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-19-01-30-40.gh-issue-125378.WTosxX.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the bug in :mod:`pdb` where after a multi-line command, an empty line repeats the first line of the multi-line command, instead of the full command. diff --git a/Misc/NEWS.d/next/Library/2024-10-19-11-06-06.gh-issue-125631.BlhVvR.rst b/Misc/NEWS.d/next/Library/2024-10-19-11-06-06.gh-issue-125631.BlhVvR.rst deleted file mode 100644 index e870abbf87803a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-19-11-06-06.gh-issue-125631.BlhVvR.rst +++ /dev/null @@ -1,4 +0,0 @@ -Restore ability to set :attr:`~pickle.Pickler.persistent_id` and -:attr:`~pickle.Unpickler.persistent_load` attributes of instances of the -:class:`!Pickler` and :class:`!Unpickler` classes in the :mod:`pickle` -module. diff --git a/Misc/NEWS.d/next/Library/2024-10-19-13-37-37.gh-issue-125710.FyFAAr.rst b/Misc/NEWS.d/next/Library/2024-10-19-13-37-37.gh-issue-125710.FyFAAr.rst deleted file mode 100644 index 8d5220e9889c3a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-19-13-37-37.gh-issue-125710.FyFAAr.rst +++ /dev/null @@ -1 +0,0 @@ -[Enum] fix hashable<->nonhashable comparisons for member values diff --git a/Misc/NEWS.d/next/Library/2024-10-19-16-06-52.gh-issue-125666.jGfdCP.rst b/Misc/NEWS.d/next/Library/2024-10-19-16-06-52.gh-issue-125666.jGfdCP.rst deleted file mode 100644 index 3b4488815cced6..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-19-16-06-52.gh-issue-125666.jGfdCP.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid the exiting the interpreter if a null byte is given as input in the new REPL. diff --git a/Misc/NEWS.d/next/Library/2024-10-21-12-06-55.gh-issue-124969.xiY8UP.rst b/Misc/NEWS.d/next/Library/2024-10-21-12-06-55.gh-issue-124969.xiY8UP.rst deleted file mode 100644 index c44550184e0000..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-21-12-06-55.gh-issue-124969.xiY8UP.rst +++ /dev/null @@ -1,2 +0,0 @@ -``locale.nl_langinfo(locale.ALT_DIGITS)`` now returns a string again. The -returned value consists of up to 100 semicolon-separated symbols. diff --git a/Misc/NEWS.d/next/Library/2024-10-21-13-52-37.gh-issue-125767.0kK4lX.rst b/Misc/NEWS.d/next/Library/2024-10-21-13-52-37.gh-issue-125767.0kK4lX.rst deleted file mode 100644 index bfda740a79d10e..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-21-13-52-37.gh-issue-125767.0kK4lX.rst +++ /dev/null @@ -1,2 +0,0 @@ -:class:`super` objects are now :mod:`pickleable ` and -:mod:`copyable `. diff --git a/Misc/NEWS.d/next/Library/2024-10-22-13-28-00.gh-issue-125355.zssHm_.rst b/Misc/NEWS.d/next/Library/2024-10-22-13-28-00.gh-issue-125355.zssHm_.rst deleted file mode 100644 index fd67f697641d92..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-22-13-28-00.gh-issue-125355.zssHm_.rst +++ /dev/null @@ -1,7 +0,0 @@ -Fix several bugs in :meth:`argparse.ArgumentParser.parse_intermixed_args`. - -* The parser no longer changes temporarily during parsing. -* Default values are not processed twice. -* Required mutually exclusive groups containing positional arguments are now supported. -* The missing arguments report now includes the names of all required optional and positional arguments. -* Unknown options can be intermixed with positional arguments in parse_known_intermixed_args(). diff --git a/Misc/NEWS.d/next/Library/2024-10-23-17-45-40.gh-issue-125884.41E_PD.rst b/Misc/NEWS.d/next/Library/2024-10-23-17-45-40.gh-issue-125884.41E_PD.rst deleted file mode 100644 index 684b1f282b143e..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-23-17-45-40.gh-issue-125884.41E_PD.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the bug for :mod:`pdb` where it can't set breakpoints on functions with certain annotations. diff --git a/Misc/NEWS.d/next/Library/2024-10-24-14-08-10.gh-issue-125789.eaiAMw.rst b/Misc/NEWS.d/next/Library/2024-10-24-14-08-10.gh-issue-125789.eaiAMw.rst deleted file mode 100644 index 964a006bb47b7b..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-24-14-08-10.gh-issue-125789.eaiAMw.rst +++ /dev/null @@ -1 +0,0 @@ -Fix possible crash when mutating list of callbacks returned by :attr:`!asyncio.Future._callbacks`. It now always returns a new copy in C implementation :mod:`!_asyncio`. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2024-10-25-10-53-56.gh-issue-125966.eOCYU_.rst b/Misc/NEWS.d/next/Library/2024-10-25-10-53-56.gh-issue-125966.eOCYU_.rst deleted file mode 100644 index 9fe8795de18003..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-25-10-53-56.gh-issue-125966.eOCYU_.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a use-after-free crash in :meth:`asyncio.Future.remove_done_callback`. -Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-10-25-11-13-24.gh-issue-125969.YvbrTr.rst b/Misc/NEWS.d/next/Library/2024-10-25-11-13-24.gh-issue-125969.YvbrTr.rst deleted file mode 100644 index dc99adff7416c5..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-25-11-13-24.gh-issue-125969.YvbrTr.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an out-of-bounds crash when an evil :meth:`asyncio.loop.call_soon` -mutates the length of the internal callbacks list. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-10-25-20-52-15.gh-issue-125926.pp8rtZ.rst b/Misc/NEWS.d/next/Library/2024-10-25-20-52-15.gh-issue-125926.pp8rtZ.rst deleted file mode 100644 index 7f98bcdc38e566..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-25-20-52-15.gh-issue-125926.pp8rtZ.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fix :func:`urllib.parse.urljoin` for base URI with undefined authority. -Although :rfc:`3986` only specify reference resolution for absolute base -URI, :func:`!urljoin` should continue to return sensible result for relative -base URI. diff --git a/Misc/NEWS.d/next/Library/2024-10-26-12-50-48.gh-issue-125984.d4vp5_.rst b/Misc/NEWS.d/next/Library/2024-10-26-12-50-48.gh-issue-125984.d4vp5_.rst deleted file mode 100644 index 7a1d7b53b11301..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-26-12-50-48.gh-issue-125984.d4vp5_.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix use-after-free crashes on :class:`asyncio.Future` objects for which the -underlying event loop implements an evil :meth:`~object.__getattribute__`. -Reported by Nico-Posada. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-10-28-01-24-52.gh-issue-125413.Jat5kq.rst b/Misc/NEWS.d/next/Library/2024-10-28-01-24-52.gh-issue-125413.Jat5kq.rst deleted file mode 100644 index ddf1f9725d9695..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-28-01-24-52.gh-issue-125413.Jat5kq.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add :meth:`pathlib.Path.scandir` method to efficiently fetch directory -children and their file attributes. This is a trivial wrapper of -:func:`os.scandir`. diff --git a/Misc/NEWS.d/next/Library/2024-10-28-11-33-59.gh-issue-126068.Pdznm_.rst b/Misc/NEWS.d/next/Library/2024-10-28-11-33-59.gh-issue-126068.Pdznm_.rst deleted file mode 100644 index a0faf61890da17..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-28-11-33-59.gh-issue-126068.Pdznm_.rst +++ /dev/null @@ -1,5 +0,0 @@ -Fix exceptions in the :mod:`argparse` module so that only error messages for -ArgumentError and ArgumentTypeError are now translated. -ArgumentError is now only used for command line errors, not for logical -errors in the program. TypeError is now raised instead of ValueError for -some logical errors. diff --git a/Misc/NEWS.d/next/Library/2024-10-28-19-49-18.gh-issue-118201.v41XXh.rst b/Misc/NEWS.d/next/Library/2024-10-28-19-49-18.gh-issue-118201.v41XXh.rst new file mode 100644 index 00000000000000..bed4b3b5956f31 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-28-19-49-18.gh-issue-118201.v41XXh.rst @@ -0,0 +1,2 @@ +Fixed intermittent failures of :any:`os.confstr`, :any:`os.pathconf` and +:any:`os.sysconf` on iOS and Android. diff --git a/Misc/NEWS.d/next/Library/2024-10-28-22-35-22.gh-issue-126083.TuI--n.rst b/Misc/NEWS.d/next/Library/2024-10-28-22-35-22.gh-issue-126083.TuI--n.rst deleted file mode 100644 index d64b7dd2fedbd6..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-28-22-35-22.gh-issue-126083.TuI--n.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a reference leak in :class:`asyncio.Task` objects when reinitializing the same object with a non-``None`` context. Patch by Nico Posada. diff --git a/Misc/NEWS.d/next/Library/2024-10-29-07-24-52.gh-issue-125322.sstOM-.rst b/Misc/NEWS.d/next/Library/2024-10-29-07-24-52.gh-issue-125322.sstOM-.rst deleted file mode 100644 index e0cc91d287f89d..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-29-07-24-52.gh-issue-125322.sstOM-.rst +++ /dev/null @@ -1 +0,0 @@ -Correct detection of complex numbers support in libffi. diff --git a/Misc/NEWS.d/next/Library/2024-10-29-10-38-28.gh-issue-126080.qKRBuo.rst b/Misc/NEWS.d/next/Library/2024-10-29-10-38-28.gh-issue-126080.qKRBuo.rst deleted file mode 100644 index e54ac17b217c92..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-29-10-38-28.gh-issue-126080.qKRBuo.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a use-after-free crash on :class:`asyncio.Task` objects for which the -underlying event loop implements an evil :meth:`~object.__getattribute__`. -Reported by Nico-Posada. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-10-29-10-58-52.gh-issue-126106.rlF798.rst b/Misc/NEWS.d/next/Library/2024-10-29-10-58-52.gh-issue-126106.rlF798.rst deleted file mode 100644 index de989007b4c35a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-29-10-58-52.gh-issue-126106.rlF798.rst +++ /dev/null @@ -1 +0,0 @@ -Fixes a possible ``NULL`` pointer dereference in :mod:`ssl`. diff --git a/Misc/NEWS.d/next/Library/2024-10-29-11-45-44.gh-issue-126105.cOL-R6.rst b/Misc/NEWS.d/next/Library/2024-10-29-11-45-44.gh-issue-126105.cOL-R6.rst deleted file mode 100644 index 547eb3af1ca064..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-29-11-45-44.gh-issue-126105.cOL-R6.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a crash in :mod:`ast` when the :attr:`ast.AST._fields` attribute is deleted. diff --git a/Misc/NEWS.d/next/Library/2024-10-30-00-12-22.gh-issue-126156.BOSqv0.rst b/Misc/NEWS.d/next/Library/2024-10-30-00-12-22.gh-issue-126156.BOSqv0.rst deleted file mode 100644 index 4fe18275ab9384..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-30-00-12-22.gh-issue-126156.BOSqv0.rst +++ /dev/null @@ -1 +0,0 @@ -Improved performances of creating :py:class:`~http.cookies.Morsel` objects by a factor of 3.8x. diff --git a/Misc/NEWS.d/next/Library/2024-10-30-20-45-17.gh-issue-126205.CHEmtx.rst b/Misc/NEWS.d/next/Library/2024-10-30-20-45-17.gh-issue-126205.CHEmtx.rst deleted file mode 100644 index c92ffb75056606..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-30-20-45-17.gh-issue-126205.CHEmtx.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix issue where :func:`urllib.request.pathname2url` generated URLs beginning -with four slashes (rather than two) when given a Windows UNC path. diff --git a/Misc/NEWS.d/next/Library/2024-10-30-23-42-44.gh-issue-126223.k2qooc.rst b/Misc/NEWS.d/next/Library/2024-10-30-23-42-44.gh-issue-126223.k2qooc.rst deleted file mode 100644 index fee391c030b941..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-30-23-42-44.gh-issue-126223.k2qooc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Raise a :exc:`UnicodeEncodeError` instead of a :exc:`SystemError` upon -calling :func:`!_interpreters.create` with an invalid Unicode character. diff --git a/Misc/NEWS.d/next/Library/2024-10-30-23-59-36.gh-issue-126212._9uYjT.rst b/Misc/NEWS.d/next/Library/2024-10-30-23-59-36.gh-issue-126212._9uYjT.rst deleted file mode 100644 index 047fe0f68048b5..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-30-23-59-36.gh-issue-126212._9uYjT.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix issue where :func:`urllib.request.pathname2url` and -:func:`~urllib.request.url2pathname` removed slashes from Windows DOS drive -paths and URLs. diff --git a/Misc/NEWS.d/next/Library/2024-10-31-14-06-28.gh-issue-126220.uJAJCU.rst b/Misc/NEWS.d/next/Library/2024-10-31-14-06-28.gh-issue-126220.uJAJCU.rst deleted file mode 100644 index 555f2f3bafbf33..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-10-31-14-06-28.gh-issue-126220.uJAJCU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash in :class:`!cProfile.Profile` and :class:`!_lsprof.Profiler` when their -callbacks were directly called with 0 arguments. diff --git a/Misc/NEWS.d/next/Library/2024-11-01-10-35-49.gh-issue-120057.YWy81Q.rst b/Misc/NEWS.d/next/Library/2024-11-01-10-35-49.gh-issue-120057.YWy81Q.rst deleted file mode 100644 index ded60a3f57bca3..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-01-10-35-49.gh-issue-120057.YWy81Q.rst +++ /dev/null @@ -1,2 +0,0 @@ -Replace the ``os.environ.refresh()`` method with a new -:func:`os.reload_environ` function. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2024-11-01-14-31-41.gh-issue-126138.yTniOG.rst b/Misc/NEWS.d/next/Library/2024-11-01-14-31-41.gh-issue-126138.yTniOG.rst deleted file mode 100644 index 459eebc82bd42a..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-01-14-31-41.gh-issue-126138.yTniOG.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a use-after-free crash on :class:`asyncio.Task` objects -whose underlying coroutine yields an object that implements -an evil :meth:`~object.__getattribute__`. Patch by Nico Posada. diff --git a/Misc/NEWS.d/next/Library/2024-11-02-19-20-44.gh-issue-126303.yVvyWB.rst b/Misc/NEWS.d/next/Library/2024-11-02-19-20-44.gh-issue-126303.yVvyWB.rst deleted file mode 100644 index 0072c97338c251..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-02-19-20-44.gh-issue-126303.yVvyWB.rst +++ /dev/null @@ -1 +0,0 @@ -Fix pickling and copying of :class:`os.sched_param` objects. diff --git a/Misc/NEWS.d/next/Library/2024-11-03-09-42-42.gh-issue-126313.EFP6Dl.rst b/Misc/NEWS.d/next/Library/2024-11-03-09-42-42.gh-issue-126313.EFP6Dl.rst deleted file mode 100644 index dad348d8898f13..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-03-09-42-42.gh-issue-126313.EFP6Dl.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue in :func:`curses.napms` when :func:`curses.initscr` has not yet -been called. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-11-03-10-48-07.gh-issue-126353.ChDzot.rst b/Misc/NEWS.d/next/Library/2024-11-03-10-48-07.gh-issue-126353.ChDzot.rst deleted file mode 100644 index 16d508b7ec6a20..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-03-10-48-07.gh-issue-126353.ChDzot.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`asyncio.get_event_loop` now does not implicitly creates an event loop. -It now raises a :exc:`RuntimeError` if there is no set event loop. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2024-11-03-14-43-51.gh-issue-126363.Xus7vU.rst b/Misc/NEWS.d/next/Library/2024-11-03-14-43-51.gh-issue-126363.Xus7vU.rst deleted file mode 100644 index 20fea9b9ef99a0..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-03-14-43-51.gh-issue-126363.Xus7vU.rst +++ /dev/null @@ -1,2 +0,0 @@ -Speed up pattern parsing in :meth:`pathlib.Path.glob` by skipping creation -of a :class:`pathlib.Path` object for the pattern. diff --git a/Misc/NEWS.d/next/Library/2024-11-04-16-40-02.gh-issue-126417.OWPqn0.rst b/Misc/NEWS.d/next/Library/2024-11-04-16-40-02.gh-issue-126417.OWPqn0.rst deleted file mode 100644 index c4a366343382f3..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-04-16-40-02.gh-issue-126417.OWPqn0.rst +++ /dev/null @@ -1,3 +0,0 @@ -Register the :class:`!multiprocessing.managers.DictProxy` and :class:`!multiprocessing.managers.ListProxy` types in -:mod:`multiprocessing.managers` to :class:`collections.abc.MutableMapping` and -:class:`collections.abc.MutableSequence`, respectively. diff --git a/Misc/NEWS.d/next/Library/2024-11-05-11-28-45.gh-issue-126451.XJMtqz.rst b/Misc/NEWS.d/next/Library/2024-11-05-11-28-45.gh-issue-126451.XJMtqz.rst deleted file mode 100644 index 563cb2515eca60..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-05-11-28-45.gh-issue-126451.XJMtqz.rst +++ /dev/null @@ -1,2 +0,0 @@ -Register the :class:`contextvars.Context` type to -:class:`collections.abc.Mapping`. diff --git a/Misc/NEWS.d/next/Library/2024-11-06-13-41-38.gh-issue-126489.toaf-0.rst b/Misc/NEWS.d/next/Library/2024-11-06-13-41-38.gh-issue-126489.toaf-0.rst deleted file mode 100644 index 8a6573cdea7b42..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-06-13-41-38.gh-issue-126489.toaf-0.rst +++ /dev/null @@ -1,3 +0,0 @@ -The Python implementation of :mod:`pickle` no longer calls -:meth:`pickle.Pickler.persistent_id` for the result of -:meth:`!persistent_id`. diff --git a/Misc/NEWS.d/next/Library/2024-11-06-23-40-28.gh-issue-125679.Qq9xF5.rst b/Misc/NEWS.d/next/Library/2024-11-06-23-40-28.gh-issue-125679.Qq9xF5.rst deleted file mode 100644 index ac6851e2689692..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-11-06-23-40-28.gh-issue-125679.Qq9xF5.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :class:`multiprocessing.Lock` and :class:`multiprocessing.RLock` -``repr`` values no longer say "unknown" on macOS. diff --git a/Misc/NEWS.d/next/Library/2024-11-12-13-14-47.gh-issue-126727.5Eqfqd.rst b/Misc/NEWS.d/next/Library/2024-11-12-13-14-47.gh-issue-126727.5Eqfqd.rst new file mode 100644 index 00000000000000..7bec8a6b7a830a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-12-13-14-47.gh-issue-126727.5Eqfqd.rst @@ -0,0 +1,3 @@ +``locale.nl_langinfo(locale.ERA)`` now returns multiple era description +segments separated by semicolons. Previously it only returned the first +segment on platforms with Glibc. diff --git a/Misc/NEWS.d/next/Library/2024-11-12-20-05-09.gh-issue-126601.Nj7bA9.rst b/Misc/NEWS.d/next/Library/2024-11-12-20-05-09.gh-issue-126601.Nj7bA9.rst new file mode 100644 index 00000000000000..11e2b7350a0e48 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-12-20-05-09.gh-issue-126601.Nj7bA9.rst @@ -0,0 +1,3 @@ +Fix issue where :func:`urllib.request.pathname2url` raised :exc:`OSError` +when given a Windows path containing a colon character not following a +drive letter, such as before an NTFS alternate data stream. diff --git a/Misc/NEWS.d/next/Library/2024-11-13-19-15-18.gh-issue-126780.ZZqJvI.rst b/Misc/NEWS.d/next/Library/2024-11-13-19-15-18.gh-issue-126780.ZZqJvI.rst new file mode 100644 index 00000000000000..93d45caf5cad72 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-13-19-15-18.gh-issue-126780.ZZqJvI.rst @@ -0,0 +1 @@ +Fix :func:`os.path.normpath` for drive-relative paths on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-11-15-01-50-36.gh-issue-85168.bP8VIN.rst b/Misc/NEWS.d/next/Library/2024-11-15-01-50-36.gh-issue-85168.bP8VIN.rst new file mode 100644 index 00000000000000..abceda8f6fd707 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-15-01-50-36.gh-issue-85168.bP8VIN.rst @@ -0,0 +1,4 @@ +Fix issue where :func:`urllib.request.url2pathname` and +:func:`~urllib.request.pathname2url` always used UTF-8 when quoting and +unquoting file URIs. They now use the :term:`filesystem encoding and error +handler`. diff --git a/Misc/NEWS.d/next/Library/2024-11-18-19-03-46.gh-issue-126947.NiDYUe.rst b/Misc/NEWS.d/next/Library/2024-11-18-19-03-46.gh-issue-126947.NiDYUe.rst new file mode 100644 index 00000000000000..29ba4f21454fe1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-18-19-03-46.gh-issue-126947.NiDYUe.rst @@ -0,0 +1,2 @@ +Raise :exc:`TypeError` in :meth:`!_pydatetime.timedelta.__new__` if the passed arguments are not :class:`int` or :class:`float`, so that the Python +implementation is in line with the C implementation. diff --git a/Misc/NEWS.d/next/Library/2024-11-18-22-02-47.gh-issue-118761.GQKD_J.rst b/Misc/NEWS.d/next/Library/2024-11-18-22-02-47.gh-issue-118761.GQKD_J.rst new file mode 100644 index 00000000000000..ebb9fe8016de21 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-18-22-02-47.gh-issue-118761.GQKD_J.rst @@ -0,0 +1,2 @@ +Improve import time of :mod:`mimetypes` by around 11-16 times. Patch by Hugo +van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2024-11-19-14-34-05.gh-issue-126615.LOskwi.rst b/Misc/NEWS.d/next/Library/2024-11-19-14-34-05.gh-issue-126615.LOskwi.rst new file mode 100644 index 00000000000000..8c7a2ade03c19e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-19-14-34-05.gh-issue-126615.LOskwi.rst @@ -0,0 +1,2 @@ +The :exc:`~ctypes.COMError` exception is now public. +Previously, this was private and only available in ``_ctypes``. diff --git a/Misc/NEWS.d/next/Library/2024-11-20-11-37-08.gh-issue-126316.ElkZmE.rst b/Misc/NEWS.d/next/Library/2024-11-20-11-37-08.gh-issue-126316.ElkZmE.rst new file mode 100644 index 00000000000000..d643254c5b3564 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-20-11-37-08.gh-issue-126316.ElkZmE.rst @@ -0,0 +1,2 @@ +:mod:`grp`: Make :func:`grp.getgrall` thread-safe by adding a mutex. Patch +by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2024-11-20-16-58-59.gh-issue-126997.0PI41Y.rst b/Misc/NEWS.d/next/Library/2024-11-20-16-58-59.gh-issue-126997.0PI41Y.rst new file mode 100644 index 00000000000000..b85c51ef07dcbe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-20-16-58-59.gh-issue-126997.0PI41Y.rst @@ -0,0 +1,3 @@ +Fix support of STRING and GLOBAL opcodes with non-ASCII arguments in +:mod:`pickletools`. :func:`pickletools.dis` now outputs non-ASCII bytes in +STRING, BINSTRING and SHORT_BINSTRING arguments as escaped (``\xXX``). diff --git a/Misc/NEWS.d/next/Library/2024-11-22-02-31-55.gh-issue-126766.jfkhBH.rst b/Misc/NEWS.d/next/Library/2024-11-22-02-31-55.gh-issue-126766.jfkhBH.rst new file mode 100644 index 00000000000000..998c99bf4358d5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-22-02-31-55.gh-issue-126766.jfkhBH.rst @@ -0,0 +1,2 @@ +Fix issue where :func:`urllib.request.url2pathname` failed to discard any +'localhost' authority present in the URL. diff --git a/Misc/NEWS.d/next/Library/2024-11-22-03-40-02.gh-issue-127078.gI_PaP.rst b/Misc/NEWS.d/next/Library/2024-11-22-03-40-02.gh-issue-127078.gI_PaP.rst new file mode 100644 index 00000000000000..a84c06f3c7a273 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-22-03-40-02.gh-issue-127078.gI_PaP.rst @@ -0,0 +1,2 @@ +Fix issue where :func:`urllib.request.url2pathname` failed to discard an +extra slash before a UNC drive in the URL path on Windows. diff --git a/Misc/NEWS.d/next/Tests/2024-09-30-22-52-44.gh-issue-124295.VZy5kx.rst b/Misc/NEWS.d/next/Tests/2024-09-30-22-52-44.gh-issue-124295.VZy5kx.rst deleted file mode 100644 index 3c2455cfc8c530..00000000000000 --- a/Misc/NEWS.d/next/Tests/2024-09-30-22-52-44.gh-issue-124295.VZy5kx.rst +++ /dev/null @@ -1 +0,0 @@ -Add translation tests to the :mod:`argparse` module. diff --git a/Misc/NEWS.d/next/Tests/2024-10-21-14-10-56.gh-issue-125730.kcWbvI.rst b/Misc/NEWS.d/next/Tests/2024-10-21-14-10-56.gh-issue-125730.kcWbvI.rst deleted file mode 100644 index 061a1f9f27599c..00000000000000 --- a/Misc/NEWS.d/next/Tests/2024-10-21-14-10-56.gh-issue-125730.kcWbvI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Change ``make test`` to not run GUI tests by default. Use ``make ci`` to run -tests with GUI tests instead. diff --git a/Misc/NEWS.d/next/Tests/2024-11-21-02-03-48.gh-issue-127076.a3avV1.rst b/Misc/NEWS.d/next/Tests/2024-11-21-02-03-48.gh-issue-127076.a3avV1.rst new file mode 100644 index 00000000000000..7dec8bd627c063 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-11-21-02-03-48.gh-issue-127076.a3avV1.rst @@ -0,0 +1 @@ +Disable strace based system call tests when LD_PRELOAD is set. diff --git a/Misc/NEWS.d/next/Windows/2024-10-15-21-28-43.gh-issue-125550.hmGWCP.rst b/Misc/NEWS.d/next/Windows/2024-10-15-21-28-43.gh-issue-125550.hmGWCP.rst deleted file mode 100644 index c3ae00c74b3d91..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-15-21-28-43.gh-issue-125550.hmGWCP.rst +++ /dev/null @@ -1,2 +0,0 @@ -Enable the :ref:`launcher` to detect Python 3.14 installs from the Windows -Store. diff --git a/Misc/NEWS.d/next/Windows/2024-10-23-17-24-23.gh-issue-125842.m3EF9E.rst b/Misc/NEWS.d/next/Windows/2024-10-23-17-24-23.gh-issue-125842.m3EF9E.rst deleted file mode 100644 index 63644721d57f5b..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-23-17-24-23.gh-issue-125842.m3EF9E.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a :exc:`SystemError` when :func:`sys.exit` is called with ``0xffffffff`` -on Windows. diff --git a/Misc/NEWS.d/next/Windows/2024-10-29-09-39-06.gh-issue-126084.3wAL8o.rst b/Misc/NEWS.d/next/Windows/2024-10-29-09-39-06.gh-issue-126084.3wAL8o.rst deleted file mode 100644 index 319053d95db19e..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-29-09-39-06.gh-issue-126084.3wAL8o.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix venvwlauncher to launch pythonw instead of python so no extra console -window is created. diff --git a/Misc/NEWS.d/next/Windows/2024-10-29-19-48-03.gh-issue-125315.jdB9qN.rst b/Misc/NEWS.d/next/Windows/2024-10-29-19-48-03.gh-issue-125315.jdB9qN.rst deleted file mode 100644 index 3d813248766a5b..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-29-19-48-03.gh-issue-125315.jdB9qN.rst +++ /dev/null @@ -1,2 +0,0 @@ -Avoid crashing in :mod:`platform` due to slow WMI calls on some Windows -machines. diff --git a/Misc/NEWS.d/next/Windows/2024-10-29-20-09-52.gh-issue-126074.83ZzZs.rst b/Misc/NEWS.d/next/Windows/2024-10-29-20-09-52.gh-issue-126074.83ZzZs.rst deleted file mode 100644 index d4d06b090b5922..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-10-29-20-09-52.gh-issue-126074.83ZzZs.rst +++ /dev/null @@ -1 +0,0 @@ -Removed unnecessary DLLs from Windows embeddable package diff --git a/Misc/NEWS.d/next/Windows/2024-10-31-09-46-53.gh-issue-125729.KdKVLa.rst b/Misc/NEWS.d/next/Windows/2024-10-31-09-46-53.gh-issue-125729.KdKVLa.rst new file mode 100644 index 00000000000000..fbf4ab1cd1a11a --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2024-10-31-09-46-53.gh-issue-125729.KdKVLa.rst @@ -0,0 +1 @@ +Makes the presence of the :mod:`turtle` module dependent on the Tcl/Tk installer option. Previously, the module was always installed but would be unusable without Tcl/Tk. diff --git a/Misc/NEWS.d/next/Windows/2024-11-07-20-42-31.gh-issue-126497.EARpd-.rst b/Misc/NEWS.d/next/Windows/2024-11-07-20-42-31.gh-issue-126497.EARpd-.rst deleted file mode 100644 index c902b9d6da8c65..00000000000000 --- a/Misc/NEWS.d/next/Windows/2024-11-07-20-42-31.gh-issue-126497.EARpd-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixes venv failure due to missing redirector executables in experimental -free-threaded installs. diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index cc73e93009b43f..739e005646ba97 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -48,11 +48,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "6aaee1b194bea30f0a60d1cce71eada8b14d3526" + "checksumValue": "373cc00d87782a736970644d863ff2ebbd0e4886" }, { "algorithm": "SHA256", - "checksumValue": "7bd4e53a8015534b5bbb58afe1a131b3989d3d4fca29bca685c44d34bcaa2555" + "checksumValue": "0f750bc336e510d14ac9a3e63fc2399f60f3f04f0061c426e86751ed5fba90e4" } ], "fileName": "Modules/expat/expat.h" @@ -62,11 +62,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "b70ce53fdc25ae482681ae2f6623c3c8edc9c1b7" + "checksumValue": "9e615c6e5c3ba00670f674a6b071bb855b0b563d" }, { "algorithm": "SHA256", - "checksumValue": "86afb425ec9999eb4f1ec9ab2fb41c58c4aa5cb9bf934b8c94264670fc5a961d" + "checksumValue": "3d90a4b65c40a3f848c36100f4d73b933a015c7b7cd85c28e4331a6b845c1ad0" } ], "fileName": "Modules/expat/expat_external.h" @@ -128,18 +128,18 @@ "fileName": "Modules/expat/nametab.h" }, { - "SPDXID": "SPDXRef-FILE-Modules-expat-pyexpatns.h", + "SPDXID": "SPDXRef-FILE-Modules-expat-refresh.sh", "checksums": [ { "algorithm": "SHA1", - "checksumValue": "f50c899172acd93fc539007bfb43315b83d407e4" + "checksumValue": "a9b0a33b8359cfe94b23972a1605daf8dcc605d9" }, { "algorithm": "SHA256", - "checksumValue": "d571b8258cfaa067a20adef553e5fcedd6671ca4a8841483496de031bd904567" + "checksumValue": "19eb541460bc2ca8b87118acd3c048f6af77affbf8719ac29aa7b6c8d70f83fd" } ], - "fileName": "Modules/expat/pyexpatns.h" + "fileName": "Modules/expat/refresh.sh" }, { "SPDXID": "SPDXRef-FILE-Modules-expat-siphash.h", @@ -188,11 +188,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "b2ec0ad170ccc21e63fbcfc8d7404cdd756eedd3" + "checksumValue": "3199fbd38b6fb158f73d5c8de6b6e6e3812ef803" }, { "algorithm": "SHA256", - "checksumValue": "92159d4e17393e56ee85f47d9fb31348695a58589899aa01e7536cdc88f60b85" + "checksumValue": "c1518244dd5ea397e345d00e12cc45d42f43453ed208218559c981c97a0583e2" } ], "fileName": "Modules/expat/xmlparse.c" @@ -300,11 +300,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "a34e821b68ef5334eccf4f729b28bb7bb65b965e" + "checksumValue": "1cd3cda98e0e6882a13a59268b88640c542350fd" }, { "algorithm": "SHA256", - "checksumValue": "4582db9143c0810b98838a5357c577e0b32ae77f3018486159df4e0dfd3fce3c" + "checksumValue": "41a420bc9355e451720e60e9536e66f04dc6e416ca9217c4ab18d827887a2e08" } ], "fileName": "Modules/_hacl/Hacl_Hash_Blake2b.c" @@ -328,11 +328,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "0ffe60c6d5eed5dd222515e820d461d319d16b1f" + "checksumValue": "0ceef306590ec12251db03a31fc08ecba697486d" }, { "algorithm": "SHA256", - "checksumValue": "4804cb3ce68bfdcf98853d6f1d77b4a844a3c2796f776b39770ba327e400d402" + "checksumValue": "1575a23b21319e55e670f74194fc2dfd1777eb5a3816cad43750e03da6e44db9" } ], "fileName": "Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c" @@ -370,11 +370,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "cf035ffeff875bc74345a47373ce25dc408ea9dc" + "checksumValue": "9616a9f8d795d64487bf86a96719f943729621e2" }, { "algorithm": "SHA256", - "checksumValue": "579059b002c45fab0fed6381e85c3f5eaf1d959400ca64b103542ac6c35bade3" + "checksumValue": "5ecde5ddc8ec073cffe64d60e868535d995f33fb0f87f9b50e68bd2a694b7434" } ], "fileName": "Modules/_hacl/Hacl_Hash_Blake2s.c" @@ -398,11 +398,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "9bb53022d158a9c349edb52a8def8aac7d098a4e" + "checksumValue": "5b950ce0a5c8f0c2c56b4ac96e1943b504255d45" }, { "algorithm": "SHA256", - "checksumValue": "2abde0c6b5da0402e91b4bedfe786c24b908fbdc04e08e74651c7624729254d9" + "checksumValue": "5a5f5d8e376dc30d89fd6c6c435157fe9ffa5308030e7abb1256afaee0765536" } ], "fileName": "Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c" @@ -496,11 +496,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "f2aa3ed6acce621c162bc3a0592780ce5aa3bc4d" + "checksumValue": "b0aa8810339adb09623ffa429246b4324fac4565" }, { "algorithm": "SHA256", - "checksumValue": "30638efb75c8b185bb09c3df6977e3f3c5d21a1e696218cf7ade6bc4d5201b31" + "checksumValue": "2288f8f860efe80eed4f1e14ef570079b7459aeb41f87e94e691d7cf5e0e7adb" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA2.c" @@ -524,11 +524,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "fc2c3ef83a71bef42eb3f73b78e4ef6642a4634e" + "checksumValue": "ef374b9d0951ebb38006af944dd4b38a6cf3abb2" }, { "algorithm": "SHA256", - "checksumValue": "e4f3ed9d1e8f661482cbd2d04b197e15cc3b698c5ef2ddedf0eb65df320dbbc4" + "checksumValue": "164df19f229143006c5f9a3c0bd771415f152bfbc7efb61c337fa0f903003eb3" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA3.c" @@ -566,11 +566,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "47ce34375d43a27312e1fffb96b8965610b05855" + "checksumValue": "2e08072c0c57dac02b67f3f71d77068c537ac02e" }, { "algorithm": "SHA256", - "checksumValue": "8affd767d7644150064d8bccd05d7bf4c4ae41fd4bb5bf5b8e943eabf09f3d74" + "checksumValue": "e69fd3e84f77873ecb414f5300761b686321d01f5710ccf2517765236b08fc25" } ], "fileName": "Modules/_hacl/Lib_Memzero0.c" @@ -748,11 +748,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "0018e084339058dd454b4e49d10d236b4f896bf8" + "checksumValue": "2e9ae174142fc491f20567ab8b5c08cef9b07cfe" }, { "algorithm": "SHA256", - "checksumValue": "10e959a92b3288a6165a404c8fae2bbcd7fb00a9abbae2b7809fa55d6fe9068d" + "checksumValue": "07100964adcf4b5f8bd4773e25f475b34cd180b90df8b1c0052e55c008b7cc49" } ], "fileName": "Modules/_hacl/internal/Hacl_Hash_SHA2.h" @@ -1640,14 +1640,14 @@ "checksums": [ { "algorithm": "SHA256", - "checksumValue": "935ae51d0ff0bf1403f0ecc1ff02b8f685d09053618558c07fbe4bd2abbc5dd1" + "checksumValue": "40de5297b032d2676fc0039049b4e8dab1f2730eebb5ecff6a40c04fa0356339" } ], - "downloadLocation": "https://github.com/hacl-star/hacl-star/archive/315a9e491d2bc347b9dae99e0ea506995ea84d9d.zip", + "downloadLocation": "https://github.com/hacl-star/hacl-star/archive/f218923ef2417d963d7efc7951593ae6aef613f7.zip", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:hacl-star:hacl-star:315a9e491d2bc347b9dae99e0ea506995ea84d9d:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:hacl-star:hacl-star:f218923ef2417d963d7efc7951593ae6aef613f7:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], @@ -1655,7 +1655,7 @@ "name": "hacl-star", "originator": "Organization: HACL* Developers", "primaryPackagePurpose": "SOURCE", - "versionInfo": "315a9e491d2bc347b9dae99e0ea506995ea84d9d" + "versionInfo": "f218923ef2417d963d7efc7951593ae6aef613f7" }, { "SPDXID": "SPDXRef-PACKAGE-macholib", @@ -1749,7 +1749,7 @@ "spdxElementId": "SPDXRef-PACKAGE-expat" }, { - "relatedSpdxElement": "SPDXRef-FILE-Modules-expat-pyexpatns.h", + "relatedSpdxElement": "SPDXRef-FILE-Modules-expat-refresh.sh", "relationshipType": "CONTAINS", "spdxElementId": "SPDXRef-PACKAGE-expat" }, diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 617a3dca35d9c2..f883125a2c70b2 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -1017,8 +1017,10 @@ _asyncio_Future_remove_done_callback_impl(FutureObj *self, PyTypeObject *cls, if (len == 1) { PyObject *cb_tup = PyList_GET_ITEM(self->fut_callbacks, 0); + Py_INCREF(cb_tup); int cmp = PyObject_RichCompareBool( PyTuple_GET_ITEM(cb_tup, 0), fn, Py_EQ); + Py_DECREF(cb_tup); if (cmp == -1) { return NULL; } diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index eae69e484e1660..34529bce496d88 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -956,32 +956,48 @@ CDataType_in_dll_impl(PyObject *type, PyTypeObject *cls, PyObject *dll, return NULL; } +#undef USE_DLERROR #ifdef MS_WIN32 Py_BEGIN_ALLOW_THREADS address = (void *)GetProcAddress(handle, name); Py_END_ALLOW_THREADS - if (!address) { - PyErr_Format(PyExc_ValueError, - "symbol '%s' not found", - name); - return NULL; - } #else + #ifdef __CYGWIN__ + // dlerror() isn't very helpful on cygwin + #else + #define USE_DLERROR + /* dlerror() always returns the latest error. + * + * Clear the previous value before calling dlsym(), + * to ensure we can tell if our call resulted in an error. + */ + (void)dlerror(); + #endif address = (void *)dlsym(handle, name); - if (!address) { -#ifdef __CYGWIN__ -/* dlerror() isn't very helpful on cygwin */ - PyErr_Format(PyExc_ValueError, - "symbol '%s' not found", - name); -#else - PyErr_SetString(PyExc_ValueError, dlerror()); #endif - return NULL; + + if (address) { + ctypes_state *st = get_module_state_by_def(Py_TYPE(type)); + return PyCData_AtAddress(st, type, address); } -#endif - ctypes_state *st = get_module_state_by_def(Py_TYPE(type)); - return PyCData_AtAddress(st, type, address); + + #ifdef USE_DLERROR + const char *dlerr = dlerror(); + if (dlerr) { + PyObject *message = PyUnicode_DecodeLocale(dlerr, "surrogateescape"); + if (message) { + PyErr_SetObject(PyExc_ValueError, message); + Py_DECREF(message); + return NULL; + } + // Ignore errors from PyUnicode_DecodeLocale, + // fall back to the generic error below. + PyErr_Clear(); + } + #endif +#undef USE_DLERROR + PyErr_Format(PyExc_ValueError, "symbol '%s' not found", name); + return NULL; } /*[clinic input] @@ -3759,6 +3775,7 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } +#undef USE_DLERROR #ifdef MS_WIN32 address = FindAddress(handle, name, (PyObject *)type); if (!address) { @@ -3774,20 +3791,41 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } #else + #ifdef __CYGWIN__ + //dlerror() isn't very helpful on cygwin */ + #else + #define USE_DLERROR + /* dlerror() always returns the latest error. + * + * Clear the previous value before calling dlsym(), + * to ensure we can tell if our call resulted in an error. + */ + (void)dlerror(); + #endif address = (PPROC)dlsym(handle, name); + if (!address) { -#ifdef __CYGWIN__ -/* dlerror() isn't very helpful on cygwin */ - PyErr_Format(PyExc_AttributeError, - "function '%s' not found", - name); -#else - PyErr_SetString(PyExc_AttributeError, dlerror()); -#endif + #ifdef USE_DLERROR + const char *dlerr = dlerror(); + if (dlerr) { + PyObject *message = PyUnicode_DecodeLocale(dlerr, "surrogateescape"); + if (message) { + PyErr_SetObject(PyExc_AttributeError, message); + Py_DECREF(ftuple); + Py_DECREF(message); + return NULL; + } + // Ignore errors from PyUnicode_DecodeLocale, + // fall back to the generic error below. + PyErr_Clear(); + } + #endif + PyErr_Format(PyExc_AttributeError, "function '%s' not found", name); Py_DECREF(ftuple); return NULL; } #endif +#undef USE_DLERROR ctypes_state *st = get_module_state_by_def(Py_TYPE(type)); if (!_validate_paramflags(st, type, paramflags)) { Py_DECREF(ftuple); diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 5ac9cf16681645..218c3a9c81e05f 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1623,13 +1623,39 @@ static PyObject *py_dl_sym(PyObject *self, PyObject *args) if (PySys_Audit("ctypes.dlsym/handle", "O", args) < 0) { return NULL; } +#undef USE_DLERROR + #ifdef __CYGWIN__ + // dlerror() isn't very helpful on cygwin + #else + #define USE_DLERROR + /* dlerror() always returns the latest error. + * + * Clear the previous value before calling dlsym(), + * to ensure we can tell if our call resulted in an error. + */ + (void)dlerror(); + #endif ptr = dlsym((void*)handle, name); - if (!ptr) { - PyErr_SetString(PyExc_OSError, - dlerror()); - return NULL; + if (ptr) { + return PyLong_FromVoidPtr(ptr); + } + #ifdef USE_DLERROR + const char *dlerr = dlerror(); + if (dlerr) { + PyObject *message = PyUnicode_DecodeLocale(dlerr, "surrogateescape"); + if (message) { + PyErr_SetObject(PyExc_OSError, message); + Py_DECREF(message); + return NULL; + } + // Ignore errors from PyUnicode_DecodeLocale, + // fall back to the generic error below. + PyErr_Clear(); } - return PyLong_FromVoidPtr(ptr); + #endif + #undef USE_DLERROR + PyErr_Format(PyExc_OSError, "symbol '%s' not found", name); + return NULL; } #endif diff --git a/Modules/_decimal/docstrings.h b/Modules/_decimal/docstrings.h index b34bff83d3f4e9..5abd7b9d807e19 100644 --- a/Modules/_decimal/docstrings.h +++ b/Modules/_decimal/docstrings.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2001-2012 Python Software Foundation. All Rights Reserved. + * Copyright (c) 2001 Python Software Foundation. All Rights Reserved. * Modified and extended by Stefan Krah. */ diff --git a/Modules/_decimal/tests/bench.py b/Modules/_decimal/tests/bench.py index 640290f2ec7962..6605e9a92e2dde 100644 --- a/Modules/_decimal/tests/bench.py +++ b/Modules/_decimal/tests/bench.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2001-2012 Python Software Foundation. All Rights Reserved. +# Copyright (C) 2001 Python Software Foundation. All Rights Reserved. # Modified and extended by Stefan Krah. # diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index d2afe1a1bea018..24b38063dde9e5 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -18,7 +18,7 @@ class _functools._lru_cache_wrapper "PyObject *" "&lru_cache_type_spec" /* _functools module written and maintained by Hye-Shik Chang with adaptations by Raymond Hettinger - Copyright (c) 2004, 2005, 2006 Python Software Foundation. + Copyright (c) 2004 Python Software Foundation. All rights reserved. */ @@ -937,8 +937,8 @@ _functools.reduce function as func: object iterable as seq: object - initial as result: object = NULL / + initial as result: object = NULL Apply a function of two arguments cumulatively to the items of an iterable, from left to right. @@ -953,7 +953,7 @@ calculates ((((1 + 2) + 3) + 4) + 5). static PyObject * _functools_reduce_impl(PyObject *module, PyObject *func, PyObject *seq, PyObject *result) -/*[clinic end generated code: output=30d898fe1267c79d input=d233c2670cba7f66]*/ +/*[clinic end generated code: output=30d898fe1267c79d input=1511e9a8c38581ac]*/ { PyObject *args, *it; diff --git a/Modules/_hacl/Hacl_Hash_Blake2b.c b/Modules/_hacl/Hacl_Hash_Blake2b.c index cd3b9777e09f6c..1bab75e6aaf2ab 100644 --- a/Modules/_hacl/Hacl_Hash_Blake2b.c +++ b/Modules/_hacl/Hacl_Hash_Blake2b.c @@ -1029,7 +1029,8 @@ Hacl_Hash_Blake2b_reset_with_key_and_params( uint8_t *k ) { - index_of_state(s); + Hacl_Hash_Blake2b_index i1 = index_of_state(s); + KRML_MAYBE_UNUSED_VAR(i1); reset_raw(s, ((Hacl_Hash_Blake2b_params_and_key){ .fst = p, .snd = k })); } diff --git a/Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c b/Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c index 92b2e8f539041b..19234ab9d7f9b2 100644 --- a/Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c +++ b/Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c @@ -855,7 +855,8 @@ Hacl_Hash_Blake2b_Simd256_reset_with_key_and_params( uint8_t *k ) { - index_of_state(s); + Hacl_Hash_Blake2b_index i1 = index_of_state(s); + KRML_MAYBE_UNUSED_VAR(i1); reset_raw(s, ((Hacl_Hash_Blake2b_params_and_key){ .fst = p, .snd = k })); } diff --git a/Modules/_hacl/Hacl_Hash_Blake2s.c b/Modules/_hacl/Hacl_Hash_Blake2s.c index e5e0ecd0bfde7e..ceb7385072e048 100644 --- a/Modules/_hacl/Hacl_Hash_Blake2s.c +++ b/Modules/_hacl/Hacl_Hash_Blake2s.c @@ -1011,7 +1011,8 @@ Hacl_Hash_Blake2s_reset_with_key_and_params( uint8_t *k ) { - index_of_state(s); + Hacl_Hash_Blake2b_index i1 = index_of_state(s); + KRML_MAYBE_UNUSED_VAR(i1); reset_raw(s, ((Hacl_Hash_Blake2b_params_and_key){ .fst = p, .snd = k })); } diff --git a/Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c b/Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c index f675a7f14f192f..3b68783bfad9b4 100644 --- a/Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c +++ b/Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c @@ -842,7 +842,8 @@ Hacl_Hash_Blake2s_Simd128_reset_with_key_and_params( uint8_t *k ) { - index_of_state(s); + Hacl_Hash_Blake2b_index i1 = index_of_state(s); + KRML_MAYBE_UNUSED_VAR(i1); reset_raw(s, ((Hacl_Hash_Blake2b_params_and_key){ .fst = p, .snd = k })); } diff --git a/Modules/_hacl/Hacl_Hash_SHA2.c b/Modules/_hacl/Hacl_Hash_SHA2.c index 4b6af5fc78c680..cc930bbc89e8ad 100644 --- a/Modules/_hacl/Hacl_Hash_SHA2.c +++ b/Modules/_hacl/Hacl_Hash_SHA2.c @@ -211,7 +211,7 @@ void Hacl_Hash_SHA2_sha224_init(uint32_t *hash) os[i] = x;); } -static inline void sha224_update_nblocks(uint32_t len, uint8_t *b, uint32_t *st) +void Hacl_Hash_SHA2_sha224_update_nblocks(uint32_t len, uint8_t *b, uint32_t *st) { Hacl_Hash_SHA2_sha256_update_nblocks(len, b, st); } @@ -825,7 +825,7 @@ void Hacl_Hash_SHA2_digest_224(Hacl_Streaming_MD_state_32 *state, uint8_t *outpu } uint8_t *buf_last = buf_1 + r - ite; uint8_t *buf_multi = buf_1; - sha224_update_nblocks(0U, buf_multi, tmp_block_state); + Hacl_Hash_SHA2_sha224_update_nblocks(0U, buf_multi, tmp_block_state); uint64_t prev_len_last = total_len - (uint64_t)r; Hacl_Hash_SHA2_sha224_update_last(prev_len_last + (uint64_t)r, r, buf_last, tmp_block_state); Hacl_Hash_SHA2_sha224_finish(tmp_block_state, output); @@ -847,7 +847,7 @@ void Hacl_Hash_SHA2_hash_224(uint8_t *output, uint8_t *input, uint32_t input_len Hacl_Hash_SHA2_sha224_init(st); uint32_t rem = input_len % 64U; uint64_t len_ = (uint64_t)input_len; - sha224_update_nblocks(input_len, ib, st); + Hacl_Hash_SHA2_sha224_update_nblocks(input_len, ib, st); uint32_t rem1 = input_len % 64U; uint8_t *b0 = ib; uint8_t *lb = b0 + input_len - rem1; diff --git a/Modules/_hacl/Hacl_Hash_SHA3.c b/Modules/_hacl/Hacl_Hash_SHA3.c index 9cf5abb330b180..b964e1d9c0aa69 100644 --- a/Modules/_hacl/Hacl_Hash_SHA3.c +++ b/Modules/_hacl/Hacl_Hash_SHA3.c @@ -251,7 +251,8 @@ Hacl_Hash_SHA3_update_multi_sha3( uint8_t *bl0 = b_; uint8_t *uu____0 = b0 + i * block_len(a); memcpy(bl0, uu____0, block_len(a) * sizeof (uint8_t)); - block_len(a); + uint32_t unused = block_len(a); + KRML_MAYBE_UNUSED_VAR(unused); absorb_inner_32(b_, s); } } diff --git a/Modules/_hacl/Lib_Memzero0.c b/Modules/_hacl/Lib_Memzero0.c index 5b1a2f7797db76..5c269d231de82f 100644 --- a/Modules/_hacl/Lib_Memzero0.c +++ b/Modules/_hacl/Lib_Memzero0.c @@ -36,7 +36,7 @@ void Lib_Memzero0_memzero0(void *dst, uint64_t len) { size_t len_ = (size_t) len; #ifdef _WIN32 - SecureZeroMemory(dst, len); + SecureZeroMemory(dst, len_); #elif defined(__APPLE__) && defined(__MACH__) memset_s(dst, len_, 0, len_); #elif (defined(__linux__) && !defined(LINUX_NO_EXPLICIT_BZERO)) || defined(__FreeBSD__) diff --git a/Modules/_hacl/internal/Hacl_Hash_SHA2.h b/Modules/_hacl/internal/Hacl_Hash_SHA2.h index 0127f4373fb1a1..cb60f9e9bd4df6 100644 --- a/Modules/_hacl/internal/Hacl_Hash_SHA2.h +++ b/Modules/_hacl/internal/Hacl_Hash_SHA2.h @@ -123,6 +123,8 @@ void Hacl_Hash_SHA2_sha256_finish(uint32_t *st, uint8_t *h); void Hacl_Hash_SHA2_sha224_init(uint32_t *hash); +void Hacl_Hash_SHA2_sha224_update_nblocks(uint32_t len, uint8_t *b, uint32_t *st); + void Hacl_Hash_SHA2_sha224_update_last(uint64_t totlen, uint32_t len, uint8_t *b, uint32_t *st); diff --git a/Modules/_hacl/refresh.sh b/Modules/_hacl/refresh.sh index 6234fea9f17bc7..4147ab302fe146 100755 --- a/Modules/_hacl/refresh.sh +++ b/Modules/_hacl/refresh.sh @@ -22,7 +22,7 @@ fi # Update this when updating to a new version after verifying that the changes # the update brings in are good. -expected_hacl_star_rev=315a9e491d2bc347b9dae99e0ea506995ea84d9d +expected_hacl_star_rev=f218923ef2417d963d7efc7951593ae6aef613f7 hacl_dir="$(realpath "$1")" cd "$(dirname "$0")" diff --git a/Modules/_interpchannelsmodule.c b/Modules/_interpchannelsmodule.c index b8d7dfb87cce0e..75d69ade1d3c9b 100644 --- a/Modules/_interpchannelsmodule.c +++ b/Modules/_interpchannelsmodule.c @@ -63,7 +63,7 @@ _globals (static struct globals): data (void *) obj (PyObject *) interpid (int64_t) - new_object (xid_newobjectfunc) + new_object (xid_newobjfunc) free (xid_freefunc) last (struct _channelitem *): ... @@ -1758,6 +1758,11 @@ channel_send(_channels *channels, int64_t cid, PyObject *obj, } int64_t interpid = PyInterpreterState_GetID(interp); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + // Look up the channel. PyThread_type_lock mutex = NULL; _channel_state *chan = NULL; @@ -1779,7 +1784,7 @@ channel_send(_channels *channels, int64_t cid, PyObject *obj, PyThread_release_lock(mutex); return -1; } - if (_PyObject_GetXIData(obj, data) != 0) { + if (_PyObject_GetXIData(&ctx, obj, data) != 0) { PyThread_release_lock(mutex); GLOBAL_FREE(data); return -1; diff --git a/Modules/_interpqueuesmodule.c b/Modules/_interpqueuesmodule.c index 8d0e223db7ff19..808938a9e8cd16 100644 --- a/Modules/_interpqueuesmodule.c +++ b/Modules/_interpqueuesmodule.c @@ -1127,6 +1127,12 @@ queue_destroy(_queues *queues, int64_t qid) static int queue_put(_queues *queues, int64_t qid, PyObject *obj, int fmt, int unboundop) { + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + // Look up the queue. _queue *queue = NULL; int err = _queues_lookup(queues, qid, &queue); @@ -1141,13 +1147,12 @@ queue_put(_queues *queues, int64_t qid, PyObject *obj, int fmt, int unboundop) _queue_unmark_waiter(queue, queues->mutex); return -1; } - if (_PyObject_GetXIData(obj, data) != 0) { + if (_PyObject_GetXIData(&ctx, obj, data) != 0) { _queue_unmark_waiter(queue, queues->mutex); GLOBAL_FREE(data); return -1; } - assert(_PyXIData_INTERPID(data) == \ - PyInterpreterState_GetID(PyInterpreterState_Get())); + assert(_PyXIData_INTERPID(data) == PyInterpreterState_GetID(interp)); // Add the data to the queue. int64_t interpid = -1; // _queueitem_init() will set it. diff --git a/Modules/_interpreters_common.h b/Modules/_interpreters_common.h index b0e31a33734dab..a6c639feea5d14 100644 --- a/Modules/_interpreters_common.h +++ b/Modules/_interpreters_common.h @@ -8,15 +8,24 @@ static int ensure_xid_class(PyTypeObject *cls, xidatafunc getdata) { - //assert(cls->tp_flags & Py_TPFLAGS_HEAPTYPE); - return _PyXIData_RegisterClass(cls, getdata); + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + return _PyXIData_RegisterClass(&ctx, cls, getdata); } #ifdef REGISTERS_HEAP_TYPES static int clear_xid_class(PyTypeObject *cls) { - return _PyXIData_UnregisterClass(cls); + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + return _PyXIData_UnregisterClass(&ctx, cls); } #endif diff --git a/Modules/_interpretersmodule.c b/Modules/_interpretersmodule.c index 95acdd69e53260..a36823c4bb982b 100644 --- a/Modules/_interpretersmodule.c +++ b/Modules/_interpretersmodule.c @@ -936,6 +936,11 @@ static int _interp_exec(PyObject *self, PyInterpreterState *interp, PyObject *code_arg, PyObject *shared_arg, PyObject **p_excinfo) { + if (shared_arg != NULL && !PyDict_CheckExact(shared_arg)) { + PyErr_SetString(PyExc_TypeError, "expected 'shared' to be a dict"); + return -1; + } + // Extract code. Py_ssize_t codestrlen = -1; PyObject *bytes_obj = NULL; @@ -1181,7 +1186,13 @@ object_is_shareable(PyObject *self, PyObject *args, PyObject *kwds) return NULL; } - if (_PyObject_CheckXIData(obj) == 0) { + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return NULL; + } + + if (_PyObject_CheckXIData(&ctx, obj) == 0) { Py_RETURN_TRUE; } PyErr_Clear(); @@ -1480,6 +1491,11 @@ module_exec(PyObject *mod) PyInterpreterState *interp = PyInterpreterState_Get(); module_state *state = get_module_state(mod); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + #define ADD_WHENCE(NAME) \ if (PyModule_AddIntConstant(mod, "WHENCE_" #NAME, \ _PyInterpreterState_WHENCE_##NAME) < 0) \ @@ -1501,9 +1517,7 @@ module_exec(PyObject *mod) if (PyModule_AddType(mod, (PyTypeObject *)PyExc_InterpreterNotFoundError) < 0) { goto error; } - PyObject *PyExc_NotShareableError = \ - _PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError; - if (PyModule_AddType(mod, (PyTypeObject *)PyExc_NotShareableError) < 0) { + if (PyModule_AddType(mod, (PyTypeObject *)ctx.PyExc_NotShareableError) < 0) { goto error; } diff --git a/Modules/_json.c b/Modules/_json.c index ce0093ab431d05..a99abbe72bf7a0 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -86,11 +86,11 @@ encoder_dealloc(PyObject *self); static int encoder_clear(PyEncoderObject *self); static int -encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *seq, PyObject *newline_indent); +encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *seq, Py_ssize_t indent_level, PyObject *indent_cache); static int -encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *obj, PyObject *newline_indent); +encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *obj, Py_ssize_t indent_level, PyObject *indent_cache); static int -encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *dct, PyObject *newline_indent); +encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *dct, Py_ssize_t indent_level, PyObject *indent_cache); static PyObject * _encoded_const(PyObject *obj); static void @@ -1252,17 +1252,92 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return (PyObject *)s; } + +/* indent_cache is a list that contains intermixed values at even and odd + * positions: + * + * 2*k : '\n' + indent * (k + initial_indent_level) + * strings written after opening and before closing brackets + * 2*k-1 : item_separator + '\n' + indent * (k + initial_indent_level) + * strings written between items + * + * Its size is always an odd number. + */ static PyObject * -_create_newline_indent(PyObject *indent, Py_ssize_t indent_level) +create_indent_cache(PyEncoderObject *s, Py_ssize_t indent_level) { PyObject *newline_indent = PyUnicode_FromOrdinal('\n'); if (newline_indent != NULL && indent_level) { PyUnicode_AppendAndDel(&newline_indent, - PySequence_Repeat(indent, indent_level)); + PySequence_Repeat(s->indent, indent_level)); + } + if (newline_indent == NULL) { + return NULL; + } + PyObject *indent_cache = PyList_New(1); + if (indent_cache == NULL) { + Py_DECREF(newline_indent); + return NULL; } - return newline_indent; + PyList_SET_ITEM(indent_cache, 0, newline_indent); + return indent_cache; +} + +/* Extend indent_cache by adding values for the next level. + * It should have values for the indent_level-1 level before the call. + */ +static int +update_indent_cache(PyEncoderObject *s, + Py_ssize_t indent_level, PyObject *indent_cache) +{ + assert(indent_level * 2 == PyList_GET_SIZE(indent_cache) + 1); + assert(indent_level > 0); + PyObject *newline_indent = PyList_GET_ITEM(indent_cache, (indent_level - 1)*2); + newline_indent = PyUnicode_Concat(newline_indent, s->indent); + if (newline_indent == NULL) { + return -1; + } + PyObject *separator_indent = PyUnicode_Concat(s->item_separator, newline_indent); + if (separator_indent == NULL) { + Py_DECREF(newline_indent); + return -1; + } + + if (PyList_Append(indent_cache, separator_indent) < 0 || + PyList_Append(indent_cache, newline_indent) < 0) + { + Py_DECREF(separator_indent); + Py_DECREF(newline_indent); + return -1; + } + Py_DECREF(separator_indent); + Py_DECREF(newline_indent); + return 0; } +static PyObject * +get_item_separator(PyEncoderObject *s, + Py_ssize_t indent_level, PyObject *indent_cache) +{ + assert(indent_level > 0); + if (indent_level * 2 > PyList_GET_SIZE(indent_cache)) { + if (update_indent_cache(s, indent_level, indent_cache) < 0) { + return NULL; + } + } + assert(indent_level * 2 < PyList_GET_SIZE(indent_cache)); + return PyList_GET_ITEM(indent_cache, indent_level * 2 - 1); +} + +static int +write_newline_indent(PyUnicodeWriter *writer, + Py_ssize_t indent_level, PyObject *indent_cache) +{ + PyObject *newline_indent = PyList_GET_ITEM(indent_cache, indent_level * 2); + return PyUnicodeWriter_WriteStr(writer, newline_indent); +} + + static PyObject * encoder_call(PyEncoderObject *self, PyObject *args, PyObject *kwds) { @@ -1280,20 +1355,20 @@ encoder_call(PyEncoderObject *self, PyObject *args, PyObject *kwds) return NULL; } - PyObject *newline_indent = NULL; + PyObject *indent_cache = NULL; if (self->indent != Py_None) { - newline_indent = _create_newline_indent(self->indent, indent_level); - if (newline_indent == NULL) { + indent_cache = create_indent_cache(self, indent_level); + if (indent_cache == NULL) { PyUnicodeWriter_Discard(writer); return NULL; } } - if (encoder_listencode_obj(self, writer, obj, newline_indent)) { + if (encoder_listencode_obj(self, writer, obj, indent_level, indent_cache)) { PyUnicodeWriter_Discard(writer); - Py_XDECREF(newline_indent); + Py_XDECREF(indent_cache); return NULL; } - Py_XDECREF(newline_indent); + Py_XDECREF(indent_cache); PyObject *str = PyUnicodeWriter_Finish(writer); if (str == NULL) { @@ -1381,7 +1456,8 @@ _steal_accumulate(PyUnicodeWriter *writer, PyObject *stolen) static int encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, - PyObject *obj, PyObject *newline_indent) + PyObject *obj, + Py_ssize_t indent_level, PyObject *indent_cache) { /* Encode Python object obj to a JSON term */ PyObject *newobj; @@ -1421,14 +1497,14 @@ encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, else if (PyList_Check(obj) || PyTuple_Check(obj)) { if (_Py_EnterRecursiveCall(" while encoding a JSON object")) return -1; - rv = encoder_listencode_list(s, writer, obj, newline_indent); + rv = encoder_listencode_list(s, writer, obj, indent_level, indent_cache); _Py_LeaveRecursiveCall(); return rv; } else if (PyDict_Check(obj)) { if (_Py_EnterRecursiveCall(" while encoding a JSON object")) return -1; - rv = encoder_listencode_dict(s, writer, obj, newline_indent); + rv = encoder_listencode_dict(s, writer, obj, indent_level, indent_cache); _Py_LeaveRecursiveCall(); return rv; } @@ -1462,7 +1538,7 @@ encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, Py_XDECREF(ident); return -1; } - rv = encoder_listencode_obj(s, writer, newobj, newline_indent); + rv = encoder_listencode_obj(s, writer, newobj, indent_level, indent_cache); _Py_LeaveRecursiveCall(); Py_DECREF(newobj); @@ -1485,7 +1561,7 @@ encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, static int encoder_encode_key_value(PyEncoderObject *s, PyUnicodeWriter *writer, bool *first, PyObject *dct, PyObject *key, PyObject *value, - PyObject *newline_indent, + Py_ssize_t indent_level, PyObject *indent_cache, PyObject *item_separator) { PyObject *keystr = NULL; @@ -1541,7 +1617,7 @@ encoder_encode_key_value(PyEncoderObject *s, PyUnicodeWriter *writer, bool *firs if (PyUnicodeWriter_WriteStr(writer, s->key_separator) < 0) { return -1; } - if (encoder_listencode_obj(s, writer, value, newline_indent) < 0) { + if (encoder_listencode_obj(s, writer, value, indent_level, indent_cache) < 0) { _PyErr_FormatNote("when serializing %T item %R", dct, key); return -1; } @@ -1550,15 +1626,14 @@ encoder_encode_key_value(PyEncoderObject *s, PyUnicodeWriter *writer, bool *firs static int encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, - PyObject *dct, PyObject *newline_indent) + PyObject *dct, + Py_ssize_t indent_level, PyObject *indent_cache) { /* Encode Python dict dct a JSON term */ PyObject *ident = NULL; PyObject *items = NULL; PyObject *key, *value; bool first = true; - PyObject *new_newline_indent = NULL; - PyObject *separator_indent = NULL; if (PyDict_GET_SIZE(dct) == 0) { /* Fast path */ @@ -1585,19 +1660,13 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, goto bail; } - PyObject *current_item_separator = s->item_separator; // borrowed reference + PyObject *separator = s->item_separator; // borrowed reference if (s->indent != Py_None) { - new_newline_indent = PyUnicode_Concat(newline_indent, s->indent); - if (new_newline_indent == NULL) { - goto bail; - } - separator_indent = PyUnicode_Concat(current_item_separator, new_newline_indent); - if (separator_indent == NULL) { - goto bail; - } - // update item separator with a borrowed reference - current_item_separator = separator_indent; - if (PyUnicodeWriter_WriteStr(writer, new_newline_indent) < 0) { + indent_level++; + separator = get_item_separator(s, indent_level, indent_cache); + if (separator == NULL || + write_newline_indent(writer, indent_level, indent_cache) < 0) + { goto bail; } } @@ -1618,8 +1687,8 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, key = PyTuple_GET_ITEM(item, 0); value = PyTuple_GET_ITEM(item, 1); if (encoder_encode_key_value(s, writer, &first, dct, key, value, - new_newline_indent, - current_item_separator) < 0) + indent_level, indent_cache, + separator) < 0) goto bail; } Py_CLEAR(items); @@ -1628,8 +1697,8 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, Py_ssize_t pos = 0; while (PyDict_Next(dct, &pos, &key, &value)) { if (encoder_encode_key_value(s, writer, &first, dct, key, value, - new_newline_indent, - current_item_separator) < 0) + indent_level, indent_cache, + separator) < 0) goto bail; } } @@ -1640,10 +1709,8 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, Py_CLEAR(ident); } if (s->indent != Py_None) { - Py_CLEAR(new_newline_indent); - Py_CLEAR(separator_indent); - - if (PyUnicodeWriter_WriteStr(writer, newline_indent) < 0) { + indent_level--; + if (write_newline_indent(writer, indent_level, indent_cache) < 0) { goto bail; } } @@ -1656,20 +1723,17 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, bail: Py_XDECREF(items); Py_XDECREF(ident); - Py_XDECREF(separator_indent); - Py_XDECREF(new_newline_indent); return -1; } static int encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, - PyObject *seq, PyObject *newline_indent) + PyObject *seq, + Py_ssize_t indent_level, PyObject *indent_cache) { PyObject *ident = NULL; PyObject *s_fast = NULL; Py_ssize_t i; - PyObject *new_newline_indent = NULL; - PyObject *separator_indent = NULL; ident = NULL; s_fast = PySequence_Fast(seq, "_iterencode_list needs a sequence"); @@ -1702,20 +1766,13 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *separator = s->item_separator; // borrowed reference if (s->indent != Py_None) { - new_newline_indent = PyUnicode_Concat(newline_indent, s->indent); - if (new_newline_indent == NULL) { - goto bail; - } - - if (PyUnicodeWriter_WriteStr(writer, new_newline_indent) < 0) { - goto bail; - } - - separator_indent = PyUnicode_Concat(separator, new_newline_indent); - if (separator_indent == NULL) { + indent_level++; + separator = get_item_separator(s, indent_level, indent_cache); + if (separator == NULL || + write_newline_indent(writer, indent_level, indent_cache) < 0) + { goto bail; } - separator = separator_indent; // assign separator with borrowed reference } for (i = 0; i < PySequence_Fast_GET_SIZE(s_fast); i++) { PyObject *obj = PySequence_Fast_GET_ITEM(s_fast, i); @@ -1723,7 +1780,7 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, if (PyUnicodeWriter_WriteStr(writer, separator) < 0) goto bail; } - if (encoder_listencode_obj(s, writer, obj, new_newline_indent)) { + if (encoder_listencode_obj(s, writer, obj, indent_level, indent_cache)) { _PyErr_FormatNote("when serializing %T item %zd", seq, i); goto bail; } @@ -1735,9 +1792,8 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, } if (s->indent != Py_None) { - Py_CLEAR(new_newline_indent); - Py_CLEAR(separator_indent); - if (PyUnicodeWriter_WriteStr(writer, newline_indent) < 0) { + indent_level--; + if (write_newline_indent(writer, indent_level, indent_cache) < 0) { goto bail; } } @@ -1751,8 +1807,6 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, bail: Py_XDECREF(ident); Py_DECREF(s_fast); - Py_XDECREF(separator_indent); - Py_XDECREF(new_newline_indent); return -1; } diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c index 2a789ea74d27da..876627bcf5fad8 100644 --- a/Modules/_localemodule.c +++ b/Modules/_localemodule.c @@ -636,6 +636,37 @@ restore_locale(char *oldloc) } } +#ifdef __GLIBC__ +#if defined(ALT_DIGITS) || defined(ERA) +static PyObject * +decode_strings(const char *result, size_t max_count) +{ + /* Convert a sequence of NUL-separated C strings to a Python string + * containing semicolon separated items. */ + size_t i = 0; + size_t count = 0; + for (; count < max_count && result[i]; count++) { + i += strlen(result + i) + 1; + } + char *buf = PyMem_Malloc(i); + if (buf == NULL) { + PyErr_NoMemory(); + return NULL; + } + memcpy(buf, result, i); + /* Replace all NULs with semicolons. */ + i = 0; + while (--count) { + i += strlen(buf + i); + buf[i++] = ';'; + } + PyObject *pyresult = PyUnicode_DecodeLocale(buf, NULL); + PyMem_Free(buf); + return pyresult; +} +#endif +#endif + /*[clinic input] _locale.nl_langinfo @@ -668,32 +699,18 @@ _locale_nl_langinfo_impl(PyObject *module, int item) } PyObject *pyresult; #ifdef __GLIBC__ + /* According to the POSIX specification the result must be + * a sequence of semicolon-separated strings. + * But in Glibc they are NUL-separated. */ #ifdef ALT_DIGITS if (item == ALT_DIGITS && *result) { - /* According to the POSIX specification the result must be - * a sequence of up to 100 semicolon-separated strings. - * But in Glibc they are NUL-separated. */ - Py_ssize_t i = 0; - int count = 0; - for (; count < 100 && result[i]; count++) { - i += strlen(result + i) + 1; - } - char *buf = PyMem_Malloc(i); - if (buf == NULL) { - PyErr_NoMemory(); - pyresult = NULL; - } - else { - memcpy(buf, result, i); - /* Replace all NULs with semicolons. */ - i = 0; - while (--count) { - i += strlen(buf + i); - buf[i++] = ';'; - } - pyresult = PyUnicode_DecodeLocale(buf, NULL); - PyMem_Free(buf); - } + pyresult = decode_strings(result, 100); + } + else +#endif +#ifdef ERA + if (item == ERA && *result) { + pyresult = decode_strings(result, SIZE_MAX); } else #endif diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 5837cd41a40cd4..2696f38046121f 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -6730,6 +6730,7 @@ load_build(PickleState *st, UnpicklerObject *self) } if (PyObject_SetItem(dict, d_key, d_value) < 0) { Py_DECREF(d_key); + Py_DECREF(dict); goto error; } Py_DECREF(d_key); diff --git a/Modules/_sre/clinic/sre.c.h b/Modules/_sre/clinic/sre.c.h index e287f3d5ad3991..87e4785a428468 100644 --- a/Modules/_sre/clinic/sre.c.h +++ b/Modules/_sre/clinic/sre.c.h @@ -985,6 +985,44 @@ PyDoc_STRVAR(_sre_SRE_Pattern___deepcopy____doc__, #define _SRE_SRE_PATTERN___DEEPCOPY___METHODDEF \ {"__deepcopy__", (PyCFunction)_sre_SRE_Pattern___deepcopy__, METH_O, _sre_SRE_Pattern___deepcopy____doc__}, +#if defined(Py_DEBUG) + +PyDoc_STRVAR(_sre_SRE_Pattern__fail_after__doc__, +"_fail_after($self, count, exception, /)\n" +"--\n" +"\n" +"For debugging."); + +#define _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF \ + {"_fail_after", _PyCFunction_CAST(_sre_SRE_Pattern__fail_after), METH_FASTCALL, _sre_SRE_Pattern__fail_after__doc__}, + +static PyObject * +_sre_SRE_Pattern__fail_after_impl(PatternObject *self, int count, + PyObject *exception); + +static PyObject * +_sre_SRE_Pattern__fail_after(PatternObject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int count; + PyObject *exception; + + if (!_PyArg_CheckPositional("_fail_after", nargs, 2, 2)) { + goto exit; + } + count = PyLong_AsInt(args[0]); + if (count == -1 && PyErr_Occurred()) { + goto exit; + } + exception = args[1]; + return_value = _sre_SRE_Pattern__fail_after_impl(self, count, exception); + +exit: + return return_value; +} + +#endif /* defined(Py_DEBUG) */ + PyDoc_STRVAR(_sre_compile__doc__, "compile($module, /, pattern, flags, code, groups, groupindex,\n" " indexgroup)\n" @@ -1474,4 +1512,8 @@ _sre_SRE_Scanner_search(ScannerObject *self, PyTypeObject *cls, PyObject *const } return _sre_SRE_Scanner_search_impl(self, cls); } -/*[clinic end generated code: output=afaa301d55957cb0 input=a9049054013a1b77]*/ + +#ifndef _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF + #define _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF +#endif /* !defined(_SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF) */ +/*[clinic end generated code: output=f8cb77f2261f0b2e input=a9049054013a1b77]*/ diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index 2c86f8869d8e58..36f542ddb4df2b 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -267,6 +267,85 @@ data_stack_grow(SRE_STATE* state, Py_ssize_t size) return 0; } +/* memory pool functions for SRE_REPEAT, this can avoid memory + leak when SRE(match) function terminates abruptly. + state->repeat_pool_used is a doubly-linked list, so that we + can remove a SRE_REPEAT node from it. + state->repeat_pool_unused is a singly-linked list, we put/get + node at the head. */ +static SRE_REPEAT * +repeat_pool_malloc(SRE_STATE *state) +{ + SRE_REPEAT *repeat; + + if (state->repeat_pool_unused) { + /* remove from unused pool (singly-linked list) */ + repeat = state->repeat_pool_unused; + state->repeat_pool_unused = repeat->pool_next; + } + else { + repeat = PyMem_Malloc(sizeof(SRE_REPEAT)); + if (!repeat) { + return NULL; + } + } + + /* add to used pool (doubly-linked list) */ + SRE_REPEAT *temp = state->repeat_pool_used; + if (temp) { + temp->pool_prev = repeat; + } + repeat->pool_prev = NULL; + repeat->pool_next = temp; + state->repeat_pool_used = repeat; + + return repeat; +} + +static void +repeat_pool_free(SRE_STATE *state, SRE_REPEAT *repeat) +{ + SRE_REPEAT *prev = repeat->pool_prev; + SRE_REPEAT *next = repeat->pool_next; + + /* remove from used pool (doubly-linked list) */ + if (prev) { + prev->pool_next = next; + } + else { + state->repeat_pool_used = next; + } + if (next) { + next->pool_prev = prev; + } + + /* add to unused pool (singly-linked list) */ + repeat->pool_next = state->repeat_pool_unused; + state->repeat_pool_unused = repeat; +} + +static void +repeat_pool_clear(SRE_STATE *state) +{ + /* clear used pool */ + SRE_REPEAT *next = state->repeat_pool_used; + state->repeat_pool_used = NULL; + while (next) { + SRE_REPEAT *temp = next; + next = temp->pool_next; + PyMem_Free(temp); + } + + /* clear unused pool */ + next = state->repeat_pool_unused; + state->repeat_pool_unused = NULL; + while (next) { + SRE_REPEAT *temp = next; + next = temp->pool_next; + PyMem_Free(temp); + } +} + /* generate 8-bit version */ #define SRE_CHAR Py_UCS1 @@ -511,6 +590,11 @@ state_init(SRE_STATE* state, PatternObject* pattern, PyObject* string, state->pos = start; state->endpos = end; +#ifdef Py_DEBUG + state->fail_after_count = pattern->fail_after_count; + state->fail_after_exc = pattern->fail_after_exc; // borrowed ref +#endif + return string; err: /* We add an explicit cast here because MSVC has a bug when @@ -533,6 +617,8 @@ state_fini(SRE_STATE* state) /* See above PyMem_Free() for why we explicitly cast here. */ PyMem_Free((void*) state->mark); state->mark = NULL; + /* SRE_REPEAT pool */ + repeat_pool_clear(state); } /* calculate offset from start of string */ @@ -619,6 +705,9 @@ pattern_traverse(PatternObject *self, visitproc visit, void *arg) Py_VISIT(self->groupindex); Py_VISIT(self->indexgroup); Py_VISIT(self->pattern); +#ifdef Py_DEBUG + Py_VISIT(self->fail_after_exc); +#endif return 0; } @@ -628,6 +717,9 @@ pattern_clear(PatternObject *self) Py_CLEAR(self->groupindex); Py_CLEAR(self->indexgroup); Py_CLEAR(self->pattern); +#ifdef Py_DEBUG + Py_CLEAR(self->fail_after_exc); +#endif return 0; } @@ -690,7 +782,7 @@ _sre_SRE_Pattern_match_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t status; PyObject *match; - if (!state_init(&state, (PatternObject *)self, string, pos, endpos)) + if (!state_init(&state, self, string, pos, endpos)) return NULL; INIT_TRACE(&state); @@ -1381,6 +1473,29 @@ _sre_SRE_Pattern___deepcopy__(PatternObject *self, PyObject *memo) return Py_NewRef(self); } +#ifdef Py_DEBUG +/*[clinic input] +_sre.SRE_Pattern._fail_after + + count: int + exception: object + / + +For debugging. +[clinic start generated code]*/ + +static PyObject * +_sre_SRE_Pattern__fail_after_impl(PatternObject *self, int count, + PyObject *exception) +/*[clinic end generated code: output=9a6bf12135ac50c2 input=ef80a45c66c5499d]*/ +{ + self->fail_after_count = count; + Py_INCREF(exception); + Py_XSETREF(self->fail_after_exc, exception); + Py_RETURN_NONE; +} +#endif /* Py_DEBUG */ + static PyObject * pattern_repr(PatternObject *obj) { @@ -1506,6 +1621,10 @@ _sre_compile_impl(PyObject *module, PyObject *pattern, int flags, self->pattern = NULL; self->groupindex = NULL; self->indexgroup = NULL; +#ifdef Py_DEBUG + self->fail_after_count = -1; + self->fail_after_exc = NULL; +#endif self->codesize = n; @@ -2604,7 +2723,8 @@ pattern_new_match(_sremodulestate* module_state, if (!match) return NULL; - match->pattern = (PatternObject*)Py_NewRef(pattern); + Py_INCREF(pattern); + match->pattern = pattern; match->string = Py_NewRef(state->string); @@ -2740,7 +2860,7 @@ _sre_SRE_Scanner_match_impl(ScannerObject *self, PyTypeObject *cls) return NULL; } - match = pattern_new_match(module_state, (PatternObject*) self->pattern, + match = pattern_new_match(module_state, self->pattern, state, status); if (status == 0) @@ -2790,7 +2910,7 @@ _sre_SRE_Scanner_search_impl(ScannerObject *self, PyTypeObject *cls) return NULL; } - match = pattern_new_match(module_state, (PatternObject*) self->pattern, + match = pattern_new_match(module_state, self->pattern, state, status); if (status == 0) @@ -2826,7 +2946,8 @@ pattern_scanner(_sremodulestate *module_state, return NULL; } - scanner->pattern = Py_NewRef(self); + Py_INCREF(self); + scanner->pattern = self; PyObject_GC_Track(scanner); return (PyObject*) scanner; @@ -3020,6 +3141,7 @@ static PyMethodDef pattern_methods[] = { _SRE_SRE_PATTERN_SCANNER_METHODDEF _SRE_SRE_PATTERN___COPY___METHODDEF _SRE_SRE_PATTERN___DEEPCOPY___METHODDEF + _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, {NULL, NULL} diff --git a/Modules/_sre/sre.h b/Modules/_sre/sre.h index 83d89d57b11199..42681c2addf3c2 100644 --- a/Modules/_sre/sre.h +++ b/Modules/_sre/sre.h @@ -34,6 +34,11 @@ typedef struct { int flags; /* flags used when compiling pattern source */ PyObject *weakreflist; /* List of weak references */ int isbytes; /* pattern type (1 - bytes, 0 - string, -1 - None) */ +#ifdef Py_DEBUG + /* for simulation of user interruption */ + int fail_after_count; + PyObject *fail_after_exc; +#endif /* pattern code */ Py_ssize_t codesize; SRE_CODE code[1]; @@ -68,6 +73,9 @@ typedef struct SRE_REPEAT_T { const SRE_CODE* pattern; /* points to REPEAT operator arguments */ const void* last_ptr; /* helper to check for infinite loops */ struct SRE_REPEAT_T *prev; /* points to previous repeat context */ + /* for SRE_REPEAT pool */ + struct SRE_REPEAT_T *pool_prev; + struct SRE_REPEAT_T *pool_next; } SRE_REPEAT; typedef struct { @@ -95,12 +103,19 @@ typedef struct { size_t data_stack_base; /* current repeat context */ SRE_REPEAT *repeat; + /* SRE_REPEAT pool */ + SRE_REPEAT *repeat_pool_used; + SRE_REPEAT *repeat_pool_unused; unsigned int sigcount; +#ifdef Py_DEBUG + int fail_after_count; + PyObject *fail_after_exc; +#endif } SRE_STATE; typedef struct { PyObject_HEAD - PyObject* pattern; + PatternObject* pattern; SRE_STATE state; int executing; } ScannerObject; diff --git a/Modules/_sre/sre_lib.h b/Modules/_sre/sre_lib.h index 97fbb0a75e54b6..af4bfc56083bcb 100644 --- a/Modules/_sre/sre_lib.h +++ b/Modules/_sre/sre_lib.h @@ -560,13 +560,28 @@ typedef struct { Py_ssize_t last_ctx_pos; } SRE(match_context); -#define MAYBE_CHECK_SIGNALS \ +#define _MAYBE_CHECK_SIGNALS \ do { \ if ((0 == (++sigcount & 0xfff)) && PyErr_CheckSignals()) { \ RETURN_ERROR(SRE_ERROR_INTERRUPTED); \ } \ } while (0) +#ifdef Py_DEBUG +# define MAYBE_CHECK_SIGNALS \ + do { \ + _MAYBE_CHECK_SIGNALS; \ + if (state->fail_after_count >= 0) { \ + if (state->fail_after_count-- == 0) { \ + PyErr_SetNone(state->fail_after_exc); \ + RETURN_ERROR(SRE_ERROR_INTERRUPTED); \ + } \ + } \ + } while (0) +#else +# define MAYBE_CHECK_SIGNALS _MAYBE_CHECK_SIGNALS +#endif /* Py_DEBUG */ + #ifdef HAVE_COMPUTED_GOTOS #ifndef USE_COMPUTED_GOTOS #define USE_COMPUTED_GOTOS 1 @@ -1120,12 +1135,9 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) pattern[1], pattern[2])); /* install new repeat context */ - /* TODO(https://github.com/python/cpython/issues/67877): Fix this - * potential memory leak. */ - ctx->u.rep = (SRE_REPEAT*) PyMem_Malloc(sizeof(*ctx->u.rep)); + ctx->u.rep = repeat_pool_malloc(state); if (!ctx->u.rep) { - PyErr_NoMemory(); - RETURN_FAILURE; + RETURN_ERROR(SRE_ERROR_MEMORY); } ctx->u.rep->count = -1; ctx->u.rep->pattern = pattern; @@ -1136,7 +1148,7 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) state->ptr = ptr; DO_JUMP(JUMP_REPEAT, jump_repeat, pattern+pattern[0]); state->repeat = ctx->u.rep->prev; - PyMem_Free(ctx->u.rep); + repeat_pool_free(state, ctx->u.rep); if (ret) { RETURN_ON_ERROR(ret); @@ -1294,6 +1306,17 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) pointer */ state->ptr = ptr; + /* Set state->repeat to non-NULL */ + ctx->u.rep = repeat_pool_malloc(state); + if (!ctx->u.rep) { + RETURN_ERROR(SRE_ERROR_MEMORY); + } + ctx->u.rep->count = -1; + ctx->u.rep->pattern = NULL; + ctx->u.rep->prev = state->repeat; + ctx->u.rep->last_ptr = NULL; + state->repeat = ctx->u.rep; + /* Initialize Count to 0 */ ctx->count = 0; @@ -1308,6 +1331,9 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) } else { state->ptr = ptr; + /* Restore state->repeat */ + state->repeat = ctx->u.rep->prev; + repeat_pool_free(state, ctx->u.rep); RETURN_FAILURE; } } @@ -1380,6 +1406,10 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) } } + /* Restore state->repeat */ + state->repeat = ctx->u.rep->prev; + repeat_pool_free(state, ctx->u.rep); + /* Evaluate Tail */ /* Jump to end of pattern indicated by skip, and then skip the SUCCESS op code that follows it. */ diff --git a/Modules/_testcapi/long.c b/Modules/_testcapi/long.c index 2b5e85d5707522..ebea09080ef11c 100644 --- a/Modules/_testcapi/long.c +++ b/Modules/_testcapi/long.c @@ -105,6 +105,30 @@ pylong_getsign(PyObject *module, PyObject *arg) } +static PyObject * +pylong_ispositive(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + RETURN_INT(PyLong_IsPositive(arg)); +} + + +static PyObject * +pylong_isnegative(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + RETURN_INT(PyLong_IsNegative(arg)); +} + + +static PyObject * +pylong_iszero(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + RETURN_INT(PyLong_IsZero(arg)); +} + + static PyObject * pylong_aspid(PyObject *module, PyObject *arg) { @@ -124,6 +148,9 @@ static PyMethodDef test_methods[] = { {"pylong_fromnativebytes", pylong_fromnativebytes, METH_VARARGS}, {"pylong_getsign", pylong_getsign, METH_O}, {"pylong_aspid", pylong_aspid, METH_O}, + {"pylong_ispositive", pylong_ispositive, METH_O}, + {"pylong_isnegative", pylong_isnegative, METH_O}, + {"pylong_iszero", pylong_iszero, METH_O}, {NULL}, }; diff --git a/Modules/_testcapi/object.c b/Modules/_testcapi/object.c index 1c76e766a790f0..3af5429ef00985 100644 --- a/Modules/_testcapi/object.c +++ b/Modules/_testcapi/object.c @@ -124,13 +124,20 @@ pyobject_clear_weakrefs_no_callbacks(PyObject *self, PyObject *obj) Py_RETURN_NONE; } +static PyObject * +pyobject_enable_deferred_refcount(PyObject *self, PyObject *obj) +{ + int result = PyUnstable_Object_EnableDeferredRefcount(obj); + return PyLong_FromLong(result); +} + static PyMethodDef test_methods[] = { {"call_pyobject_print", call_pyobject_print, METH_VARARGS}, {"pyobject_print_null", pyobject_print_null, METH_VARARGS}, {"pyobject_print_noref_object", pyobject_print_noref_object, METH_VARARGS}, {"pyobject_print_os_error", pyobject_print_os_error, METH_VARARGS}, {"pyobject_clear_weakrefs_no_callbacks", pyobject_clear_weakrefs_no_callbacks, METH_O}, - + {"pyobject_enable_deferred_refcount", pyobject_enable_deferred_refcount, METH_O}, {NULL}, }; diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 327a077671047c..a925191d479bd6 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -1797,6 +1797,12 @@ _xid_capsule_destructor(PyObject *capsule) static PyObject * get_crossinterp_data(PyObject *self, PyObject *args) { + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return NULL; + } + PyObject *obj = NULL; if (!PyArg_ParseTuple(args, "O:get_crossinterp_data", &obj)) { return NULL; @@ -1806,7 +1812,7 @@ get_crossinterp_data(PyObject *self, PyObject *args) if (data == NULL) { return NULL; } - if (_PyObject_GetXIData(obj, data) != 0) { + if (_PyObject_GetXIData(&ctx, obj, data) != 0) { _PyXIData_Free(data); return NULL; } @@ -2063,6 +2069,14 @@ identify_type_slot_wrappers(PyObject *self, PyObject *Py_UNUSED(ignored)) return _PyType_GetSlotWrapperNames(); } + +static PyObject * +has_deferred_refcount(PyObject *self, PyObject *op) +{ + return PyBool_FromLong(_PyObject_HasDeferredRefcount(op)); +} + + static PyMethodDef module_functions[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, @@ -2159,6 +2173,7 @@ static PyMethodDef module_functions[] = { GH_119213_GETARGS_METHODDEF {"get_static_builtin_types", get_static_builtin_types, METH_NOARGS}, {"identify_type_slot_wrappers", identify_type_slot_wrappers, METH_NOARGS}, + {"has_deferred_refcount", has_deferred_refcount, METH_O}, {NULL, NULL} /* sentinel */ }; @@ -2207,7 +2222,7 @@ module_exec(PyObject *module) } if (PyModule_Add(module, "TIER2_THRESHOLD", - PyLong_FromLong(JUMP_BACKWARD_INITIAL_VALUE)) < 0) { + PyLong_FromLong(JUMP_BACKWARD_INITIAL_VALUE + 1)) < 0) { return 1; } diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index b0b70ccb8cc3d3..45897817a56051 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -325,6 +325,7 @@ typedef struct { const Tcl_ObjType *ListType; const Tcl_ObjType *StringType; const Tcl_ObjType *UTF32StringType; + const Tcl_ObjType *PixelType; } TkappObject; #define Tkapp_Interp(v) (((TkappObject *) (v))->interp) @@ -637,6 +638,7 @@ Tkapp_New(const char *screenName, const char *className, v->ListType = Tcl_GetObjType("list"); v->StringType = Tcl_GetObjType("string"); v->UTF32StringType = Tcl_GetObjType("utf32string"); + v->PixelType = Tcl_GetObjType("pixel"); /* Delete the 'exit' command, which can screw things up */ Tcl_DeleteCommand(v->interp, "exit"); @@ -1236,7 +1238,8 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) } if (value->typePtr == tkapp->StringType || - value->typePtr == tkapp->UTF32StringType) + value->typePtr == tkapp->UTF32StringType || + value->typePtr == tkapp->PixelType) { return unicodeFromTclObj(tkapp, value); } diff --git a/Modules/clinic/_functoolsmodule.c.h b/Modules/clinic/_functoolsmodule.c.h index 760877928db60d..afd5eb4eb12b78 100644 --- a/Modules/clinic/_functoolsmodule.c.h +++ b/Modules/clinic/_functoolsmodule.c.h @@ -69,7 +69,7 @@ _functools_cmp_to_key(PyObject *module, PyObject *const *args, Py_ssize_t nargs, } PyDoc_STRVAR(_functools_reduce__doc__, -"reduce($module, function, iterable, initial=, /)\n" +"reduce($module, function, iterable, /, initial=)\n" "--\n" "\n" "Apply a function of two arguments cumulatively to the items of an iterable, from left to right.\n" @@ -82,30 +82,59 @@ PyDoc_STRVAR(_functools_reduce__doc__, "calculates ((((1 + 2) + 3) + 4) + 5)."); #define _FUNCTOOLS_REDUCE_METHODDEF \ - {"reduce", _PyCFunction_CAST(_functools_reduce), METH_FASTCALL, _functools_reduce__doc__}, + {"reduce", _PyCFunction_CAST(_functools_reduce), METH_FASTCALL|METH_KEYWORDS, _functools_reduce__doc__}, static PyObject * _functools_reduce_impl(PyObject *module, PyObject *func, PyObject *seq, PyObject *result); static PyObject * -_functools_reduce(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +_functools_reduce(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(initial), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "initial", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "reduce", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; PyObject *func; PyObject *seq; PyObject *result = NULL; - if (!_PyArg_CheckPositional("reduce", nargs, 2, 3)) { + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, + /*minpos*/ 2, /*maxpos*/ 3, /*minkw*/ 0, /*varpos*/ 0, argsbuf); + if (!args) { goto exit; } func = args[0]; seq = args[1]; - if (nargs < 3) { - goto skip_optional; + if (!noptargs) { + goto skip_optional_pos; } result = args[2]; -skip_optional: +skip_optional_pos: return_value = _functools_reduce_impl(module, func, seq, result); exit: @@ -159,4 +188,4 @@ _functools__lru_cache_wrapper_cache_clear(PyObject *self, PyObject *Py_UNUSED(ig return return_value; } -/*[clinic end generated code: output=0c3df7e5131200b7 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e6edcc01f0720daf input=a9049054013a1b77]*/ diff --git a/Modules/clinic/grpmodule.c.h b/Modules/clinic/grpmodule.c.h index cc0ad210f42743..facfa3a43e490e 100644 --- a/Modules/clinic/grpmodule.c.h +++ b/Modules/clinic/grpmodule.c.h @@ -2,6 +2,12 @@ preserve [clinic start generated code]*/ +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif +#include "pycore_modsupport.h" // _PyArg_UnpackKeywords() + PyDoc_STRVAR(grp_getgrgid__doc__, "getgrgid($module, /, id)\n" "--\n" @@ -11,21 +17,49 @@ PyDoc_STRVAR(grp_getgrgid__doc__, "If id is not valid, raise KeyError."); #define GRP_GETGRGID_METHODDEF \ - {"getgrgid", (PyCFunction)(void(*)(void))grp_getgrgid, METH_VARARGS|METH_KEYWORDS, grp_getgrgid__doc__}, + {"getgrgid", _PyCFunction_CAST(grp_getgrgid), METH_FASTCALL|METH_KEYWORDS, grp_getgrgid__doc__}, static PyObject * grp_getgrgid_impl(PyObject *module, PyObject *id); static PyObject * -grp_getgrgid(PyObject *module, PyObject *args, PyObject *kwargs) +grp_getgrgid(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; - static char *_keywords[] = {"id", NULL}; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(id), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"id", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "getgrgid", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; PyObject *id; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:getgrgid", _keywords, - &id)) + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, + /*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf); + if (!args) { goto exit; + } + id = args[0]; return_value = grp_getgrgid_impl(module, id); exit: @@ -41,21 +75,53 @@ PyDoc_STRVAR(grp_getgrnam__doc__, "If name is not valid, raise KeyError."); #define GRP_GETGRNAM_METHODDEF \ - {"getgrnam", (PyCFunction)(void(*)(void))grp_getgrnam, METH_VARARGS|METH_KEYWORDS, grp_getgrnam__doc__}, + {"getgrnam", _PyCFunction_CAST(grp_getgrnam), METH_FASTCALL|METH_KEYWORDS, grp_getgrnam__doc__}, static PyObject * grp_getgrnam_impl(PyObject *module, PyObject *name); static PyObject * -grp_getgrnam(PyObject *module, PyObject *args, PyObject *kwargs) +grp_getgrnam(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; - static char *_keywords[] = {"name", NULL}; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(name), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"name", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "getgrnam", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; PyObject *name; - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "U:getgrnam", _keywords, - &name)) + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, + /*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf); + if (!args) { + goto exit; + } + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("getgrnam", "argument 'name'", "str", args[0]); goto exit; + } + name = args[0]; return_value = grp_getgrnam_impl(module, name); exit: @@ -82,4 +148,4 @@ grp_getgrall(PyObject *module, PyObject *Py_UNUSED(ignored)) { return grp_getgrall_impl(module); } -/*[clinic end generated code: output=81f180beb67fc585 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2154194308dab038 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index dce0ea100ec435..cd0c4faeac83d1 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -10128,7 +10128,7 @@ os_fpathconf(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (fd < 0) { goto exit; } - if (!conv_path_confname(args[1], &name)) { + if (!conv_confname(module, args[1], &name, "pathconf_names")) { goto exit; } _return_value = os_fpathconf_impl(module, fd, name); @@ -10203,7 +10203,7 @@ os_pathconf(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject if (!path_converter(args[0], &path)) { goto exit; } - if (!conv_path_confname(args[1], &name)) { + if (!conv_confname(module, args[1], &name, "pathconf_names")) { goto exit; } _return_value = os_pathconf_impl(module, &path, name); @@ -10241,7 +10241,7 @@ os_confstr(PyObject *module, PyObject *arg) PyObject *return_value = NULL; int name; - if (!conv_confstr_confname(arg, &name)) { + if (!conv_confname(module, arg, &name, "confstr_names")) { goto exit; } return_value = os_confstr_impl(module, name); @@ -10273,7 +10273,7 @@ os_sysconf(PyObject *module, PyObject *arg) int name; long _return_value; - if (!conv_sysconf_confname(arg, &name)) { + if (!conv_confname(module, arg, &name, "sysconf_names")) { goto exit; } _return_value = os_sysconf_impl(module, name); @@ -13114,4 +13114,4 @@ os__create_environ(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #define OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF #endif /* !defined(OS__SUPPORTS_VIRTUAL_TERMINAL_METHODDEF) */ -/*[clinic end generated code: output=5358a13b4ce6148b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7ee14f5e880092f5 input=a9049054013a1b77]*/ diff --git a/Modules/expat/expat.h b/Modules/expat/expat.h index d0d6015a66283f..523b37d8d5787d 100644 --- a/Modules/expat/expat.h +++ b/Modules/expat/expat.h @@ -130,7 +130,9 @@ enum XML_Error { /* Added in 2.3.0. */ XML_ERROR_NO_BUFFER, /* Added in 2.4.0. */ - XML_ERROR_AMPLIFICATION_LIMIT_BREACH + XML_ERROR_AMPLIFICATION_LIMIT_BREACH, + /* Added in 2.6.4. */ + XML_ERROR_NOT_STARTED, }; enum XML_Content_Type { @@ -1066,7 +1068,7 @@ XML_SetReparseDeferralEnabled(XML_Parser parser, XML_Bool enabled); */ #define XML_MAJOR_VERSION 2 #define XML_MINOR_VERSION 6 -#define XML_MICRO_VERSION 3 +#define XML_MICRO_VERSION 4 #ifdef __cplusplus } diff --git a/Modules/expat/expat_external.h b/Modules/expat/expat_external.h index 12c560e14716ff..567872b09836e1 100644 --- a/Modules/expat/expat_external.h +++ b/Modules/expat/expat_external.h @@ -40,6 +40,10 @@ #ifndef Expat_External_INCLUDED #define Expat_External_INCLUDED 1 +/* Namespace external symbols to allow multiple libexpat version to + co-exist. */ +#include "pyexpatns.h" + /* External API definitions */ /* Expat tries very hard to make the API boundary very specifically @@ -64,11 +68,6 @@ compiled with the cdecl calling convention as the default since system headers may assume the cdecl convention. */ - -/* Namespace external symbols to allow multiple libexpat version to - co-exist. */ -#include "pyexpatns.h" - #ifndef XMLCALL # if defined(_MSC_VER) # define XMLCALL __cdecl diff --git a/Modules/expat/refresh.sh b/Modules/expat/refresh.sh new file mode 100755 index 00000000000000..82a9dbc23ad26b --- /dev/null +++ b/Modules/expat/refresh.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +# +# Use this script to update libexpat + +set -e +set -o pipefail + +if [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then + echo "A bash version >= 4 required. Got: $BASH_VERSION" >&2 + exit 1 +fi + +# Update this when updating to a new version after verifying that the changes +# the update brings in are good. These values are used for verifying the SBOM, too. +expected_libexpat_tag="R_2_6_4" +expected_libexpat_version="2.6.4" +expected_libexpat_sha256="fd03b7172b3bd7427a3e7a812063f74754f24542429b634e0db6511b53fb2278" + +expat_dir="$(realpath "$(dirname -- "${BASH_SOURCE[0]}")")" +cd ${expat_dir} + +# Step 1: download and copy files +curl --location "https://github.com/libexpat/libexpat/releases/download/${expected_libexpat_tag}/expat-${expected_libexpat_version}.tar.gz" > libexpat.tar.gz +echo "${expected_libexpat_sha256} libexpat.tar.gz" | sha256sum --check + +# Step 2: Pull files from the libexpat distribution +declare -a lib_files +lib_files=( + ascii.h + asciitab.h + expat.h + expat_external.h + iasciitab.h + internal.h + latin1tab.h + nametab.h + siphash.h + utf8tab.h + winconfig.h + xmlparse.c + xmlrole.c + xmlrole.h + xmltok.c + xmltok.h + xmltok_impl.c + xmltok_impl.h + xmltok_ns.c +) +for f in "${lib_files[@]}"; do + tar xzvf libexpat.tar.gz "expat-${expected_libexpat_version}/lib/${f}" --strip-components 2 +done +rm libexpat.tar.gz + +# Step 3: Add the namespacing include to expat_external.h +sed -i 's/#define Expat_External_INCLUDED 1/&\n\n\/* Namespace external symbols to allow multiple libexpat version to\n co-exist. \*\/\n#include "pyexpatns.h"/' expat_external.h + +echo "Updated; verify all is okay using git diff and git status." diff --git a/Modules/expat/xmlparse.c b/Modules/expat/xmlparse.c index d9285b213b38bd..a4e091e7c33c0a 100644 --- a/Modules/expat/xmlparse.c +++ b/Modules/expat/xmlparse.c @@ -1,4 +1,4 @@ -/* ba4cdf9bdb534f355a9def4c9e25d20ee8e72f95b0a4d930be52e563f5080196 (2.6.3+) +/* c5625880f4bf417c1463deee4eb92d86ff413f802048621c57e25fe483eb59e4 (2.6.4+) __ __ _ ___\ \/ /_ __ __ _| |_ / _ \\ /| '_ \ / _` | __| @@ -40,6 +40,7 @@ Copyright (c) 2023 Owain Davies Copyright (c) 2023-2024 Sony Corporation / Snild Dolkow Copyright (c) 2024 Berkay Eren Ürün + Copyright (c) 2024 Hanno Böck Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -2234,6 +2235,9 @@ XML_StopParser(XML_Parser parser, XML_Bool resumable) { if (parser == NULL) return XML_STATUS_ERROR; switch (parser->m_parsingStatus.parsing) { + case XML_INITIALIZED: + parser->m_errorCode = XML_ERROR_NOT_STARTED; + return XML_STATUS_ERROR; case XML_SUSPENDED: if (resumable) { parser->m_errorCode = XML_ERROR_SUSPENDED; @@ -2244,7 +2248,7 @@ XML_StopParser(XML_Parser parser, XML_Bool resumable) { case XML_FINISHED: parser->m_errorCode = XML_ERROR_FINISHED; return XML_STATUS_ERROR; - default: + case XML_PARSING: if (resumable) { #ifdef XML_DTD if (parser->m_isParamEntity) { @@ -2255,6 +2259,9 @@ XML_StopParser(XML_Parser parser, XML_Bool resumable) { parser->m_parsingStatus.parsing = XML_SUSPENDED; } else parser->m_parsingStatus.parsing = XML_FINISHED; + break; + default: + assert(0); } return XML_STATUS_OK; } @@ -2519,6 +2526,9 @@ XML_ErrorString(enum XML_Error code) { case XML_ERROR_AMPLIFICATION_LIMIT_BREACH: return XML_L( "limit on input amplification factor (from DTD and entities) breached"); + /* Added in 2.6.4. */ + case XML_ERROR_NOT_STARTED: + return XML_L("parser not started"); } return NULL; } @@ -7856,7 +7866,7 @@ accountingReportDiff(XML_Parser rootParser, assert(! rootParser->m_parentParser); fprintf(stderr, - " (+" EXPAT_FMT_PTRDIFF_T("6") " bytes %s|%d, xmlparse.c:%d) %*s\"", + " (+" EXPAT_FMT_PTRDIFF_T("6") " bytes %s|%u, xmlparse.c:%d) %*s\"", bytesMore, (account == XML_ACCOUNT_DIRECT) ? "DIR" : "EXP", levelsAwayFromRootParser, source_line, 10, ""); @@ -7969,7 +7979,7 @@ entityTrackingReportStats(XML_Parser rootParser, ENTITY *entity, fprintf( stderr, - "expat: Entities(%p): Count %9d, depth %2d/%2d %*s%s%s; %s length %d (xmlparse.c:%d)\n", + "expat: Entities(%p): Count %9u, depth %2u/%2u %*s%s%s; %s length %d (xmlparse.c:%d)\n", (void *)rootParser, rootParser->m_entity_stats.countEverOpened, rootParser->m_entity_stats.currentDepth, rootParser->m_entity_stats.maximumDepthSeen, diff --git a/Modules/getpath.py b/Modules/getpath.py index 1f1bfcb4f64dd4..1c1eb6cbf3ee22 100644 --- a/Modules/getpath.py +++ b/Modules/getpath.py @@ -420,7 +420,7 @@ def search_up(prefix, *landmarks, test=isfile): # Only warn if the file actually exists and was unresolvable # Otherwise users who specify a fake executable may get spurious warnings. if isfile(real_executable): - warn(f'Failed to find real location of {base_executable}') + warn(f'Failed to find real location of {real_executable}') if not executable_dir and os_name == 'darwin' and library: # QUIRK: macOS checks adjacent to its library early diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c index f7d3e12f347ec2..29da9936b65504 100644 --- a/Modules/grpmodule.c +++ b/Modules/grpmodule.c @@ -1,9 +1,8 @@ /* UNIX group file access module */ -// Need limited C API version 3.13 for PyMem_RawRealloc() -#include "pyconfig.h" // Py_GIL_DISABLED -#ifndef Py_GIL_DISABLED -#define Py_LIMITED_API 0x030d0000 +// Argument Clinic uses the internal C API +#ifndef Py_BUILD_CORE_BUILTIN +# define Py_BUILD_CORE_MODULE 1 #endif #include "Python.h" @@ -281,23 +280,33 @@ static PyObject * grp_getgrall_impl(PyObject *module) /*[clinic end generated code: output=585dad35e2e763d7 input=d7df76c825c367df]*/ { - PyObject *d; - struct group *p; - - if ((d = PyList_New(0)) == NULL) + PyObject *d = PyList_New(0); + if (d == NULL) { return NULL; + } + + static PyMutex getgrall_mutex = {0}; + PyMutex_Lock(&getgrall_mutex); setgrent(); + + struct group *p; while ((p = getgrent()) != NULL) { + // gh-126316: Don't release the mutex around mkgrent() since + // setgrent()/endgrent() are not reentrant / thread-safe. A deadlock + // is unlikely since mkgrent() should not be able to call arbitrary + // Python code. PyObject *v = mkgrent(module, p); if (v == NULL || PyList_Append(d, v) != 0) { Py_XDECREF(v); - Py_DECREF(d); - endgrent(); - return NULL; + Py_CLEAR(d); + goto done; } Py_DECREF(v); } + +done: endgrent(); + PyMutex_Unlock(&getgrall_mutex); return d; } diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 1201fa094902d7..78fbdcdf77a923 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -3291,6 +3291,9 @@ itertools_count_impl(PyTypeObject *type, PyObject *long_cnt, PyErr_Clear(); fast_mode = 0; } + else if (cnt == PY_SSIZE_T_MAX) { + fast_mode = 0; + } } } else { cnt = 0; diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 1ce2baecb8a964..6eb7054b566e3f 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -678,6 +678,7 @@ PyOS_AfterFork_Child(void) _PyEval_StartTheWorldAll(&_PyRuntime); _PyThreadState_DeleteList(list); + _PyImport_ReInitLock(tstate->interp); _PyImport_ReleaseLock(tstate->interp); _PySignal_AfterFork(); @@ -3112,18 +3113,22 @@ class Py_off_t_return_converter(long_return_converter): type = 'Py_off_t' conversion_fn = 'PyLong_FromPy_off_t' -class path_confname_converter(CConverter): +class confname_converter(CConverter): type="int" - converter="conv_path_confname" + converter="conv_confname" -class confstr_confname_converter(path_confname_converter): - converter='conv_confstr_confname' + def converter_init(self, *, table): + self.table = table -class sysconf_confname_converter(path_confname_converter): - converter="conv_sysconf_confname" + def parse_arg(self, argname, displayname, *, limited_capi): + return self.format_code(""" + if (!{converter}(module, {argname}, &{paramname}, "{table}")) {{{{ + goto exit; + }}}} + """, argname=argname, converter=self.converter, table=self.table) [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=1860d32584c2a539]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=8189d5ae78244626]*/ /*[clinic input] @@ -13546,46 +13551,38 @@ struct constdef { }; static int -conv_confname(PyObject *arg, int *valuep, struct constdef *table, - size_t tablesize) +conv_confname(PyObject *module, PyObject *arg, int *valuep, const char *tablename) { - if (PyLong_Check(arg)) { - int value = PyLong_AsInt(arg); - if (value == -1 && PyErr_Occurred()) - return 0; - *valuep = value; - return 1; - } - else { - /* look up the value in the table using a binary search */ - size_t lo = 0; - size_t mid; - size_t hi = tablesize; - int cmp; - const char *confname; - if (!PyUnicode_Check(arg)) { - PyErr_SetString(PyExc_TypeError, - "configuration names must be strings or integers"); + if (PyUnicode_Check(arg)) { + PyObject *table = PyObject_GetAttrString(module, tablename); + if (table == NULL) { return 0; } - confname = PyUnicode_AsUTF8(arg); - if (confname == NULL) + + arg = PyObject_GetItem(table, arg); + Py_DECREF(table); + if (arg == NULL) { + PyErr_SetString( + PyExc_ValueError, "unrecognized configuration name"); return 0; - while (lo < hi) { - mid = (lo + hi) / 2; - cmp = strcmp(confname, table[mid].name); - if (cmp < 0) - hi = mid; - else if (cmp > 0) - lo = mid + 1; - else { - *valuep = table[mid].value; - return 1; - } } - PyErr_SetString(PyExc_ValueError, "unrecognized configuration name"); - return 0; + } else { + Py_INCREF(arg); // Match the Py_DECREF below. } + + int success = 0; + if (!PyLong_Check(arg)) { + PyErr_SetString(PyExc_TypeError, + "configuration names must be strings or integers"); + } else { + int value = PyLong_AsInt(arg); + if (!(value == -1 && PyErr_Occurred())) { + *valuep = value; + success = 1; + } + } + Py_DECREF(arg); + return success; } @@ -13676,14 +13673,6 @@ static struct constdef posix_constants_pathconf[] = { {"PC_TIMESTAMP_RESOLUTION", _PC_TIMESTAMP_RESOLUTION}, #endif }; - -static int -conv_path_confname(PyObject *arg, int *valuep) -{ - return conv_confname(arg, valuep, posix_constants_pathconf, - sizeof(posix_constants_pathconf) - / sizeof(struct constdef)); -} #endif @@ -13692,7 +13681,7 @@ conv_path_confname(PyObject *arg, int *valuep) os.fpathconf -> long fd: fildes - name: path_confname + name: confname(table="pathconf_names") / Return the configuration limit name for the file descriptor fd. @@ -13702,7 +13691,7 @@ If there is no limit, return -1. static long os_fpathconf_impl(PyObject *module, int fd, int name) -/*[clinic end generated code: output=d5b7042425fc3e21 input=5b8d2471cfaae186]*/ +/*[clinic end generated code: output=d5b7042425fc3e21 input=023d44589c9ed6aa]*/ { long limit; @@ -13720,7 +13709,7 @@ os_fpathconf_impl(PyObject *module, int fd, int name) /*[clinic input] os.pathconf -> long path: path_t(allow_fd='PATH_HAVE_FPATHCONF') - name: path_confname + name: confname(table="pathconf_names") Return the configuration limit name for the file or directory path. @@ -13731,7 +13720,7 @@ On some platforms, path may also be specified as an open file descriptor. static long os_pathconf_impl(PyObject *module, path_t *path, int name) -/*[clinic end generated code: output=5bedee35b293a089 input=bc3e2a985af27e5e]*/ +/*[clinic end generated code: output=5bedee35b293a089 input=6f6072f57b10c787]*/ { long limit; @@ -13908,19 +13897,11 @@ static struct constdef posix_constants_confstr[] = { #endif }; -static int -conv_confstr_confname(PyObject *arg, int *valuep) -{ - return conv_confname(arg, valuep, posix_constants_confstr, - sizeof(posix_constants_confstr) - / sizeof(struct constdef)); -} - /*[clinic input] os.confstr - name: confstr_confname + name: confname(table="confstr_names") / Return a string-valued system configuration variable. @@ -13928,7 +13909,7 @@ Return a string-valued system configuration variable. static PyObject * os_confstr_impl(PyObject *module, int name) -/*[clinic end generated code: output=bfb0b1b1e49b9383 input=18fb4d0567242e65]*/ +/*[clinic end generated code: output=bfb0b1b1e49b9383 input=4c6ffca2837ec959]*/ { PyObject *result = NULL; char buffer[255]; @@ -14465,18 +14446,10 @@ static struct constdef posix_constants_sysconf[] = { #endif }; -static int -conv_sysconf_confname(PyObject *arg, int *valuep) -{ - return conv_confname(arg, valuep, posix_constants_sysconf, - sizeof(posix_constants_sysconf) - / sizeof(struct constdef)); -} - /*[clinic input] os.sysconf -> long - name: sysconf_confname + name: confname(table="sysconf_names") / Return an integer-valued system configuration variable. @@ -14484,7 +14457,7 @@ Return an integer-valued system configuration variable. static long os_sysconf_impl(PyObject *module, int name) -/*[clinic end generated code: output=3662f945fc0cc756 input=279e3430a33f29e4]*/ +/*[clinic end generated code: output=3662f945fc0cc756 input=930b8f23b5d15086]*/ { long value; @@ -14497,40 +14470,15 @@ os_sysconf_impl(PyObject *module, int name) #endif /* HAVE_SYSCONF */ -/* This code is used to ensure that the tables of configuration value names - * are in sorted order as required by conv_confname(), and also to build - * the exported dictionaries that are used to publish information about the - * names available on the host platform. - * - * Sorting the table at runtime ensures that the table is properly ordered - * when used, even for platforms we're not able to test on. It also makes - * it easier to add additional entries to the tables. - */ - -static int -cmp_constdefs(const void *v1, const void *v2) -{ - const struct constdef *c1 = - (const struct constdef *) v1; - const struct constdef *c2 = - (const struct constdef *) v2; - - return strcmp(c1->name, c2->name); -} - static int setup_confname_table(struct constdef *table, size_t tablesize, const char *tablename, PyObject *module) { - PyObject *d = NULL; - size_t i; - - qsort(table, tablesize, sizeof(struct constdef), cmp_constdefs); - d = PyDict_New(); + PyObject *d = PyDict_New(); if (d == NULL) return -1; - for (i=0; i < tablesize; ++i) { + for (size_t i=0; i < tablesize; ++i) { PyObject *o = PyLong_FromLong(table[i].value); if (o == NULL || PyDict_SetItemString(d, table[i].name, o) == -1) { Py_XDECREF(o); diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 2764bd6e2b2a47..06be822c9555f9 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -810,7 +810,9 @@ internal_select(PySocketSockObject *s, int writing, PyTime_t interval, /* s->sock_timeout is in seconds, timeout in ms */ ms = _PyTime_AsMilliseconds(interval, _PyTime_ROUND_CEILING); - assert(ms <= INT_MAX); + if (ms > INT_MAX) { + ms = INT_MAX; + } /* On some OSes, typically BSD-based ones, the timeout parameter of the poll() syscall, when negative, must be exactly INFTIM, where defined, @@ -822,6 +824,7 @@ internal_select(PySocketSockObject *s, int writing, PyTime_t interval, ms = -1; #endif } + assert(INT_MIN <= ms && ms <= INT_MAX); Py_BEGIN_ALLOW_THREADS; n = poll(&pollfd, 1, (int)ms); diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index 5a52b2f702ad0b..871f99b6f885ba 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -52,8 +52,9 @@ bytearray_getbuffer(PyObject *self, Py_buffer *view, int flags) } void *ptr = (void *) PyByteArray_AS_STRING(obj); - /* cannot fail if view != NULL and readonly == 0 */ - (void)PyBuffer_FillInfo(view, (PyObject*)obj, ptr, Py_SIZE(obj), 0, flags); + if (PyBuffer_FillInfo(view, (PyObject*)obj, ptr, Py_SIZE(obj), 0, flags) < 0) { + return -1; + } obj->ob_exports++; return 0; } diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index ac02cfe7cf01c5..8c7651f0f3aa45 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -3196,6 +3196,7 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) #ifdef Py_TRACE_REFS _Py_ForgetReference(v); #endif + _PyReftracerTrack(v, PyRefTracer_DESTROY); *pv = (PyObject *) PyObject_Realloc(v, PyBytesObject_SIZE + newsize); if (*pv == NULL) { diff --git a/Objects/codeobject.c b/Objects/codeobject.c index dba43d5911da95..148350cc4b9195 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -302,21 +302,32 @@ validate_and_copy_tuple(PyObject *tup) } static int -init_co_cached(PyCodeObject *self) { - if (self->_co_cached == NULL) { - self->_co_cached = PyMem_New(_PyCoCached, 1); - if (self->_co_cached == NULL) { +init_co_cached(PyCodeObject *self) +{ + _PyCoCached *cached = FT_ATOMIC_LOAD_PTR(self->_co_cached); + if (cached != NULL) { + return 0; + } + + Py_BEGIN_CRITICAL_SECTION(self); + cached = self->_co_cached; + if (cached == NULL) { + cached = PyMem_New(_PyCoCached, 1); + if (cached == NULL) { PyErr_NoMemory(); - return -1; } - self->_co_cached->_co_code = NULL; - self->_co_cached->_co_cellvars = NULL; - self->_co_cached->_co_freevars = NULL; - self->_co_cached->_co_varnames = NULL; + else { + cached->_co_code = NULL; + cached->_co_cellvars = NULL; + cached->_co_freevars = NULL; + cached->_co_varnames = NULL; + FT_ATOMIC_STORE_PTR(self->_co_cached, cached); + } } - return 0; - + Py_END_CRITICAL_SECTION(); + return cached != NULL ? 0 : -1; } + /****************** * _PyCode_New() ******************/ @@ -1571,16 +1582,21 @@ get_cached_locals(PyCodeObject *co, PyObject **cached_field, { assert(cached_field != NULL); assert(co->_co_cached != NULL); - if (*cached_field != NULL) { - return Py_NewRef(*cached_field); + PyObject *varnames = FT_ATOMIC_LOAD_PTR(*cached_field); + if (varnames != NULL) { + return Py_NewRef(varnames); } - assert(*cached_field == NULL); - PyObject *varnames = get_localsplus_names(co, kind, num); + + Py_BEGIN_CRITICAL_SECTION(co); + varnames = *cached_field; if (varnames == NULL) { - return NULL; + varnames = get_localsplus_names(co, kind, num); + if (varnames != NULL) { + FT_ATOMIC_STORE_PTR(*cached_field, varnames); + } } - *cached_field = Py_NewRef(varnames); - return varnames; + Py_END_CRITICAL_SECTION(); + return Py_XNewRef(varnames); } PyObject * @@ -1674,18 +1690,26 @@ _PyCode_GetCode(PyCodeObject *co) if (init_co_cached(co)) { return NULL; } - if (co->_co_cached->_co_code != NULL) { - return Py_NewRef(co->_co_cached->_co_code); + + _PyCoCached *cached = co->_co_cached; + PyObject *code = FT_ATOMIC_LOAD_PTR(cached->_co_code); + if (code != NULL) { + return Py_NewRef(code); } - PyObject *code = PyBytes_FromStringAndSize((const char *)_PyCode_CODE(co), - _PyCode_NBYTES(co)); + + Py_BEGIN_CRITICAL_SECTION(co); + code = cached->_co_code; if (code == NULL) { - return NULL; + code = PyBytes_FromStringAndSize((const char *)_PyCode_CODE(co), + _PyCode_NBYTES(co)); + if (code != NULL) { + deopt_code(co, (_Py_CODEUNIT *)PyBytes_AS_STRING(code)); + assert(cached->_co_code == NULL); + FT_ATOMIC_STORE_PTR(cached->_co_code, code); + } } - deopt_code(co, (_Py_CODEUNIT *)PyBytes_AS_STRING(code)); - assert(co->_co_cached->_co_code == NULL); - co->_co_cached->_co_code = Py_NewRef(code); - return code; + Py_END_CRITICAL_SECTION(); + return Py_XNewRef(code); } PyObject * @@ -2871,20 +2895,22 @@ get_indices_in_use(PyInterpreterState *interp, struct flag_set *in_use) assert(interp->stoptheworld.world_stopped); assert(in_use->flags == NULL); int32_t max_index = 0; - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { int32_t idx = ((_PyThreadStateImpl *) p)->tlbc_index; if (idx > max_index) { max_index = idx; } } + _Py_FOR_EACH_TSTATE_END(interp); in_use->size = (size_t) max_index + 1; in_use->flags = PyMem_Calloc(in_use->size, sizeof(*in_use->flags)); if (in_use->flags == NULL) { return -1; } - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { in_use->flags[((_PyThreadStateImpl *) p)->tlbc_index] = 1; } + _Py_FOR_EACH_TSTATE_END(interp); return 0; } diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 2090008055b7c0..49b213eaa817e2 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -883,6 +883,7 @@ new_dict(PyInterpreterState *interp, mp->ma_used = used; mp->_ma_watcher_tag = 0; ASSERT_CONSISTENT(mp); + _PyObject_GC_TRACK(mp); return (PyObject *)mp; } @@ -1284,6 +1285,20 @@ ensure_shared_on_resize(PyDictObject *mp) #endif } +static inline void +ensure_shared_on_keys_version_assignment(PyDictObject *mp) +{ + ASSERT_DICT_LOCKED((PyObject *) mp); + #ifdef Py_GIL_DISABLED + if (!IS_DICT_SHARED(mp)) { + // This ensures that a concurrent resize operation will delay + // freeing the old keys or values using QSBR, which is necessary to + // safely allow concurrent reads without locking. + SET_DICT_SHARED(mp); + } + #endif +} + #ifdef Py_GIL_DISABLED static inline Py_ALWAYS_INLINE int @@ -1578,64 +1593,6 @@ _PyDict_HasOnlyStringKeys(PyObject *dict) return 1; } -#define MAINTAIN_TRACKING(mp, key, value) \ - do { \ - if (!_PyObject_GC_IS_TRACKED(mp)) { \ - if (_PyObject_GC_MAY_BE_TRACKED(key) || \ - _PyObject_GC_MAY_BE_TRACKED(value)) { \ - _PyObject_GC_TRACK(mp); \ - } \ - } \ - } while(0) - -void -_PyDict_MaybeUntrack(PyObject *op) -{ - PyDictObject *mp; - PyObject *value; - Py_ssize_t i, numentries; - - ASSERT_WORLD_STOPPED_OR_DICT_LOCKED(op); - - if (!PyDict_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op)) - return; - - mp = (PyDictObject *) op; - ASSERT_CONSISTENT(mp); - numentries = mp->ma_keys->dk_nentries; - if (_PyDict_HasSplitTable(mp)) { - for (i = 0; i < numentries; i++) { - if ((value = mp->ma_values->values[i]) == NULL) - continue; - if (_PyObject_GC_MAY_BE_TRACKED(value)) { - return; - } - } - } - else { - if (DK_IS_UNICODE(mp->ma_keys)) { - PyDictUnicodeEntry *ep0 = DK_UNICODE_ENTRIES(mp->ma_keys); - for (i = 0; i < numentries; i++) { - if ((value = ep0[i].me_value) == NULL) - continue; - if (_PyObject_GC_MAY_BE_TRACKED(value)) - return; - } - } - else { - PyDictKeyEntry *ep0 = DK_ENTRIES(mp->ma_keys); - for (i = 0; i < numentries; i++) { - if ((value = ep0[i].me_value) == NULL) - continue; - if (_PyObject_GC_MAY_BE_TRACKED(value) || - _PyObject_GC_MAY_BE_TRACKED(ep0[i].me_key)) - return; - } - } - } - _PyObject_GC_UNTRACK(op); -} - void _PyDict_EnablePerThreadRefcounting(PyObject *op) { @@ -1701,7 +1658,7 @@ insert_combined_dict(PyInterpreterState *interp, PyDictObject *mp, } _PyDict_NotifyEvent(interp, PyDict_EVENT_ADDED, mp, key, value); - mp->ma_keys->dk_version = 0; + FT_ATOMIC_STORE_UINT32_RELAXED(mp->ma_keys->dk_version, 0); Py_ssize_t hashpos = find_empty_slot(mp->ma_keys, hash); dictkeys_set_index(mp->ma_keys, hashpos, mp->ma_keys->dk_nentries); @@ -1743,7 +1700,7 @@ insert_split_key(PyDictKeysObject *keys, PyObject *key, Py_hash_t hash) ix = unicodekeys_lookup_unicode(keys, key, hash); if (ix == DKIX_EMPTY && keys->dk_usable > 0) { // Insert into new slot - keys->dk_version = 0; + FT_ATOMIC_STORE_UINT32_RELAXED(keys->dk_version, 0); Py_ssize_t hashpos = find_empty_slot(keys, hash); ix = keys->dk_nentries; dictkeys_set_index(keys, hashpos, ix); @@ -1761,7 +1718,6 @@ insert_split_value(PyInterpreterState *interp, PyDictObject *mp, PyObject *key, { assert(PyUnicode_CheckExact(key)); ASSERT_DICT_LOCKED(mp); - MAINTAIN_TRACKING(mp, key, value); PyObject *old_value = mp->ma_values->values[ix]; if (old_value == NULL) { _PyDict_NotifyEvent(interp, PyDict_EVENT_ADDED, mp, key, value); @@ -1818,8 +1774,6 @@ insertdict(PyInterpreterState *interp, PyDictObject *mp, if (ix == DKIX_ERROR) goto Fail; - MAINTAIN_TRACKING(mp, key, value); - if (ix == DKIX_EMPTY) { assert(!_PyDict_HasSplitTable(mp)); /* Insert into new slot. */ @@ -1878,8 +1832,6 @@ insert_to_emptydict(PyInterpreterState *interp, PyDictObject *mp, /* We don't decref Py_EMPTY_KEYS here because it is immortal. */ assert(mp->ma_values == NULL); - MAINTAIN_TRACKING(mp, key, value); - size_t hashpos = (size_t)hash & (PyDict_MINSIZE-1); dictkeys_set_index(newkeys, hashpos, 0); if (unicode) { @@ -2679,7 +2631,7 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix, ASSERT_CONSISTENT(mp); } else { - mp->ma_keys->dk_version = 0; + FT_ATOMIC_STORE_UINT32_RELAXED(mp->ma_keys->dk_version, 0); dictkeys_set_index(mp->ma_keys, hashpos, DKIX_DUMMY); if (DK_IS_UNICODE(mp->ma_keys)) { PyDictUnicodeEntry *ep = &DK_UNICODE_ENTRIES(mp->ma_keys)[ix]; @@ -4024,8 +3976,7 @@ copy_lock_held(PyObject *o) split_copy->ma_used = mp->ma_used; split_copy->_ma_watcher_tag = 0; dictkeys_incref(mp->ma_keys); - if (_PyObject_GC_IS_TRACKED(mp)) - _PyObject_GC_TRACK(split_copy); + _PyObject_GC_TRACK(split_copy); return (PyObject *)split_copy; } @@ -4060,11 +4011,6 @@ copy_lock_held(PyObject *o) new->ma_used = mp->ma_used; ASSERT_CONSISTENT(new); - if (_PyObject_GC_IS_TRACKED(mp)) { - /* Maintain tracking. */ - _PyObject_GC_TRACK(new); - } - return (PyObject *)new; } @@ -4351,7 +4297,6 @@ dict_setdefault_ref_lock_held(PyObject *d, PyObject *key, PyObject *default_valu } } - MAINTAIN_TRACKING(mp, key, value); STORE_USED(mp, mp->ma_used + 1); assert(mp->ma_keys->dk_usable >= 0); ASSERT_CONSISTENT(mp); @@ -4498,7 +4443,7 @@ dict_popitem_impl(PyDictObject *self) return NULL; } } - self->ma_keys->dk_version = 0; + FT_ATOMIC_STORE_UINT32_RELAXED(self->ma_keys->dk_version, 0); /* Pop last item */ PyObject *key, *value; @@ -4800,16 +4745,8 @@ dict_new(PyTypeObject *type, PyObject *args, PyObject *kwds) d->ma_keys = Py_EMPTY_KEYS; d->ma_values = NULL; ASSERT_CONSISTENT(d); - - if (type != &PyDict_Type) { - // Don't track if a subclass tp_alloc is PyType_GenericAlloc() - if (!_PyObject_GC_IS_TRACKED(d)) { - _PyObject_GC_TRACK(d); - } - } - else { - // _PyType_AllocNoTrack() does not track the created object - assert(!_PyObject_GC_IS_TRACKED(d)); + if (!_PyObject_GC_IS_TRACKED(d)) { + _PyObject_GC_TRACK(d); } return self; } @@ -6746,19 +6683,14 @@ make_dict_from_instance_attributes(PyInterpreterState *interp, { dictkeys_incref(keys); Py_ssize_t used = 0; - Py_ssize_t track = 0; size_t size = shared_keys_usable_size(keys); for (size_t i = 0; i < size; i++) { PyObject *val = values->values[i]; if (val != NULL) { used += 1; - track += _PyObject_GC_MAY_BE_TRACKED(val); } } PyDictObject *res = (PyDictObject *)new_dict(interp, keys, values, used, 0); - if (track && res) { - _PyObject_GC_TRACK(res); - } return res; } @@ -7169,50 +7101,146 @@ set_dict_inline_values(PyObject *obj, PyDictObject *new_dict) } } -int -_PyObject_SetManagedDict(PyObject *obj, PyObject *new_dict) +#ifdef Py_GIL_DISABLED + +// Trys and sets the dictionary for an object in the easy case when our current +// dictionary is either completely not materialized or is a dictionary which +// does not point at the inline values. +static bool +try_set_dict_inline_only_or_other_dict(PyObject *obj, PyObject *new_dict, PyDictObject **cur_dict) +{ + bool replaced = false; + Py_BEGIN_CRITICAL_SECTION(obj); + + PyDictObject *dict = *cur_dict = _PyObject_GetManagedDict(obj); + if (dict == NULL) { + // We only have inline values, we can just completely replace them. + set_dict_inline_values(obj, (PyDictObject *)new_dict); + replaced = true; + goto exit_lock; + } + + if (FT_ATOMIC_LOAD_PTR_RELAXED(dict->ma_values) != _PyObject_InlineValues(obj)) { + // We have a materialized dict which doesn't point at the inline values, + // We get to simply swap dictionaries and free the old dictionary. + FT_ATOMIC_STORE_PTR(_PyObject_ManagedDictPointer(obj)->dict, + (PyDictObject *)Py_XNewRef(new_dict)); + replaced = true; + goto exit_lock; + } + else { + // We have inline values, we need to lock the dict and the object + // at the same time to safely dematerialize them. To do that while releasing + // the object lock we need a strong reference to the current dictionary. + Py_INCREF(dict); + } +exit_lock: + Py_END_CRITICAL_SECTION(); + return replaced; +} + +// Replaces a dictionary that is probably the dictionary which has been +// materialized and points at the inline values. We could have raced +// and replaced it with another dictionary though. +static int +replace_dict_probably_inline_materialized(PyObject *obj, PyDictObject *inline_dict, + PyDictObject *cur_dict, PyObject *new_dict) +{ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(obj); + + if (cur_dict == inline_dict) { + assert(FT_ATOMIC_LOAD_PTR_RELAXED(inline_dict->ma_values) == _PyObject_InlineValues(obj)); + + int err = _PyDict_DetachFromObject(inline_dict, obj); + if (err != 0) { + assert(new_dict == NULL); + return err; + } + } + + FT_ATOMIC_STORE_PTR(_PyObject_ManagedDictPointer(obj)->dict, + (PyDictObject *)Py_XNewRef(new_dict)); + return 0; +} + +#endif + +static void +decref_maybe_delay(PyObject *obj, bool delay) +{ + if (delay) { + _PyObject_XDecRefDelayed(obj); + } + else { + Py_XDECREF(obj); + } +} + +static int +set_or_clear_managed_dict(PyObject *obj, PyObject *new_dict, bool clear) { assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT); +#ifndef NDEBUG + Py_BEGIN_CRITICAL_SECTION(obj); assert(_PyObject_InlineValuesConsistencyCheck(obj)); + Py_END_CRITICAL_SECTION(); +#endif int err = 0; PyTypeObject *tp = Py_TYPE(obj); if (tp->tp_flags & Py_TPFLAGS_INLINE_VALUES) { - PyDictObject *dict = _PyObject_GetManagedDict(obj); - if (dict == NULL) { #ifdef Py_GIL_DISABLED - Py_BEGIN_CRITICAL_SECTION(obj); + PyDictObject *prev_dict; + if (!try_set_dict_inline_only_or_other_dict(obj, new_dict, &prev_dict)) { + // We had a materialized dictionary which pointed at the inline + // values. We need to lock both the object and the dict at the + // same time to safely replace it. We can't merely lock the dictionary + // while the object is locked because it could suspend the object lock. + PyDictObject *cur_dict; - dict = _PyObject_ManagedDictPointer(obj)->dict; - if (dict == NULL) { - set_dict_inline_values(obj, (PyDictObject *)new_dict); - } + assert(prev_dict != NULL); + Py_BEGIN_CRITICAL_SECTION2(obj, prev_dict); - Py_END_CRITICAL_SECTION(); + // We could have had another thread race in between the call to + // try_set_dict_inline_only_or_other_dict where we locked the object + // and when we unlocked and re-locked the dictionary. + cur_dict = _PyObject_GetManagedDict(obj); - if (dict == NULL) { - return 0; + err = replace_dict_probably_inline_materialized(obj, prev_dict, + cur_dict, new_dict); + + Py_END_CRITICAL_SECTION2(); + + // Decref for the dictionary we incref'd in try_set_dict_inline_only_or_other_dict + // while the object was locked + decref_maybe_delay((PyObject *)prev_dict, + !clear && prev_dict != cur_dict); + if (err != 0) { + return err; } -#else - set_dict_inline_values(obj, (PyDictObject *)new_dict); - return 0; -#endif - } - Py_BEGIN_CRITICAL_SECTION2(dict, obj); + prev_dict = cur_dict; + } - // We've locked dict, but the actual dict could have changed - // since we locked it. - dict = _PyObject_ManagedDictPointer(obj)->dict; - err = _PyDict_DetachFromObject(dict, obj); - if (err == 0) { - FT_ATOMIC_STORE_PTR(_PyObject_ManagedDictPointer(obj)->dict, - (PyDictObject *)Py_XNewRef(new_dict)); + if (prev_dict != NULL) { + // decref for the dictionary that we replaced + decref_maybe_delay((PyObject *)prev_dict, !clear); } - Py_END_CRITICAL_SECTION2(); - if (err == 0) { - Py_XDECREF(dict); + return 0; +#else + PyDictObject *dict = _PyObject_GetManagedDict(obj); + if (dict == NULL) { + set_dict_inline_values(obj, (PyDictObject *)new_dict); + return 0; } + if (_PyDict_DetachFromObject(dict, obj) == 0) { + _PyObject_ManagedDictPointer(obj)->dict = (PyDictObject *)Py_XNewRef(new_dict); + Py_DECREF(dict); + return 0; + } + assert(new_dict == NULL); + return -1; +#endif } else { PyDictObject *dict; @@ -7225,18 +7253,37 @@ _PyObject_SetManagedDict(PyObject *obj, PyObject *new_dict) (PyDictObject *)Py_XNewRef(new_dict)); Py_END_CRITICAL_SECTION(); - - Py_XDECREF(dict); + decref_maybe_delay((PyObject *)dict, !clear); } assert(_PyObject_InlineValuesConsistencyCheck(obj)); return err; } +int +_PyObject_SetManagedDict(PyObject *obj, PyObject *new_dict) +{ + return set_or_clear_managed_dict(obj, new_dict, false); +} + void PyObject_ClearManagedDict(PyObject *obj) { - if (_PyObject_SetManagedDict(obj, NULL) < 0) { + if (set_or_clear_managed_dict(obj, NULL, true) < 0) { + /* Must be out of memory */ + assert(PyErr_Occurred() == PyExc_MemoryError); PyErr_WriteUnraisable(NULL); + /* Clear the dict */ + PyDictObject *dict = _PyObject_GetManagedDict(obj); + Py_BEGIN_CRITICAL_SECTION2(dict, obj); + dict = _PyObject_ManagedDictPointer(obj)->dict; + PyInterpreterState *interp = _PyInterpreterState_GET(); + PyDictKeysObject *oldkeys = dict->ma_keys; + set_keys(dict, Py_EMPTY_KEYS); + dict->ma_values = NULL; + dictkeys_decref(interp, oldkeys, IS_DICT_SHARED(dict)); + STORE_USED(dict, 0); + set_dict_inline_values(obj, NULL); + Py_END_CRITICAL_SECTION2(); } } @@ -7261,12 +7308,6 @@ _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj) PyDictValues *values = copy_values(mp->ma_values); if (values == NULL) { - /* Out of memory. Clear the dict */ - PyInterpreterState *interp = _PyInterpreterState_GET(); - PyDictKeysObject *oldkeys = mp->ma_keys; - set_keys(mp, Py_EMPTY_KEYS); - dictkeys_decref(interp, oldkeys, IS_DICT_SHARED(mp)); - STORE_USED(mp, 0); PyErr_NoMemory(); return -1; } @@ -7390,20 +7431,54 @@ _PyDictKeys_DecRef(PyDictKeysObject *keys) dictkeys_decref(interp, keys, false); } -uint32_t _PyDictKeys_GetVersionForCurrentState(PyInterpreterState *interp, - PyDictKeysObject *dictkeys) +static inline uint32_t +get_next_dict_keys_version(PyInterpreterState *interp) { - if (dictkeys->dk_version != 0) { - return dictkeys->dk_version; - } +#ifdef Py_GIL_DISABLED + uint32_t v; + do { + v = _Py_atomic_load_uint32_relaxed( + &interp->dict_state.next_keys_version); + if (v == 0) { + return 0; + } + } while (!_Py_atomic_compare_exchange_uint32( + &interp->dict_state.next_keys_version, &v, v + 1)); +#else if (interp->dict_state.next_keys_version == 0) { return 0; } uint32_t v = interp->dict_state.next_keys_version++; - dictkeys->dk_version = v; +#endif return v; } +// In free-threaded builds the caller must ensure that the keys object is not +// being mutated concurrently by another thread. +uint32_t +_PyDictKeys_GetVersionForCurrentState(PyInterpreterState *interp, + PyDictKeysObject *dictkeys) +{ + uint32_t dk_version = FT_ATOMIC_LOAD_UINT32_RELAXED(dictkeys->dk_version); + if (dk_version != 0) { + return dk_version; + } + dk_version = get_next_dict_keys_version(interp); + FT_ATOMIC_STORE_UINT32_RELAXED(dictkeys->dk_version, dk_version); + return dk_version; +} + +uint32_t +_PyDict_GetKeysVersionForCurrentState(PyInterpreterState *interp, + PyDictObject *dict) +{ + ASSERT_DICT_LOCKED((PyObject *) dict); + uint32_t dk_version = + _PyDictKeys_GetVersionForCurrentState(interp, dict->ma_keys); + ensure_shared_on_keys_version_assignment(dict); + return dk_version; +} + static inline int validate_watcher_id(PyInterpreterState *interp, int watcher_id) { diff --git a/Objects/funcobject.c b/Objects/funcobject.c index 1f2387f68440aa..4ba47285f7152f 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -289,12 +289,14 @@ functions is running. */ +#ifndef Py_GIL_DISABLED static inline struct _func_version_cache_item * get_cache_item(PyInterpreterState *interp, uint32_t version) { return interp->func_state.func_version_cache + (version % FUNC_VERSION_CACHE_SIZE); } +#endif void _PyFunction_SetVersion(PyFunctionObject *func, uint32_t version) diff --git a/Objects/genobject.c b/Objects/genobject.c index 19c2c4e3331a89..e87f199c2504ba 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -471,14 +471,14 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, return gen_send_ex(gen, Py_None, 1, 0); goto throw_here; } + PyThreadState *tstate = _PyThreadState_GET(); + assert(tstate != NULL); if (PyGen_CheckExact(yf) || PyCoro_CheckExact(yf)) { /* `yf` is a generator or a coroutine. */ - PyThreadState *tstate = _PyThreadState_GET(); - /* Since we are fast-tracking things by skipping the eval loop, - we need to update the current frame so the stack trace - will be reported correctly to the user. */ - /* XXX We should probably be updating the current frame - somewhere in ceval.c. */ + + /* Link frame into the stack to enable complete backtraces. */ + /* XXX We should probably be updating the current frame somewhere in + ceval.c. */ _PyInterpreterFrame *prev = tstate->current_frame; frame->previous = prev; tstate->current_frame = frame; @@ -502,10 +502,16 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, Py_DECREF(yf); goto throw_here; } + + _PyInterpreterFrame *prev = tstate->current_frame; + frame->previous = prev; + tstate->current_frame = frame; PyFrameState state = gen->gi_frame_state; gen->gi_frame_state = FRAME_EXECUTING; ret = PyObject_CallFunctionObjArgs(meth, typ, val, tb, NULL); gen->gi_frame_state = state; + tstate->current_frame = prev; + frame->previous = NULL; Py_DECREF(meth); } Py_DECREF(yf); diff --git a/Objects/longobject.c b/Objects/longobject.c index b4c0f63a9843ce..4aa35685b509f2 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -784,6 +784,39 @@ PyLong_AsUnsignedLongMask(PyObject *op) return val; } +int +PyLong_IsPositive(PyObject *obj) +{ + assert(obj != NULL); + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %T", obj); + return -1; + } + return _PyLong_IsPositive((PyLongObject *)obj); +} + +int +PyLong_IsNegative(PyObject *obj) +{ + assert(obj != NULL); + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %T", obj); + return -1; + } + return _PyLong_IsNegative((PyLongObject *)obj); +} + +int +PyLong_IsZero(PyObject *obj) +{ + assert(obj != NULL); + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %T", obj); + return -1; + } + return _PyLong_IsZero((PyLongObject *)obj); +} + int _PyLong_Sign(PyObject *vv) { diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index d4672e8198cb24..25634f997ac66b 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -3356,6 +3356,7 @@ memory_iter(PyObject *seq) PyErr_BadInternalCall(); return NULL; } + CHECK_RELEASED(seq); PyMemoryViewObject *obj = (PyMemoryViewObject *)seq; int ndims = obj->view.ndim; if (ndims == 0) { diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index 535b0d068f064f..a8d64c9aefae6b 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -107,8 +107,6 @@ static void track_module(PyModuleObject *m) { _PyDict_EnablePerThreadRefcounting(m->md_dict); - PyObject_GC_Track(m->md_dict); - _PyObject_SetDeferredRefcount((PyObject *)m); PyObject_GC_Track(m); } diff --git a/Objects/object.c b/Objects/object.c index 7cc74a8dc0d8eb..8868fa29066404 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -119,7 +119,7 @@ get_reftotal(PyInterpreterState *interp) since we can't determine which interpreter updated it. */ Py_ssize_t total = REFTOTAL(interp); #ifdef Py_GIL_DISABLED - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, p) { /* This may race with other threads modifications to their reftotal */ _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)p; total += _Py_atomic_load_ssize_relaxed(&tstate_impl->reftotal); @@ -2457,11 +2457,7 @@ new_reference(PyObject *op) #ifdef Py_TRACE_REFS _Py_AddToAllObjects(op); #endif - struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; - if (tracer->tracer_func != NULL) { - void* data = tracer->tracer_data; - tracer->tracer_func(op, PyRefTracer_CREATE, data); - } + _PyReftracerTrack(op, PyRefTracer_CREATE); } void @@ -2519,16 +2515,41 @@ _PyObject_SetDeferredRefcount(PyObject *op) #endif } +int +PyUnstable_Object_EnableDeferredRefcount(PyObject *op) +{ +#ifdef Py_GIL_DISABLED + if (!PyType_IS_GC(Py_TYPE(op))) { + // Deferred reference counting doesn't work + // on untracked types. + return 0; + } + + uint8_t bits = _Py_atomic_load_uint8(&op->ob_gc_bits); + if ((bits & _PyGC_BITS_DEFERRED) != 0) + { + // Nothing to do. + return 0; + } + + if (_Py_atomic_compare_exchange_uint8(&op->ob_gc_bits, &bits, bits | _PyGC_BITS_DEFERRED) == 0) + { + // Someone beat us to it! + return 0; + } + _Py_atomic_add_ssize(&op->ob_ref_shared, _Py_REF_SHARED(_Py_REF_DEFERRED, 0)); + return 1; +#else + return 0; +#endif +} + void _Py_ResurrectReference(PyObject *op) { #ifdef Py_TRACE_REFS _Py_AddToAllObjects(op); #endif - if (_PyRuntime.ref_tracer.tracer_func != NULL) { - void* data = _PyRuntime.ref_tracer.tracer_data; - _PyRuntime.ref_tracer.tracer_func(op, PyRefTracer_CREATE, data); - } } @@ -2918,15 +2939,10 @@ _Py_Dealloc(PyObject *op) Py_INCREF(type); #endif - struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; - if (tracer->tracer_func != NULL) { - void* data = tracer->tracer_data; - tracer->tracer_func(op, PyRefTracer_DESTROY, data); - } - #ifdef Py_TRACE_REFS _Py_ForgetReference(op); #endif + _PyReftracerTrack(op, PyRefTracer_DESTROY); (*dealloc)(op); #ifdef Py_DEBUG diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index dfeccfa4dd7658..2cc0377f68f990 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -1093,10 +1093,24 @@ struct _mem_work_chunk { }; static void -free_work_item(uintptr_t ptr) +free_work_item(uintptr_t ptr, delayed_dealloc_cb cb, void *state) { if (ptr & 0x01) { - PyObject_Free((char *)(ptr - 1)); + PyObject *obj = (PyObject *)(ptr - 1); +#ifdef Py_GIL_DISABLED + if (cb == NULL) { + assert(!_PyInterpreterState_GET()->stoptheworld.world_stopped); + Py_DECREF(obj); + return; + } + + Py_ssize_t refcount = _Py_ExplicitMergeRefcount(obj, -1); + if (refcount == 0) { + cb(obj, state); + } +#else + Py_DECREF(obj); +#endif } else { PyMem_Free((void *)ptr); @@ -1107,7 +1121,7 @@ static void free_delayed(uintptr_t ptr) { #ifndef Py_GIL_DISABLED - free_work_item(ptr); + free_work_item(ptr, NULL, NULL); #else PyInterpreterState *interp = _PyInterpreterState_GET(); if (_PyInterpreterState_GetFinalizing(interp) != NULL || @@ -1115,7 +1129,8 @@ free_delayed(uintptr_t ptr) { // Free immediately during interpreter shutdown or if the world is // stopped. - free_work_item(ptr); + assert(!interp->stoptheworld.world_stopped || !(ptr & 0x01)); + free_work_item(ptr, NULL, NULL); return; } @@ -1142,7 +1157,8 @@ free_delayed(uintptr_t ptr) if (buf == NULL) { // failed to allocate a buffer, free immediately _PyEval_StopTheWorld(tstate->base.interp); - free_work_item(ptr); + // TODO: Fix me + free_work_item(ptr, NULL, NULL); _PyEval_StartTheWorld(tstate->base.interp); return; } @@ -1166,12 +1182,16 @@ _PyMem_FreeDelayed(void *ptr) free_delayed((uintptr_t)ptr); } +#ifdef Py_GIL_DISABLED void -_PyObject_FreeDelayed(void *ptr) +_PyObject_XDecRefDelayed(PyObject *ptr) { assert(!((uintptr_t)ptr & 0x01)); - free_delayed(((uintptr_t)ptr)|0x01); + if (ptr != NULL) { + free_delayed(((uintptr_t)ptr)|0x01); + } } +#endif static struct _mem_work_chunk * work_queue_first(struct llist_node *head) @@ -1181,7 +1201,7 @@ work_queue_first(struct llist_node *head) static void process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr, - bool keep_empty) + bool keep_empty, delayed_dealloc_cb cb, void *state) { while (!llist_empty(head)) { struct _mem_work_chunk *buf = work_queue_first(head); @@ -1192,7 +1212,7 @@ process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr, return; } - free_work_item(item->ptr); + free_work_item(item->ptr, cb, state); buf->rd_idx++; } @@ -1210,7 +1230,8 @@ process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr, static void process_interp_queue(struct _Py_mem_interp_free_queue *queue, - struct _qsbr_thread_state *qsbr) + struct _qsbr_thread_state *qsbr, delayed_dealloc_cb cb, + void *state) { if (!_Py_atomic_load_int_relaxed(&queue->has_work)) { return; @@ -1218,7 +1239,7 @@ process_interp_queue(struct _Py_mem_interp_free_queue *queue, // Try to acquire the lock, but don't block if it's already held. if (_PyMutex_LockTimed(&queue->mutex, 0, 0) == PY_LOCK_ACQUIRED) { - process_queue(&queue->head, qsbr, false); + process_queue(&queue->head, qsbr, false, cb, state); int more_work = !llist_empty(&queue->head); _Py_atomic_store_int_relaxed(&queue->has_work, more_work); @@ -1234,10 +1255,23 @@ _PyMem_ProcessDelayed(PyThreadState *tstate) _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate; // Process thread-local work - process_queue(&tstate_impl->mem_free_queue, tstate_impl->qsbr, true); + process_queue(&tstate_impl->mem_free_queue, tstate_impl->qsbr, true, NULL, NULL); + + // Process shared interpreter work + process_interp_queue(&interp->mem_free_queue, tstate_impl->qsbr, NULL, NULL); +} + +void +_PyMem_ProcessDelayedNoDealloc(PyThreadState *tstate, delayed_dealloc_cb cb, void *state) +{ + PyInterpreterState *interp = tstate->interp; + _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate; + + // Process thread-local work + process_queue(&tstate_impl->mem_free_queue, tstate_impl->qsbr, true, cb, state); // Process shared interpreter work - process_interp_queue(&interp->mem_free_queue, tstate_impl->qsbr); + process_interp_queue(&interp->mem_free_queue, tstate_impl->qsbr, cb, state); } void @@ -1279,7 +1313,7 @@ _PyMem_FiniDelayed(PyInterpreterState *interp) // Free the remaining items immediately. There should be no other // threads accessing the memory at this point during shutdown. struct _mem_work_item *item = &buf->array[buf->rd_idx]; - free_work_item(item->ptr); + free_work_item(item->ptr, NULL, NULL); buf->rd_idx++; } @@ -1405,7 +1439,7 @@ get_mimalloc_allocated_blocks(PyInterpreterState *interp) { size_t allocated_blocks = 0; #ifdef Py_GIL_DISABLED - for (PyThreadState *t = interp->threads.head; t != NULL; t = t->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, t) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)t; for (int i = 0; i < _Py_MIMALLOC_HEAP_COUNT; i++) { mi_heap_t *heap = &tstate->mimalloc.heaps[i]; diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 193914d54bd90e..49977726eadca9 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -966,6 +966,7 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) for (i = newsize; i < oldsize; i++) { Py_CLEAR(v->ob_item[i]); } + _PyReftracerTrack((PyObject *)v, PyRefTracer_DESTROY); sv = PyObject_GC_Resize(PyTupleObject, v, newsize); if (sv == NULL) { *pv = NULL; diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 4af7f0273aae91..2611404a3d0d61 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -4761,10 +4761,10 @@ PyType_FromMetaclass( if (strcmp(memb->name, "__weaklistoffset__") == 0) { weaklistoffset_member = memb; } - if (strcmp(memb->name, "__dictoffset__") == 0) { + else if (strcmp(memb->name, "__dictoffset__") == 0) { dictoffset_member = memb; } - if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { vectorcalloffset_member = memb; } } @@ -5645,6 +5645,24 @@ _PyType_SetFlags(PyTypeObject *self, unsigned long mask, unsigned long flags) END_TYPE_LOCK(); } +int +_PyType_Validate(PyTypeObject *ty, _py_validate_type validate, unsigned int *tp_version) +{ + int err; + BEGIN_TYPE_LOCK(); + err = validate(ty); + if (!err) { + if(assign_version_tag(_PyInterpreterState_GET(), ty)) { + *tp_version = ty->tp_version_tag; + } + else { + err = -1; + } + } + END_TYPE_LOCK(); + return err; +} + static void set_flags_recursive(PyTypeObject *self, unsigned long mask, unsigned long flags) { @@ -9314,13 +9332,13 @@ wrap_buffer(PyObject *self, PyObject *args, void *wrapped) if (flags == -1 && PyErr_Occurred()) { return NULL; } - if (flags > INT_MAX) { + if (flags > INT_MAX || flags < INT_MIN) { PyErr_SetString(PyExc_OverflowError, - "buffer flags too large"); + "buffer flags out of range"); return NULL; } - return _PyMemoryView_FromBufferProc(self, Py_SAFE_DOWNCAST(flags, Py_ssize_t, int), + return _PyMemoryView_FromBufferProc(self, (int)flags, (getbufferproc)wrapped); } diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 9cd9781e412524..562e3312b63e9a 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1129,6 +1129,7 @@ resize_compact(PyObject *unicode, Py_ssize_t length) #ifdef Py_TRACE_REFS _Py_ForgetReference(unicode); #endif + _PyReftracerTrack(unicode, PyRefTracer_DESTROY); new_unicode = (PyObject *)PyObject_Realloc(unicode, new_size); if (new_unicode == NULL) { diff --git a/PC/python_ver_rc.h b/PC/python_ver_rc.h index 08509f96ed1db8..ee867fe41224c3 100644 --- a/PC/python_ver_rc.h +++ b/PC/python_ver_rc.h @@ -5,7 +5,7 @@ #include "winver.h" #define PYTHON_COMPANY "Python Software Foundation" -#define PYTHON_COPYRIGHT "Copyright \xA9 2001-2024 Python Software Foundation. Copyright \xA9 2000 BeOpen.com. Copyright \xA9 1995-2001 CNRI. Copyright \xA9 1991-1995 SMC." +#define PYTHON_COPYRIGHT "Copyright \xA9 2001 Python Software Foundation. Copyright \xA9 2000 BeOpen.com. Copyright \xA9 1995-2001 CNRI. Copyright \xA9 1991-1995 SMC." #define MS_WINDOWS #include "modsupport.h" diff --git a/PC/store_info.txt b/PC/store_info.txt index f6a85cb8ebec1f..d150ba17cbe62d 100644 --- a/PC/store_info.txt +++ b/PC/store_info.txt @@ -109,7 +109,7 @@ PSF LICENSE AGREEMENT FOR PYTHON 3.9 analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 3.9 alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of - copyright, i.e., "Copyright © 2001-2018 Python Software Foundation; All Rights + copyright, i.e., "Copyright © 2001 Python Software Foundation; All Rights Reserved" are retained in Python 3.9 alone or in any derivative version prepared by Licensee. diff --git a/PCbuild/_tkinter.vcxproj b/PCbuild/_tkinter.vcxproj index 117488a01621cc..87f6005fffc7c5 100644 --- a/PCbuild/_tkinter.vcxproj +++ b/PCbuild/_tkinter.vcxproj @@ -94,6 +94,7 @@ $(tcltkDir)include;%(AdditionalIncludeDirectories) + TCL_WITH_EXTERNAL_TOMMATH;%(PreprocessorDefinitions) WITH_APPINIT;%(PreprocessorDefinitions) Py_TCLTK_DIR="$(tcltkDir.TrimEnd('\').Replace('\', '\\'))";%(PreprocessorDefinitions) @@ -109,9 +110,10 @@ - <_TclTkDLL Include="$(tcltkdir)\bin\$(tclDllName)" /> - <_TclTkDLL Include="$(tcltkdir)\bin\$(tkDllName)" /> - <_TclTkDLL Include="$(tcltkdir)\bin\$(tclZlibDllName)" /> + <_TclTkDLL Include="$(tcltkdir)\bin\$(tclDLLName)" /> + <_TclTkDLL Include="$(tcltkdir)\bin\$(tkDLLName)" /> + <_TclTkDLL Include="$(tcltkdir)\bin\$(tclZlibDLLName)" /> + <_TclTkDLL Include="$(tcltkdir)\bin\$(tommathDLLName)" Condition="$(tommathDLLName) != ''"/> @@ -134,4 +136,4 @@ - \ No newline at end of file + diff --git a/PCbuild/build.bat b/PCbuild/build.bat index abe649553756a7..6d3ce81651ade5 100644 --- a/PCbuild/build.bat +++ b/PCbuild/build.bat @@ -11,7 +11,7 @@ echo.directly to MSBuild may be passed. If the argument contains an '=', the echo.entire argument must be quoted (e.g. `%~nx0 "/p:PlatformToolset=v141"`). echo.Alternatively you can put extra flags for MSBuild in a file named echo.`msbuild.rsp` in the `PCbuild` directory, one flag per line. This file -echo.will be picked automatically by MSBuild. Flags put in this file does not +echo.will be picked automatically by MSBuild. Flags put in this file do not echo.need to be quoted. You can still use environment variables inside the echo.response file. echo. @@ -196,4 +196,4 @@ rem Display the current build version information call "%dir%find_msbuild.bat" %MSBUILD% if ERRORLEVEL 1 (echo Cannot locate MSBuild.exe on PATH or as MSBUILD variable & exit /b 2) %MSBUILD% "%dir%pythoncore.vcxproj" /t:ShowVersionInfo /v:m /nologo %1 %2 %3 %4 %5 %6 %7 %8 %9 -if ERRORLEVEL 1 exit /b 3 \ No newline at end of file +if ERRORLEVEL 1 exit /b 3 diff --git a/PCbuild/tcltk.props b/PCbuild/tcltk.props index b4cb401609d409..d26b36ba98e493 100644 --- a/PCbuild/tcltk.props +++ b/PCbuild/tcltk.props @@ -17,15 +17,21 @@ $(ExternalsDir)tcltk-$(TclVersion)\$(ArchName)\ $(tcltkDir)\bin\tclsh$(TclMajorVersion)$(TclMinorVersion)t.exe $(tcltkDir)\..\win32\bin\tclsh$(TclMajorVersion)$(TclMinorVersion)t.exe + TCL_WITH_EXTERNAL_TOMMATH; - tcl$(TclMajorVersion)$(TclMinorVersion)t$(TclDebugExt).dll - tcl$(TclMajorVersion)$(TclMinorVersion)t$(TclDebugExt).lib - tclsh$(TclMajorVersion)$(TclMinorVersion)t$(TclDebugExt).exe - tk$(TkMajorVersion)$(TkMinorVersion)t$(TclDebugExt).dll - tk$(TkMajorVersion)$(TkMinorVersion)t$(TclDebugExt).lib + t + tcl9 + tcl$(TclMajorVersion)$(TclMinorVersion)$(tcltkSuffix)$(TclDebugExt).dll + tcl$(TclMajorVersion)$(TclMinorVersion)$(tcltkSuffix)$(TclDebugExt).lib + tclsh$(TclMajorVersion)$(TclMinorVersion)$(tcltkSuffix)$(TclDebugExt).exe + $(tkPrefix)tk$(TkMajorVersion)$(TkMinorVersion)$(tcltkSuffix)$(TclDebugExt).dll + $(tkPrefix)tk$(TclMajorVersion)$(TclMinorVersion)$(tcltkSuffix)$(TclDebugExt).lib zlib1.dll - $(tcltkDir)lib\tcl$(TclMajorVersion)$(TclMinorVersion)t$(TclDebugExt).lib;$(tcltkDir)lib\tk$(TkMajorVersion)$(TkMinorVersion)t$(TclDebugExt).lib + libtommath.dll + tommath.lib + $(tcltkDir)lib\$(TclLibName);$(tcltkDir)lib\$(TkLibName); + $(tcltkLib);$(tcltkDir)lib\$(tommathLibName) IX86 AMD64 ARM64 diff --git a/Programs/_freeze_module.c b/Programs/_freeze_module.c index 891e4256e897ab..06d1ee016dc2a8 100644 --- a/Programs/_freeze_module.c +++ b/Programs/_freeze_module.c @@ -121,6 +121,7 @@ compile_and_marshal(const char *name, const char *text) return NULL; } + assert(Py_MARSHAL_VERSION >= 5); PyObject *marshalled = PyMarshal_WriteObjectToString(code, Py_MARSHAL_VERSION); Py_CLEAR(code); if (marshalled == NULL) { diff --git a/Python/_warnings.c b/Python/_warnings.c index 3f9e73b5376223..e05ba99e8eaec4 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -803,7 +803,8 @@ is_filename_to_skip(PyObject *filename, PyTupleObject *skip_file_prefixes) for (Py_ssize_t idx = 0; idx < prefixes; ++idx) { PyObject *prefix = PyTuple_GET_ITEM(skip_file_prefixes, idx); - Py_ssize_t found = PyUnicode_Tailmatch(filename, prefix, 0, -1, -1); + Py_ssize_t found = PyUnicode_Tailmatch(filename, prefix, + 0, PY_SSIZE_T_MAX, -1); if (found == 1) { return true; } diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index a3f41190261a05..85ebd5b00cc18b 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -3336,7 +3336,7 @@ _PyBuiltin_Init(PyInterpreterState *interp) SETBUILTIN("False", Py_False); SETBUILTIN("True", Py_True); SETBUILTIN("bool", &PyBool_Type); - SETBUILTIN("memoryview", &PyMemoryView_Type); + SETBUILTIN("memoryview", &PyMemoryView_Type); SETBUILTIN("bytearray", &PyByteArray_Type); SETBUILTIN("bytes", &PyBytes_Type); SETBUILTIN("classmethod", &PyClassMethod_Type); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 7ae0f20369641a..6ee886c2ba0fc8 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -391,7 +391,7 @@ dummy_func( }; specializing op(_SPECIALIZE_TO_BOOL, (counter/1, value -- value)) { - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _Py_Specialize_ToBool(value, next_instr); @@ -399,7 +399,7 @@ dummy_func( } OPCODE_DEFERRED_INC(TO_BOOL); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } op(_TO_BOOL, (value -- res)) { @@ -435,7 +435,7 @@ dummy_func( PyObject *value_o = PyStackRef_AsPyObjectBorrow(value); EXIT_IF(!PyList_CheckExact(value_o)); STAT_INC(TO_BOOL, hit); - res = Py_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; + res = PyList_GET_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; DECREF_INPUTS(); } @@ -661,7 +661,7 @@ dummy_func( assert(next_instr->op.code == STORE_FAST); next_oparg = next_instr->op.arg; #else - next_oparg = CURRENT_OPERAND(); + next_oparg = CURRENT_OPERAND0(); #endif _PyStackRef *target_local = &GETLOCAL(next_oparg); DEOPT_IF(!PyStackRef_Is(*target_local, left)); @@ -1569,7 +1569,7 @@ dummy_func( }; specializing op(_SPECIALIZE_LOAD_GLOBAL, (counter/1 -- )) { - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); next_instr = this_instr; @@ -1578,7 +1578,7 @@ dummy_func( } OPCODE_DEFERRED_INC(LOAD_GLOBAL); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } // res[1] because we need a pointer to res to pass it to _PyEval_LoadGlobalStackRef @@ -1599,16 +1599,18 @@ dummy_func( op(_GUARD_GLOBALS_VERSION, (version/1 --)) { PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict)); - DEOPT_IF(dict->ma_keys->dk_version != version); - assert(DK_IS_UNICODE(dict->ma_keys)); + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version); + assert(DK_IS_UNICODE(keys)); } op(_GUARD_GLOBALS_VERSION_PUSH_KEYS, (version / 1 -- globals_keys: PyDictKeysObject *)) { PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict)); - DEOPT_IF(dict->ma_keys->dk_version != version); - globals_keys = dict->ma_keys; + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version); + globals_keys = keys; assert(DK_IS_UNICODE(globals_keys)); } @@ -1616,33 +1618,44 @@ dummy_func( { PyDictObject *dict = (PyDictObject *)BUILTINS(); DEOPT_IF(!PyDict_CheckExact(dict)); - DEOPT_IF(dict->ma_keys->dk_version != version); - builtins_keys = dict->ma_keys; + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version); + builtins_keys = keys; assert(DK_IS_UNICODE(builtins_keys)); } op(_LOAD_GLOBAL_MODULE_FROM_KEYS, (index/1, globals_keys: PyDictKeysObject* -- res, null if (oparg & 1))) { PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEAD(globals_keys); SYNC_SP(); DEOPT_IF(res_o == NULL); + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + DEOPT_IF(!increfed); + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); } op(_LOAD_GLOBAL_BUILTINS_FROM_KEYS, (index/1, builtins_keys: PyDictKeysObject* -- res, null if (oparg & 1))) { PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEAD(builtins_keys); SYNC_SP(); DEOPT_IF(res_o == NULL); + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + DEOPT_IF(!increfed); + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); } macro(LOAD_GLOBAL_MODULE) = @@ -2132,7 +2145,7 @@ dummy_func( op(_CHECK_ATTR_MODULE, (dict_version/2, owner -- owner)) { PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); - DEOPT_IF(!PyModule_CheckExact(owner_o)); + DEOPT_IF(Py_TYPE(owner_o)->tp_getattro != PyModule_Type.tp_getattro); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner_o)->md_dict; assert(dict != NULL); DEOPT_IF(dict->ma_keys->dk_version != dict_version); @@ -2340,10 +2353,6 @@ dummy_func( DEOPT_IF(ep->me_key != name); PyObject *old_value = ep->me_value; DEOPT_IF(old_value == NULL); - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) { - _PyObject_GC_TRACK(dict); - } _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value)); ep->me_value = PyStackRef_AsPyObjectSteal(value); // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, @@ -2624,15 +2633,16 @@ dummy_func( } _PyExecutorObject *executor; int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor, 0); - ERROR_IF(optimized < 0, error); - if (optimized) { + if (optimized <= 0) { + this_instr[1].counter = restart_backoff_counter(counter); + ERROR_IF(optimized < 0, error); + } + else { + this_instr[1].counter = initial_jump_backoff_counter(); assert(tstate->previous_executor == NULL); tstate->previous_executor = Py_None; GOTO_TIER_TWO(executor); } - else { - this_instr[1].counter = restart_backoff_counter(counter); - } } else { ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); @@ -3463,6 +3473,12 @@ dummy_func( EXIT_IF(func->func_version != func_version); } + tier2 op(_CHECK_FUNCTION_VERSION_INLINE, (func_version/2, callable_o/4 --)) { + assert(PyFunction_Check(callable_o)); + PyFunctionObject *func = (PyFunctionObject *)callable_o; + EXIT_IF(func->func_version != func_version); + } + macro(CALL_PY_GENERAL) = unused/1 + // Skip over the counter _CHECK_PEP_523 + @@ -4869,6 +4885,9 @@ dummy_func( tstate->previous_executor = (PyObject *)current_executor; GOTO_TIER_ONE(target); } + else { + exit->temperature = initial_temperature_backoff_counter(); + } } exit->executor = executor; } diff --git a/Python/ceval.c b/Python/ceval.c index 9a608f06966688..2a3938572c1569 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -99,11 +99,7 @@ } \ _Py_DECREF_STAT_INC(); \ if (--op->ob_refcnt == 0) { \ - struct _reftracer_runtime_state *tracer = &_PyRuntime.ref_tracer; \ - if (tracer->tracer_func != NULL) { \ - void* data = tracer->tracer_data; \ - tracer->tracer_func(op, PyRefTracer_DESTROY, data); \ - } \ + _PyReftracerTrack(op, PyRefTracer_DESTROY); \ destructor d = (destructor)(dealloc); \ d(op); \ } \ @@ -300,11 +296,12 @@ Py_SetRecursionLimit(int new_limit) { PyInterpreterState *interp = _PyInterpreterState_GET(); interp->ceval.recursion_limit = new_limit; - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { int depth = p->py_recursion_limit - p->py_recursion_remaining; p->py_recursion_limit = new_limit; p->py_recursion_remaining = new_limit - depth; } + _Py_FOR_EACH_TSTATE_END(interp); } /* The function _Py_EnterRecursiveCallTstate() only calls _Py_CheckRecursiveCall() diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 4c9f59f837e11b..1f811e72406130 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -977,25 +977,19 @@ make_pending_calls(PyThreadState *tstate) void _Py_set_eval_breaker_bit_all(PyInterpreterState *interp, uintptr_t bit) { - _PyRuntimeState *runtime = &_PyRuntime; - - HEAD_LOCK(runtime); - for (PyThreadState *tstate = interp->threads.head; tstate != NULL; tstate = tstate->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, tstate) { _Py_set_eval_breaker_bit(tstate, bit); } - HEAD_UNLOCK(runtime); + _Py_FOR_EACH_TSTATE_END(interp); } void _Py_unset_eval_breaker_bit_all(PyInterpreterState *interp, uintptr_t bit) { - _PyRuntimeState *runtime = &_PyRuntime; - - HEAD_LOCK(runtime); - for (PyThreadState *tstate = interp->threads.head; tstate != NULL; tstate = tstate->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, tstate) { _Py_unset_eval_breaker_bit(tstate, bit); } - HEAD_UNLOCK(runtime); + _Py_FOR_EACH_TSTATE_END(interp); } void diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index 5df55813a0ddeb..603b71ea938cde 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -413,7 +413,8 @@ do { \ #define CURRENT_OPARG() (next_uop[-1].oparg) -#define CURRENT_OPERAND() (next_uop[-1].operand) +#define CURRENT_OPERAND0() (next_uop[-1].operand0) +#define CURRENT_OPERAND1() (next_uop[-1].operand1) #define JUMP_TO_JUMP_TARGET() goto jump_to_jump_target #define JUMP_TO_ERROR() goto jump_to_error_target diff --git a/Python/crossinterp.c b/Python/crossinterp.c index 2daba99988c12a..7aaa045f375cf0 100644 --- a/Python/crossinterp.c +++ b/Python/crossinterp.c @@ -9,19 +9,15 @@ #include "pycore_pyerrors.h" // _PyErr_Clear() -#define _PyXI_GET_GLOBAL_STATE(interp) (&(interp)->runtime->xi) -#define _PyXI_GET_STATE(interp) (&(interp)->xi) - - /**************/ /* exceptions */ /**************/ -static int init_exceptions(PyInterpreterState *); -static void fini_exceptions(PyInterpreterState *); -static int _init_not_shareable_error_type(PyInterpreterState *); -static void _fini_not_shareable_error_type(PyInterpreterState *); -static PyObject * _get_not_shareable_error_type(PyInterpreterState *); +typedef struct xi_exceptions exceptions_t; +static int init_static_exctypes(exceptions_t *, PyInterpreterState *); +static void fini_static_exctypes(exceptions_t *, PyInterpreterState *); +static int init_heap_exctypes(exceptions_t *); +static void fini_heap_exctypes(exceptions_t *); #include "crossinterp_exceptions.h" @@ -68,7 +64,7 @@ _Py_CallInInterpreterAndRawFree(PyInterpreterState *interp, static void xid_lookup_init(_PyXIData_lookup_t *); static void xid_lookup_fini(_PyXIData_lookup_t *); -static xidatafunc lookup_getdata(PyInterpreterState *, PyObject *); +static xidatafunc lookup_getdata(_PyXIData_lookup_context_t *, PyObject *); #include "crossinterp_data_lookup.h" @@ -126,7 +122,7 @@ void _PyXIData_Init(_PyXIData_t *data, PyInterpreterState *interp, void *shared, PyObject *obj, - xid_newobjectfunc new_object) + xid_newobjfunc new_object) { assert(data != NULL); assert(new_object != NULL); @@ -150,7 +146,7 @@ int _PyXIData_InitWithSize(_PyXIData_t *data, PyInterpreterState *interp, const size_t size, PyObject *obj, - xid_newobjectfunc new_object) + xid_newobjfunc new_object) { assert(size > 0); // For now we always free the shared data in the same interpreter @@ -202,10 +198,9 @@ _check_xidata(PyThreadState *tstate, _PyXIData_t *data) } static inline void -_set_xid_lookup_failure(PyInterpreterState *interp, - PyObject *obj, const char *msg) +_set_xid_lookup_failure(dlcontext_t *ctx, PyObject *obj, const char *msg) { - PyObject *exctype = _get_not_shareable_error_type(interp); + PyObject *exctype = ctx->PyExc_NotShareableError; assert(exctype != NULL); if (msg != NULL) { assert(obj == NULL); @@ -222,13 +217,12 @@ _set_xid_lookup_failure(PyInterpreterState *interp, } int -_PyObject_CheckXIData(PyObject *obj) +_PyObject_CheckXIData(_PyXIData_lookup_context_t *ctx, PyObject *obj) { - PyInterpreterState *interp = PyInterpreterState_Get(); - xidatafunc getdata = lookup_getdata(interp, obj); + xidatafunc getdata = lookup_getdata(ctx, obj); if (getdata == NULL) { if (!PyErr_Occurred()) { - _set_xid_lookup_failure(interp, obj, NULL); + _set_xid_lookup_failure(ctx, obj, NULL); } return -1; } @@ -236,7 +230,8 @@ _PyObject_CheckXIData(PyObject *obj) } int -_PyObject_GetXIData(PyObject *obj, _PyXIData_t *data) +_PyObject_GetXIData(_PyXIData_lookup_context_t *ctx, + PyObject *obj, _PyXIData_t *data) { PyThreadState *tstate = PyThreadState_Get(); PyInterpreterState *interp = tstate->interp; @@ -247,11 +242,11 @@ _PyObject_GetXIData(PyObject *obj, _PyXIData_t *data) // Call the "getdata" func for the object. Py_INCREF(obj); - xidatafunc getdata = lookup_getdata(interp, obj); + xidatafunc getdata = lookup_getdata(ctx, obj); if (getdata == NULL) { Py_DECREF(obj); if (!PyErr_Occurred()) { - _set_xid_lookup_failure(interp, obj, NULL); + _set_xid_lookup_failure(ctx, obj, NULL); } return -1; } @@ -968,6 +963,8 @@ _PyXI_ClearExcInfo(_PyXI_excinfo *info) static int _PyXI_ApplyErrorCode(_PyXI_errcode code, PyInterpreterState *interp) { + dlcontext_t ctx; + assert(!PyErr_Occurred()); switch (code) { case _PyXI_ERR_NO_ERROR: _Py_FALLTHROUGH; @@ -986,8 +983,7 @@ _PyXI_ApplyErrorCode(_PyXI_errcode code, PyInterpreterState *interp) break; case _PyXI_ERR_ALREADY_RUNNING: assert(interp != NULL); - assert(_PyInterpreterState_IsRunningMain(interp)); - _PyInterpreterState_FailIfRunningMain(interp); + _PyErr_SetInterpreterAlreadyRunning(); break; case _PyXI_ERR_MAIN_NS_FAILURE: PyErr_SetString(PyExc_InterpreterError, @@ -998,7 +994,10 @@ _PyXI_ApplyErrorCode(_PyXI_errcode code, PyInterpreterState *interp) "failed to apply namespace to __main__"); break; case _PyXI_ERR_NOT_SHAREABLE: - _set_xid_lookup_failure(interp, NULL, NULL); + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + _set_xid_lookup_failure(&ctx, NULL, NULL); break; default: #ifdef Py_DEBUG @@ -1060,7 +1059,11 @@ _PyXI_ApplyError(_PyXI_error *error) } else if (error->code == _PyXI_ERR_NOT_SHAREABLE) { // Propagate the exception directly. - _set_xid_lookup_failure(error->interp, NULL, error->uncaught.msg); + dlcontext_t ctx; + if (_PyXIData_GetLookupContext(error->interp, &ctx) < 0) { + return NULL; + } + _set_xid_lookup_failure(&ctx, NULL, error->uncaught.msg); } else { // Raise an exception corresponding to the code. @@ -1147,7 +1150,12 @@ _sharednsitem_set_value(_PyXI_namespace_item *item, PyObject *value) PyErr_NoMemory(); return -1; } - if (_PyObject_GetXIData(value, item->data) != 0) { + PyInterpreterState *interp = PyInterpreterState_Get(); + dlcontext_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + if (_PyObject_GetXIData(&ctx, value, item->data) != 0) { PyMem_RawFree(item->data); item->data = NULL; // The caller may want to propagate PyExc_NotShareableError @@ -1605,7 +1613,13 @@ _propagate_not_shareable_error(_PyXI_session *session) return; } PyInterpreterState *interp = PyInterpreterState_Get(); - if (PyErr_ExceptionMatches(_get_not_shareable_error_type(interp))) { + dlcontext_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + PyErr_FormatUnraisable( + "Exception ignored while propagating not shareable error"); + return; + } + if (PyErr_ExceptionMatches(ctx.PyExc_NotShareableError)) { // We want to propagate the exception directly. session->_error_override = _PyXI_ERR_NOT_SHAREABLE; session->error_override = &session->_error_override; @@ -1773,18 +1787,87 @@ _PyXI_Exit(_PyXI_session *session) /* runtime lifecycle */ /*********************/ +int +_Py_xi_global_state_init(_PyXI_global_state_t *state) +{ + assert(state != NULL); + xid_lookup_init(&state->data_lookup); + return 0; +} + +void +_Py_xi_global_state_fini(_PyXI_global_state_t *state) +{ + assert(state != NULL); + xid_lookup_fini(&state->data_lookup); +} + +int +_Py_xi_state_init(_PyXI_state_t *state, PyInterpreterState *interp) +{ + assert(state != NULL); + assert(interp == NULL || state == _PyXI_GET_STATE(interp)); + + xid_lookup_init(&state->data_lookup); + + // Initialize exceptions. + if (interp != NULL) { + if (init_static_exctypes(&state->exceptions, interp) < 0) { + fini_heap_exctypes(&state->exceptions); + return -1; + } + } + if (init_heap_exctypes(&state->exceptions) < 0) { + return -1; + } + + return 0; +} + +void +_Py_xi_state_fini(_PyXI_state_t *state, PyInterpreterState *interp) +{ + assert(state != NULL); + assert(interp == NULL || state == _PyXI_GET_STATE(interp)); + + fini_heap_exctypes(&state->exceptions); + if (interp != NULL) { + fini_static_exctypes(&state->exceptions, interp); + } + + xid_lookup_fini(&state->data_lookup); +} + + PyStatus _PyXI_Init(PyInterpreterState *interp) { - // Initialize the XID lookup state (e.g. registry). if (_Py_IsMainInterpreter(interp)) { - xid_lookup_init(&_PyXI_GET_GLOBAL_STATE(interp)->data_lookup); + _PyXI_global_state_t *global_state = _PyXI_GET_GLOBAL_STATE(interp); + if (global_state == NULL) { + PyErr_PrintEx(0); + return _PyStatus_ERR( + "failed to get global cross-interpreter state"); + } + if (_Py_xi_global_state_init(global_state) < 0) { + PyErr_PrintEx(0); + return _PyStatus_ERR( + "failed to initialize global cross-interpreter state"); + } } - xid_lookup_init(&_PyXI_GET_STATE(interp)->data_lookup); - // Initialize exceptions (heap types). - if (_init_not_shareable_error_type(interp) < 0) { - return _PyStatus_ERR("failed to initialize NotShareableError"); + _PyXI_state_t *state = _PyXI_GET_STATE(interp); + if (state == NULL) { + PyErr_PrintEx(0); + return _PyStatus_ERR( + "failed to get interpreter's cross-interpreter state"); + } + // The static types were already initialized in _PyXI_InitTypes(), + // so we pass in NULL here to avoid initializing them again. + if (_Py_xi_state_init(state, NULL) < 0) { + PyErr_PrintEx(0); + return _PyStatus_ERR( + "failed to initialize interpreter's cross-interpreter state"); } return _PyStatus_OK(); @@ -1796,30 +1879,42 @@ _PyXI_Init(PyInterpreterState *interp) void _PyXI_Fini(PyInterpreterState *interp) { - // Finalize exceptions (heap types). - _fini_not_shareable_error_type(interp); + _PyXI_state_t *state = _PyXI_GET_STATE(interp); +#ifndef NDEBUG + if (state == NULL) { + PyErr_PrintEx(0); + return; + } +#endif + // The static types will be finalized soon in _PyXI_FiniTypes(), + // so we pass in NULL here to avoid finalizing them right now. + _Py_xi_state_fini(state, NULL); - // Finalize the XID lookup state (e.g. registry). - xid_lookup_fini(&_PyXI_GET_STATE(interp)->data_lookup); if (_Py_IsMainInterpreter(interp)) { - xid_lookup_fini(&_PyXI_GET_GLOBAL_STATE(interp)->data_lookup); + _PyXI_global_state_t *global_state = _PyXI_GET_GLOBAL_STATE(interp); + _Py_xi_global_state_fini(global_state); } } PyStatus _PyXI_InitTypes(PyInterpreterState *interp) { - if (init_exceptions(interp) < 0) { + if (init_static_exctypes(&_PyXI_GET_STATE(interp)->exceptions, interp) < 0) { PyErr_PrintEx(0); - return _PyStatus_ERR("failed to initialize an exception type"); + return _PyStatus_ERR( + "failed to initialize the cross-interpreter exception types"); } + // We would initialize heap types here too but that leads to ref leaks. + // Instead, we intialize them in _PyXI_Init(). return _PyStatus_OK(); } void _PyXI_FiniTypes(PyInterpreterState *interp) { - fini_exceptions(interp); + // We would finalize heap types here too but that leads to ref leaks. + // Instead, we finalize them in _PyXI_Fini(). + fini_static_exctypes(&_PyXI_GET_STATE(interp)->exceptions, interp); } diff --git a/Python/crossinterp_data_lookup.h b/Python/crossinterp_data_lookup.h index 88c662a3df00d6..48e5d9762cd697 100644 --- a/Python/crossinterp_data_lookup.h +++ b/Python/crossinterp_data_lookup.h @@ -1,14 +1,15 @@ #include "pycore_weakref.h" // _PyWeakref_GET_REF() -typedef struct _xidregistry dlregistry_t; -typedef struct _xidregitem dlregitem_t; +typedef _PyXIData_lookup_context_t dlcontext_t; +typedef _PyXIData_registry_t dlregistry_t; +typedef _PyXIData_regitem_t dlregitem_t; // forward static void _xidregistry_init(dlregistry_t *); static void _xidregistry_fini(dlregistry_t *); -static xidatafunc _lookup_getdata_from_registry(PyInterpreterState *, PyObject *); +static xidatafunc _lookup_getdata_from_registry(dlcontext_t *, PyObject *); /* used in crossinterp.c */ @@ -26,22 +27,43 @@ xid_lookup_fini(_PyXIData_lookup_t *state) } static xidatafunc -lookup_getdata(PyInterpreterState *interp, PyObject *obj) +lookup_getdata(dlcontext_t *ctx, PyObject *obj) { /* Cross-interpreter objects are looked up by exact match on the class. We can reassess this policy when we move from a global registry to a tp_* slot. */ - return _lookup_getdata_from_registry(interp, obj); + return _lookup_getdata_from_registry(ctx, obj); } /* exported API */ +int +_PyXIData_GetLookupContext(PyInterpreterState *interp, + _PyXIData_lookup_context_t *res) +{ + _PyXI_global_state_t *global = _PyXI_GET_GLOBAL_STATE(interp); + if (global == NULL) { + assert(PyErr_Occurred()); + return -1; + } + _PyXI_state_t *local = _PyXI_GET_STATE(interp); + if (local == NULL) { + assert(PyErr_Occurred()); + return -1; + } + *res = (dlcontext_t){ + .global = &global->data_lookup, + .local = &local->data_lookup, + .PyExc_NotShareableError = local->exceptions.PyExc_NotShareableError, + }; + return 0; +} + xidatafunc -_PyXIData_Lookup(PyObject *obj) +_PyXIData_Lookup(_PyXIData_lookup_context_t *ctx, PyObject *obj) { - PyInterpreterState *interp = PyInterpreterState_Get(); - return lookup_getdata(interp, obj); + return lookup_getdata(ctx, obj); } @@ -110,25 +132,12 @@ _xidregistry_unlock(dlregistry_t *registry) /* accessing the registry */ static inline dlregistry_t * -_get_global_xidregistry(_PyRuntimeState *runtime) +_get_xidregistry_for_type(dlcontext_t *ctx, PyTypeObject *cls) { - return &runtime->xi.data_lookup.registry; -} - -static inline dlregistry_t * -_get_xidregistry(PyInterpreterState *interp) -{ - return &interp->xi.data_lookup.registry; -} - -static inline dlregistry_t * -_get_xidregistry_for_type(PyInterpreterState *interp, PyTypeObject *cls) -{ - dlregistry_t *registry = _get_global_xidregistry(interp->runtime); if (cls->tp_flags & Py_TPFLAGS_HEAPTYPE) { - registry = _get_xidregistry(interp); + return &ctx->local->registry; } - return registry; + return &ctx->global->registry; } static dlregitem_t* _xidregistry_remove_entry(dlregistry_t *, dlregitem_t *); @@ -160,11 +169,11 @@ _xidregistry_find_type(dlregistry_t *xidregistry, PyTypeObject *cls) } static xidatafunc -_lookup_getdata_from_registry(PyInterpreterState *interp, PyObject *obj) +_lookup_getdata_from_registry(dlcontext_t *ctx, PyObject *obj) { PyTypeObject *cls = Py_TYPE(obj); - dlregistry_t *xidregistry = _get_xidregistry_for_type(interp, cls); + dlregistry_t *xidregistry = _get_xidregistry_for_type(ctx, cls); _xidregistry_lock(xidregistry); dlregitem_t *matched = _xidregistry_find_type(xidregistry, cls); @@ -241,7 +250,8 @@ _xidregistry_clear(dlregistry_t *xidregistry) } int -_PyXIData_RegisterClass(PyTypeObject *cls, xidatafunc getdata) +_PyXIData_RegisterClass(_PyXIData_lookup_context_t *ctx, + PyTypeObject *cls, xidatafunc getdata) { if (!PyType_Check(cls)) { PyErr_Format(PyExc_ValueError, "only classes may be registered"); @@ -253,8 +263,7 @@ _PyXIData_RegisterClass(PyTypeObject *cls, xidatafunc getdata) } int res = 0; - PyInterpreterState *interp = _PyInterpreterState_GET(); - dlregistry_t *xidregistry = _get_xidregistry_for_type(interp, cls); + dlregistry_t *xidregistry = _get_xidregistry_for_type(ctx, cls); _xidregistry_lock(xidregistry); dlregitem_t *matched = _xidregistry_find_type(xidregistry, cls); @@ -272,11 +281,10 @@ _PyXIData_RegisterClass(PyTypeObject *cls, xidatafunc getdata) } int -_PyXIData_UnregisterClass(PyTypeObject *cls) +_PyXIData_UnregisterClass(_PyXIData_lookup_context_t *ctx, PyTypeObject *cls) { int res = 0; - PyInterpreterState *interp = _PyInterpreterState_GET(); - dlregistry_t *xidregistry = _get_xidregistry_for_type(interp, cls); + dlregistry_t *xidregistry = _get_xidregistry_for_type(ctx, cls); _xidregistry_lock(xidregistry); dlregitem_t *matched = _xidregistry_find_type(xidregistry, cls); @@ -500,6 +508,11 @@ _tuple_shared_free(void* data) static int _tuple_shared(PyThreadState *tstate, PyObject *obj, _PyXIData_t *data) { + dlcontext_t ctx; + if (_PyXIData_GetLookupContext(tstate->interp, &ctx) < 0) { + return -1; + } + Py_ssize_t len = PyTuple_GET_SIZE(obj); if (len < 0) { return -1; @@ -526,7 +539,7 @@ _tuple_shared(PyThreadState *tstate, PyObject *obj, _PyXIData_t *data) int res = -1; if (!_Py_EnterRecursiveCallTstate(tstate, " while sharing a tuple")) { - res = _PyObject_GetXIData(item, data); + res = _PyObject_GetXIData(&ctx, item, data); _Py_LeaveRecursiveCallTstate(tstate); } if (res < 0) { diff --git a/Python/crossinterp_exceptions.h b/Python/crossinterp_exceptions.h index 278511da615c75..3cb45d2067710b 100644 --- a/Python/crossinterp_exceptions.h +++ b/Python/crossinterp_exceptions.h @@ -25,71 +25,78 @@ static PyTypeObject _PyExc_InterpreterNotFoundError = { }; PyObject *PyExc_InterpreterNotFoundError = (PyObject *)&_PyExc_InterpreterNotFoundError; -/* NotShareableError extends ValueError */ - -static int -_init_not_shareable_error_type(PyInterpreterState *interp) -{ - const char *name = "interpreters.NotShareableError"; - PyObject *base = PyExc_ValueError; - PyObject *ns = NULL; - PyObject *exctype = PyErr_NewException(name, base, ns); - if (exctype == NULL) { - return -1; - } - - _PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError = exctype; - return 0; -} - -static void -_fini_not_shareable_error_type(PyInterpreterState *interp) -{ - Py_CLEAR(_PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError); -} - -static PyObject * -_get_not_shareable_error_type(PyInterpreterState *interp) -{ - assert(_PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError != NULL); - return _PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError; -} - /* lifecycle */ static int -init_exceptions(PyInterpreterState *interp) +init_static_exctypes(exceptions_t *state, PyInterpreterState *interp) { + assert(state == &_PyXI_GET_STATE(interp)->exceptions); PyTypeObject *base = (PyTypeObject *)PyExc_Exception; - // builtin static types - + // PyExc_InterpreterError _PyExc_InterpreterError.tp_base = base; _PyExc_InterpreterError.tp_traverse = base->tp_traverse; _PyExc_InterpreterError.tp_clear = base->tp_clear; if (_PyStaticType_InitBuiltin(interp, &_PyExc_InterpreterError) < 0) { - return -1; + goto error; } + state->PyExc_InterpreterError = (PyObject *)&_PyExc_InterpreterError; + // PyExc_InterpreterNotFoundError _PyExc_InterpreterNotFoundError.tp_traverse = base->tp_traverse; _PyExc_InterpreterNotFoundError.tp_clear = base->tp_clear; if (_PyStaticType_InitBuiltin(interp, &_PyExc_InterpreterNotFoundError) < 0) { - return -1; + goto error; } + state->PyExc_InterpreterNotFoundError = + (PyObject *)&_PyExc_InterpreterNotFoundError; - // heap types + return 0; - // We would call _init_not_shareable_error_type() here too, - // but that leads to ref leaks +error: + fini_static_exctypes(state, interp); + return -1; +} + +static void +fini_static_exctypes(exceptions_t *state, PyInterpreterState *interp) +{ + assert(state == &_PyXI_GET_STATE(interp)->exceptions); + if (state->PyExc_InterpreterNotFoundError != NULL) { + state->PyExc_InterpreterNotFoundError = NULL; + _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterNotFoundError); + } + if (state->PyExc_InterpreterError != NULL) { + state->PyExc_InterpreterError = NULL; + _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterError); + } +} + +static int +init_heap_exctypes(exceptions_t *state) +{ + PyObject *exctype; + + /* NotShareableError extends ValueError */ + const char *name = "interpreters.NotShareableError"; + PyObject *base = PyExc_ValueError; + PyObject *ns = NULL; + exctype = PyErr_NewException(name, base, ns); + if (exctype == NULL) { + goto error; + } + state->PyExc_NotShareableError = exctype; return 0; + +error: + fini_heap_exctypes(state); + return -1; } static void -fini_exceptions(PyInterpreterState *interp) +fini_heap_exctypes(exceptions_t *state) { - // Likewise with _fini_not_shareable_error_type(). - _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterNotFoundError); - _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterError); + Py_CLEAR(state->PyExc_NotShareableError); } diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 1d63402214db5d..5c7138a94214a8 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -508,7 +508,7 @@ JUMP_TO_JUMP_TARGET(); } STAT_INC(TO_BOOL, hit); - res = Py_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; + res = PyList_GET_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; PyStackRef_CLOSE(value); stack_pointer[-1] = res; break; @@ -831,7 +831,7 @@ assert(next_instr->op.code == STORE_FAST); next_oparg = next_instr->op.arg; #else - next_oparg = CURRENT_OPERAND(); + next_oparg = CURRENT_OPERAND0(); #endif _PyStackRef *target_local = &GETLOCAL(next_oparg); if (!PyStackRef_Is(*target_local, left)) { @@ -1864,33 +1864,35 @@ } case _GUARD_GLOBALS_VERSION: { - uint16_t version = (uint16_t)CURRENT_OPERAND(); + uint16_t version = (uint16_t)CURRENT_OPERAND0(); PyDictObject *dict = (PyDictObject *)GLOBALS(); if (!PyDict_CheckExact(dict)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - if (dict->ma_keys->dk_version != version) { + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + if (FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - assert(DK_IS_UNICODE(dict->ma_keys)); + assert(DK_IS_UNICODE(keys)); break; } case _GUARD_GLOBALS_VERSION_PUSH_KEYS: { PyDictKeysObject *globals_keys; - uint16_t version = (uint16_t)CURRENT_OPERAND(); + uint16_t version = (uint16_t)CURRENT_OPERAND0(); PyDictObject *dict = (PyDictObject *)GLOBALS(); if (!PyDict_CheckExact(dict)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - if (dict->ma_keys->dk_version != version) { + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + if (FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - globals_keys = dict->ma_keys; + globals_keys = keys; assert(DK_IS_UNICODE(globals_keys)); stack_pointer[0].bits = (uintptr_t)globals_keys; stack_pointer += 1; @@ -1900,17 +1902,18 @@ case _GUARD_BUILTINS_VERSION_PUSH_KEYS: { PyDictKeysObject *builtins_keys; - uint16_t version = (uint16_t)CURRENT_OPERAND(); + uint16_t version = (uint16_t)CURRENT_OPERAND0(); PyDictObject *dict = (PyDictObject *)BUILTINS(); if (!PyDict_CheckExact(dict)) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - if (dict->ma_keys->dk_version != version) { + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + if (FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - builtins_keys = dict->ma_keys; + builtins_keys = keys; assert(DK_IS_UNICODE(builtins_keys)); stack_pointer[0].bits = (uintptr_t)builtins_keys; stack_pointer += 1; @@ -1924,19 +1927,27 @@ _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); globals_keys = (PyDictKeysObject *)stack_pointer[-1].bits; - uint16_t index = (uint16_t)CURRENT_OPERAND(); + uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); if (res_o == NULL) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + if (!increfed) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + (oparg & 1); @@ -1950,19 +1961,27 @@ _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); builtins_keys = (PyDictKeysObject *)stack_pointer[-1].bits; - uint16_t index = (uint16_t)CURRENT_OPERAND(); + uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); if (res_o == NULL) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + if (!increfed) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; stack_pointer += 1 + (oparg & 1); @@ -2523,7 +2542,7 @@ case _GUARD_TYPE_VERSION: { _PyStackRef owner; owner = stack_pointer[-1]; - uint32_t type_version = (uint32_t)CURRENT_OPERAND(); + uint32_t type_version = (uint32_t)CURRENT_OPERAND0(); PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); assert(type_version != 0); if (tp->tp_version_tag != type_version) { @@ -2552,7 +2571,7 @@ _PyStackRef null = PyStackRef_NULL; (void)null; owner = stack_pointer[-1]; - uint16_t offset = (uint16_t)CURRENT_OPERAND(); + uint16_t offset = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset); PyObject *attr_o = *value_ptr; @@ -2575,7 +2594,7 @@ _PyStackRef null = PyStackRef_NULL; (void)null; owner = stack_pointer[-1]; - uint16_t offset = (uint16_t)CURRENT_OPERAND(); + uint16_t offset = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset); PyObject *attr_o = *value_ptr; @@ -2600,9 +2619,9 @@ case _CHECK_ATTR_MODULE: { _PyStackRef owner; owner = stack_pointer[-1]; - uint32_t dict_version = (uint32_t)CURRENT_OPERAND(); + uint32_t dict_version = (uint32_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); - if (!PyModule_CheckExact(owner_o)) { + if (Py_TYPE(owner_o)->tp_getattro != PyModule_Type.tp_getattro) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } @@ -2621,7 +2640,7 @@ _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; - uint16_t index = (uint16_t)CURRENT_OPERAND(); + uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner_o)->md_dict; assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); @@ -2664,7 +2683,7 @@ _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; - uint16_t hint = (uint16_t)CURRENT_OPERAND(); + uint16_t hint = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); PyObject *attr_o; PyDictObject *dict = _PyObject_GetManagedDict(owner_o); @@ -2705,7 +2724,7 @@ _PyStackRef null = PyStackRef_NULL; (void)null; owner = stack_pointer[-1]; - uint16_t index = (uint16_t)CURRENT_OPERAND(); + uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); char *addr = (char *)owner_o + index; PyObject *attr_o = *(PyObject **)addr; @@ -2727,7 +2746,7 @@ _PyStackRef null = PyStackRef_NULL; (void)null; owner = stack_pointer[-1]; - uint16_t index = (uint16_t)CURRENT_OPERAND(); + uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); char *addr = (char *)owner_o + index; PyObject *attr_o = *(PyObject **)addr; @@ -2751,7 +2770,7 @@ case _CHECK_ATTR_CLASS: { _PyStackRef owner; owner = stack_pointer[-1]; - uint32_t type_version = (uint32_t)CURRENT_OPERAND(); + uint32_t type_version = (uint32_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); if (!PyType_Check(owner_o)) { UOP_STAT_INC(uopcode, miss); @@ -2771,7 +2790,7 @@ _PyStackRef null = PyStackRef_NULL; (void)null; owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)CURRENT_OPERAND(); + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); attr = PyStackRef_FromPyObjectNew(descr); @@ -2787,7 +2806,7 @@ _PyStackRef null = PyStackRef_NULL; (void)null; owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)CURRENT_OPERAND(); + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); attr = PyStackRef_FromPyObjectNew(descr); @@ -2807,7 +2826,7 @@ _PyInterpreterFrame *new_frame; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; - PyObject *fget = (PyObject *)CURRENT_OPERAND(); + PyObject *fget = (PyObject *)CURRENT_OPERAND0(); assert((oparg & 1) == 0); assert(Py_IS_TYPE(fget, &PyFunction_Type)); PyFunctionObject *f = (PyFunctionObject *)fget; @@ -2859,7 +2878,7 @@ _PyStackRef value; owner = stack_pointer[-1]; value = stack_pointer[-2]; - uint16_t offset = (uint16_t)CURRENT_OPERAND(); + uint16_t offset = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); STAT_INC(STORE_ATTR, hit); assert(_PyObject_GetManagedDict(owner_o) == NULL); @@ -2886,7 +2905,7 @@ oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; value = stack_pointer[-2]; - uint16_t hint = (uint16_t)CURRENT_OPERAND(); + uint16_t hint = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictObject *dict = _PyObject_GetManagedDict(owner_o); @@ -2914,10 +2933,6 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) { - _PyObject_GC_TRACK(dict); - } _PyFrame_SetStackPointer(frame, stack_pointer); _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -2937,7 +2952,7 @@ _PyStackRef value; owner = stack_pointer[-1]; value = stack_pointer[-2]; - uint16_t index = (uint16_t)CURRENT_OPERAND(); + uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); char *addr = (char *)owner_o + index; STAT_INC(STORE_ATTR, hit); @@ -3780,7 +3795,7 @@ case _GUARD_KEYS_VERSION: { _PyStackRef owner; owner = stack_pointer[-1]; - uint32_t keys_version = (uint32_t)CURRENT_OPERAND(); + uint32_t keys_version = (uint32_t)CURRENT_OPERAND0(); PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner)); PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; if (owner_heap_type->ht_cached_keys->dk_version != keys_version) { @@ -3796,7 +3811,7 @@ _PyStackRef self = PyStackRef_NULL; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)CURRENT_OPERAND(); + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); assert(oparg & 1); /* Cached method object */ STAT_INC(LOAD_ATTR, hit); @@ -3817,7 +3832,7 @@ _PyStackRef self = PyStackRef_NULL; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)CURRENT_OPERAND(); + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); assert(oparg & 1); assert(Py_TYPE(PyStackRef_AsPyObjectBorrow(owner))->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); @@ -3837,7 +3852,7 @@ _PyStackRef attr; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)CURRENT_OPERAND(); + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); assert((oparg & 1) == 0); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); @@ -3852,7 +3867,7 @@ _PyStackRef attr; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)CURRENT_OPERAND(); + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); assert((oparg & 1) == 0); assert(Py_TYPE(PyStackRef_AsPyObjectBorrow(owner))->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); @@ -3866,7 +3881,7 @@ case _CHECK_ATTR_METHOD_LAZY_DICT: { _PyStackRef owner; owner = stack_pointer[-1]; - uint16_t dictoffset = (uint16_t)CURRENT_OPERAND(); + uint16_t dictoffset = (uint16_t)CURRENT_OPERAND0(); char *ptr = ((char *)PyStackRef_AsPyObjectBorrow(owner)) + MANAGED_DICT_OFFSET + dictoffset; PyObject *dict = *(PyObject **)ptr; /* This object has a __dict__, just not yet created */ @@ -3883,7 +3898,7 @@ _PyStackRef self = PyStackRef_NULL; oparg = CURRENT_OPARG(); owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)CURRENT_OPERAND(); + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); assert(oparg & 1); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); @@ -3967,7 +3982,7 @@ _PyStackRef *callable; oparg = CURRENT_OPARG(); callable = &stack_pointer[-2 - oparg]; - uint32_t func_version = (uint32_t)CURRENT_OPERAND(); + uint32_t func_version = (uint32_t)CURRENT_OPERAND0(); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); if (!PyFunction_Check(callable_o)) { UOP_STAT_INC(uopcode, miss); @@ -3981,13 +3996,25 @@ break; } + case _CHECK_FUNCTION_VERSION_INLINE: { + uint32_t func_version = (uint32_t)CURRENT_OPERAND0(); + PyObject *callable_o = (PyObject *)CURRENT_OPERAND1(); + assert(PyFunction_Check(callable_o)); + PyFunctionObject *func = (PyFunctionObject *)callable_o; + if (func->func_version != func_version) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + break; + } + case _CHECK_METHOD_VERSION: { _PyStackRef *null; _PyStackRef *callable; oparg = CURRENT_OPARG(); null = &stack_pointer[-1 - oparg]; callable = &stack_pointer[-2 - oparg]; - uint32_t func_version = (uint32_t)CURRENT_OPERAND(); + uint32_t func_version = (uint32_t)CURRENT_OPERAND0(); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); if (Py_TYPE(callable_o) != &PyMethod_Type) { UOP_STAT_INC(uopcode, miss); @@ -4443,7 +4470,7 @@ callable = &stack_pointer[-2 - oparg]; init = &stack_pointer[-2 - oparg]; self = &stack_pointer[-1 - oparg]; - uint32_t type_version = (uint32_t)CURRENT_OPERAND(); + uint32_t type_version = (uint32_t)CURRENT_OPERAND0(); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); if (!PyStackRef_IsNull(null[0])) { UOP_STAT_INC(uopcode, miss); @@ -5201,7 +5228,7 @@ _PyStackRef *callable; oparg = CURRENT_OPARG(); callable = &stack_pointer[-3 - oparg]; - uint32_t func_version = (uint32_t)CURRENT_OPERAND(); + uint32_t func_version = (uint32_t)CURRENT_OPERAND0(); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); if (!PyFunction_Check(callable_o)) { UOP_STAT_INC(uopcode, miss); @@ -5221,7 +5248,7 @@ oparg = CURRENT_OPARG(); null = &stack_pointer[-2 - oparg]; callable = &stack_pointer[-3 - oparg]; - uint32_t func_version = (uint32_t)CURRENT_OPERAND(); + uint32_t func_version = (uint32_t)CURRENT_OPERAND0(); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); if (Py_TYPE(callable_o) != &PyMethod_Type) { UOP_STAT_INC(uopcode, miss); @@ -5659,13 +5686,13 @@ } case _SET_IP: { - PyObject *instr_ptr = (PyObject *)CURRENT_OPERAND(); + PyObject *instr_ptr = (PyObject *)CURRENT_OPERAND0(); frame->instr_ptr = (_Py_CODEUNIT *)instr_ptr; break; } case _CHECK_STACK_SPACE_OPERAND: { - uint32_t framesize = (uint32_t)CURRENT_OPERAND(); + uint32_t framesize = (uint32_t)CURRENT_OPERAND0(); assert(framesize <= INT_MAX); if (!_PyThreadState_HasStackSpace(tstate, framesize)) { UOP_STAT_INC(uopcode, miss); @@ -5690,7 +5717,7 @@ } case _EXIT_TRACE: { - PyObject *exit_p = (PyObject *)CURRENT_OPERAND(); + PyObject *exit_p = (PyObject *)CURRENT_OPERAND0(); _PyExitData *exit = (_PyExitData *)exit_p; PyCodeObject *code = _PyFrame_GetCode(frame); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -5738,6 +5765,9 @@ tstate->previous_executor = (PyObject *)current_executor; GOTO_TIER_ONE(target); } + else { + exit->temperature = initial_temperature_backoff_counter(); + } } exit->executor = executor; } @@ -5757,7 +5787,7 @@ case _LOAD_CONST_INLINE: { _PyStackRef value; - PyObject *ptr = (PyObject *)CURRENT_OPERAND(); + PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); value = PyStackRef_FromPyObjectNew(ptr); stack_pointer[0] = value; stack_pointer += 1; @@ -5767,7 +5797,7 @@ case _LOAD_CONST_INLINE_BORROW: { _PyStackRef value; - PyObject *ptr = (PyObject *)CURRENT_OPERAND(); + PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); value = PyStackRef_FromPyObjectImmortal(ptr); stack_pointer[0] = value; stack_pointer += 1; @@ -5779,7 +5809,7 @@ _PyStackRef pop; _PyStackRef value; pop = stack_pointer[-1]; - PyObject *ptr = (PyObject *)CURRENT_OPERAND(); + PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); PyStackRef_CLOSE(pop); value = PyStackRef_FromPyObjectImmortal(ptr); stack_pointer[-1] = value; @@ -5789,7 +5819,7 @@ case _LOAD_CONST_INLINE_WITH_NULL: { _PyStackRef value; _PyStackRef null; - PyObject *ptr = (PyObject *)CURRENT_OPERAND(); + PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); value = PyStackRef_FromPyObjectNew(ptr); null = PyStackRef_NULL; stack_pointer[0] = value; @@ -5802,7 +5832,7 @@ case _LOAD_CONST_INLINE_BORROW_WITH_NULL: { _PyStackRef value; _PyStackRef null; - PyObject *ptr = (PyObject *)CURRENT_OPERAND(); + PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); value = PyStackRef_FromPyObjectImmortal(ptr); null = PyStackRef_NULL; stack_pointer[0] = value; @@ -5813,7 +5843,7 @@ } case _CHECK_FUNCTION: { - uint32_t func_version = (uint32_t)CURRENT_OPERAND(); + uint32_t func_version = (uint32_t)CURRENT_OPERAND0(); assert(PyStackRef_FunctionCheck(frame->f_funcobj)); PyFunctionObject *func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); if (func->func_version != func_version) { @@ -5827,7 +5857,7 @@ _PyStackRef res; _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); - uint16_t index = (uint16_t)CURRENT_OPERAND(); + uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictObject *dict = (PyDictObject *)GLOBALS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); PyObject *res_o = entries[index].me_value; @@ -5849,7 +5879,7 @@ _PyStackRef res; _PyStackRef null = PyStackRef_NULL; oparg = CURRENT_OPARG(); - uint16_t index = (uint16_t)CURRENT_OPERAND(); + uint16_t index = (uint16_t)CURRENT_OPERAND0(); PyDictObject *dict = (PyDictObject *)BUILTINS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); PyObject *res_o = entries[index].me_value; @@ -5878,7 +5908,7 @@ } case _DYNAMIC_EXIT: { - PyObject *exit_p = (PyObject *)CURRENT_OPERAND(); + PyObject *exit_p = (PyObject *)CURRENT_OPERAND0(); tstate->previous_executor = (PyObject *)current_executor; _PyExitData *exit = (_PyExitData *)exit_p; _Py_CODEUNIT *target = frame->instr_ptr; @@ -5925,7 +5955,7 @@ } case _START_EXECUTOR: { - PyObject *executor = (PyObject *)CURRENT_OPERAND(); + PyObject *executor = (PyObject *)CURRENT_OPERAND0(); Py_DECREF(tstate->previous_executor); tstate->previous_executor = NULL; #ifndef _Py_JIT @@ -5951,7 +5981,7 @@ } case _CHECK_VALIDITY_AND_SET_IP: { - PyObject *instr_ptr = (PyObject *)CURRENT_OPERAND(); + PyObject *instr_ptr = (PyObject *)CURRENT_OPERAND0(); if (!current_executor->vm_data.valid) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); @@ -5967,7 +5997,7 @@ case _ERROR_POP_N: { oparg = CURRENT_OPARG(); - uint32_t target = (uint32_t)CURRENT_OPERAND(); + uint32_t target = (uint32_t)CURRENT_OPERAND0(); stack_pointer += -oparg; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); diff --git a/Python/fileutils.c b/Python/fileutils.c index c9ae1b3f54e167..9529b14d377c60 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -2506,37 +2506,38 @@ _Py_normpath_and_size(wchar_t *path, Py_ssize_t size, Py_ssize_t *normsize) #endif #define SEP_OR_END(x) (IS_SEP(x) || IS_END(x)) - if (p1[0] == L'.' && IS_SEP(&p1[1])) { - // Skip leading '.\' - path = &path[2]; - while (IS_SEP(path)) { - path++; - } - p1 = p2 = minP2 = path; - lastC = SEP; - } - else { - Py_ssize_t drvsize, rootsize; - _Py_skiproot(path, size, &drvsize, &rootsize); - if (drvsize || rootsize) { - // Skip past root and update minP2 - p1 = &path[drvsize + rootsize]; + Py_ssize_t drvsize, rootsize; + _Py_skiproot(path, size, &drvsize, &rootsize); + if (drvsize || rootsize) { + // Skip past root and update minP2 + p1 = &path[drvsize + rootsize]; #ifndef ALTSEP - p2 = p1; + p2 = p1; #else - for (; p2 < p1; ++p2) { - if (*p2 == ALTSEP) { - *p2 = SEP; - } + for (; p2 < p1; ++p2) { + if (*p2 == ALTSEP) { + *p2 = SEP; } + } #endif - minP2 = p2 - 1; - lastC = *minP2; + minP2 = p2 - 1; + lastC = *minP2; #ifdef MS_WINDOWS - if (lastC != SEP) { - minP2++; - } + if (lastC != SEP) { + minP2++; + } +#endif + } + if (p1[0] == L'.' && SEP_OR_END(&p1[1])) { + // Skip leading '.\' + lastC = *++p1; +#ifdef ALTSEP + if (lastC == ALTSEP) { + lastC = SEP; + } #endif + while (IS_SEP(p1)) { + p1++; } } diff --git a/Python/gc.c b/Python/gc.c index 028657eb8999c1..63adecf0e05114 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -5,7 +5,7 @@ #include "Python.h" #include "pycore_ceval.h" // _Py_set_eval_breaker_bit() #include "pycore_context.h" -#include "pycore_dict.h" // _PyDict_MaybeUntrack() +#include "pycore_dict.h" // _PyInlineValuesSize() #include "pycore_initconfig.h" #include "pycore_interp.h" // PyInterpreterState.gc #include "pycore_object.h" @@ -23,6 +23,10 @@ typedef struct _gc_runtime_state GCState; # define GC_DEBUG #endif +// Define this when debugging the GC +// #define GC_EXTRA_DEBUG + + #define GC_NEXT _PyGCHead_NEXT #define GC_PREV _PyGCHead_PREV @@ -421,6 +425,11 @@ validate_list(PyGC_Head *head, enum flagstates flags) assert(prev == GC_PREV(head)); } +#else +#define validate_list(x, y) do{}while(0) +#endif + +#ifdef GC_EXTRA_DEBUG static void validate_old(GCState *gcstate) { @@ -464,7 +473,6 @@ gc_list_validate_space(PyGC_Head *head, int space) { } #else -#define validate_list(x, y) do{}while(0) #define validate_old(g) do{}while(0) #define validate_consistent_old_space(l) do{}while(0) #define gc_list_validate_space(l, s) do{}while(0) @@ -739,21 +747,6 @@ untrack_tuples(PyGC_Head *head) } } -/* Try to untrack all currently tracked dictionaries */ -static void -untrack_dicts(PyGC_Head *head) -{ - PyGC_Head *next, *gc = GC_NEXT(head); - while (gc != head) { - PyObject *op = FROM_GC(gc); - next = GC_NEXT(gc); - if (PyDict_CheckExact(op)) { - _PyDict_MaybeUntrack(op); - } - gc = next; - } -} - /* Return true if object has a pre-PEP 442 finalization method. */ static int has_legacy_finalizer(PyObject *op) @@ -1250,15 +1243,10 @@ handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable, gc_list_merge(resurrected, old_generation); } - -#define UNTRACK_TUPLES 1 -#define UNTRACK_DICTS 2 - static void gc_collect_region(PyThreadState *tstate, PyGC_Head *from, PyGC_Head *to, - int untrack, struct gc_collection_stats *stats); static inline Py_ssize_t @@ -1320,7 +1308,7 @@ gc_collect_young(PyThreadState *tstate, PyGC_Head survivors; gc_list_init(&survivors); - gc_collect_region(tstate, young, &survivors, UNTRACK_TUPLES, stats); + gc_collect_region(tstate, young, &survivors, stats); Py_ssize_t survivor_count = 0; if (gcstate->visited_space) { /* objects in visited space have bit set, so we set it here */ @@ -1386,7 +1374,6 @@ visit_add_to_container(PyObject *op, void *arg) static uintptr_t expand_region_transitively_reachable(PyGC_Head *container, PyGC_Head *gc, GCState *gcstate) { - validate_list(container, collecting_clear_unreachable_clear); struct container_and_flag arg = { .container = container, .visited_space = gcstate->visited_space, @@ -1460,10 +1447,11 @@ gc_collect_increment(PyThreadState *tstate, struct gc_collection_stats *stats) gc_set_old_space(gc, gcstate->visited_space); increment_size += expand_region_transitively_reachable(&increment, gc, gcstate); } + validate_list(&increment, collecting_clear_unreachable_clear); gc_list_validate_space(&increment, gcstate->visited_space); PyGC_Head survivors; gc_list_init(&survivors); - gc_collect_region(tstate, &increment, &survivors, UNTRACK_TUPLES, stats); + gc_collect_region(tstate, &increment, &survivors, stats); gc_list_validate_space(&survivors, gcstate->visited_space); gc_list_merge(&survivors, visited); assert(gc_list_is_empty(&increment)); @@ -1496,7 +1484,6 @@ gc_collect_full(PyThreadState *tstate, gc_list_merge(pending, visited); gc_collect_region(tstate, visited, visited, - UNTRACK_TUPLES | UNTRACK_DICTS, stats); gcstate->young.count = 0; gcstate->old[0].count = 0; @@ -1514,7 +1501,6 @@ static void gc_collect_region(PyThreadState *tstate, PyGC_Head *from, PyGC_Head *to, - int untrack, struct gc_collection_stats *stats) { PyGC_Head unreachable; /* non-problematic unreachable trash */ @@ -1528,12 +1514,7 @@ gc_collect_region(PyThreadState *tstate, gc_list_init(&unreachable); deduce_unreachable(from, &unreachable); validate_consistent_old_space(from); - if (untrack & UNTRACK_TUPLES) { - untrack_tuples(from); - } - if (untrack & UNTRACK_DICTS) { - untrack_dicts(from); - } + untrack_tuples(from); validate_consistent_old_space(to); if (from != to) { gc_list_merge(from, to); diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index 986d80c18d36c8..f7f44407494e51 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -3,7 +3,7 @@ #include "pycore_brc.h" // struct _brc_thread_state #include "pycore_ceval.h" // _Py_set_eval_breaker_bit() #include "pycore_context.h" -#include "pycore_dict.h" // _PyDict_MaybeUntrack() +#include "pycore_dict.h" // _PyInlineValuesSize() #include "pycore_freelist.h" // _PyObject_ClearFreeLists() #include "pycore_initconfig.h" #include "pycore_interp.h" // PyInterpreterState.gc @@ -113,6 +113,12 @@ worklist_remove(struct worklist_iter *iter) iter->next = iter->ptr; } +static inline int +gc_is_frozen(PyObject *op) +{ + return (op->ob_gc_bits & _PyGC_BITS_FROZEN) != 0; +} + static inline int gc_is_unreachable(PyObject *op) { @@ -277,7 +283,7 @@ op_from_block(void *block, void *arg, bool include_frozen) if (!_PyObject_GC_IS_TRACKED(op)) { return NULL; } - if (!include_frozen && (op->ob_gc_bits & _PyGC_BITS_FROZEN) != 0) { + if (!include_frozen && gc_is_frozen(op)) { return NULL; } return op; @@ -298,7 +304,7 @@ gc_visit_heaps_lock_held(PyInterpreterState *interp, mi_block_visit_fun *visitor Py_ssize_t offset_pre = offset_base + 2 * sizeof(PyObject*); // visit each thread's heaps for GC objects - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, p) { struct _mimalloc_thread_state *m = &((_PyThreadStateImpl *)p)->mimalloc; if (!_Py_atomic_load_int(&m->initialized)) { // The thread may not have called tstate_mimalloc_bind() yet. @@ -358,7 +364,7 @@ gc_visit_stackref(_PyStackRef stackref) // being dead already. if (PyStackRef_IsDeferred(stackref) && !PyStackRef_IsNull(stackref)) { PyObject *obj = PyStackRef_AsPyObjectBorrow(stackref); - if (_PyObject_GC_IS_TRACKED(obj)) { + if (_PyObject_GC_IS_TRACKED(obj) && !gc_is_frozen(obj)) { gc_add_refs(obj, 1); } } @@ -368,8 +374,7 @@ gc_visit_stackref(_PyStackRef stackref) static void gc_visit_thread_stacks(PyInterpreterState *interp) { - HEAD_LOCK(&_PyRuntime); - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { for (_PyInterpreterFrame *f = p->current_frame; f != NULL; f = f->previous) { PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable); if (executable == NULL || !PyCode_Check(executable)) { @@ -384,7 +389,24 @@ gc_visit_thread_stacks(PyInterpreterState *interp) } } } - HEAD_UNLOCK(&_PyRuntime); + _Py_FOR_EACH_TSTATE_END(interp); +} + +static void +queue_untracked_obj_decref(PyObject *op, struct collection_state *state) +{ + if (!_PyObject_GC_IS_TRACKED(op)) { + // GC objects with zero refcount are handled subsequently by the + // GC as if they were cyclic trash, but we have to handle dead + // non-GC objects here. Add one to the refcount so that we can + // decref and deallocate the object once we start the world again. + op->ob_ref_shared += (1 << _Py_REF_SHARED_SHIFT); +#ifdef Py_REF_DEBUG + _Py_IncRefTotal(_PyThreadState_GET()); +#endif + worklist_push(&state->objs_to_decref, op); + } + } static void @@ -398,22 +420,20 @@ merge_queued_objects(_PyThreadStateImpl *tstate, struct collection_state *state) // Subtract one when merging because the queue had a reference. Py_ssize_t refcount = merge_refcount(op, -1); - if (!_PyObject_GC_IS_TRACKED(op) && refcount == 0) { - // GC objects with zero refcount are handled subsequently by the - // GC as if they were cyclic trash, but we have to handle dead - // non-GC objects here. Add one to the refcount so that we can - // decref and deallocate the object once we start the world again. - op->ob_ref_shared += (1 << _Py_REF_SHARED_SHIFT); -#ifdef Py_REF_DEBUG - _Py_IncRefTotal(_PyThreadState_GET()); -#endif - worklist_push(&state->objs_to_decref, op); + if (refcount == 0) { + queue_untracked_obj_decref(op, state); } } } static void -process_delayed_frees(PyInterpreterState *interp) +queue_freed_object(PyObject *obj, void *arg) +{ + queue_untracked_obj_decref(obj, arg); +} + +static void +process_delayed_frees(PyInterpreterState *interp, struct collection_state *state) { // While we are in a "stop the world" pause, we can observe the latest // write sequence by advancing the write sequence immediately. @@ -423,23 +443,25 @@ process_delayed_frees(PyInterpreterState *interp) // Merge the queues from other threads into our own queue so that we can // process all of the pending delayed free requests at once. - HEAD_LOCK(&_PyRuntime); - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { _PyThreadStateImpl *other = (_PyThreadStateImpl *)p; if (other != current_tstate) { llist_concat(¤t_tstate->mem_free_queue, &other->mem_free_queue); } } - HEAD_UNLOCK(&_PyRuntime); + _Py_FOR_EACH_TSTATE_END(interp); - _PyMem_ProcessDelayed((PyThreadState *)current_tstate); + _PyMem_ProcessDelayedNoDealloc((PyThreadState *)current_tstate, queue_freed_object, state); } // Subtract an incoming reference from the computed "gc_refs" refcount. static int visit_decref(PyObject *op, void *arg) { - if (_PyObject_GC_IS_TRACKED(op) && !_Py_IsImmortal(op)) { + if (_PyObject_GC_IS_TRACKED(op) + && !_Py_IsImmortal(op) + && !gc_is_frozen(op)) + { // If update_refs hasn't reached this object yet, mark it // as (tentatively) unreachable and initialize ob_tid to zero. gc_maybe_init_refs(op); @@ -484,13 +506,6 @@ update_refs(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } } - else if (PyDict_CheckExact(op)) { - _PyDict_MaybeUntrack(op); - if (!_PyObject_GC_IS_TRACKED(op)) { - gc_restore_refs(op); - return true; - } - } } // We repurpose ob_tid to compute "gc_refs", the number of external @@ -1217,8 +1232,7 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, state->gcstate->old[i-1].count = 0; } - HEAD_LOCK(&_PyRuntime); - for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)p; // merge per-thread refcount for types into the type's actual refcount @@ -1227,9 +1241,9 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, // merge refcounts for all queued objects merge_queued_objects(tstate, state); } - HEAD_UNLOCK(&_PyRuntime); + _Py_FOR_EACH_TSTATE_END(interp); - process_delayed_frees(interp); + process_delayed_frees(interp, state); // Find unreachable objects int err = deduce_unreachable_heap(interp, state); @@ -1539,7 +1553,7 @@ visit_freeze(const mi_heap_t *heap, const mi_heap_area_t *area, void *block, size_t block_size, void *args) { PyObject *op = op_from_block(block, args, true); - if (op != NULL) { + if (op != NULL && !gc_is_unreachable(op)) { op->ob_gc_bits |= _PyGC_BITS_FROZEN; } return true; @@ -1584,7 +1598,7 @@ visit_count_frozen(const mi_heap_t *heap, const mi_heap_area_t *area, void *block, size_t block_size, void *args) { PyObject *op = op_from_block(block, args, true); - if (op != NULL && (op->ob_gc_bits & _PyGC_BITS_FROZEN) != 0) { + if (op != NULL && gc_is_frozen(op)) { struct count_frozen_args *arg = (struct count_frozen_args *)args; arg->count++; } @@ -1908,13 +1922,7 @@ PyObject_GC_Del(void *op) } record_deallocation(_PyThreadState_GET()); - PyObject *self = (PyObject *)op; - if (_PyObject_GC_IS_SHARED_INLINE(self)) { - _PyObject_FreeDelayed(((char *)op)-presize); - } - else { - PyObject_Free(((char *)op)-presize); - } + PyObject_Free(((char *)op)-presize); } int @@ -1982,13 +1990,11 @@ PyUnstable_GC_VisitObjects(gcvisitobjects_t callback, void *arg) void _PyGC_ClearAllFreeLists(PyInterpreterState *interp) { - HEAD_LOCK(&_PyRuntime); - _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)interp->threads.head; - while (tstate != NULL) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { + _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)p; _PyObject_ClearFreeLists(&tstate->freelists, 0); - tstate = (_PyThreadStateImpl *)tstate->base.next; } - HEAD_UNLOCK(&_PyRuntime); + _Py_FOR_EACH_TSTATE_END(interp); } #endif // Py_GIL_DISABLED diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 03b4d2224922f0..13947849942cd4 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -192,7 +192,7 @@ assert(next_instr->op.code == STORE_FAST); next_oparg = next_instr->op.arg; #else - next_oparg = CURRENT_OPERAND(); + next_oparg = CURRENT_OPERAND0(); #endif _PyStackRef *target_local = &GETLOCAL(next_oparg); DEOPT_IF(!PyStackRef_Is(*target_local, left), BINARY_OP); @@ -5092,15 +5092,18 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor, 0); stack_pointer = _PyFrame_GetStackPointer(frame); - if (optimized < 0) goto error; - if (optimized) { + if (optimized <= 0) { + this_instr[1].counter = restart_backoff_counter(counter); + if (optimized < 0) goto error; + } + else { + _PyFrame_SetStackPointer(frame, stack_pointer); + this_instr[1].counter = initial_jump_backoff_counter(); + stack_pointer = _PyFrame_GetStackPointer(frame); assert(tstate->previous_executor == NULL); tstate->previous_executor = Py_None; GOTO_TIER_TWO(executor); } - else { - this_instr[1].counter = restart_backoff_counter(counter); - } } else { ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); @@ -5561,7 +5564,7 @@ owner = stack_pointer[-1]; uint32_t dict_version = read_u32(&this_instr[2].cache); PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner); - DEOPT_IF(!PyModule_CheckExact(owner_o), LOAD_ATTR); + DEOPT_IF(Py_TYPE(owner_o)->tp_getattro != PyModule_Type.tp_getattro, LOAD_ATTR); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner_o)->md_dict; assert(dict != NULL); DEOPT_IF(dict->ma_keys->dk_version != dict_version, LOAD_ATTR); @@ -6095,7 +6098,7 @@ { uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); next_instr = this_instr; @@ -6106,7 +6109,7 @@ } OPCODE_DEFERRED_INC(LOAD_GLOBAL); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } /* Skip 1 cache entry */ /* Skip 1 cache entry */ @@ -6141,28 +6144,35 @@ uint16_t version = read_u16(&this_instr[2].cache); PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); - DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); - assert(DK_IS_UNICODE(dict->ma_keys)); + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version, LOAD_GLOBAL); + assert(DK_IS_UNICODE(keys)); } // _GUARD_BUILTINS_VERSION_PUSH_KEYS { uint16_t version = read_u16(&this_instr[3].cache); PyDictObject *dict = (PyDictObject *)BUILTINS(); DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); - DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); - builtins_keys = dict->ma_keys; + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version, LOAD_GLOBAL); + builtins_keys = keys; assert(DK_IS_UNICODE(builtins_keys)); } // _LOAD_GLOBAL_BUILTINS_FROM_KEYS { uint16_t index = read_u16(&this_instr[4].cache); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(builtins_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEOPT_IF(res_o == NULL, LOAD_GLOBAL); + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + DEOPT_IF(!increfed, LOAD_GLOBAL); + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; @@ -6185,8 +6195,9 @@ uint16_t version = read_u16(&this_instr[2].cache); PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); - DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); - globals_keys = dict->ma_keys; + PyDictKeysObject *keys = FT_ATOMIC_LOAD_PTR_ACQUIRE(dict->ma_keys); + DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != version, LOAD_GLOBAL); + globals_keys = keys; assert(DK_IS_UNICODE(globals_keys)); } /* Skip 1 cache entry */ @@ -6194,12 +6205,17 @@ { uint16_t index = read_u16(&this_instr[4].cache); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(globals_keys); - PyObject *res_o = entries[index].me_value; + PyObject *res_o = FT_ATOMIC_LOAD_PTR_RELAXED(entries[index].me_value); DEOPT_IF(res_o == NULL, LOAD_GLOBAL); + #if Py_GIL_DISABLED + int increfed = _Py_TryIncrefCompareStackRef(&entries[index].me_value, res_o, &res); + DEOPT_IF(!increfed, LOAD_GLOBAL); + #else Py_INCREF(res_o); + res = PyStackRef_FromPyObjectSteal(res_o); + #endif STAT_INC(LOAD_GLOBAL, hit); null = PyStackRef_NULL; - res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[0] = res; if (oparg & 1) stack_pointer[1] = null; @@ -7432,10 +7448,6 @@ DEOPT_IF(ep->me_key != name, STORE_ATTR); PyObject *old_value = ep->me_value; DEOPT_IF(old_value == NULL, STORE_ATTR); - /* Ensure dict is GC tracked if it needs to be */ - if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) { - _PyObject_GC_TRACK(dict); - } _PyFrame_SetStackPointer(frame, stack_pointer); _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -7746,7 +7758,7 @@ value = stack_pointer[-1]; uint16_t counter = read_u16(&this_instr[1].cache); (void)counter; - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; _PyFrame_SetStackPointer(frame, stack_pointer); @@ -7756,7 +7768,7 @@ } OPCODE_DEFERRED_INC(TO_BOOL); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } /* Skip 2 cache entries */ // _TO_BOOL @@ -7851,7 +7863,7 @@ PyObject *value_o = PyStackRef_AsPyObjectBorrow(value); DEOPT_IF(!PyList_CheckExact(value_o), TO_BOOL); STAT_INC(TO_BOOL, hit); - res = Py_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; + res = PyList_GET_SIZE(value_o) ? PyStackRef_True : PyStackRef_False; PyStackRef_CLOSE(value); stack_pointer[-1] = res; DISPATCH(); diff --git a/Python/getcopyright.c b/Python/getcopyright.c index 066c2ed66acddf..964584ddf7998e 100644 --- a/Python/getcopyright.c +++ b/Python/getcopyright.c @@ -4,7 +4,7 @@ static const char cprt[] = "\ -Copyright (c) 2001-2024 Python Software Foundation.\n\ +Copyright (c) 2001 Python Software Foundation.\n\ All Rights Reserved.\n\ \n\ Copyright (c) 2000 BeOpen.com.\n\ diff --git a/Python/import.c b/Python/import.c index 29bd8bf68ff5e1..09fe95fa1fb647 100644 --- a/Python/import.c +++ b/Python/import.c @@ -122,6 +122,13 @@ _PyImport_ReleaseLock(PyInterpreterState *interp) _PyRecursiveMutex_Unlock(&IMPORT_LOCK(interp)); } +void +_PyImport_ReInitLock(PyInterpreterState *interp) +{ + // gh-126688: Thread id may change after fork() on some operating systems. + IMPORT_LOCK(interp).thread = PyThread_get_thread_ident_ex(); +} + /***************/ /* sys.modules */ diff --git a/Python/instrumentation.c b/Python/instrumentation.c index 87c2addaf809eb..3503809e3306cb 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -1006,13 +1006,10 @@ set_global_version(PyThreadState *tstate, uint32_t version) #ifdef Py_GIL_DISABLED // Set the version on all threads in free-threaded builds. - _PyRuntimeState *runtime = &_PyRuntime; - HEAD_LOCK(runtime); - for (tstate = interp->threads.head; tstate; - tstate = PyThreadState_Next(tstate)) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, tstate) { set_version_raw(&tstate->eval_breaker, version); }; - HEAD_UNLOCK(runtime); + _Py_FOR_EACH_TSTATE_END(interp); #else // Normal builds take the current version from instrumentation_version when // attaching a thread, so we only have to set the current thread's version. diff --git a/Python/jit.c b/Python/jit.c index 90f693dfb7c41b..7dd0da7a45055a 100644 --- a/Python/jit.c +++ b/Python/jit.c @@ -58,7 +58,12 @@ jit_alloc(size_t size) int failed = memory == NULL; #else int flags = MAP_ANONYMOUS | MAP_PRIVATE; - unsigned char *memory = mmap(NULL, size, PROT_READ | PROT_WRITE, flags, -1, 0); + int prot = PROT_READ | PROT_WRITE; +# ifdef MAP_JIT + flags |= MAP_JIT; + prot |= PROT_EXEC; +# endif + unsigned char *memory = mmap(NULL, size, prot, flags, -1, 0); int failed = memory == MAP_FAILED; #endif if (failed) { @@ -102,8 +107,11 @@ mark_executable(unsigned char *memory, size_t size) int old; int failed = !VirtualProtect(memory, size, PAGE_EXECUTE_READ, &old); #else + int failed = 0; __builtin___clear_cache((char *)memory, (char *)memory + size); - int failed = mprotect(memory, size, PROT_EXEC | PROT_READ); +#ifndef MAP_JIT + failed = mprotect(memory, size, PROT_EXEC | PROT_READ); +#endif #endif if (failed) { jit_error("unable to protect executable memory"); @@ -499,6 +507,9 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz if (memory == NULL) { return -1; } +#ifdef MAP_JIT + pthread_jit_write_protect_np(0); +#endif // Update the offsets of each instruction: for (size_t i = 0; i < length; i++) { state.instruction_starts[i] += (uintptr_t)memory; @@ -529,6 +540,9 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz data += group->data_size; assert(code == memory + code_size); assert(data == memory + code_size + data_size); +#ifdef MAP_JIT + pthread_jit_write_protect_np(1); +#endif if (mark_executable(memory, total_size)) { jit_free(memory, total_size); return -1; diff --git a/Python/marshal.c b/Python/marshal.c index a280fbfd078f41..72afa4ff89432c 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -50,41 +50,52 @@ module marshal # define MAX_MARSHAL_STACK_DEPTH 2000 #endif +/* Supported types */ #define TYPE_NULL '0' #define TYPE_NONE 'N' #define TYPE_FALSE 'F' #define TYPE_TRUE 'T' #define TYPE_STOPITER 'S' #define TYPE_ELLIPSIS '.' -#define TYPE_INT 'i' -/* TYPE_INT64 is not generated anymore. - Supported for backward compatibility only. */ -#define TYPE_INT64 'I' -#define TYPE_FLOAT 'f' -#define TYPE_BINARY_FLOAT 'g' -#define TYPE_COMPLEX 'x' -#define TYPE_BINARY_COMPLEX 'y' -#define TYPE_LONG 'l' -#define TYPE_STRING 's' -#define TYPE_INTERNED 't' -#define TYPE_REF 'r' -#define TYPE_TUPLE '(' +#define TYPE_BINARY_FLOAT 'g' // Version 0 uses TYPE_FLOAT instead. +#define TYPE_BINARY_COMPLEX 'y' // Version 0 uses TYPE_COMPLEX instead. +#define TYPE_LONG 'l' // See also TYPE_INT. +#define TYPE_STRING 's' // Bytes. (Name comes from Python 2.) +#define TYPE_TUPLE '(' // See also TYPE_SMALL_TUPLE. #define TYPE_LIST '[' #define TYPE_DICT '{' #define TYPE_CODE 'c' #define TYPE_UNICODE 'u' #define TYPE_UNKNOWN '?' +// added in version 2: #define TYPE_SET '<' #define TYPE_FROZENSET '>' +// added in version 5: #define TYPE_SLICE ':' -#define FLAG_REF '\x80' /* with a type, add obj to index */ +// Remember to update the version and documentation when adding new types. +/* Special cases for unicode strings (added in version 4) */ +#define TYPE_INTERNED 't' // Version 1+ #define TYPE_ASCII 'a' #define TYPE_ASCII_INTERNED 'A' -#define TYPE_SMALL_TUPLE ')' #define TYPE_SHORT_ASCII 'z' #define TYPE_SHORT_ASCII_INTERNED 'Z' +/* Special cases for small objects */ +#define TYPE_INT 'i' // All versions. 32-bit encoding. +#define TYPE_SMALL_TUPLE ')' // Version 4+ + +/* Supported for backwards compatibility */ +#define TYPE_COMPLEX 'x' // Generated for version 0 only. +#define TYPE_FLOAT 'f' // Generated for version 0 only. +#define TYPE_INT64 'I' // Not generated any more. + +/* References (added in version 3) */ +#define TYPE_REF 'r' +#define FLAG_REF '\x80' /* with a type, add obj to index */ + + +// Error codes: #define WFERR_OK 0 #define WFERR_UNMARSHALLABLE 1 #define WFERR_NESTEDTOODEEP 2 @@ -615,6 +626,11 @@ w_complex_object(PyObject *v, char flag, WFILE *p) PyBuffer_Release(&view); } else if (PySlice_Check(v)) { + if (p->version < 5) { + w_byte(TYPE_UNKNOWN, p); + p->error = WFERR_UNMARSHALLABLE; + return; + } PySliceObject *slice = (PySliceObject *)v; W_TYPE(TYPE_SLICE, p); w_object(slice->start, p); diff --git a/Python/optimizer.c b/Python/optimizer.c index b876b6c2bd72fd..6a232218981dcd 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -205,8 +205,8 @@ _PyOptimizer_Optimize( return 1; } -_PyExecutorObject * -_Py_GetExecutor(PyCodeObject *code, int offset) +static _PyExecutorObject * +get_executor_lock_held(PyCodeObject *code, int offset) { int code_len = (int)Py_SIZE(code); for (int i = 0 ; i < code_len;) { @@ -222,6 +222,16 @@ _Py_GetExecutor(PyCodeObject *code, int offset) return NULL; } +_PyExecutorObject * +_Py_GetExecutor(PyCodeObject *code, int offset) +{ + _PyExecutorObject *executor; + Py_BEGIN_CRITICAL_SECTION(code); + executor = get_executor_lock_held(code, offset); + Py_END_CRITICAL_SECTION(); + return executor; +} + static PyObject * is_valid(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -288,13 +298,13 @@ _PyUOpPrint(const _PyUOpInstruction *uop) printf(" (%d, target=%d, operand=%#" PRIx64, uop->oparg, uop->target, - (uint64_t)uop->operand); + (uint64_t)uop->operand0); break; case UOP_FORMAT_JUMP: printf(" (%d, jump_target=%d, operand=%#" PRIx64, uop->oparg, uop->jump_target, - (uint64_t)uop->operand); + (uint64_t)uop->operand0); break; default: printf(" (%d, Unknown format)", uop->oparg); @@ -340,7 +350,7 @@ uop_item(_PyExecutorObject *self, Py_ssize_t index) Py_DECREF(oname); return NULL; } - PyObject *operand = PyLong_FromUnsignedLongLong(self->trace[index].operand); + PyObject *operand = PyLong_FromUnsignedLongLong(self->trace[index].operand0); if (operand == NULL) { Py_DECREF(target); Py_DECREF(oparg); @@ -463,7 +473,7 @@ add_to_trace( trace[trace_length].format = UOP_FORMAT_TARGET; trace[trace_length].target = target; trace[trace_length].oparg = oparg; - trace[trace_length].operand = operand; + trace[trace_length].operand0 = operand; return trace_length + 1; } @@ -970,7 +980,7 @@ static void make_exit(_PyUOpInstruction *inst, int opcode, int target) { inst->opcode = opcode; inst->oparg = 0; - inst->operand = 0; + inst->operand0 = 0; inst->format = UOP_FORMAT_TARGET; inst->target = target; } @@ -1033,7 +1043,7 @@ prepare_for_execution(_PyUOpInstruction *buffer, int length) current_error_target = target; make_exit(&buffer[next_spare], _ERROR_POP_N, 0); buffer[next_spare].oparg = popped; - buffer[next_spare].operand = target; + buffer[next_spare].operand0 = target; next_spare++; } buffer[i].error_target = current_error; @@ -1150,7 +1160,7 @@ make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFil int next_exit = exit_count-1; _PyUOpInstruction *dest = (_PyUOpInstruction *)&executor->trace[length]; assert(buffer[0].opcode == _START_EXECUTOR); - buffer[0].operand = (uint64_t)executor; + buffer[0].operand0 = (uint64_t)executor; for (int i = length-1; i >= 0; i--) { int opcode = buffer[i].opcode; dest--; @@ -1159,13 +1169,13 @@ make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFil if (opcode == _EXIT_TRACE) { _PyExitData *exit = &executor->exits[next_exit]; exit->target = buffer[i].target; - dest->operand = (uint64_t)exit; + dest->operand0 = (uint64_t)exit; next_exit--; } if (opcode == _DYNAMIC_EXIT) { _PyExitData *exit = &executor->exits[next_exit]; exit->target = 0; - dest->operand = (uint64_t)exit; + dest->operand0 = (uint64_t)exit; next_exit--; } } @@ -1312,7 +1322,7 @@ _PyOptimizer_NewUOpOptimizer(void) static void counter_dealloc(_PyExecutorObject *self) { /* The optimizer is the operand of the second uop. */ - PyObject *opt = (PyObject *)self->trace[1].operand; + PyObject *opt = (PyObject *)self->trace[1].operand0; Py_DECREF(opt); uop_dealloc(self); } @@ -1352,7 +1362,7 @@ counter_optimize( _Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[JUMP_BACKWARD] - oparg; _PyUOpInstruction buffer[4] = { { .opcode = _START_EXECUTOR, .jump_target = 3, .format=UOP_FORMAT_JUMP }, - { .opcode = _LOAD_CONST_INLINE, .operand = (uintptr_t)self }, + { .opcode = _LOAD_CONST_INLINE, .operand0 = (uintptr_t)self }, { .opcode = _INTERNAL_INCREMENT_OPT_COUNTER }, { .opcode = _EXIT_TRACE, .target = (uint32_t)(target - _PyCode_CODE(code)), .format=UOP_FORMAT_TARGET } }; diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index 25166bc2dc5c02..a4a0472b64e57c 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -100,11 +100,11 @@ convert_global_to_const(_PyUOpInstruction *inst, PyObject *obj) PyDictObject *dict = (PyDictObject *)obj; assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); - assert(inst->operand <= UINT16_MAX); - if ((int)inst->operand >= dict->ma_keys->dk_nentries) { + assert(inst->operand0 <= UINT16_MAX); + if ((int)inst->operand0 >= dict->ma_keys->dk_nentries) { return NULL; } - PyObject *res = entries[inst->operand].me_value; + PyObject *res = entries[inst->operand0].me_value; if (res == NULL) { return NULL; } @@ -114,7 +114,7 @@ convert_global_to_const(_PyUOpInstruction *inst, PyObject *obj) else { inst->opcode = (inst->oparg & 1) ? _LOAD_CONST_INLINE_WITH_NULL : _LOAD_CONST_INLINE; } - inst->operand = (uint64_t)res; + inst->operand0 = (uint64_t)res; return res; } @@ -125,7 +125,7 @@ incorrect_keys(_PyUOpInstruction *inst, PyObject *obj) return 1; } PyDictObject *dict = (PyDictObject *)obj; - if (dict->ma_keys->dk_version != inst->operand) { + if (dict->ma_keys->dk_version != inst->operand0) { return 1; } return 0; @@ -215,7 +215,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, } else { buffer[pc].opcode = _CHECK_FUNCTION; - buffer[pc].operand = function_version; + buffer[pc].operand0 = function_version; function_checked |= 1; } // We're no longer pushing the builtins keys; rewrite the @@ -248,7 +248,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, } else { buffer[pc].opcode = _CHECK_FUNCTION; - buffer[pc].operand = function_version; + buffer[pc].operand0 = function_version; function_checked |= 1; } if (opcode == _GUARD_GLOBALS_VERSION_PUSH_KEYS) { @@ -273,7 +273,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, builtins_watched <<= 1; globals_watched <<= 1; function_checked <<= 1; - uint64_t operand = buffer[pc].operand; + uint64_t operand = buffer[pc].operand0; if (operand == 0 || (operand & 1)) { // It's either a code object or NULL, so bail return 1; @@ -301,7 +301,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, builtins_watched >>= 1; globals_watched >>= 1; function_checked >>= 1; - uint64_t operand = buffer[pc].operand; + uint64_t operand = buffer[pc].operand0; if (operand == 0 || (operand & 1)) { // It's either a code object or NULL, so bail return 1; @@ -317,7 +317,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, break; } case _CHECK_FUNCTION_EXACT_ARGS: - prechecked_function_version = (uint32_t)buffer[pc].operand; + prechecked_function_version = (uint32_t)buffer[pc].operand0; break; default: if (is_terminator(inst)) { @@ -343,7 +343,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, #define REPLACE_OP(INST, OP, ARG, OPERAND) \ INST->opcode = OP; \ INST->oparg = ARG; \ - INST->operand = OPERAND; + INST->operand0 = OPERAND; /* Shortened forms for convenience, used in optimizer_bytecodes.c */ #define sym_is_not_null _Py_uop_sym_is_not_null @@ -409,7 +409,7 @@ get_code(_PyUOpInstruction *op) { assert(op->opcode == _PUSH_FRAME || op->opcode == _RETURN_VALUE || op->opcode == _RETURN_GENERATOR); PyCodeObject *co = NULL; - uint64_t operand = op->operand; + uint64_t operand = op->operand0; if (operand == 0) { return NULL; } @@ -429,7 +429,7 @@ static PyCodeObject * get_code_with_logging(_PyUOpInstruction *op) { PyCodeObject *co = NULL; - uint64_t push_operand = op->operand; + uint64_t push_operand = op->operand0; if (push_operand & 1) { co = (PyCodeObject *)(push_operand & ~1); DPRINTF(3, "code=%p ", co); @@ -534,7 +534,7 @@ optimize_uops( assert(max_space <= INT_MAX); assert(max_space <= INT32_MAX); first_valid_check_stack->opcode = _CHECK_STACK_SPACE_OPERAND; - first_valid_check_stack->operand = max_space; + first_valid_check_stack->operand0 = max_space; } return trace_len; diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index 71904c1bc73f88..42bdbd9ca8d0cd 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -346,7 +346,7 @@ dummy_func(void) { res = sym_new_type(ctx, &PyUnicode_Type); } // _STORE_FAST: - GETLOCAL(this_instr->operand) = res; + GETLOCAL(this_instr->operand0) = res; } op(_BINARY_SUBSCR_INIT_CALL, (container, sub -- new_frame: _Py_UOpsAbstractFrame *)) { @@ -589,8 +589,27 @@ dummy_func(void) { self = sym_new_not_null(ctx); } - op(_CHECK_FUNCTION_EXACT_ARGS, (callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) { + op(_CHECK_FUNCTION_VERSION, (func_version/2, callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) { + (void)self_or_null; + if (sym_is_const(callable) && sym_matches_type(callable, &PyFunction_Type)) { + assert(PyFunction_Check(sym_get_const(callable))); + REPLACE_OP(this_instr, _CHECK_FUNCTION_VERSION_INLINE, 0, func_version); + this_instr->operand1 = (uintptr_t)sym_get_const(callable); + } sym_set_type(callable, &PyFunction_Type); + } + + op(_CHECK_FUNCTION_EXACT_ARGS, (callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) { + assert(sym_matches_type(callable, &PyFunction_Type)); + if (sym_is_const(callable)) { + if (sym_is_null(self_or_null) || sym_is_not_null(self_or_null)) { + PyFunctionObject *func = (PyFunctionObject *)sym_get_const(callable); + PyCodeObject *co = (PyCodeObject *)func->func_code; + if (co->co_argcount == oparg + !sym_is_null(self_or_null)) { + REPLACE_OP(this_instr, _NOP, 0 ,0); + } + } + } (void)self_or_null; } diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index 54821b23716eeb..f77a5aa35bdf82 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -525,7 +525,7 @@ res = sym_new_type(ctx, &PyUnicode_Type); } // _STORE_FAST: - GETLOCAL(this_instr->operand) = res; + GETLOCAL(this_instr->operand0) = res; stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; @@ -888,7 +888,7 @@ case _GUARD_GLOBALS_VERSION_PUSH_KEYS: { _Py_UopsSymbol *globals_keys; - uint16_t version = (uint16_t)this_instr->operand; + uint16_t version = (uint16_t)this_instr->operand0; globals_keys = sym_new_unknown(ctx); (void)version; stack_pointer[0] = globals_keys; @@ -899,7 +899,7 @@ case _GUARD_BUILTINS_VERSION_PUSH_KEYS: { _Py_UopsSymbol *builtins_keys; - uint16_t version = (uint16_t)this_instr->operand; + uint16_t version = (uint16_t)this_instr->operand0; builtins_keys = sym_new_unknown(ctx); (void)version; stack_pointer[0] = builtins_keys; @@ -1090,7 +1090,7 @@ case _GUARD_TYPE_VERSION: { _Py_UopsSymbol *owner; owner = stack_pointer[-1]; - uint32_t type_version = (uint32_t)this_instr->operand; + uint32_t type_version = (uint32_t)this_instr->operand0; assert(type_version); if (sym_matches_type_version(owner, type_version)) { REPLACE_OP(this_instr, _NOP, 0, 0); @@ -1122,7 +1122,7 @@ _Py_UopsSymbol *attr; _Py_UopsSymbol *null = NULL; owner = stack_pointer[-1]; - uint16_t offset = (uint16_t)this_instr->operand; + uint16_t offset = (uint16_t)this_instr->operand0; attr = sym_new_not_null(ctx); null = sym_new_null(ctx); (void)offset; @@ -1137,7 +1137,7 @@ case _CHECK_ATTR_MODULE: { _Py_UopsSymbol *owner; owner = stack_pointer[-1]; - uint32_t dict_version = (uint32_t)this_instr->operand; + uint32_t dict_version = (uint32_t)this_instr->operand0; (void)dict_version; if (sym_is_const(owner)) { PyObject *cnst = sym_get_const(owner); @@ -1160,7 +1160,7 @@ _Py_UopsSymbol *attr; _Py_UopsSymbol *null = NULL; owner = stack_pointer[-1]; - uint16_t index = (uint16_t)this_instr->operand; + uint16_t index = (uint16_t)this_instr->operand0; (void)index; null = sym_new_null(ctx); attr = NULL; @@ -1202,7 +1202,7 @@ _Py_UopsSymbol *attr; _Py_UopsSymbol *null = NULL; owner = stack_pointer[-1]; - uint16_t hint = (uint16_t)this_instr->operand; + uint16_t hint = (uint16_t)this_instr->operand0; attr = sym_new_not_null(ctx); null = sym_new_null(ctx); (void)hint; @@ -1219,7 +1219,7 @@ _Py_UopsSymbol *attr; _Py_UopsSymbol *null = NULL; owner = stack_pointer[-1]; - uint16_t index = (uint16_t)this_instr->operand; + uint16_t index = (uint16_t)this_instr->operand0; attr = sym_new_not_null(ctx); null = sym_new_null(ctx); (void)index; @@ -1240,7 +1240,7 @@ _Py_UopsSymbol *attr; _Py_UopsSymbol *null = NULL; owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)this_instr->operand; + PyObject *descr = (PyObject *)this_instr->operand0; attr = sym_new_not_null(ctx); null = sym_new_null(ctx); (void)descr; @@ -1256,7 +1256,7 @@ _Py_UopsSymbol *owner; _Py_UOpsAbstractFrame *new_frame; owner = stack_pointer[-1]; - PyObject *fget = (PyObject *)this_instr->operand; + PyObject *fget = (PyObject *)this_instr->operand0; (void)fget; (void)owner; new_frame = NULL; @@ -1639,7 +1639,7 @@ _Py_UopsSymbol *attr; _Py_UopsSymbol *self = NULL; owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)this_instr->operand; + PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; attr = sym_new_not_null(ctx); self = owner; @@ -1655,7 +1655,7 @@ _Py_UopsSymbol *attr; _Py_UopsSymbol *self = NULL; owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)this_instr->operand; + PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; attr = sym_new_not_null(ctx); self = owner; @@ -1689,7 +1689,7 @@ _Py_UopsSymbol *attr; _Py_UopsSymbol *self = NULL; owner = stack_pointer[-1]; - PyObject *descr = (PyObject *)this_instr->operand; + PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; attr = sym_new_not_null(ctx); self = owner; @@ -1749,6 +1749,22 @@ } case _CHECK_FUNCTION_VERSION: { + _Py_UopsSymbol *self_or_null; + _Py_UopsSymbol *callable; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; + uint32_t func_version = (uint32_t)this_instr->operand0; + (void)self_or_null; + if (sym_is_const(callable) && sym_matches_type(callable, &PyFunction_Type)) { + assert(PyFunction_Check(sym_get_const(callable))); + REPLACE_OP(this_instr, _CHECK_FUNCTION_VERSION_INLINE, 0, func_version); + this_instr->operand1 = (uintptr_t)sym_get_const(callable); + } + sym_set_type(callable, &PyFunction_Type); + break; + } + + case _CHECK_FUNCTION_VERSION_INLINE: { break; } @@ -1816,7 +1832,16 @@ _Py_UopsSymbol *callable; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; - sym_set_type(callable, &PyFunction_Type); + assert(sym_matches_type(callable, &PyFunction_Type)); + if (sym_is_const(callable)) { + if (sym_is_null(self_or_null) || sym_is_not_null(self_or_null)) { + PyFunctionObject *func = (PyFunctionObject *)sym_get_const(callable); + PyCodeObject *co = (PyCodeObject *)func->func_code; + if (co->co_argcount == oparg + !sym_is_null(self_or_null)) { + REPLACE_OP(this_instr, _NOP, 0 ,0); + } + } + } (void)self_or_null; break; } @@ -1939,7 +1964,7 @@ null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; args = &stack_pointer[-oparg]; - uint32_t type_version = (uint32_t)this_instr->operand; + uint32_t type_version = (uint32_t)this_instr->operand0; (void)type_version; (void)callable; (void)null; @@ -2399,7 +2424,7 @@ } case _CHECK_STACK_SPACE_OPERAND: { - uint32_t framesize = (uint32_t)this_instr->operand; + uint32_t framesize = (uint32_t)this_instr->operand0; (void)framesize; /* We should never see _CHECK_STACK_SPACE_OPERANDs. * They are only created at the end of this pass. */ @@ -2412,7 +2437,7 @@ } case _EXIT_TRACE: { - PyObject *exit_p = (PyObject *)this_instr->operand; + PyObject *exit_p = (PyObject *)this_instr->operand0; (void)exit_p; ctx->done = true; break; @@ -2424,7 +2449,7 @@ case _LOAD_CONST_INLINE: { _Py_UopsSymbol *value; - PyObject *ptr = (PyObject *)this_instr->operand; + PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); stack_pointer[0] = value; stack_pointer += 1; @@ -2434,7 +2459,7 @@ case _LOAD_CONST_INLINE_BORROW: { _Py_UopsSymbol *value; - PyObject *ptr = (PyObject *)this_instr->operand; + PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); stack_pointer[0] = value; stack_pointer += 1; @@ -2452,7 +2477,7 @@ case _LOAD_CONST_INLINE_WITH_NULL: { _Py_UopsSymbol *value; _Py_UopsSymbol *null; - PyObject *ptr = (PyObject *)this_instr->operand; + PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); null = sym_new_null(ctx); stack_pointer[0] = value; @@ -2465,7 +2490,7 @@ case _LOAD_CONST_INLINE_BORROW_WITH_NULL: { _Py_UopsSymbol *value; _Py_UopsSymbol *null; - PyObject *ptr = (PyObject *)this_instr->operand; + PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); null = sym_new_null(ctx); stack_pointer[0] = value; diff --git a/Python/parking_lot.c b/Python/parking_lot.c index bffc959e5d0978..8edf43235942ab 100644 --- a/Python/parking_lot.c +++ b/Python/parking_lot.c @@ -221,8 +221,7 @@ _PySemaphore_Wait(_PySemaphore *sema, PyTime_t timeout, int detach) PyThreadState *tstate = NULL; if (detach) { tstate = _PyThreadState_GET(); - if (tstate && _Py_atomic_load_int_relaxed(&tstate->state) == - _Py_THREAD_ATTACHED) { + if (tstate && _PyThreadState_IsAttached(tstate)) { // Only detach if we are attached PyEval_ReleaseThread(tstate); } diff --git a/Python/pystate.c b/Python/pystate.c index 24ee73c145cbcc..975eb6d4fbd0f2 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -629,6 +629,8 @@ init_interpreter(PyInterpreterState *interp, assert(next != NULL || (interp == runtime->interpreters.main)); interp->next = next; + interp->threads.preallocated = &interp->_initial_thread; + // We would call _PyObject_InitState() at this point // if interp->feature_flags were alredy set. @@ -766,7 +768,6 @@ PyInterpreterState_New(void) return interp; } - static void interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) { @@ -789,18 +790,15 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) } // Clear the current/main thread state last. - HEAD_LOCK(runtime); - PyThreadState *p = interp->threads.head; - HEAD_UNLOCK(runtime); - while (p != NULL) { + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { // See https://github.com/python/cpython/issues/102126 // Must be called without HEAD_LOCK held as it can deadlock // if any finalizer tries to acquire that lock. + HEAD_UNLOCK(runtime); PyThreadState_Clear(p); HEAD_LOCK(runtime); - p = p->next; - HEAD_UNLOCK(runtime); } + _Py_FOR_EACH_TSTATE_END(interp); if (tstate->interp == interp) { /* We fix tstate->_status below when we for sure aren't using it (e.g. no longer need the GIL). */ @@ -910,6 +908,9 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) // XXX Once we have one allocator per interpreter (i.e. // per-interpreter GC) we must ensure that all of the interpreter's // objects have been cleaned up at the point. + + // We could clear interp->threads.freelist here + // if it held more than just the initial thread state. } @@ -1047,10 +1048,17 @@ get_main_thread(PyInterpreterState *interp) return _Py_atomic_load_ptr_relaxed(&interp->threads.main); } +void +_PyErr_SetInterpreterAlreadyRunning(void) +{ + PyErr_SetString(PyExc_InterpreterError, "interpreter already running"); +} + int _PyInterpreterState_SetRunningMain(PyInterpreterState *interp) { - if (_PyInterpreterState_FailIfRunningMain(interp) < 0) { + if (get_main_thread(interp) != NULL) { + _PyErr_SetInterpreterAlreadyRunning(); return -1; } PyThreadState *tstate = current_fast_get(); @@ -1096,17 +1104,6 @@ _PyThreadState_IsRunningMain(PyThreadState *tstate) return get_main_thread(interp) == tstate; } -int -_PyInterpreterState_FailIfRunningMain(PyInterpreterState *interp) -{ - if (get_main_thread(interp) != NULL) { - PyErr_SetString(PyExc_InterpreterError, - "interpreter already running"); - return -1; - } - return 0; -} - void _PyInterpreterState_ReinitRunningMain(PyThreadState *tstate) { @@ -1390,22 +1387,45 @@ allocate_chunk(int size_in_bytes, _PyStackChunk* previous) return res; } +static void +reset_threadstate(_PyThreadStateImpl *tstate) +{ + // Set to _PyThreadState_INIT directly? + memcpy(tstate, + &initial._main_interpreter._initial_thread, + sizeof(*tstate)); +} + static _PyThreadStateImpl * -alloc_threadstate(void) +alloc_threadstate(PyInterpreterState *interp) { - return PyMem_RawCalloc(1, sizeof(_PyThreadStateImpl)); + _PyThreadStateImpl *tstate; + + // Try the preallocated tstate first. + tstate = _Py_atomic_exchange_ptr(&interp->threads.preallocated, NULL); + + // Fall back to the allocator. + if (tstate == NULL) { + tstate = PyMem_RawCalloc(1, sizeof(_PyThreadStateImpl)); + if (tstate == NULL) { + return NULL; + } + reset_threadstate(tstate); + } + return tstate; } static void free_threadstate(_PyThreadStateImpl *tstate) { + PyInterpreterState *interp = tstate->base.interp; // The initial thread state of the interpreter is allocated // as part of the interpreter state so should not be freed. - if (tstate == &tstate->base.interp->_initial_thread) { - // Restore to _PyThreadState_INIT. - memcpy(tstate, - &initial._main_interpreter._initial_thread, - sizeof(*tstate)); + if (tstate == &interp->_initial_thread) { + // Make it available again. + reset_threadstate(tstate); + assert(interp->threads.preallocated == NULL); + _Py_atomic_store_ptr(&interp->threads.preallocated, tstate); } else { PyMem_RawFree(tstate); @@ -1496,66 +1516,38 @@ add_threadstate(PyInterpreterState *interp, PyThreadState *tstate, static PyThreadState * new_threadstate(PyInterpreterState *interp, int whence) { - _PyThreadStateImpl *tstate; - _PyRuntimeState *runtime = interp->runtime; - // We don't need to allocate a thread state for the main interpreter - // (the common case), but doing it later for the other case revealed a - // reentrancy problem (deadlock). So for now we always allocate before - // taking the interpreters lock. See GH-96071. - _PyThreadStateImpl *new_tstate = alloc_threadstate(); - int used_newtstate; - if (new_tstate == NULL) { + // Allocate the thread state. + _PyThreadStateImpl *tstate = alloc_threadstate(interp); + if (tstate == NULL) { return NULL; } + #ifdef Py_GIL_DISABLED Py_ssize_t qsbr_idx = _Py_qsbr_reserve(interp); if (qsbr_idx < 0) { - PyMem_RawFree(new_tstate); + free_threadstate(tstate); return NULL; } int32_t tlbc_idx = _Py_ReserveTLBCIndex(interp); if (tlbc_idx < 0) { - PyMem_RawFree(new_tstate); + free_threadstate(tstate); return NULL; } #endif /* We serialize concurrent creation to protect global state. */ - HEAD_LOCK(runtime); + HEAD_LOCK(interp->runtime); + // Initialize the new thread state. interp->threads.next_unique_id += 1; uint64_t id = interp->threads.next_unique_id; + init_threadstate(tstate, interp, id, whence); - // Allocate the thread state and add it to the interpreter. + // Add the new thread state to the interpreter. PyThreadState *old_head = interp->threads.head; - if (old_head == NULL) { - // It's the interpreter's initial thread state. - used_newtstate = 0; - tstate = &interp->_initial_thread; - } - // XXX Re-use interp->_initial_thread if not in use? - else { - // Every valid interpreter must have at least one thread. - assert(id > 1); - assert(old_head->prev == NULL); - used_newtstate = 1; - tstate = new_tstate; - // Set to _PyThreadState_INIT. - memcpy(tstate, - &initial._main_interpreter._initial_thread, - sizeof(*tstate)); - } - - init_threadstate(tstate, interp, id, whence); add_threadstate(interp, (PyThreadState *)tstate, old_head); - HEAD_UNLOCK(runtime); - if (!used_newtstate) { - // Must be called with lock unlocked to avoid re-entrancy deadlock. - PyMem_RawFree(new_tstate); - } - else { - } + HEAD_UNLOCK(interp->runtime); #ifdef Py_GIL_DISABLED // Must be called with lock unlocked to avoid lock ordering deadlocks. @@ -1654,6 +1646,11 @@ PyThreadState_Clear(PyThreadState *tstate) "PyThreadState_Clear: warning: thread still has a frame\n"); } + if (verbose && tstate->current_exception != NULL) { + fprintf(stderr, "PyThreadState_Clear: warning: thread has an exception set\n"); + _PyErr_Print(tstate); + } + /* At this point tstate shouldn't be used any more, neither to run Python code nor for other uses. @@ -1801,10 +1798,9 @@ tstate_delete_common(PyThreadState *tstate, int release_gil) static void zapthreads(PyInterpreterState *interp) { - PyThreadState *tstate; /* No need to lock the mutex here because this should only happen when the threads are all really dead (XXX famous last words). */ - while ((tstate = interp->threads.head) != NULL) { + _Py_FOR_EACH_TSTATE_UNLOCKED(interp, tstate) { tstate_verify_not_active(tstate); tstate_delete_common(tstate, 0); free_threadstate((_PyThreadStateImpl *)tstate); @@ -2161,7 +2157,7 @@ decrement_stoptheworld_countdown(struct _stoptheworld_state *stw) } #ifdef Py_GIL_DISABLED -// Interpreter for _Py_FOR_EACH_THREAD(). For global stop-the-world events, +// Interpreter for _Py_FOR_EACH_STW_INTERP(). For global stop-the-world events, // we start with the first interpreter and then iterate over all interpreters. // For per-interpreter stop-the-world events, we only operate on the one // interpreter. @@ -2176,10 +2172,9 @@ interp_for_stop_the_world(struct _stoptheworld_state *stw) // Loops over threads for a stop-the-world event. // For global: all threads in all interpreters // For per-interpreter: all threads in the interpreter -#define _Py_FOR_EACH_THREAD(stw, i, t) \ - for (i = interp_for_stop_the_world((stw)); \ - i != NULL; i = ((stw->is_global) ? i->next : NULL)) \ - for (t = i->threads.head; t; t = t->next) +#define _Py_FOR_EACH_STW_INTERP(stw, i) \ + for (PyInterpreterState *i = interp_for_stop_the_world((stw)); \ + i != NULL; i = ((stw->is_global) ? i->next : NULL)) // Try to transition threads atomically from the "detached" state to the @@ -2188,19 +2183,19 @@ static bool park_detached_threads(struct _stoptheworld_state *stw) { int num_parked = 0; - PyInterpreterState *i; - PyThreadState *t; - _Py_FOR_EACH_THREAD(stw, i, t) { - int state = _Py_atomic_load_int_relaxed(&t->state); - if (state == _Py_THREAD_DETACHED) { - // Atomically transition to "suspended" if in "detached" state. - if (_Py_atomic_compare_exchange_int(&t->state, - &state, _Py_THREAD_SUSPENDED)) { - num_parked++; + _Py_FOR_EACH_STW_INTERP(stw, i) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { + int state = _Py_atomic_load_int_relaxed(&t->state); + if (state == _Py_THREAD_DETACHED) { + // Atomically transition to "suspended" if in "detached" state. + if (_Py_atomic_compare_exchange_int( + &t->state, &state, _Py_THREAD_SUSPENDED)) { + num_parked++; + } + } + else if (state == _Py_THREAD_ATTACHED && t != stw->requester) { + _Py_set_eval_breaker_bit(t, _PY_EVAL_PLEASE_STOP_BIT); } - } - else if (state == _Py_THREAD_ATTACHED && t != stw->requester) { - _Py_set_eval_breaker_bit(t, _PY_EVAL_PLEASE_STOP_BIT); } } stw->thread_countdown -= num_parked; @@ -2227,12 +2222,12 @@ stop_the_world(struct _stoptheworld_state *stw) stw->stop_event = (PyEvent){0}; // zero-initialize (unset) stw->requester = _PyThreadState_GET(); // may be NULL - PyInterpreterState *i; - PyThreadState *t; - _Py_FOR_EACH_THREAD(stw, i, t) { - if (t != stw->requester) { - // Count all the other threads (we don't wait on ourself). - stw->thread_countdown++; + _Py_FOR_EACH_STW_INTERP(stw, i) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { + if (t != stw->requester) { + // Count all the other threads (we don't wait on ourself). + stw->thread_countdown++; + } } } @@ -2273,14 +2268,14 @@ start_the_world(struct _stoptheworld_state *stw) stw->requested = 0; stw->world_stopped = 0; // Switch threads back to the detached state. - PyInterpreterState *i; - PyThreadState *t; - _Py_FOR_EACH_THREAD(stw, i, t) { - if (t != stw->requester) { - assert(_Py_atomic_load_int_relaxed(&t->state) == - _Py_THREAD_SUSPENDED); - _Py_atomic_store_int(&t->state, _Py_THREAD_DETACHED); - _PyParkingLot_UnparkAll(&t->state); + _Py_FOR_EACH_STW_INTERP(stw, i) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { + if (t != stw->requester) { + assert(_Py_atomic_load_int_relaxed(&t->state) == + _Py_THREAD_SUSPENDED); + _Py_atomic_store_int(&t->state, _Py_THREAD_DETACHED); + _PyParkingLot_UnparkAll(&t->state); + } } } stw->requester = NULL; @@ -2344,7 +2339,6 @@ _PyEval_StartTheWorld(PyInterpreterState *interp) int PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) { - _PyRuntimeState *runtime = &_PyRuntime; PyInterpreterState *interp = _PyInterpreterState_GET(); /* Although the GIL is held, a few C API functions can be called @@ -2353,12 +2347,16 @@ PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) * list of thread states we're traversing, so to prevent that we lock * head_mutex for the duration. */ - HEAD_LOCK(runtime); - for (PyThreadState *tstate = interp->threads.head; tstate != NULL; tstate = tstate->next) { - if (tstate->thread_id != id) { - continue; + PyThreadState *tstate = NULL; + _Py_FOR_EACH_TSTATE_BEGIN(interp, t) { + if (t->thread_id == id) { + tstate = t; + break; } + } + _Py_FOR_EACH_TSTATE_END(interp); + if (tstate != NULL) { /* Tricky: we need to decref the current value * (if any) in tstate->async_exc, but that can in turn * allow arbitrary Python code to run, including @@ -2368,14 +2366,12 @@ PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) */ Py_XINCREF(exc); PyObject *old_exc = _Py_atomic_exchange_ptr(&tstate->async_exc, exc); - HEAD_UNLOCK(runtime); Py_XDECREF(old_exc); _Py_set_eval_breaker_bit(tstate, _PY_ASYNC_EXCEPTION_BIT); - return 1; } - HEAD_UNLOCK(runtime); - return 0; + + return tstate != NULL; } //--------------------------------- @@ -2515,8 +2511,7 @@ _PyThread_CurrentFrames(void) HEAD_LOCK(runtime); PyInterpreterState *i; for (i = runtime->interpreters.head; i != NULL; i = i->next) { - PyThreadState *t; - for (t = i->threads.head; t != NULL; t = t->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { _PyInterpreterFrame *frame = t->current_frame; frame = _PyFrame_GetFirstComplete(frame); if (frame == NULL) { @@ -2581,8 +2576,7 @@ _PyThread_CurrentExceptions(void) HEAD_LOCK(runtime); PyInterpreterState *i; for (i = runtime->interpreters.head; i != NULL; i = i->next) { - PyThreadState *t; - for (t = i->threads.head; t != NULL; t = t->next) { + _Py_FOR_EACH_TSTATE_UNLOCKED(i, t) { _PyErr_StackItem *err_info = _PyErr_GetTopmostException(t); if (err_info == NULL) { continue; diff --git a/Python/specialize.c b/Python/specialize.c index 0699e7be5e6b9c..c69f61c8b449a1 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -719,7 +719,7 @@ specialize(_Py_CODEUNIT *instr, uint8_t specialized_opcode) } static inline void -unspecialize(_Py_CODEUNIT *instr, int reason) +unspecialize(_Py_CODEUNIT *instr) { assert(!PyErr_Occurred()); uint8_t opcode = FT_ATOMIC_LOAD_UINT8_RELAXED(instr->op.code); @@ -729,7 +729,6 @@ unspecialize(_Py_CODEUNIT *instr, int reason) SPECIALIZATION_FAIL(generic_opcode, SPEC_FAIL_OTHER); return; } - SPECIALIZATION_FAIL(generic_opcode, reason); _Py_BackoffCounter *counter = (_Py_BackoffCounter *)instr + 1; _Py_BackoffCounter cur = load_counter(counter); set_counter(counter, adaptive_counter_backoff(cur)); @@ -1219,7 +1218,7 @@ _Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *nam SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER); fail = true; } - else if (PyModule_CheckExact(owner)) { + else if (Py_TYPE(owner)->tp_getattro == PyModule_Type.tp_getattro) { fail = specialize_module_load_attr(owner, instr, name); } else if (PyType_Check(owner)) { @@ -1520,12 +1519,12 @@ PyObject *descr, DescriptorClassification kind, bool is_method) return 1; } -void -_Py_Specialize_LoadGlobal( +static void +specialize_load_global_lock_held( PyObject *globals, PyObject *builtins, _Py_CODEUNIT *instr, PyObject *name) { - assert(ENABLE_SPECIALIZATION); + assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[LOAD_GLOBAL] == INLINE_CACHE_ENTRIES_LOAD_GLOBAL); /* Use inline cache */ _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)(instr + 1); @@ -1550,8 +1549,8 @@ _Py_Specialize_LoadGlobal( SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE); goto fail; } - uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState( - interp, globals_keys); + uint32_t keys_version = _PyDict_GetKeysVersionForCurrentState( + interp, (PyDictObject*) globals); if (keys_version == 0) { SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_VERSIONS); goto fail; @@ -1562,8 +1561,8 @@ _Py_Specialize_LoadGlobal( } cache->index = (uint16_t)index; cache->module_keys_version = (uint16_t)keys_version; - instr->op.code = LOAD_GLOBAL_MODULE; - goto success; + specialize(instr, LOAD_GLOBAL_MODULE); + return; } if (!PyDict_CheckExact(builtins)) { SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_LOAD_GLOBAL_NON_DICT); @@ -1583,8 +1582,8 @@ _Py_Specialize_LoadGlobal( SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE); goto fail; } - uint32_t globals_version = _PyDictKeys_GetVersionForCurrentState( - interp, globals_keys); + uint32_t globals_version = _PyDict_GetKeysVersionForCurrentState( + interp, (PyDictObject*) globals); if (globals_version == 0) { SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_VERSIONS); goto fail; @@ -1593,8 +1592,8 @@ _Py_Specialize_LoadGlobal( SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_RANGE); goto fail; } - uint32_t builtins_version = _PyDictKeys_GetVersionForCurrentState( - interp, builtin_keys); + uint32_t builtins_version = _PyDict_GetKeysVersionForCurrentState( + interp, (PyDictObject*) builtins); if (builtins_version == 0) { SPECIALIZATION_FAIL(LOAD_GLOBAL, SPEC_FAIL_OUT_OF_VERSIONS); goto fail; @@ -1606,18 +1605,20 @@ _Py_Specialize_LoadGlobal( cache->index = (uint16_t)index; cache->module_keys_version = (uint16_t)globals_version; cache->builtin_keys_version = (uint16_t)builtins_version; - instr->op.code = LOAD_GLOBAL_BUILTIN; - goto success; -fail: - STAT_INC(LOAD_GLOBAL, failure); - assert(!PyErr_Occurred()); - instr->op.code = LOAD_GLOBAL; - cache->counter = adaptive_counter_backoff(cache->counter); + specialize(instr, LOAD_GLOBAL_BUILTIN); return; -success: - STAT_INC(LOAD_GLOBAL, success); - assert(!PyErr_Occurred()); - cache->counter = adaptive_counter_cooldown(); +fail: + unspecialize(instr); +} + +void +_Py_Specialize_LoadGlobal( + PyObject *globals, PyObject *builtins, + _Py_CODEUNIT *instr, PyObject *name) +{ + Py_BEGIN_CRITICAL_SECTION2(globals, builtins); + specialize_load_global_lock_held(globals, builtins, instr, name); + Py_END_CRITICAL_SECTION2(); } #ifdef Py_STATS @@ -2243,6 +2244,7 @@ _Py_Specialize_CallKw(_PyStackRef callable_st, _Py_CODEUNIT *instr, int nargs) } } +#ifdef Py_STATS static int binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) { @@ -2310,6 +2312,7 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) } Py_UNREACHABLE(); } +#endif void _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr, @@ -2373,7 +2376,8 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in } break; } - unspecialize(instr, binary_op_fail_kind(oparg, lhs, rhs)); + SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs)); + unspecialize(instr); } @@ -2663,103 +2667,109 @@ _Py_Specialize_Send(_PyStackRef receiver_st, _Py_CODEUNIT *instr) cache->counter = adaptive_counter_cooldown(); } +#ifdef Py_STATS +static int +to_bool_fail_kind(PyObject *value) +{ + if (PyByteArray_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_BYTEARRAY; + } + if (PyBytes_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_BYTES; + } + if (PyDict_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_DICT; + } + if (PyFloat_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_FLOAT; + } + if (PyMemoryView_Check(value)) { + return SPEC_FAIL_TO_BOOL_MEMORY_VIEW; + } + if (PyAnySet_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_SET; + } + if (PyTuple_CheckExact(value)) { + return SPEC_FAIL_TO_BOOL_TUPLE; + } + return SPEC_FAIL_OTHER; +} +#endif // Py_STATS + +static int +check_type_always_true(PyTypeObject *ty) +{ + PyNumberMethods *nb = ty->tp_as_number; + if (nb && nb->nb_bool) { + return SPEC_FAIL_TO_BOOL_NUMBER; + } + PyMappingMethods *mp = ty->tp_as_mapping; + if (mp && mp->mp_length) { + return SPEC_FAIL_TO_BOOL_MAPPING; + } + PySequenceMethods *sq = ty->tp_as_sequence; + if (sq && sq->sq_length) { + return SPEC_FAIL_TO_BOOL_SEQUENCE; + } + return 0; +} + void _Py_Specialize_ToBool(_PyStackRef value_o, _Py_CODEUNIT *instr) { - assert(ENABLE_SPECIALIZATION); + assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[TO_BOOL] == INLINE_CACHE_ENTRIES_TO_BOOL); _PyToBoolCache *cache = (_PyToBoolCache *)(instr + 1); PyObject *value = PyStackRef_AsPyObjectBorrow(value_o); + uint8_t specialized_op; if (PyBool_Check(value)) { - instr->op.code = TO_BOOL_BOOL; + specialized_op = TO_BOOL_BOOL; goto success; } if (PyLong_CheckExact(value)) { - instr->op.code = TO_BOOL_INT; + specialized_op = TO_BOOL_INT; goto success; } if (PyList_CheckExact(value)) { - instr->op.code = TO_BOOL_LIST; + specialized_op = TO_BOOL_LIST; goto success; } if (Py_IsNone(value)) { - instr->op.code = TO_BOOL_NONE; + specialized_op = TO_BOOL_NONE; goto success; } if (PyUnicode_CheckExact(value)) { - instr->op.code = TO_BOOL_STR; + specialized_op = TO_BOOL_STR; goto success; } if (PyType_HasFeature(Py_TYPE(value), Py_TPFLAGS_HEAPTYPE)) { - PyNumberMethods *nb = Py_TYPE(value)->tp_as_number; - if (nb && nb->nb_bool) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_NUMBER); - goto failure; - } - PyMappingMethods *mp = Py_TYPE(value)->tp_as_mapping; - if (mp && mp->mp_length) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_MAPPING); - goto failure; - } - PySequenceMethods *sq = Py_TYPE(value)->tp_as_sequence; - if (sq && sq->sq_length) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_SEQUENCE); - goto failure; - } - if (!PyUnstable_Type_AssignVersionTag(Py_TYPE(value))) { + unsigned int version = 0; + int err = _PyType_Validate(Py_TYPE(value), check_type_always_true, &version); + if (err < 0) { SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_OUT_OF_VERSIONS); goto failure; } - uint32_t version = type_get_version(Py_TYPE(value), TO_BOOL); - if (version == 0) { + else if (err > 0) { + SPECIALIZATION_FAIL(TO_BOOL, err); goto failure; } - instr->op.code = TO_BOOL_ALWAYS_TRUE; - write_u32(cache->version, version); + + assert(err == 0); assert(version); + write_u32(cache->version, version); + specialized_op = TO_BOOL_ALWAYS_TRUE; goto success; } -#ifdef Py_STATS - if (PyByteArray_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_BYTEARRAY); - goto failure; - } - if (PyBytes_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_BYTES); - goto failure; - } - if (PyDict_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_DICT); - goto failure; - } - if (PyFloat_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_FLOAT); - goto failure; - } - if (PyMemoryView_Check(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_MEMORY_VIEW); - goto failure; - } - if (PyAnySet_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_SET); - goto failure; - } - if (PyTuple_CheckExact(value)) { - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_TO_BOOL_TUPLE); - goto failure; - } - SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_OTHER); -#endif // Py_STATS + + SPECIALIZATION_FAIL(TO_BOOL, to_bool_fail_kind(value)); failure: - STAT_INC(TO_BOOL, failure); - instr->op.code = TO_BOOL; - cache->counter = adaptive_counter_backoff(cache->counter); + unspecialize(instr); return; success: - STAT_INC(TO_BOOL, success); - cache->counter = adaptive_counter_cooldown(); + specialize(instr, specialized_op); } +#ifdef Py_STATS static int containsop_fail_kind(PyObject *value) { if (PyUnicode_CheckExact(value)) { @@ -2776,6 +2786,7 @@ containsop_fail_kind(PyObject *value) { } return SPEC_FAIL_OTHER; } +#endif void _Py_Specialize_ContainsOp(_PyStackRef value_st, _Py_CODEUNIT *instr) @@ -2793,7 +2804,8 @@ _Py_Specialize_ContainsOp(_PyStackRef value_st, _Py_CODEUNIT *instr) return; } - unspecialize(instr, containsop_fail_kind(value)); + SPECIALIZATION_FAIL(CONTAINS_OP, containsop_fail_kind(value)); + unspecialize(instr); return; } diff --git a/README.rst b/README.rst index 3f694771e090cb..29db15b41d2c72 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -This is Python version 3.14.0 alpha 1 +This is Python version 3.14.0 alpha 2 ===================================== .. image:: https://github.com/python/cpython/actions/workflows/build.yml/badge.svg?branch=main&event=push @@ -14,7 +14,7 @@ This is Python version 3.14.0 alpha 1 :target: https://discuss.python.org/ -Copyright © 2001-2024 Python Software Foundation. All rights reserved. +Copyright © 2001 Python Software Foundation. All rights reserved. See the end of this file for further copyright and license information. @@ -215,7 +215,7 @@ Copyright and License Information --------------------------------- -Copyright © 2001-2024 Python Software Foundation. All rights reserved. +Copyright © 2001 Python Software Foundation. All rights reserved. Copyright © 2000 BeOpen.com. All rights reserved. diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py index 020f874cffeaef..5c4a725102d79a 100644 --- a/Tools/build/generate_sbom.py +++ b/Tools/build/generate_sbom.py @@ -59,6 +59,8 @@ class PackageFiles(typing.NamedTuple): include=["Modules/expat/**"], exclude=[ "Modules/expat/expat_config.h", + "Modules/expat/pyexpatns.h", + "Modules/_hacl/refresh.sh", ] ), "macholib": PackageFiles( @@ -218,6 +220,32 @@ def check_sbom_packages(sbom_data: dict[str, typing.Any]) -> None: "HACL* SBOM version doesn't match value in 'Modules/_hacl/refresh.sh'" ) + # libexpat specifies its expected rev in a refresh script. + if package["name"] == "libexpat": + libexpat_refresh_sh = (CPYTHON_ROOT_DIR / "Modules/expat/refresh.sh").read_text() + libexpat_expected_version_match = re.search( + r"expected_libexpat_version=\"([0-9]+\.[0-9]+\.[0-9]+)\"", + libexpat_refresh_sh + ) + libexpat_expected_sha256_match = re.search( + r"expected_libexpat_sha256=\"[a-f0-9]{40}\"", + libexpat_refresh_sh + ) + libexpat_expected_version = libexpat_expected_version_match and libexpat_expected_version_match.group(1) + libexpat_expected_sha256 = libexpat_expected_sha256_match and libexpat_expected_sha256_match.group(1) + + error_if( + libexpat_expected_version != version, + "libexpat SBOM version doesn't match value in 'Modules/expat/refresh.sh'" + ) + error_if( + package["checksums"] != [{ + "algorithm": "SHA256", + "checksumValue": libexpat_expected_sha256 + }], + "libexpat SBOM checksum doesn't match value in 'Modules/expat/refresh.sh'" + ) + # License must be on the approved list for SPDX. license_concluded = package["licenseConcluded"] error_if( diff --git a/Tools/build/regen-configure.sh b/Tools/build/regen-configure.sh index e1ecefddeb8732..d2a613b1e40dc1 100755 --- a/Tools/build/regen-configure.sh +++ b/Tools/build/regen-configure.sh @@ -5,7 +5,7 @@ set -e -x # The check_autoconf_regen job of .github/workflows/build.yml must kept in # sync with this script. Use the same container image than the job so the job # doesn't need to run autoreconf in a container. -IMAGE="ghcr.io/python/autoconf:2024.10.16.11360930377" +IMAGE="ghcr.io/python/autoconf:2024.11.11.11786316759" AUTORECONF="autoreconf -ivf -Werror" WORK_DIR="/src" diff --git a/Tools/c-analyzer/cpython/_analyzer.py b/Tools/c-analyzer/cpython/_analyzer.py index f07fa8af495e17..6204353e9bd26a 100644 --- a/Tools/c-analyzer/cpython/_analyzer.py +++ b/Tools/c-analyzer/cpython/_analyzer.py @@ -280,12 +280,26 @@ def _is_kwlist(decl): vartype = ''.join(str(decl.vartype).split()) return vartype == 'char*[]' +def _is_local_static_mutex(decl): + if not hasattr(decl, "vartype"): + return False + + if not hasattr(decl, "parent") or decl.parent is None: + # We only want to allow local variables + return False + + vartype = decl.vartype + return (vartype.typespec == 'PyMutex') and (decl.storage == 'static') def _has_other_supported_type(decl): if hasattr(decl, 'file') and decl.file.filename.endswith('.c.h'): assert 'clinic' in decl.file.filename, (decl,) if decl.name == '_kwtuple': return True + if _is_local_static_mutex(decl): + # GH-127081: Local static mutexes are used to + # wrap libc functions that aren't thread safe + return True vartype = str(decl.vartype).split() if vartype[0] == 'struct': vartype = vartype[1:] diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index a725ec10d4e52a..e02e07ec748231 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -200,7 +200,7 @@ def why_not_viable(self) -> str | None: return "has tier 1 control flow" if self.properties.needs_this: return "uses the 'this_instr' variable" - if len([c for c in self.caches if c.name != "unused"]) > 1: + if len([c for c in self.caches if c.name != "unused"]) > 2: return "has unused cache entries" if self.properties.error_with_pop and self.properties.error_without_pop: return "has both popping and not-popping errors" @@ -623,6 +623,9 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_Py_NewRef", "_Py_SINGLETON", "_Py_STR", + "_Py_TryIncrefCompare", + "_Py_TryIncrefCompareStackRef", + "_Py_atomic_load_ptr_acquire", "_Py_atomic_load_uintptr_relaxed", "_Py_set_eval_breaker_bit", "advance_backoff_counter", diff --git a/Tools/cases_generator/optimizer_generator.py b/Tools/cases_generator/optimizer_generator.py index 7a1dfe1b85bf1a..d08b621aed552b 100644 --- a/Tools/cases_generator/optimizer_generator.py +++ b/Tools/cases_generator/optimizer_generator.py @@ -143,7 +143,7 @@ def write_uop( else: type = f"uint{cache.size*16}_t " cast = f"uint{cache.size*16}_t" - out.emit(f"{type}{cache.name} = ({cast})this_instr->operand;\n") + out.emit(f"{type}{cache.name} = ({cast})this_instr->operand0;\n") if override: emitter = OptimizerEmitter(out) # No reference management of inputs needed. diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py index ce761495cca435..dd16a1a7eb28b5 100644 --- a/Tools/cases_generator/tier2_generator.py +++ b/Tools/cases_generator/tier2_generator.py @@ -181,14 +181,14 @@ def write_uop(uop: Uop, emitter: Emitter, stack: Stack) -> Stack: code_list, storage = Storage.for_uop(stack, uop) for code in code_list: emitter.emit(code) - for cache in uop.caches: + for idx, cache in enumerate(uop.caches): if cache.name != "unused": if cache.size == 4: type = cast = "PyObject *" else: type = f"uint{cache.size*16}_t " cast = f"uint{cache.size*16}_t" - emitter.emit(f"{type}{cache.name} = ({cast})CURRENT_OPERAND();\n") + emitter.emit(f"{type}{cache.name} = ({cast})CURRENT_OPERAND{idx}();\n") storage = emitter.emit_tokens(uop, storage, None) except StackError as ex: raise analysis_error(ex.args[0], uop.body[0]) from None diff --git a/Tools/i18n/pygettext.py b/Tools/i18n/pygettext.py index 3a0b27ba420e7a..0d16e8f7da0071 100755 --- a/Tools/i18n/pygettext.py +++ b/Tools/i18n/pygettext.py @@ -341,6 +341,9 @@ def __waiting(self, ttype, tstring, lineno): if ttype == tokenize.NAME and tstring in ('class', 'def'): self.__state = self.__suiteseen return + if ttype == tokenize.NAME and tstring in ('class', 'def'): + self.__state = self.__ignorenext + return if ttype == tokenize.NAME and tstring in opts.keywords: self.__state = self.__keywordseen return @@ -448,6 +451,9 @@ def __openseen(self, ttype, tstring, lineno): }, file=sys.stderr) self.__state = self.__waiting + def __ignorenext(self, ttype, tstring, lineno): + self.__state = self.__waiting + def __addentry(self, msg, lineno=None, isdocstring=0): if lineno is None: lineno = self.__lineno diff --git a/Tools/jit/_stencils.py b/Tools/jit/_stencils.py index 2cd051b0a77b8d..61be8fd3bbdf55 100644 --- a/Tools/jit/_stencils.py +++ b/Tools/jit/_stencils.py @@ -29,11 +29,16 @@ class HoleValue(enum.Enum): GOT = enum.auto() # The current uop's oparg (exposed as _JIT_OPARG): OPARG = enum.auto() - # The current uop's operand on 64-bit platforms (exposed as _JIT_OPERAND): - OPERAND = enum.auto() - # The current uop's operand on 32-bit platforms (exposed as _JIT_OPERAND_HI/LO): - OPERAND_HI = enum.auto() - OPERAND_LO = enum.auto() + # The current uop's operand0 on 64-bit platforms (exposed as _JIT_OPERAND0): + OPERAND0 = enum.auto() + # The current uop's operand0 on 32-bit platforms (exposed as _JIT_OPERAND0_HI/LO): + OPERAND0_HI = enum.auto() + OPERAND0_LO = enum.auto() + # The current uop's operand1 on 64-bit platforms (exposed as _JIT_OPERAND1): + OPERAND1 = enum.auto() + # The current uop's operand1 on 32-bit platforms (exposed as _JIT_OPERAND1_HI/LO): + OPERAND1_HI = enum.auto() + OPERAND1_LO = enum.auto() # The current uop's target (exposed as _JIT_TARGET): TARGET = enum.auto() # The base address of the machine code for the jump target (exposed as _JIT_JUMP_TARGET): @@ -99,9 +104,12 @@ class HoleValue(enum.Enum): # These should all have been turned into DATA values by process_relocations: # HoleValue.GOT: "", HoleValue.OPARG: "instruction->oparg", - HoleValue.OPERAND: "instruction->operand", - HoleValue.OPERAND_HI: "(instruction->operand >> 32)", - HoleValue.OPERAND_LO: "(instruction->operand & UINT32_MAX)", + HoleValue.OPERAND0: "instruction->operand0", + HoleValue.OPERAND0_HI: "(instruction->operand0 >> 32)", + HoleValue.OPERAND0_LO: "(instruction->operand0 & UINT32_MAX)", + HoleValue.OPERAND1: "instruction->operand1", + HoleValue.OPERAND1_HI: "(instruction->operand1 >> 32)", + HoleValue.OPERAND1_LO: "(instruction->operand1 & UINT32_MAX)", HoleValue.TARGET: "instruction->target", HoleValue.JUMP_TARGET: "state->instruction_starts[instruction->jump_target]", HoleValue.ERROR_TARGET: "state->instruction_starts[instruction->error_target]", diff --git a/Tools/jit/template.c b/Tools/jit/template.c index 57c1006ab423e9..95c90bda70f352 100644 --- a/Tools/jit/template.c +++ b/Tools/jit/template.c @@ -26,8 +26,11 @@ #undef CURRENT_OPARG #define CURRENT_OPARG() (_oparg) -#undef CURRENT_OPERAND -#define CURRENT_OPERAND() (_operand) +#undef CURRENT_OPERAND0 +#define CURRENT_OPERAND0() (_operand0) + +#undef CURRENT_OPERAND1 +#define CURRENT_OPERAND1() (_operand1) #undef DEOPT_IF #define DEOPT_IF(COND, INSTNAME) \ @@ -99,12 +102,17 @@ _JIT_ENTRY(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer, PyThreadState // Other stuff we need handy: PATCH_VALUE(uint16_t, _oparg, _JIT_OPARG) #if SIZEOF_VOID_P == 8 - PATCH_VALUE(uint64_t, _operand, _JIT_OPERAND) + PATCH_VALUE(uint64_t, _operand0, _JIT_OPERAND0) + PATCH_VALUE(uint64_t, _operand1, _JIT_OPERAND1) #else assert(SIZEOF_VOID_P == 4); - PATCH_VALUE(uint32_t, _operand_hi, _JIT_OPERAND_HI) - PATCH_VALUE(uint32_t, _operand_lo, _JIT_OPERAND_LO) - uint64_t _operand = ((uint64_t)_operand_hi << 32) | _operand_lo; + PATCH_VALUE(uint32_t, _operand0_hi, _JIT_OPERAND0_HI) + PATCH_VALUE(uint32_t, _operand0_lo, _JIT_OPERAND0_LO) + uint64_t _operand0 = ((uint64_t)_operand0_hi << 32) | _operand0_lo; + + PATCH_VALUE(uint32_t, _operand1_hi, _JIT_OPERAND1_HI) + PATCH_VALUE(uint32_t, _operand1_lo, _JIT_OPERAND1_LO) + uint64_t _operand1 = ((uint64_t)_operand1_hi << 32) | _operand1_lo; #endif PATCH_VALUE(uint32_t, _target, _JIT_TARGET) diff --git a/Tools/msi/bundle/Default.wxl b/Tools/msi/bundle/Default.wxl index 49f681d3e11d2e..7208d83ddae61b 100644 --- a/Tools/msi/bundle/Default.wxl +++ b/Tools/msi/bundle/Default.wxl @@ -70,8 +70,8 @@ Select Customize to review current options. Installs the Python documentation files. &pip Installs pip, which can download and install other Python packages. - tcl/tk and &IDLE - Installs tkinter and the IDLE development environment. + Tcl/Tk, turtle and &IDLE + Installs tkinter, turtle and the IDLE development environment. Python &test suite Installs the standard library test suite. py &launcher diff --git a/Tools/msi/freethreaded/freethreaded_files.wxs b/Tools/msi/freethreaded/freethreaded_files.wxs index 367fd978efd484..b3ce28e7aedc84 100644 --- a/Tools/msi/freethreaded/freethreaded_files.wxs +++ b/Tools/msi/freethreaded/freethreaded_files.wxs @@ -103,7 +103,7 @@ - + diff --git a/Tools/msi/lib/lib.wixproj b/Tools/msi/lib/lib.wixproj index 26311ea32724d1..02078e503d74a4 100644 --- a/Tools/msi/lib/lib.wixproj +++ b/Tools/msi/lib/lib.wixproj @@ -19,6 +19,7 @@ @@ -32,4 +33,4 @@ - \ No newline at end of file + diff --git a/Tools/msi/tcltk/tcltk.wixproj b/Tools/msi/tcltk/tcltk.wixproj index 218f3d15ec88fc..c8b7ab77c4dc6b 100644 --- a/Tools/msi/tcltk/tcltk.wixproj +++ b/Tools/msi/tcltk/tcltk.wixproj @@ -28,7 +28,7 @@ tcltk_lib - $(PySourcePath) !(bindpath.src) @@ -39,4 +39,4 @@ - \ No newline at end of file + diff --git a/Tools/unicode/makeunicodedata.py b/Tools/unicode/makeunicodedata.py index c94de7f9377b74..889ae8fc869b8a 100644 --- a/Tools/unicode/makeunicodedata.py +++ b/Tools/unicode/makeunicodedata.py @@ -35,7 +35,7 @@ from textwrap import dedent from typing import Iterator, List, Optional, Set, Tuple -SCRIPT = sys.argv[0] +SCRIPT = os.path.normpath(sys.argv[0]) VERSION = "3.3" # The Unicode Database diff --git a/Tools/wasm/README.md b/Tools/wasm/README.md index 4c9a643b0d9d74..3f4211fb1dfb28 100644 --- a/Tools/wasm/README.md +++ b/Tools/wasm/README.md @@ -21,12 +21,6 @@ https://github.com/psf/webassembly for more information. ### Build -For now the build system has two target flavors. The ``Emscripten/browser`` -target (``--with-emscripten-target=browser``) is optimized for browsers. -It comes with a reduced and preloaded stdlib without tests and threading -support. The ``Emscripten/node`` target has threading enabled and can -access the file system directly. - To cross compile to the ``wasm32-emscripten`` platform you need [the Emscripten compiler toolchain](https://emscripten.org/), a Python interpreter, and an installation of Node version 18 or newer. Emscripten diff --git a/Tools/wasm/emscripten/__main__.py b/Tools/wasm/emscripten/__main__.py index 2015a3764ea8c8..9ce8dd6a364ad6 100644 --- a/Tools/wasm/emscripten/__main__.py +++ b/Tools/wasm/emscripten/__main__.py @@ -4,20 +4,22 @@ import contextlib import functools import os - -try: - from os import process_cpu_count as cpu_count -except ImportError: - from os import cpu_count -from pathlib import Path import shutil import subprocess import sys import sysconfig import tempfile +from pathlib import Path +from textwrap import dedent + +try: + from os import process_cpu_count as cpu_count +except ImportError: + from os import cpu_count -WASM_DIR = Path(__file__).parent.parent -CHECKOUT = WASM_DIR.parent.parent + +EMSCRIPTEN_DIR = Path(__file__).parent +CHECKOUT = EMSCRIPTEN_DIR.parent.parent.parent CROSS_BUILD_DIR = CHECKOUT / "cross-build" BUILD_DIR = CROSS_BUILD_DIR / "build" @@ -72,7 +74,7 @@ def wrapper(context): print("⎯" * terminal_width) print("📁", working_dir) if clean_ok and getattr(context, "clean", False) and working_dir.exists(): - print(f"🚮 Deleting directory (--clean)...") + print("🚮 Deleting directory (--clean)...") shutil.rmtree(working_dir) working_dir.mkdir(parents=True, exist_ok=True) @@ -207,9 +209,21 @@ def configure_emscripten_python(context, working_dir): quiet=context.quiet, ) - python_js = working_dir / "python.js" + shutil.copy(EMSCRIPTEN_DIR / "node_entry.mjs", working_dir / "node_entry.mjs") + + node_entry = working_dir / "node_entry.mjs" exec_script = working_dir / "python.sh" - exec_script.write_text(f'#!/bin/sh\nexec {host_runner} {python_js} "$@"\n') + exec_script.write_text( + dedent( + f"""\ + #!/bin/sh + + # We compute our own path, not following symlinks and pass it in so that + # node_entry.mjs can set sys.executable correctly. + exec {host_runner} {node_entry} "$(realpath -s $0)" "$@" + """ + ) + ) exec_script.chmod(0o755) print(f"🏃‍♀️ Created {exec_script} ... ") sys.stdout.flush() diff --git a/Tools/wasm/emscripten/node_entry.mjs b/Tools/wasm/emscripten/node_entry.mjs new file mode 100644 index 00000000000000..cb1c6ff3cba6aa --- /dev/null +++ b/Tools/wasm/emscripten/node_entry.mjs @@ -0,0 +1,30 @@ +import EmscriptenModule from "./python.mjs"; +import { dirname } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +if (process?.versions?.node) { + const nodeVersion = Number(process.versions.node.split(".", 1)[0]); + if (nodeVersion < 18) { + process.stderr.write( + `Node version must be >= 18, got version ${process.version}\n`, + ); + process.exit(1); + } +} + +const settings = { + preRun(Module) { + const __dirname = dirname(fileURLToPath(import.meta.url)); + Module.FS.mkdirTree("/lib/"); + Module.FS.mount(Module.FS.filesystems.NODEFS, { root: __dirname + "/lib/" }, "/lib/"); + }, + // The first three arguments are: "node", path to this file, path to + // python.sh. After that come the arguments the user passed to python.sh. + arguments: process.argv.slice(3), + // Ensure that sys.executable, sys._base_executable, etc point to python.sh + // not to this file. To properly handle symlinks, python.sh needs to compute + // its own path. + thisProgram: process.argv[2], +}; + +await EmscriptenModule(settings); diff --git a/Tools/wasm/emscripten/node_pre.js b/Tools/wasm/emscripten/node_pre.js deleted file mode 100644 index 3490d3ca591ef6..00000000000000 --- a/Tools/wasm/emscripten/node_pre.js +++ /dev/null @@ -1,9 +0,0 @@ -// If process is undefined, we're not running in the node runtime let it go I -// guess? -if (typeof process !== "undefined") { - const nodeVersion = Number(process.versions.node.split('.',1)[0]); - if (nodeVersion < 18) { - process.stderr.write(`Node version must be >= 18, got version ${process.version}\n`); - process.exit(1); - } -} diff --git a/aclocal.m4 b/aclocal.m4 index b082a5b1bc5e07..920c2b38560faa 100644 --- a/aclocal.m4 +++ b/aclocal.m4 @@ -91,7 +91,7 @@ m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun # and this notice are preserved. This file is offered as-is, without any # warranty. -#serial 12 +#serial 14 AC_DEFUN([AX_C_FLOAT_WORDS_BIGENDIAN], [AC_CACHE_CHECK(whether float word ordering is bigendian, @@ -112,10 +112,10 @@ int main (int argc, char *argv[]) ]])], [ -if grep noonsees conftest$EXEEXT >/dev/null ; then +if grep noonsees conftest* > /dev/null ; then ax_cv_c_float_words_bigendian=yes fi -if grep seesnoon conftest$EXEEXT >/dev/null ; then +if grep seesnoon conftest* >/dev/null ; then if test "$ax_cv_c_float_words_bigendian" = unknown; then ax_cv_c_float_words_bigendian=no else @@ -398,7 +398,7 @@ AC_DEFUN([AX_CHECK_OPENSSL], [ AC_SUBST([OPENSSL_LDFLAGS]) ]) -# pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- +# pkg.m4 - Macros to locate and use pkg-config. -*- Autoconf -*- # serial 12 (pkg-config-0.29.2) dnl Copyright © 2004 Scott James Remnant . @@ -486,7 +486,7 @@ dnl Check to see whether a particular set of modules exists. Similar to dnl PKG_CHECK_MODULES(), but does not set variables or print errors. dnl dnl Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) -dnl only at the first occurence in configure.ac, so if the first place +dnl only at the first occurrence in configure.ac, so if the first place dnl it's called might be skipped (such as if it is within an "if", you dnl have to call PKG_CHECK_EXISTS manually AC_DEFUN([PKG_CHECK_EXISTS], @@ -555,14 +555,14 @@ if test $pkg_failed = yes; then AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then - $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` + $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` else - $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` + $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD + # Put the nasty error message in config.log where it belongs + echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD - m4_default([$4], [AC_MSG_ERROR( + m4_default([$4], [AC_MSG_ERROR( [Package requirements ($2) were not met: $$1_PKG_ERRORS @@ -574,7 +574,7 @@ _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then AC_MSG_RESULT([no]) - m4_default([$4], [AC_MSG_FAILURE( + m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. @@ -584,10 +584,10 @@ _PKG_TEXT To get pkg-config, see .])[]dnl ]) else - $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS - $1[]_LIBS=$pkg_cv_[]$1[]_LIBS + $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS + $1[]_LIBS=$pkg_cv_[]$1[]_LIBS AC_MSG_RESULT([yes]) - $3 + $3 fi[]dnl ])dnl PKG_CHECK_MODULES @@ -674,6 +674,74 @@ AS_VAR_COPY([$1], [pkg_cv_][$1]) AS_VAR_IF([$1], [""], [$5], [$4])dnl ])dnl PKG_CHECK_VAR +dnl PKG_WITH_MODULES(VARIABLE-PREFIX, MODULES, +dnl [ACTION-IF-FOUND],[ACTION-IF-NOT-FOUND], +dnl [DESCRIPTION], [DEFAULT]) +dnl ------------------------------------------ +dnl +dnl Prepare a "--with-" configure option using the lowercase +dnl [VARIABLE-PREFIX] name, merging the behaviour of AC_ARG_WITH and +dnl PKG_CHECK_MODULES in a single macro. +AC_DEFUN([PKG_WITH_MODULES], +[ +m4_pushdef([with_arg], m4_tolower([$1])) + +m4_pushdef([description], + [m4_default([$5], [build with ]with_arg[ support])]) + +m4_pushdef([def_arg], [m4_default([$6], [auto])]) +m4_pushdef([def_action_if_found], [AS_TR_SH([with_]with_arg)=yes]) +m4_pushdef([def_action_if_not_found], [AS_TR_SH([with_]with_arg)=no]) + +m4_case(def_arg, + [yes],[m4_pushdef([with_without], [--without-]with_arg)], + [m4_pushdef([with_without],[--with-]with_arg)]) + +AC_ARG_WITH(with_arg, + AS_HELP_STRING(with_without, description[ @<:@default=]def_arg[@:>@]),, + [AS_TR_SH([with_]with_arg)=def_arg]) + +AS_CASE([$AS_TR_SH([with_]with_arg)], + [yes],[PKG_CHECK_MODULES([$1],[$2],$3,$4)], + [auto],[PKG_CHECK_MODULES([$1],[$2], + [m4_n([def_action_if_found]) $3], + [m4_n([def_action_if_not_found]) $4])]) + +m4_popdef([with_arg]) +m4_popdef([description]) +m4_popdef([def_arg]) + +])dnl PKG_WITH_MODULES + +dnl PKG_HAVE_WITH_MODULES(VARIABLE-PREFIX, MODULES, +dnl [DESCRIPTION], [DEFAULT]) +dnl ----------------------------------------------- +dnl +dnl Convenience macro to trigger AM_CONDITIONAL after PKG_WITH_MODULES +dnl check._[VARIABLE-PREFIX] is exported as make variable. +AC_DEFUN([PKG_HAVE_WITH_MODULES], +[ +PKG_WITH_MODULES([$1],[$2],,,[$3],[$4]) + +AM_CONDITIONAL([HAVE_][$1], + [test "$AS_TR_SH([with_]m4_tolower([$1]))" = "yes"]) +])dnl PKG_HAVE_WITH_MODULES + +dnl PKG_HAVE_DEFINE_WITH_MODULES(VARIABLE-PREFIX, MODULES, +dnl [DESCRIPTION], [DEFAULT]) +dnl ------------------------------------------------------ +dnl +dnl Convenience macro to run AM_CONDITIONAL and AC_DEFINE after +dnl PKG_WITH_MODULES check. HAVE_[VARIABLE-PREFIX] is exported as make +dnl and preprocessor variable. +AC_DEFUN([PKG_HAVE_DEFINE_WITH_MODULES], +[ +PKG_HAVE_WITH_MODULES([$1],[$2],[$3],[$4]) + +AS_IF([test "$AS_TR_SH([with_]m4_tolower([$1]))" = "yes"], + [AC_DEFINE([HAVE_][$1], 1, [Enable ]m4_tolower([$1])[ support])]) +])dnl PKG_HAVE_DEFINE_WITH_MODULES + # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997-2021 Free Software Foundation, Inc. diff --git a/configure b/configure index 7a9d9627e50dfc..84b74ac3584bcd 100755 --- a/configure +++ b/configure @@ -1082,7 +1082,6 @@ with_universal_archs with_framework_name enable_framework with_app_store_compliance -with_emscripten_target enable_wasm_dynamic_linking enable_wasm_pthreads with_suffix @@ -1868,8 +1867,6 @@ Optional Packages: Enable any patches required for compiliance with app stores. Optional PATCH-FILE specifies the custom patch to apply. - --with-emscripten-target=[browser|node] - Emscripten platform --with-suffix=SUFFIX set executable suffix to SUFFIX (default is empty, yes is mapped to '.exe') --without-static-libpython @@ -7223,48 +7220,6 @@ case $ac_sys_system/$ac_sys_release in #( ;; esac -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-emscripten-target" >&5 -printf %s "checking for --with-emscripten-target... " >&6; } - -# Check whether --with-emscripten-target was given. -if test ${with_emscripten_target+y} -then : - withval=$with_emscripten_target; - if test "x$ac_sys_system" = xEmscripten -then : - - case $with_emscripten_target in #( - browser) : - ac_sys_emscripten_target=browser ;; #( - node) : - ac_sys_emscripten_target=node ;; #( - browser-debug) : - ac_sys_emscripten_target=browser-debug ;; #( - node-debug) : - ac_sys_emscripten_target=node-debug ;; #( - *) : - as_fn_error $? "Invalid argument: --with-emscripten-target=browser|node" "$LINENO" 5 - ;; -esac - -else $as_nop - - as_fn_error $? "--with-emscripten-target only applies to Emscripten" "$LINENO" 5 - -fi - -else $as_nop - - if test "x$ac_sys_system" = xEmscripten -then : - ac_sys_emscripten_target=browser -fi - -fi - -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_sys_emscripten_target" >&5 -printf "%s\n" "$ac_sys_emscripten_target" >&6; } - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --enable-wasm-dynamic-linking" >&5 printf %s "checking for --enable-wasm-dynamic-linking... " >&6; } # Check whether --enable-wasm-dynamic-linking was given. @@ -7334,12 +7289,10 @@ esac else $as_nop - case $ac_sys_system/$ac_sys_emscripten_target in #( - Emscripten/browser*) : - EXEEXT=.js ;; #( - Emscripten/node*) : - EXEEXT=.js ;; #( - WASI/*) : + case $ac_sys_system in #( + Emscripten) : + EXEEXT=.mjs ;; #( + WASI) : EXEEXT=.wasm ;; #( *) : EXEEXT= @@ -7674,8 +7627,8 @@ printf %s "checking HOSTRUNNER... " >&6; } if test -z "$HOSTRUNNER" then - case $ac_sys_system/$ac_sys_emscripten_target in #( - Emscripten/node*) : + case $ac_sys_system in #( + Emscripten) : if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}node", so it can be a program name with args. @@ -7791,7 +7744,7 @@ then : as_fn_append HOSTRUNNER " --experimental-wasm-memory64" fi ;; #( - WASI/*) : + WASI) : HOSTRUNNER='wasmtime run --wasm max-wasm-stack=16777216 --wasi preview2=n --env PYTHONPATH=/$(shell realpath --relative-to $(abs_srcdir) $(abs_builddir))/$(shell cat pybuilddir.txt):/Lib --dir $(srcdir)::/' ;; #( *) : HOSTRUNNER='' @@ -7807,13 +7760,8 @@ if test -n "$HOSTRUNNER"; then fi # LIBRARY_DEPS, LINK_PYTHON_OBJS and LINK_PYTHON_DEPS variable -case $ac_sys_system/$ac_sys_emscripten_target in #( - Emscripten/browser*) : - LIBRARY_DEPS='$(PY3LIBRARY) $(WASM_STDLIB) python.html python.worker.js' ;; #( - *) : - LIBRARY_DEPS='$(PY3LIBRARY) $(EXPORTSYMS)' - ;; -esac +LIBRARY_DEPS='$(PY3LIBRARY) $(EXPORTSYMS)' + LINK_PYTHON_DEPS='$(LIBRARY_DEPS)' if test "$PY_ENABLE_SHARED" = 1 || test "$enable_framework" ; then LIBRARY_DEPS="\$(LDLIBRARY) $LIBRARY_DEPS" @@ -9484,6 +9432,7 @@ fi as_fn_append LDFLAGS_NODIST " -sWASM_BIGINT" as_fn_append LDFLAGS_NODIST " -sFORCE_FILESYSTEM -lidbfs.js -lnodefs.js -lproxyfs.js -lworkerfs.js" + as_fn_append LDFLAGS_NODIST " -sEXPORTED_RUNTIME_METHODS=FS" if test "x$enable_wasm_dynamic_linking" = xyes then : @@ -9500,34 +9449,9 @@ then : as_fn_append LINKFORSHARED " -sPROXY_TO_PTHREAD" fi - - case $ac_sys_emscripten_target in #( - browser*) : - - if test "x$ac_sys_emscripten_target" = xbrowser-debug -then : - wasm_debug=yes -fi - as_fn_append LINKFORSHARED " --preload-file=\$(WASM_ASSETS_DIR)" - WASM_ASSETS_DIR=".\$(prefix)" - WASM_STDLIB="\$(WASM_ASSETS_DIR)/local/lib/python\$(VERSION)/os.py" - WASM_LINKFORSHARED_DEBUG="-gsource-map --emit-symbol-map" - ;; #( - node*) : - - if test "x$ac_sys_emscripten_target" = xnode-debug -then : - wasm_debug=yes -fi - as_fn_append LDFLAGS_NODIST " --pre-js=\$(srcdir)/Tools/wasm/emscripten/node_pre.js" - as_fn_append LDFLAGS_NODIST " -sALLOW_MEMORY_GROWTH -sNODERAWFS" - as_fn_append LINKFORSHARED " -sEXIT_RUNTIME" - WASM_LINKFORSHARED_DEBUG="-gseparate-dwarf --emit-symbol-map" - - ;; #( - *) : - ;; -esac + as_fn_append LDFLAGS_NODIST " -sALLOW_MEMORY_GROWTH" + as_fn_append LDFLAGS_NODIST " -sEXIT_RUNTIME" + WASM_LINKFORSHARED_DEBUG="-gseparate-dwarf --emit-symbol-map" if test "x$wasm_debug" = xyes then : @@ -13717,12 +13641,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBUUID_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "uuid >= 2.20" 2>&1` + LIBUUID_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "uuid >= 2.20" 2>&1` else - LIBUUID_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "uuid >= 2.20" 2>&1` + LIBUUID_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "uuid >= 2.20" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBUUID_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBUUID_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -13974,11 +13898,11 @@ LIBS=$save_LIBS else - LIBUUID_CFLAGS=$pkg_cv_LIBUUID_CFLAGS - LIBUUID_LIBS=$pkg_cv_LIBUUID_LIBS + LIBUUID_CFLAGS=$pkg_cv_LIBUUID_CFLAGS + LIBUUID_LIBS=$pkg_cv_LIBUUID_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - have_uuid=yes + have_uuid=yes printf "%s\n" "#define HAVE_UUID_H 1" >>confdefs.h printf "%s\n" "#define HAVE_UUID_GENERATE_TIME_SAFE 1" >>confdefs.h @@ -14666,12 +14590,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBFFI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libffi" 2>&1` + LIBFFI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libffi" 2>&1` else - LIBFFI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libffi" 2>&1` + LIBFFI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libffi" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBFFI_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBFFI_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -14817,11 +14741,11 @@ LIBS=$save_LIBS else - LIBFFI_CFLAGS=$pkg_cv_LIBFFI_CFLAGS - LIBFFI_LIBS=$pkg_cv_LIBFFI_LIBS + LIBFFI_CFLAGS=$pkg_cv_LIBFFI_CFLAGS + LIBFFI_LIBS=$pkg_cv_LIBFFI_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - have_libffi=yes + have_libffi=yes fi fi @@ -15143,25 +15067,25 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBMPDEC_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libmpdec >= 2.5.0" 2>&1` + LIBMPDEC_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libmpdec >= 2.5.0" 2>&1` else - LIBMPDEC_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libmpdec >= 2.5.0" 2>&1` + LIBMPDEC_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libmpdec >= 2.5.0" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBMPDEC_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBMPDEC_PKG_ERRORS" >&5 - LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} + LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} LIBMPDEC_LIBS=${LIBMPDEC_LIBS-"-lmpdec -lm"} LIBMPDEC_INTERNAL= elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} + LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} LIBMPDEC_LIBS=${LIBMPDEC_LIBS-"-lmpdec -lm"} LIBMPDEC_INTERNAL= else - LIBMPDEC_CFLAGS=$pkg_cv_LIBMPDEC_CFLAGS - LIBMPDEC_LIBS=$pkg_cv_LIBMPDEC_LIBS + LIBMPDEC_CFLAGS=$pkg_cv_LIBMPDEC_CFLAGS + LIBMPDEC_LIBS=$pkg_cv_LIBMPDEC_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -15412,12 +15336,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBSQLITE3_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "sqlite3 >= 3.15.2" 2>&1` + LIBSQLITE3_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "sqlite3 >= 3.15.2" 2>&1` else - LIBSQLITE3_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "sqlite3 >= 3.15.2" 2>&1` + LIBSQLITE3_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "sqlite3 >= 3.15.2" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBSQLITE3_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBSQLITE3_PKG_ERRORS" >&5 LIBSQLITE3_CFLAGS=${LIBSQLITE3_CFLAGS-""} @@ -15433,8 +15357,8 @@ printf "%s\n" "no" >&6; } else - LIBSQLITE3_CFLAGS=$pkg_cv_LIBSQLITE3_CFLAGS - LIBSQLITE3_LIBS=$pkg_cv_LIBSQLITE3_LIBS + LIBSQLITE3_CFLAGS=$pkg_cv_LIBSQLITE3_CFLAGS + LIBSQLITE3_LIBS=$pkg_cv_LIBSQLITE3_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -16176,24 +16100,24 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - TCLTK_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$_QUERY" 2>&1` + TCLTK_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$_QUERY" 2>&1` else - TCLTK_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$_QUERY" 2>&1` + TCLTK_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$_QUERY" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$TCLTK_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$TCLTK_PKG_ERRORS" >&5 - found_tcltk=no + found_tcltk=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - found_tcltk=no + found_tcltk=no else - TCLTK_CFLAGS=$pkg_cv_TCLTK_CFLAGS - TCLTK_LIBS=$pkg_cv_TCLTK_LIBS + TCLTK_CFLAGS=$pkg_cv_TCLTK_CFLAGS + TCLTK_LIBS=$pkg_cv_TCLTK_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - found_tcltk=yes + found_tcltk=yes fi fi @@ -16273,14 +16197,14 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - X11_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "x11" 2>&1` + X11_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "x11" 2>&1` else - X11_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "x11" 2>&1` + X11_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "x11" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$X11_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$X11_PKG_ERRORS" >&5 - as_fn_error $? "Package requirements (x11) were not met: + as_fn_error $? "Package requirements (x11) were not met: $X11_PKG_ERRORS @@ -16293,7 +16217,7 @@ See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full @@ -16306,8 +16230,8 @@ See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else - X11_CFLAGS=$pkg_cv_X11_CFLAGS - X11_LIBS=$pkg_cv_X11_LIBS + X11_CFLAGS=$pkg_cv_X11_CFLAGS + X11_LIBS=$pkg_cv_X11_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -20712,12 +20636,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - ZLIB_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "zlib >= 1.2.0" 2>&1` + ZLIB_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "zlib >= 1.2.0" 2>&1` else - ZLIB_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "zlib >= 1.2.0" 2>&1` + ZLIB_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "zlib >= 1.2.0" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$ZLIB_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$ZLIB_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -20975,8 +20899,8 @@ LIBS=$save_LIBS else - ZLIB_CFLAGS=$pkg_cv_ZLIB_CFLAGS - ZLIB_LIBS=$pkg_cv_ZLIB_LIBS + ZLIB_CFLAGS=$pkg_cv_ZLIB_CFLAGS + ZLIB_LIBS=$pkg_cv_ZLIB_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -21060,12 +20984,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - BZIP2_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "bzip2" 2>&1` + BZIP2_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "bzip2" 2>&1` else - BZIP2_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "bzip2" 2>&1` + BZIP2_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "bzip2" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$BZIP2_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$BZIP2_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -21229,11 +21153,11 @@ LIBS=$save_LIBS else - BZIP2_CFLAGS=$pkg_cv_BZIP2_CFLAGS - BZIP2_LIBS=$pkg_cv_BZIP2_LIBS + BZIP2_CFLAGS=$pkg_cv_BZIP2_CFLAGS + BZIP2_LIBS=$pkg_cv_BZIP2_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - have_bzip2=yes + have_bzip2=yes fi @@ -21288,12 +21212,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBLZMA_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "liblzma" 2>&1` + LIBLZMA_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "liblzma" 2>&1` else - LIBLZMA_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "liblzma" 2>&1` + LIBLZMA_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "liblzma" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBLZMA_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBLZMA_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -21457,11 +21381,11 @@ LIBS=$save_LIBS else - LIBLZMA_CFLAGS=$pkg_cv_LIBLZMA_CFLAGS - LIBLZMA_LIBS=$pkg_cv_LIBLZMA_LIBS + LIBLZMA_CFLAGS=$pkg_cv_LIBLZMA_CFLAGS + LIBLZMA_LIBS=$pkg_cv_LIBLZMA_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - have_liblzma=yes + have_liblzma=yes fi @@ -24174,10 +24098,10 @@ if ac_fn_c_try_link "$LINENO" then : -if grep noonsees conftest$EXEEXT >/dev/null ; then +if grep noonsees conftest* > /dev/null ; then ax_cv_c_float_words_bigendian=yes fi -if grep seesnoon conftest$EXEEXT >/dev/null ; then +if grep seesnoon conftest* >/dev/null ; then if test "$ax_cv_c_float_words_bigendian" = unknown; then ax_cv_c_float_words_bigendian=no else @@ -24213,10 +24137,6 @@ printf "%s\n" "#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754 1" >>confdefs.h # but if it's not big or little, then it must be this? printf "%s\n" "#define DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754 1" >>confdefs.h - ;; #( - wasm*) : - -printf "%s\n" "#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754 1" >>confdefs.h ;; #( *) : as_fn_error $? "Unknown float word ordering. You need to manually preset ax_cv_c_float_words_bigendian=no (or yes) according to your system." "$LINENO" 5 ;; @@ -25296,12 +25216,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBREADLINE_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "readline" 2>&1` + LIBREADLINE_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "readline" 2>&1` else - LIBREADLINE_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "readline" 2>&1` + LIBREADLINE_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "readline" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBREADLINE_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBREADLINE_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -25459,8 +25379,8 @@ LIBS=$save_LIBS else - LIBREADLINE_CFLAGS=$pkg_cv_LIBREADLINE_CFLAGS - LIBREADLINE_LIBS=$pkg_cv_LIBREADLINE_LIBS + LIBREADLINE_CFLAGS=$pkg_cv_LIBREADLINE_CFLAGS + LIBREADLINE_LIBS=$pkg_cv_LIBREADLINE_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -25527,12 +25447,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBEDIT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libedit" 2>&1` + LIBEDIT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libedit" 2>&1` else - LIBEDIT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libedit" 2>&1` + LIBEDIT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libedit" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBEDIT_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBEDIT_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -25694,8 +25614,8 @@ LIBS=$save_LIBS else - LIBEDIT_CFLAGS=$pkg_cv_LIBEDIT_CFLAGS - LIBEDIT_LIBS=$pkg_cv_LIBEDIT_LIBS + LIBEDIT_CFLAGS=$pkg_cv_LIBEDIT_CFLAGS + LIBEDIT_LIBS=$pkg_cv_LIBEDIT_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -26556,21 +26476,21 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - CURSES_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ncursesw" 2>&1` + CURSES_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ncursesw" 2>&1` else - CURSES_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ncursesw" 2>&1` + CURSES_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ncursesw" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$CURSES_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$CURSES_PKG_ERRORS" >&5 - have_curses=no + have_curses=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - have_curses=no + have_curses=no else - CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS - CURSES_LIBS=$pkg_cv_CURSES_LIBS + CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS + CURSES_LIBS=$pkg_cv_CURSES_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -26629,21 +26549,21 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panelw" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panelw" 2>&1` else - PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panelw" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panelw" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$PANEL_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$PANEL_PKG_ERRORS" >&5 - have_panel=no + have_panel=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - have_panel=no + have_panel=no else - PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS - PANEL_LIBS=$pkg_cv_PANEL_LIBS + PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS + PANEL_LIBS=$pkg_cv_PANEL_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -26710,21 +26630,21 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - CURSES_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ncurses" 2>&1` + CURSES_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ncurses" 2>&1` else - CURSES_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ncurses" 2>&1` + CURSES_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ncurses" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$CURSES_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$CURSES_PKG_ERRORS" >&5 - have_curses=no + have_curses=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - have_curses=no + have_curses=no else - CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS - CURSES_LIBS=$pkg_cv_CURSES_LIBS + CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS + CURSES_LIBS=$pkg_cv_CURSES_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -26783,21 +26703,21 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panel" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panel" 2>&1` else - PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panel" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panel" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$PANEL_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$PANEL_PKG_ERRORS" >&5 - have_panel=no + have_panel=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - have_panel=no + have_panel=no else - PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS - PANEL_LIBS=$pkg_cv_PANEL_LIBS + PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS + PANEL_LIBS=$pkg_cv_PANEL_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -29062,15 +28982,7 @@ else $as_nop fi else $as_nop - - case $ac_sys_system/$ac_sys_emscripten_target in #( - Emscripten/browser*) : - TEST_MODULES=no ;; #( - *) : - TEST_MODULES=yes - ;; -esac - + TEST_MODULES=yes fi { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $TEST_MODULES" >&5 diff --git a/configure.ac b/configure.ac index bc3d2d0e63b77a..8fa6cb60900ad1 100644 --- a/configure.ac +++ b/configure.ac @@ -1282,30 +1282,6 @@ AS_CASE([$ac_sys_system/$ac_sys_release], ] ) -AC_MSG_CHECKING([for --with-emscripten-target]) -AC_ARG_WITH([emscripten-target], - [AS_HELP_STRING([--with-emscripten-target=@<:@browser|node@:>@], [Emscripten platform])], -[ - AS_VAR_IF([ac_sys_system], [Emscripten], [ - AS_CASE([$with_emscripten_target], - [browser], [ac_sys_emscripten_target=browser], - [node], [ac_sys_emscripten_target=node], -dnl Debug builds with source map / dwarf symbols. Py_DEBUG builds easily -dnl run out of stack space. Detached sybmols and map prohibit some -dnl optimizations and increase file size. Options are undocumented so we -dnl are free to remove them in the future. - [browser-debug], [ac_sys_emscripten_target=browser-debug], - [node-debug], [ac_sys_emscripten_target=node-debug], - [AC_MSG_ERROR([Invalid argument: --with-emscripten-target=browser|node])] - ) - ], [ - AC_MSG_ERROR([--with-emscripten-target only applies to Emscripten]) - ]) -], [ - AS_VAR_IF([ac_sys_system], [Emscripten], [ac_sys_emscripten_target=browser]) -]) -AC_MSG_RESULT([$ac_sys_emscripten_target]) - dnl On Emscripten dlopen() requires -s MAIN_MODULE and -fPIC. The flags dnl disables dead code elimination and increases the size of the WASM module dnl by about 1.5 to 2MB. MAIN_MODULE defines __wasm_mutable_globals__. @@ -1350,10 +1326,9 @@ AC_ARG_WITH([suffix], [EXEEXT=$with_suffix] ) ], [ - AS_CASE([$ac_sys_system/$ac_sys_emscripten_target], - [Emscripten/browser*], [EXEEXT=.js], - [Emscripten/node*], [EXEEXT=.js], - [WASI/*], [EXEEXT=.wasm], + AS_CASE([$ac_sys_system], + [Emscripten], [EXEEXT=.mjs], + [WASI], [EXEEXT=.wasm], [EXEEXT=] ) ]) @@ -1638,8 +1613,8 @@ AC_MSG_CHECKING([HOSTRUNNER]) AC_ARG_VAR([HOSTRUNNER], [Program to run CPython for the host platform]) if test -z "$HOSTRUNNER" then - AS_CASE([$ac_sys_system/$ac_sys_emscripten_target], - [Emscripten/node*], [ + AS_CASE([$ac_sys_system], + [Emscripten], [ AC_PATH_TOOL([NODE], [node], [node]) HOSTRUNNER="$NODE" AS_VAR_IF([host_cpu], [wasm64], [AS_VAR_APPEND([HOSTRUNNER], [" --experimental-wasm-memory64"])]) @@ -1647,7 +1622,7 @@ then dnl TODO: support other WASI runtimes dnl wasmtime starts the process with "/" as CWD. For OOT builds add the dnl directory containing _sysconfigdata to PYTHONPATH. - [WASI/*], [HOSTRUNNER='wasmtime run --wasm max-wasm-stack=16777216 --wasi preview2=n --env PYTHONPATH=/$(shell realpath --relative-to $(abs_srcdir) $(abs_builddir))/$(shell cat pybuilddir.txt):/Lib --dir $(srcdir)::/'], + [WASI], [HOSTRUNNER='wasmtime run --wasm max-wasm-stack=16777216 --wasi preview2=n --env PYTHONPATH=/$(shell realpath --relative-to $(abs_srcdir) $(abs_builddir))/$(shell cat pybuilddir.txt):/Lib --dir $(srcdir)::/'], [HOSTRUNNER=''] ) fi @@ -1660,10 +1635,8 @@ if test -n "$HOSTRUNNER"; then fi # LIBRARY_DEPS, LINK_PYTHON_OBJS and LINK_PYTHON_DEPS variable -AS_CASE([$ac_sys_system/$ac_sys_emscripten_target], - [Emscripten/browser*], [LIBRARY_DEPS='$(PY3LIBRARY) $(WASM_STDLIB) python.html python.worker.js'], - [LIBRARY_DEPS='$(PY3LIBRARY) $(EXPORTSYMS)'] -) +LIBRARY_DEPS='$(PY3LIBRARY) $(EXPORTSYMS)' + LINK_PYTHON_DEPS='$(LIBRARY_DEPS)' if test "$PY_ENABLE_SHARED" = 1 || test "$enable_framework" ; then LIBRARY_DEPS="\$(LDLIBRARY) $LIBRARY_DEPS" @@ -2355,6 +2328,7 @@ AS_CASE([$ac_sys_system], dnl Include file system support AS_VAR_APPEND([LDFLAGS_NODIST], [" -sFORCE_FILESYSTEM -lidbfs.js -lnodefs.js -lproxyfs.js -lworkerfs.js"]) + AS_VAR_APPEND([LDFLAGS_NODIST], [" -sEXPORTED_RUNTIME_METHODS=FS"]) AS_VAR_IF([enable_wasm_dynamic_linking], [yes], [ AS_VAR_APPEND([LINKFORSHARED], [" -sMAIN_MODULE"]) @@ -2365,24 +2339,10 @@ AS_CASE([$ac_sys_system], AS_VAR_APPEND([LDFLAGS_NODIST], [" -sUSE_PTHREADS"]) AS_VAR_APPEND([LINKFORSHARED], [" -sPROXY_TO_PTHREAD"]) ]) - - AS_CASE([$ac_sys_emscripten_target], - [browser*], [ - AS_VAR_IF([ac_sys_emscripten_target], [browser-debug], [wasm_debug=yes]) - AS_VAR_APPEND([LINKFORSHARED], [" --preload-file=\$(WASM_ASSETS_DIR)"]) - WASM_ASSETS_DIR=".\$(prefix)" - WASM_STDLIB="\$(WASM_ASSETS_DIR)/local/lib/python\$(VERSION)/os.py" - dnl separate-dwarf does not seem to work in Chrome DevTools Support. - WASM_LINKFORSHARED_DEBUG="-gsource-map --emit-symbol-map" - ], - [node*], [ - AS_VAR_IF([ac_sys_emscripten_target], [node-debug], [wasm_debug=yes]) - AS_VAR_APPEND([LDFLAGS_NODIST], [" --pre-js=\$(srcdir)/Tools/wasm/emscripten/node_pre.js"]) - AS_VAR_APPEND([LDFLAGS_NODIST], [" -sALLOW_MEMORY_GROWTH -sNODERAWFS"]) - AS_VAR_APPEND([LINKFORSHARED], [" -sEXIT_RUNTIME"]) - WASM_LINKFORSHARED_DEBUG="-gseparate-dwarf --emit-symbol-map" - ] - ) + AS_VAR_APPEND([LDFLAGS_NODIST], [" -sALLOW_MEMORY_GROWTH"]) + dnl not completely sure whether or not we want -sEXIT_RUNTIME, keeping it for now. + AS_VAR_APPEND([LDFLAGS_NODIST], [" -sEXIT_RUNTIME"]) + WASM_LINKFORSHARED_DEBUG="-gseparate-dwarf --emit-symbol-map" AS_VAR_IF([wasm_debug], [yes], [ AS_VAR_APPEND([LDFLAGS_NODIST], [" -sASSERTIONS"]) @@ -5918,9 +5878,6 @@ AX_C_FLOAT_WORDS_BIGENDIAN( AC_DEFINE([DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754], [1], [Define if C doubles are 64-bit IEEE 754 binary format, stored in ARM mixed-endian order (byte order 45670123)])], - [wasm*], [AC_DEFINE([DOUBLE_IS_LITTLE_ENDIAN_IEEE754], [1], - [Define if C doubles are 64-bit IEEE 754 binary format, - stored with the least significant byte first])], [AC_MSG_ERROR([m4_normalize([ Unknown float word ordering. You need to manually preset ax_cv_c_float_words_bigendian=no (or yes) @@ -7466,12 +7423,7 @@ AC_MSG_CHECKING([for --disable-test-modules]) AC_ARG_ENABLE([test-modules], [AS_HELP_STRING([--disable-test-modules], [don't build nor install test modules])], [ AS_VAR_IF([enable_test_modules], [yes], [TEST_MODULES=yes], [TEST_MODULES=no]) -], [ - AS_CASE([$ac_sys_system/$ac_sys_emscripten_target], - [Emscripten/browser*], [TEST_MODULES=no], - [TEST_MODULES=yes] - ) -]) +], [TEST_MODULES=yes]) AC_MSG_RESULT([$TEST_MODULES]) AC_SUBST([TEST_MODULES]) diff --git a/iOS/README.rst b/iOS/README.rst index 4d7c344d5e9e17..e33455eef8f44a 100644 --- a/iOS/README.rst +++ b/iOS/README.rst @@ -351,13 +351,13 @@ Running specific tests ^^^^^^^^^^^^^^^^^^^^^^ As the test suite is being executed on an iOS simulator, it is not possible to -pass in command line arguments to configure test suite operation. To work around -this limitation, the arguments that would normally be passed as command line -arguments are configured as a static string at the start of the XCTest method -``- (void)testPython`` in ``iOSTestbedTests.m``. To pass an argument to the test -suite, add a a string to the ``argv`` definition. These arguments will be passed -to the test suite as if they had been passed to ``python -m test`` at the -command line. +pass in command line arguments to configure test suite operation. To work +around this limitation, the arguments that would normally be passed as command +line arguments are configured as part of the ``iOSTestbed-Info.plist`` file +that is used to configure the iOS testbed app. In this file, the ``TestArgs`` +key is an array containing the arguments that would be passed to ``python -m`` +on the command line (including ``test`` in position 0, the name of the test +module to be executed). Disabling automated breakpoints ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj b/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj index d57cfc3dbe0304..6819ac0eeed95f 100644 --- a/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj +++ b/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj @@ -17,6 +17,8 @@ 607A66502B0EFFE00010BFC8 /* Python.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = 607A664A2B0EFB310010BFC8 /* Python.xcframework */; }; 607A66512B0EFFE00010BFC8 /* Python.xcframework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 607A664A2B0EFB310010BFC8 /* Python.xcframework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; 607A66582B0F079F0010BFC8 /* dylib-Info-template.plist in Resources */ = {isa = PBXBuildFile; fileRef = 607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */; }; + 608619542CB77BA900F46182 /* app_packages in Resources */ = {isa = PBXBuildFile; fileRef = 608619532CB77BA900F46182 /* app_packages */; }; + 608619562CB7819B00F46182 /* app in Resources */ = {isa = PBXBuildFile; fileRef = 608619552CB7819B00F46182 /* app */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -66,6 +68,8 @@ 607A664A2B0EFB310010BFC8 /* Python.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; path = Python.xcframework; sourceTree = ""; }; 607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "dylib-Info-template.plist"; sourceTree = ""; }; 607A66592B0F08600010BFC8 /* iOSTestbed-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "iOSTestbed-Info.plist"; sourceTree = ""; }; + 608619532CB77BA900F46182 /* app_packages */ = {isa = PBXFileReference; lastKnownFileType = folder; path = app_packages; sourceTree = ""; }; + 608619552CB7819B00F46182 /* app */ = {isa = PBXFileReference; lastKnownFileType = folder; path = app; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -111,6 +115,8 @@ 607A66142B0EFA380010BFC8 /* iOSTestbed */ = { isa = PBXGroup; children = ( + 608619552CB7819B00F46182 /* app */, + 608619532CB77BA900F46182 /* app_packages */, 607A66592B0F08600010BFC8 /* iOSTestbed-Info.plist */, 607A66572B0F079F0010BFC8 /* dylib-Info-template.plist */, 607A66152B0EFA380010BFC8 /* AppDelegate.h */, @@ -223,7 +229,9 @@ files = ( 607A66252B0EFA390010BFC8 /* LaunchScreen.storyboard in Resources */, 607A66582B0F079F0010BFC8 /* dylib-Info-template.plist in Resources */, + 608619562CB7819B00F46182 /* app in Resources */, 607A66222B0EFA390010BFC8 /* Assets.xcassets in Resources */, + 608619542CB77BA900F46182 /* app_packages in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -273,7 +281,7 @@ ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "set -e\n\ninstall_dylib () {\n INSTALL_BASE=$1\n FULL_EXT=$2\n\n # The name of the extension file\n EXT=$(basename \"$FULL_EXT\")\n # The location of the extension file, relative to the bundle\n RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/} \n # The path to the extension file, relative to the install base\n PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/}\n # The full dotted name of the extension module, constructed from the file path.\n FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d \".\" -f 1 | tr \"/\" \".\"); \n # A bundle identifier; not actually used, but required by Xcode framework packaging\n FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr \"_\" \"-\")\n # The name of the framework folder.\n FRAMEWORK_FOLDER=\"Frameworks/$FULL_MODULE_NAME.framework\"\n\n # If the framework folder doesn't exist, create it.\n if [ ! -d \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\" ]; then\n echo \"Creating framework for $RELATIVE_EXT\" \n mkdir -p \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n cp \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleExecutable -string \"$FULL_MODULE_NAME\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleIdentifier -string \"$FRAMEWORK_BUNDLE_ID\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n fi\n \n echo \"Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" \n mv \"$FULL_EXT\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\"\n # Create a placeholder .fwork file where the .so was\n echo \"$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" > ${FULL_EXT%.so}.fwork\n # Create a back reference to the .so file location in the framework\n echo \"${RELATIVE_EXT%.so}.fwork\" > \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin\" \n}\n\nPYTHON_VER=$(ls -1 \"$CODESIGNING_FOLDER_PATH/python/lib\")\necho \"Install Python $PYTHON_VER standard library extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib python/lib/$PYTHON_VER/lib-dynload/ \"$FULL_EXT\"\ndone\n\n# Clean up dylib template \nrm -f \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\"\necho \"Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)...\"\nfind \"$CODESIGNING_FOLDER_PATH/Frameworks\" -name \"*.framework\" -exec /usr/bin/codesign --force --sign \"$EXPANDED_CODE_SIGN_IDENTITY\" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der \"{}\" \\; \n"; + shellScript = "set -e\n\ninstall_dylib () {\n INSTALL_BASE=$1\n FULL_EXT=$2\n\n # The name of the extension file\n EXT=$(basename \"$FULL_EXT\")\n # The location of the extension file, relative to the bundle\n RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/} \n # The path to the extension file, relative to the install base\n PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/}\n # The full dotted name of the extension module, constructed from the file path.\n FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d \".\" -f 1 | tr \"/\" \".\"); \n # A bundle identifier; not actually used, but required by Xcode framework packaging\n FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr \"_\" \"-\")\n # The name of the framework folder.\n FRAMEWORK_FOLDER=\"Frameworks/$FULL_MODULE_NAME.framework\"\n\n # If the framework folder doesn't exist, create it.\n if [ ! -d \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\" ]; then\n echo \"Creating framework for $RELATIVE_EXT\" \n mkdir -p \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n cp \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleExecutable -string \"$FULL_MODULE_NAME\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleIdentifier -string \"$FRAMEWORK_BUNDLE_ID\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n fi\n \n echo \"Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" \n mv \"$FULL_EXT\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\"\n # Create a placeholder .fwork file where the .so was\n echo \"$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" > ${FULL_EXT%.so}.fwork\n # Create a back reference to the .so file location in the framework\n echo \"${RELATIVE_EXT%.so}.fwork\" > \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin\" \n}\n\nPYTHON_VER=$(ls -1 \"$CODESIGNING_FOLDER_PATH/python/lib\")\necho \"Install Python $PYTHON_VER standard library extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib python/lib/$PYTHON_VER/lib-dynload/ \"$FULL_EXT\"\ndone\necho \"Install app package extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/app_packages\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib app_packages/ \"$FULL_EXT\"\ndone\necho \"Install app extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/app\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib app/ \"$FULL_EXT\"\ndone\n\n# Clean up dylib template \nrm -f \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\"\necho \"Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)...\"\nfind \"$CODESIGNING_FOLDER_PATH/Frameworks\" -name \"*.framework\" -exec /usr/bin/codesign --force --sign \"$EXPANDED_CODE_SIGN_IDENTITY\" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der \"{}\" \\; \n"; }; /* End PBXShellScriptBuildPhase section */ diff --git a/iOS/testbed/iOSTestbed/app/README b/iOS/testbed/iOSTestbed/app/README new file mode 100644 index 00000000000000..af22c685f87976 --- /dev/null +++ b/iOS/testbed/iOSTestbed/app/README @@ -0,0 +1,7 @@ +This folder can contain any Python application code. + +During the build, any binary modules found in this folder will be processed into +iOS Framework form. + +When the test suite runs, this folder will be on the PYTHONPATH, and will be the +working directory for the test suite. diff --git a/iOS/testbed/iOSTestbed/app_packages/README b/iOS/testbed/iOSTestbed/app_packages/README new file mode 100644 index 00000000000000..42d7fdeb813250 --- /dev/null +++ b/iOS/testbed/iOSTestbed/app_packages/README @@ -0,0 +1,7 @@ +This folder can be a target for installing any Python dependencies needed by the +test suite. + +During the build, any binary modules found in this folder will be processed into +iOS Framework form. + +When the test suite runs, this folder will be on the PYTHONPATH. diff --git a/iOS/testbed/iOSTestbed/iOSTestbed-Info.plist b/iOS/testbed/iOSTestbed/iOSTestbed-Info.plist index e2aa460b6fd5ee..a582f42a212783 100644 --- a/iOS/testbed/iOSTestbed/iOSTestbed-Info.plist +++ b/iOS/testbed/iOSTestbed/iOSTestbed-Info.plist @@ -41,8 +41,18 @@ UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight - MainModule - ios + TestArgs + + test + -uall + --single-process + --rerun + -W + + UIApplicationSceneManifest UIApplicationSupportsMultipleScenes diff --git a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m index 9bf502a808eb88..db00d43da85cbc 100644 --- a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m +++ b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m @@ -9,30 +9,38 @@ @implementation iOSTestbedTests - (void)testPython { - // Arguments to pass into the test suite runner. - // argv[0] must identify the process; any subsequent arg - // will be handled as if it were an argument to `python -m test` - const char *argv[] = { - "iOSTestbed", // argv[0] is the process that is running. - "-uall", // Enable all resources - "--single-process", // always run all tests sequentially in a single process - "--rerun", // Re-run failed tests in verbose mode - "-W", // Display test output on failure - // To run a subset of tests, add the test names below; e.g., - // "test_os", - // "test_sys", - }; - - // Start a Python interpreter. + const char **argv; int exit_code; + int failed; PyStatus status; PyPreConfig preconfig; PyConfig config; + PyObject *sys_module; + PyObject *sys_path_attr; + NSArray *test_args; NSString *python_home; + NSString *path; wchar_t *wtmp_str; NSString *resourcePath = [[NSBundle mainBundle] resourcePath]; + // Disable all color, as the Xcode log can't display color + setenv("NO_COLOR", "1", true); + + // Arguments to pass into the test suite runner. + // argv[0] must identify the process; any subsequent arg + // will be handled as if it were an argument to `python -m test` + test_args = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"TestArgs"]; + if (test_args == NULL) { + NSLog(@"Unable to identify test arguments."); + } + argv = malloc(sizeof(char *) * ([test_args count] + 1)); + argv[0] = "iOSTestbed"; + for (int i = 1; i < [test_args count]; i++) { + argv[i] = [[test_args objectAtIndex:i] UTF8String]; + } + NSLog(@"Test command: %@", test_args); + // Generate an isolated Python configuration. NSLog(@"Configuring isolated Python..."); PyPreConfig_InitIsolatedConfig(&preconfig); @@ -50,7 +58,7 @@ - (void)testPython { // Ensure that signal handlers are installed config.install_signal_handlers = 1; // Run the test module. - config.run_module = Py_DecodeLocale("test", NULL); + config.run_module = Py_DecodeLocale([[test_args objectAtIndex:0] UTF8String], NULL); // For debugging - enable verbose mode. // config.verbose = 1; @@ -83,7 +91,7 @@ - (void)testPython { } NSLog(@"Configure argc/argv..."); - status = PyConfig_SetBytesArgv(&config, sizeof(argv) / sizeof(char *), (char**) argv); + status = PyConfig_SetBytesArgv(&config, [test_args count], (char**) argv); if (PyStatus_Exception(status)) { XCTFail(@"Unable to configure argc/argv: %s", status.err_msg); PyConfig_Clear(&config); @@ -98,11 +106,47 @@ - (void)testPython { return; } + sys_module = PyImport_ImportModule("sys"); + if (sys_module == NULL) { + XCTFail(@"Could not import sys module"); + return; + } + + sys_path_attr = PyObject_GetAttrString(sys_module, "path"); + if (sys_path_attr == NULL) { + XCTFail(@"Could not access sys.path"); + return; + } + + // Add the app packages path + path = [NSString stringWithFormat:@"%@/app_packages", resourcePath, nil]; + NSLog(@"App packages path: %@", path); + wtmp_str = Py_DecodeLocale([path UTF8String], NULL); + failed = PyList_Insert(sys_path_attr, 0, PyUnicode_FromString([path UTF8String])); + if (failed) { + XCTFail(@"Unable to add app packages to sys.path"); + return; + } + PyMem_RawFree(wtmp_str); + + path = [NSString stringWithFormat:@"%@/app", resourcePath, nil]; + NSLog(@"App path: %@", path); + wtmp_str = Py_DecodeLocale([path UTF8String], NULL); + failed = PyList_Insert(sys_path_attr, 0, PyUnicode_FromString([path UTF8String])); + if (failed) { + XCTFail(@"Unable to add app to sys.path"); + return; + } + PyMem_RawFree(wtmp_str); + + // Ensure the working directory is the app folder. + chdir([path UTF8String]); + // Start the test suite. Print a separator to differentiate Python startup logs from app logs NSLog(@"---------------------------------------------------------------------------"); exit_code = Py_RunMain(); - XCTAssertEqual(exit_code, 0, @"Python test suite did not pass"); + XCTAssertEqual(exit_code, 0, @"Test suite did not pass"); NSLog(@"---------------------------------------------------------------------------");