diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index ce8967337b02f9..efbdcd402cdf67 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -9,8 +9,8 @@ ENV WASMTIME_HOME=/opt/wasmtime
ENV WASMTIME_VERSION=7.0.0
ENV WASMTIME_CPU_ARCH=x86_64
-RUN dnf -y --nodocs install git clang xz python3-blurb dnf-plugins-core && \
- dnf -y --nodocs builddep python3 && \
+RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \
+ dnf -y --nodocs --setopt=install_weak_deps=False builddep python3 && \
dnf -y clean all
RUN mkdir ${WASI_SDK_PATH} && \
diff --git a/.gitattributes b/.gitattributes
index cb1cf8bcc7c877..4ed95069442f3d 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -34,6 +34,7 @@ Lib/test/xmltestdata/* noeol
# Shell scripts should have LF even on Windows because of Cygwin
Lib/venv/scripts/common/activate text eol=lf
+Lib/venv/scripts/posix/* text eol=lf
# CRLF files
[attr]dos text eol=crlf
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 9149b38d87601c..3422ef835279bc 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -11,7 +11,7 @@
configure* @erlend-aasland @corona10
# asyncio
-**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303
+**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 @willingc
# Core
**/*context* @1st1
@@ -25,6 +25,8 @@ Objects/frameobject.c @markshannon
Objects/call.c @markshannon
Python/ceval.c @markshannon
Python/compile.c @markshannon @iritkatriel
+Python/assemble.c @markshannon @iritkatriel
+Python/flowgraph.c @markshannon @iritkatriel
Python/ast_opt.c @isidentical
Lib/test/test_patma.py @brandtbucher
Lib/test/test_peepholer.py @brandtbucher
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 4e5328282f1224..df0f107a541614 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -33,6 +33,7 @@ jobs:
check_source:
name: 'Check for source changes'
runs-on: ubuntu-latest
+ timeout-minutes: 10
outputs:
run_tests: ${{ steps.check.outputs.run_tests }}
steps:
@@ -63,6 +64,7 @@ jobs:
check_generated_files:
name: 'Check if generated files are up to date'
runs-on: ubuntu-latest
+ timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
steps:
@@ -118,6 +120,7 @@ jobs:
build_win32:
name: 'Windows (x86)'
runs-on: windows-latest
+ timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
env:
@@ -126,7 +129,6 @@ jobs:
- uses: actions/checkout@v3
- name: Build CPython
run: .\PCbuild\build.bat -e -d -p Win32
- timeout-minutes: 30
- name: Display build info
run: .\python.bat -m test.pythoninfo
- name: Tests
@@ -135,6 +137,7 @@ jobs:
build_win_amd64:
name: 'Windows (x64)'
runs-on: windows-latest
+ timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
env:
@@ -145,7 +148,6 @@ jobs:
run: echo "::add-matcher::.github/problem-matchers/msvc.json"
- name: Build CPython
run: .\PCbuild\build.bat -e -d -p x64
- timeout-minutes: 30
- name: Display build info
run: .\python.bat -m test.pythoninfo
- name: Tests
@@ -154,6 +156,7 @@ jobs:
build_macos:
name: 'macOS'
runs-on: macos-latest
+ timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
env:
@@ -184,6 +187,7 @@ jobs:
build_ubuntu:
name: 'Ubuntu'
runs-on: ubuntu-20.04
+ timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
env:
@@ -241,6 +245,7 @@ jobs:
build_ubuntu_ssltests:
name: 'Ubuntu SSL tests with OpenSSL'
runs-on: ubuntu-20.04
+ timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
strategy:
@@ -290,6 +295,7 @@ jobs:
build_asan:
name: 'Address sanitizer'
runs-on: ubuntu-20.04
+ timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
env:
@@ -302,6 +308,10 @@ jobs:
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
+ - name: Set up GCC-10 for ASAN
+ uses: egor-tensin/setup-gcc@v1
+ with:
+ version: 10
- name: Configure OpenSSL env vars
run: |
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml
index 5f1dcae190efbc..2bed09014e0ff2 100644
--- a/.github/workflows/build_msi.yml
+++ b/.github/workflows/build_msi.yml
@@ -26,6 +26,7 @@ jobs:
build:
name: Windows Installer
runs-on: windows-latest
+ timeout-minutes: 60
strategy:
matrix:
type: [x86, x64, arm64]
diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml
index 314a7da647ff70..3f7550cc72943b 100644
--- a/.github/workflows/doc.yml
+++ b/.github/workflows/doc.yml
@@ -36,6 +36,7 @@ jobs:
build_doc:
name: 'Docs'
runs-on: ubuntu-latest
+ timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- name: Register Sphinx problem matcher
@@ -55,11 +56,13 @@ jobs:
# Add pull request annotations for Sphinx nitpicks (missing references)
- name: 'Get list of changed files'
+ if: github.event_name == 'pull_request'
id: changed_files
uses: Ana06/get-changed-files@v2.2.0
with:
filter: "Doc/**"
- name: 'Build changed files in nit-picky mode'
+ if: github.event_name == 'pull_request'
continue-on-error: true
run: |
# Mark files the pull request modified
@@ -76,10 +79,31 @@ jobs:
# Build docs with the '-n' (nit-picky) option, convert warnings to errors (-W)
make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going" html 2>&1
+ # This build doesn't use problem matchers or check annotations
+ # It also does not run 'make check', as sphinx-lint is not installed into the
+ # environment.
+ build_doc_oldest_supported_sphinx:
+ name: 'Docs (Oldest Sphinx)'
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ steps:
+ - uses: actions/checkout@v3
+ - name: 'Set up Python'
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11' # known to work with Sphinx 3.2
+ cache: 'pip'
+ cache-dependency-path: 'Doc/requirements-oldest-sphinx.txt'
+ - name: 'Install build dependencies'
+ run: make -C Doc/ venv REQUIREMENTS="requirements-oldest-sphinx.txt"
+ - name: 'Build HTML documentation'
+ run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html
+
# Run "doctest" on HEAD as new syntax doesn't exist in the latest stable release
doctest:
name: 'Doctest'
runs-on: ubuntu-latest
+ timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- name: Register Sphinx problem matcher
diff --git a/.github/workflows/new-bugs-announce-notifier.yml b/.github/workflows/new-bugs-announce-notifier.yml
index b2a76ef7d36153..73806c5d6d58af 100644
--- a/.github/workflows/new-bugs-announce-notifier.yml
+++ b/.github/workflows/new-bugs-announce-notifier.yml
@@ -11,6 +11,7 @@ permissions:
jobs:
notify-new-bugs-announce:
runs-on: ubuntu-latest
+ timeout-minutes: 10
steps:
- uses: actions/setup-node@v3
with:
diff --git a/.github/workflows/project-updater.yml b/.github/workflows/project-updater.yml
index 99c7a05ae8cab0..7574bfc208ff76 100644
--- a/.github/workflows/project-updater.yml
+++ b/.github/workflows/project-updater.yml
@@ -13,16 +13,15 @@ jobs:
add-to-project:
name: Add issues to projects
runs-on: ubuntu-latest
+ timeout-minutes: 10
strategy:
matrix:
include:
# if an issue has any of these labels, it will be added
# to the corresponding project
- { project: 2, label: "release-blocker, deferred-blocker" }
- - { project: 3, label: expert-subinterpreters }
- - { project: 29, label: expert-asyncio }
- { project: 32, label: sprint }
-
+
steps:
- uses: actions/add-to-project@v0.1.0
with:
diff --git a/.github/workflows/require-pr-label.yml b/.github/workflows/require-pr-label.yml
index e847bae155e216..916bbeb4352734 100644
--- a/.github/workflows/require-pr-label.yml
+++ b/.github/workflows/require-pr-label.yml
@@ -6,12 +6,13 @@ on:
jobs:
label:
- name: DO-NOT-MERGE
+ name: DO-NOT-MERGE / unresolved review
runs-on: ubuntu-latest
+ timeout-minutes: 10
steps:
- uses: mheap/github-action-required-labels@v4
with:
mode: exactly
count: 0
- labels: "DO-NOT-MERGE"
+ labels: "DO-NOT-MERGE, awaiting changes, awaiting change review"
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index d79e856c87e78d..94676f5ee5fffc 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -12,6 +12,7 @@ jobs:
if: github.repository_owner == 'python'
runs-on: ubuntu-latest
+ timeout-minutes: 10
steps:
- name: "Check PRs"
diff --git a/.github/workflows/verify-ensurepip-wheels.yml b/.github/workflows/verify-ensurepip-wheels.yml
index 969515ed287b55..17d841f1f1c54a 100644
--- a/.github/workflows/verify-ensurepip-wheels.yml
+++ b/.github/workflows/verify-ensurepip-wheels.yml
@@ -1,4 +1,4 @@
-name: Verify bundled pip and setuptools
+name: Verify bundled wheels
on:
workflow_dispatch:
@@ -23,10 +23,11 @@ concurrency:
jobs:
verify:
runs-on: ubuntu-latest
+ timeout-minutes: 10
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3'
- - name: Compare checksums of bundled pip and setuptools to ones published on PyPI
+ - name: Compare checksum of bundled wheels to the ones published on PyPI
run: ./Tools/build/verify_ensurepip_wheels.py
diff --git a/Doc/Makefile b/Doc/Makefile
index ebe7f3698000fb..c11ea6ce03e8a4 100644
--- a/Doc/Makefile
+++ b/Doc/Makefile
@@ -13,6 +13,7 @@ JOBS = auto
PAPER =
SOURCES =
DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py)
+REQUIREMENTS = requirements.txt
SPHINXERRORHANDLING = -W
# Internal variables.
@@ -154,8 +155,8 @@ venv:
echo "To recreate it, remove it first with \`make clean-venv'."; \
else \
$(PYTHON) -m venv $(VENVDIR); \
- $(VENVDIR)/bin/python3 -m pip install -U pip setuptools; \
- $(VENVDIR)/bin/python3 -m pip install -r requirements.txt; \
+ $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \
+ $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \
echo "The venv has been created in the $(VENVDIR) directory"; \
fi
diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst
index a51619db6d3d97..474a64800044d0 100644
--- a/Doc/c-api/import.rst
+++ b/Doc/c-api/import.rst
@@ -188,6 +188,8 @@ Importing Modules
.. versionchanged:: 3.3
Uses :func:`imp.source_from_cache()` in calculating the source path if
only the bytecode path is provided.
+ .. versionchanged:: 3.12
+ No longer uses the removed ``imp`` module.
.. c:function:: long PyImport_GetMagicNumber()
diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst
index 7b5d1fac40ed87..69b15296993301 100644
--- a/Doc/c-api/type.rst
+++ b/Doc/c-api/type.rst
@@ -232,6 +232,15 @@ Type Objects
.. versionadded:: 3.11
+.. c:function:: int PyUnstable_Type_AssignVersionTag(PyTypeObject *type)
+
+ Attempt to assign a version tag to the given type.
+
+ Returns 1 if the type already had a valid version tag or a new one was
+ assigned, or 0 if a new tag could not be assigned.
+
+ .. versionadded:: 3.12
+
Creating Heap-Allocated Types
.............................
diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst
index fd8f49ccb1caab..e963b90628aa49 100644
--- a/Doc/c-api/typeobj.rst
+++ b/Doc/c-api/typeobj.rst
@@ -1145,7 +1145,7 @@ and :c:type:`PyType_Type` effectively act as defaults.)
.. data:: Py_TPFLAGS_MANAGED_DICT
- This bit indicates that instances of the class have a ``__dict___``
+ This bit indicates that instances of the class have a ``__dict__``
attribute, and that the space for the dictionary is managed by the VM.
If this flag is set, :const:`Py_TPFLAGS_HAVE_GC` should also be set.
diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst
index f062f14e9a7561..ab3a2e274d9395 100644
--- a/Doc/c-api/unicode.rst
+++ b/Doc/c-api/unicode.rst
@@ -509,6 +509,15 @@ APIs:
arguments.
+.. c:function:: PyObject* PyUnicode_FromObject(PyObject *obj)
+
+ Copy an instance of a Unicode subtype to a new true Unicode object if
+ necessary. If *obj* is already a true Unicode object (not a subtype),
+ return the reference with incremented refcount.
+
+ Objects other than Unicode or its subtypes will cause a :exc:`TypeError`.
+
+
.. c:function:: PyObject* PyUnicode_FromEncodedObject(PyObject *obj, \
const char *encoding, const char *errors)
@@ -616,15 +625,6 @@ APIs:
.. versionadded:: 3.3
-.. c:function:: PyObject* PyUnicode_FromObject(PyObject *obj)
-
- Copy an instance of a Unicode subtype to a new true Unicode object if
- necessary. If *obj* is already a true Unicode object (not a subtype),
- return the reference with incremented refcount.
-
- Objects other than Unicode or its subtypes will cause a :exc:`TypeError`.
-
-
Locale Encoding
"""""""""""""""
diff --git a/Doc/conf.py b/Doc/conf.py
index e99b801d0ae87a..42c23bf77c7034 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -76,6 +76,13 @@
if venvdir is not None:
exclude_patterns.append(venvdir + '/*')
+nitpick_ignore = [
+ # Do not error nit-picky mode builds when _SubParsersAction.add_parser cannot
+ # be resolved, as the method is currently undocumented. For context, see
+ # https://github.com/python/cpython/pull/103289.
+ ('py:meth', '_SubParsersAction.add_parser'),
+]
+
# Disable Docutils smartquotes for several translations
smartquotes_excludes = {
'languages': ['ja', 'fr', 'zh_TW', 'zh_CN'], 'builders': ['man', 'text'],
@@ -254,8 +261,31 @@
# Options for the link checker
# ----------------------------
-# Ignore certain URLs.
-linkcheck_ignore = [r'https://bugs.python.org/(issue)?\d+']
+linkcheck_allowed_redirects = {
+ # bpo-NNNN -> BPO -> GH Issues
+ r'https://bugs.python.org/issue\?@action=redirect&bpo=\d+': 'https://github.com/python/cpython/issues/\d+',
+ # GH-NNNN used to refer to pull requests
+ r'https://github.com/python/cpython/issues/\d+': 'https://github.com/python/cpython/pull/\d+',
+ # :source:`something` linking files in the repository
+ r'https://github.com/python/cpython/tree/.*': 'https://github.com/python/cpython/blob/.*'
+}
+
+linkcheck_anchors_ignore = [
+ # ignore anchors that start with a '/', e.g. Wikipedia media files:
+ # https://en.wikipedia.org/wiki/Walrus#/media/File:Pacific_Walrus_-_Bull_(8247646168).jpg
+ r'\/.*',
+]
+
+linkcheck_ignore = [
+ # The crawler gets "Anchor not found"
+ r'https://developer.apple.com/documentation/.+?#.*',
+ r'https://devguide.python.org.+?/#.*',
+ r'https://github.com.+?#.*',
+ # Robot crawlers not allowed: "403 Client Error: Forbidden"
+ r'https://support.enthought.com/hc/.*',
+ # SSLError CertificateError, even though it is valid
+ r'https://unix.org/version2/whatsnew/lp64_wp.html',
+]
# Options for extensions
diff --git a/Doc/constraints.txt b/Doc/constraints.txt
new file mode 100644
index 00000000000000..66c748eb092d83
--- /dev/null
+++ b/Doc/constraints.txt
@@ -0,0 +1,29 @@
+# We have upper bounds on our transitive dependencies here
+# To avoid new releases unexpectedly breaking our build.
+# This file can be updated on an ad-hoc basis,
+# though it will probably have to be updated
+# whenever Doc/requirements.txt is updated.
+
+# Direct dependencies of Sphinx
+babel<3
+colorama<0.5
+imagesize<1.5
+Jinja2<3.2
+packaging<24
+# Pygments==2.15.0 breaks CI
+Pygments<2.16,!=2.15.0
+requests<3
+snowballstemmer<3
+sphinxcontrib-applehelp<1.1
+sphinxcontrib-devhelp<1.1
+sphinxcontrib-htmlhelp<2.1
+sphinxcontrib-jsmath<1.1
+sphinxcontrib-qthelp<1.1
+sphinxcontrib-serializinghtml<1.2
+
+# Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above)
+MarkupSafe<2.2
+
+# Direct dependencies of sphinx-lint
+polib<1.3
+regex<2024
diff --git a/Doc/distributing/index.rst b/Doc/distributing/index.rst
index 21389adedf9c15..d237f8f082d87b 100644
--- a/Doc/distributing/index.rst
+++ b/Doc/distributing/index.rst
@@ -129,14 +129,10 @@ involved in creating and publishing a project:
* `Uploading the project to the Python Package Index`_
* `The .pypirc file`_
-.. _Project structure: \
- https://packaging.python.org/tutorials/packaging-projects/#packaging-python-projects
-.. _Building and packaging the project: \
- https://packaging.python.org/tutorials/packaging-projects/#creating-the-package-files
-.. _Uploading the project to the Python Package Index: \
- https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives
-.. _The .pypirc file: \
- https://packaging.python.org/specifications/pypirc/
+.. _Project structure: https://packaging.python.org/tutorials/packaging-projects/#packaging-python-projects
+.. _Building and packaging the project: https://packaging.python.org/tutorials/packaging-projects/#creating-the-package-files
+.. _Uploading the project to the Python Package Index: https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives
+.. _The .pypirc file: https://packaging.python.org/specifications/pypirc/
How do I...?
diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst
index 80a1387db200c2..56b40acdb69fed 100644
--- a/Doc/extending/newtypes.rst
+++ b/Doc/extending/newtypes.rst
@@ -337,7 +337,7 @@ Here is an example::
}
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object has no attribute '%.400s'",
+ "'%.100s' object has no attribute '%.400s'",
tp->tp_name, name);
return NULL;
}
diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst
index a9cde456575020..597caaa778e1c8 100644
--- a/Doc/faq/library.rst
+++ b/Doc/faq/library.rst
@@ -780,7 +780,7 @@ socket to :meth:`select.select` to check if it's writable.
The :mod:`asyncio` module provides a general purpose single-threaded and
concurrent asynchronous library, which can be used for writing non-blocking
network code.
- The third-party `Twisted `_ library is
+ The third-party `Twisted `_ library is
a popular and feature-rich alternative.
diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst
index f682587488a227..52e98fa9620194 100644
--- a/Doc/howto/argparse.rst
+++ b/Doc/howto/argparse.rst
@@ -1,10 +1,12 @@
+.. _argparse-tutorial:
+
*****************
Argparse Tutorial
*****************
:author: Tshepang Mbambo
-.. _argparse-tutorial:
+.. currentmodule:: argparse
This tutorial is intended to be a gentle introduction to :mod:`argparse`, the
recommended command-line parsing module in the Python standard library.
@@ -12,7 +14,7 @@ recommended command-line parsing module in the Python standard library.
.. note::
There are two other modules that fulfill the same task, namely
- :mod:`getopt` (an equivalent for :c:func:`getopt` from the C
+ :mod:`getopt` (an equivalent for ``getopt()`` from the C
language) and the deprecated :mod:`optparse`.
Note also that :mod:`argparse` is based on :mod:`optparse`,
and therefore very similar in terms of usage.
@@ -137,13 +139,13 @@ And running the code:
Here is what's happening:
-* We've added the :meth:`add_argument` method, which is what we use to specify
+* We've added the :meth:`~ArgumentParser.add_argument` method, which is what we use to specify
which command-line options the program is willing to accept. In this case,
I've named it ``echo`` so that it's in line with its function.
* Calling our program now requires us to specify an option.
-* The :meth:`parse_args` method actually returns some data from the
+* The :meth:`~ArgumentParser.parse_args` method actually returns some data from the
options specified, in this case, ``echo``.
* The variable is some form of 'magic' that :mod:`argparse` performs for free
@@ -256,7 +258,7 @@ Here is what is happening:
* To show that the option is actually optional, there is no error when running
the program without it. Note that by default, if an optional argument isn't
- used, the relevant variable, in this case :attr:`args.verbosity`, is
+ used, the relevant variable, in this case ``args.verbosity``, is
given ``None`` as a value, which is the reason it fails the truth
test of the :keyword:`if` statement.
@@ -299,7 +301,7 @@ Here is what is happening:
We even changed the name of the option to match that idea.
Note that we now specify a new keyword, ``action``, and give it the value
``"store_true"``. This means that, if the option is specified,
- assign the value ``True`` to :data:`args.verbose`.
+ assign the value ``True`` to ``args.verbose``.
Not specifying it implies ``False``.
* It complains when you specify a value, in true spirit of what flags
@@ -698,7 +700,7 @@ Conflicting options
So far, we have been working with two methods of an
:class:`argparse.ArgumentParser` instance. Let's introduce a third one,
-:meth:`add_mutually_exclusive_group`. It allows for us to specify options that
+:meth:`~ArgumentParser.add_mutually_exclusive_group`. It allows for us to specify options that
conflict with each other. Let's also change the rest of the program so that
the new functionality makes more sense:
we'll introduce the ``--quiet`` option,
diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst
index 74710d9b3fc2ed..3688c47f0d6ec9 100644
--- a/Doc/howto/descriptor.rst
+++ b/Doc/howto/descriptor.rst
@@ -1273,11 +1273,14 @@ Using the non-data descriptor protocol, a pure Python version of
.. testcode::
+ import functools
+
class StaticMethod:
"Emulate PyStaticMethod_Type() in Objects/funcobject.c"
def __init__(self, f):
self.f = f
+ functools.update_wrapper(self, f)
def __get__(self, obj, objtype=None):
return self.f
@@ -1285,13 +1288,19 @@ Using the non-data descriptor protocol, a pure Python version of
def __call__(self, *args, **kwds):
return self.f(*args, **kwds)
+The :func:`functools.update_wrapper` call adds a ``__wrapped__`` attribute
+that refers to the underlying function. Also it carries forward
+the attributes necessary to make the wrapper look like the wrapped
+function: ``__name__``, ``__qualname__``, ``__doc__``, and ``__annotations__``.
+
.. testcode::
:hide:
class E_sim:
@StaticMethod
- def f(x):
- return x * 10
+ def f(x: int) -> str:
+ "Simple function example"
+ return "!" * x
wrapped_ord = StaticMethod(ord)
@@ -1299,11 +1308,51 @@ Using the non-data descriptor protocol, a pure Python version of
:hide:
>>> E_sim.f(3)
- 30
+ '!!!'
>>> E_sim().f(3)
- 30
+ '!!!'
+
+ >>> sm = vars(E_sim)['f']
+ >>> type(sm).__name__
+ 'StaticMethod'
+ >>> f = E_sim.f
+ >>> type(f).__name__
+ 'function'
+ >>> sm.__name__
+ 'f'
+ >>> f.__name__
+ 'f'
+ >>> sm.__qualname__
+ 'E_sim.f'
+ >>> f.__qualname__
+ 'E_sim.f'
+ >>> sm.__doc__
+ 'Simple function example'
+ >>> f.__doc__
+ 'Simple function example'
+ >>> sm.__annotations__
+ {'x': , 'return': }
+ >>> f.__annotations__
+ {'x': , 'return': }
+ >>> sm.__module__ == f.__module__
+ True
+ >>> sm(3)
+ '!!!'
+ >>> f(3)
+ '!!!'
+
>>> wrapped_ord('A')
65
+ >>> wrapped_ord.__module__ == ord.__module__
+ True
+ >>> wrapped_ord.__wrapped__ == ord
+ True
+ >>> wrapped_ord.__name__ == ord.__name__
+ True
+ >>> wrapped_ord.__qualname__ == ord.__qualname__
+ True
+ >>> wrapped_ord.__doc__ == ord.__doc__
+ True
Class methods
@@ -1359,11 +1408,14 @@ Using the non-data descriptor protocol, a pure Python version of
.. testcode::
+ import functools
+
class ClassMethod:
"Emulate PyClassMethod_Type() in Objects/funcobject.c"
def __init__(self, f):
self.f = f
+ functools.update_wrapper(self, f)
def __get__(self, obj, cls=None):
if cls is None:
@@ -1380,8 +1432,9 @@ Using the non-data descriptor protocol, a pure Python version of
# Verify the emulation works
class T:
@ClassMethod
- def cm(cls, x, y):
- return (cls, x, y)
+ def cm(cls, x: int, y: str) -> tuple[str, int, str]:
+ "Class method that returns a tuple"
+ return (cls.__name__, x, y)
@ClassMethod
@property
@@ -1393,17 +1446,40 @@ Using the non-data descriptor protocol, a pure Python version of
:hide:
>>> T.cm(11, 22)
- (, 11, 22)
+ ('T', 11, 22)
# Also call it from an instance
>>> t = T()
>>> t.cm(11, 22)
- (, 11, 22)
+ ('T', 11, 22)
# Check the alternate path for chained descriptors
>>> T.__doc__
"A doc for 'T'"
+ # Verify that T uses our emulation
+ >>> type(vars(T)['cm']).__name__
+ 'ClassMethod'
+
+ # Verify that update_wrapper() correctly copied attributes
+ >>> T.cm.__name__
+ 'cm'
+ >>> T.cm.__qualname__
+ 'T.cm'
+ >>> T.cm.__doc__
+ 'Class method that returns a tuple'
+ >>> T.cm.__annotations__
+ {'x': , 'y': , 'return': tuple[str, int, str]}
+
+ # Verify that __wrapped__ was added and works correctly
+ >>> f = vars(T)['cm'].__wrapped__
+ >>> type(f).__name__
+ 'function'
+ >>> f.__name__
+ 'cm'
+ >>> f(T, 11, 22)
+ ('T', 11, 22)
+
The code path for ``hasattr(type(self.f), '__get__')`` was added in
Python 3.9 and makes it possible for :func:`classmethod` to support
@@ -1423,6 +1499,12 @@ chained together. In Python 3.11, this functionality was deprecated.
>>> G.__doc__
"A doc for 'G'"
+The :func:`functools.update_wrapper` call in ``ClassMethod`` adds a
+``__wrapped__`` attribute that refers to the underlying function. Also
+it carries forward the attributes necessary to make the wrapper look
+like the wrapped function: ``__name__``, ``__qualname__``, ``__doc__``,
+and ``__annotations__``.
+
Member objects and __slots__
----------------------------
diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst
index 56391a026cf889..68b75c529e92c7 100644
--- a/Doc/howto/enum.rst
+++ b/Doc/howto/enum.rst
@@ -36,8 +36,10 @@ inherits from :class:`Enum` itself.
.. note:: Case of Enum Members
- Because Enums are used to represent constants we recommend using
- UPPER_CASE names for members, and will be using that style in our examples.
+ Because Enums are used to represent constants, and to help avoid issues
+ with name clashes between mixin-class methods/attributes and enum names,
+ we strongly recommend using UPPER_CASE names for members, and will be using
+ that style in our examples.
Depending on the nature of the enum a member's value may or may not be
important, but either way that value can be used to get the corresponding
@@ -490,6 +492,10 @@ the :meth:`~Enum.__repr__` omits the inherited class' name. For example::
Use the :func:`!dataclass` argument ``repr=False``
to use the standard :func:`repr`.
+.. versionchanged:: 3.12
+ Only the dataclass fields are shown in the value area, not the dataclass'
+ name.
+
Pickling
--------
@@ -992,7 +998,9 @@ but remain normal attributes.
Enum members are instances of their enum class, and are normally accessed as
``EnumClass.member``. In certain situations, such as writing custom enum
behavior, being able to access one member directly from another is useful,
-and is supported.
+and is supported; however, in order to avoid name clashes between member names
+and attributes/methods from mixed-in classes, upper-case names are strongly
+recommended.
.. versionchanged:: 3.5
diff --git a/Doc/howto/functional.rst b/Doc/howto/functional.rst
index 38a651b0f964a6..5cf12cc52bde4e 100644
--- a/Doc/howto/functional.rst
+++ b/Doc/howto/functional.rst
@@ -1208,8 +1208,8 @@ General
-------
**Structure and Interpretation of Computer Programs**, by Harold Abelson and
-Gerald Jay Sussman with Julie Sussman. Full text at
-https://mitpress.mit.edu/sicp/. In this classic textbook of computer science,
+Gerald Jay Sussman with Julie Sussman. The book can be found at
+https://mitpress.mit.edu/sicp. In this classic textbook of computer science,
chapters 2 and 3 discuss the use of sequences and streams to organize the data
flow inside a program. The book uses Scheme for its examples, but many of the
design approaches described in these chapters are applicable to functional-style
diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst
index 2eddb582da7c24..0262054ae2b4a0 100644
--- a/Doc/howto/isolating-extensions.rst
+++ b/Doc/howto/isolating-extensions.rst
@@ -372,7 +372,7 @@ To save a some tedious error-handling boilerplate code, you can combine
these two steps with :c:func:`PyType_GetModuleState`, resulting in::
my_struct *state = (my_struct*)PyType_GetModuleState(type);
- if (state === NULL) {
+ if (state == NULL) {
return NULL;
}
@@ -435,7 +435,7 @@ For example::
PyObject *kwnames)
{
my_struct *state = (my_struct*)PyType_GetModuleState(defining_class);
- if (state === NULL) {
+ if (state == NULL) {
return NULL;
}
... // rest of logic
@@ -479,7 +479,7 @@ to get the state::
PyObject *module = PyType_GetModuleByDef(Py_TYPE(self), &module_def);
my_struct *state = (my_struct*)PyModule_GetState(module);
- if (state === NULL) {
+ if (state == NULL) {
return NULL;
}
diff --git a/Doc/howto/perf_profiling.rst b/Doc/howto/perf_profiling.rst
index ad2eb7b4d58aa5..6af5536166f58a 100644
--- a/Doc/howto/perf_profiling.rst
+++ b/Doc/howto/perf_profiling.rst
@@ -15,9 +15,9 @@ information about the performance of your application.
that aid with the analysis of the data that it produces.
The main problem with using the ``perf`` profiler with Python applications is that
-``perf`` only allows to get information about native symbols, this is, the names of
-the functions and procedures written in C. This means that the names and file names
-of the Python functions in your code will not appear in the output of the ``perf``.
+``perf`` only gets information about native symbols, that is, the names of
+functions and procedures written in C. This means that the names and file names
+of Python functions in your code will not appear in the output of ``perf``.
Since Python 3.12, the interpreter can run in a special mode that allows Python
functions to appear in the output of the ``perf`` profiler. When this mode is
@@ -28,8 +28,8 @@ relationship between this piece of code and the associated Python function using
.. note::
- Support for the ``perf`` profiler is only currently available for Linux on
- selected architectures. Check the output of the configure build step or
+ Support for the ``perf`` profiler is currently only available for Linux on
+ select architectures. Check the output of the ``configure`` build step or
check the output of ``python -m sysconfig | grep HAVE_PERF_TRAMPOLINE``
to see if your system is supported.
@@ -52,11 +52,11 @@ For example, consider the following script:
if __name__ == "__main__":
baz(1000000)
-We can run ``perf`` to sample CPU stack traces at 9999 Hertz::
+We can run ``perf`` to sample CPU stack traces at 9999 hertz::
$ perf record -F 9999 -g -o perf.data python my_script.py
-Then we can use ``perf`` report to analyze the data:
+Then we can use ``perf report`` to analyze the data:
.. code-block:: shell-session
@@ -97,7 +97,7 @@ Then we can use ``perf`` report to analyze the data:
| | | | | |--2.97%--_PyObject_Malloc
...
-As you can see here, the Python functions are not shown in the output, only ``_Py_Eval_EvalFrameDefault`` appears
+As you can see, the Python functions are not shown in the output, only ``_Py_Eval_EvalFrameDefault``
(the function that evaluates the Python bytecode) shows up. Unfortunately that's not very useful because all Python
functions use the same C function to evaluate bytecode so we cannot know which Python function corresponds to which
bytecode-evaluating function.
@@ -151,7 +151,7 @@ Instead, if we run the same experiment with ``perf`` support enabled we get:
How to enable ``perf`` profiling support
----------------------------------------
-``perf`` profiling support can either be enabled from the start using
+``perf`` profiling support can be enabled either from the start using
the environment variable :envvar:`PYTHONPERFSUPPORT` or the
:option:`-X perf <-X>` option,
or dynamically using :func:`sys.activate_stack_trampoline` and
@@ -192,7 +192,7 @@ Example, using the :mod:`sys` APIs in file :file:`example.py`:
How to obtain the best results
------------------------------
-For the best results, Python should be compiled with
+For best results, Python should be compiled with
``CFLAGS="-fno-omit-frame-pointer -mno-omit-leaf-frame-pointer"`` as this allows
profilers to unwind using only the frame pointer and not on DWARF debug
information. This is because as the code that is interposed to allow ``perf``
diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst
index 69af3c3a85c5d6..61ba6bd7224fcc 100644
--- a/Doc/howto/urllib2.rst
+++ b/Doc/howto/urllib2.rst
@@ -86,7 +86,7 @@ response::
import urllib.request
- req = urllib.request.Request('http://www.voidspace.org.uk')
+ req = urllib.request.Request('http://python.org/')
with urllib.request.urlopen(req) as response:
the_page = response.read()
@@ -458,7 +458,7 @@ To illustrate creating and installing a handler we will use the
``HTTPBasicAuthHandler``. For a more detailed discussion of this subject --
including an explanation of how Basic Authentication works - see the `Basic
Authentication Tutorial
-`_.
+`__.
When authentication is required, the server sends a header (as well as the 401
error code) requesting authentication. This specifies the authentication scheme
diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst
index 761c88710f9891..d29cbdff7830c8 100644
--- a/Doc/library/__main__.rst
+++ b/Doc/library/__main__.rst
@@ -124,7 +124,7 @@ This is where using the ``if __name__ == '__main__'`` code block comes in
handy. Code within this block won't run unless the module is executed in the
top-level environment.
-Putting as few statements as possible in the block below ``if __name___ ==
+Putting as few statements as possible in the block below ``if __name__ ==
'__main__'`` can improve code clarity and correctness. Most often, a function
named ``main`` encapsulates the program's primary behavior::
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index ee68ac58d3de75..33e367f3ccda89 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -67,7 +67,7 @@ default_ Default value used when an argument is not provided
dest_ Specify the attribute name used in the result namespace
help_ Help message for an argument
metavar_ Alternate display name for the argument as shown in help
-nargs_ Number of times the argument can be used :class:`int`, ``'?'``, ``'*'``, ``'+'``, or ``argparse.REMAINDER``
+nargs_ Number of times the argument can be used :class:`int`, ``'?'``, ``'*'``, or ``'+'``
required_ Indicate whether an argument is required or optional ``True`` or ``False``
type_ Automatically convert an argument to the given type :class:`int`, :class:`float`, ``argparse.FileType('w')``, or callable function
====================== =========================================================== ==========================================================================================================================
@@ -585,7 +585,7 @@ arguments will never be treated as file references.
.. versionchanged:: 3.12
:class:`ArgumentParser` changed encoding and errors to read arguments files
- from default (e.g. :func:`locale.getpreferredencoding(False)` and
+ from default (e.g. :func:`locale.getpreferredencoding(False) ` and
``"strict"``) to :term:`filesystem encoding and error handler`.
Arguments file should be encoded in UTF-8 instead of ANSI Codepage on Windows.
@@ -1191,7 +1191,7 @@ done downstream after the arguments are parsed.
For example, JSON or YAML conversions have complex error cases that require
better reporting than can be given by the ``type`` keyword. A
:exc:`~json.JSONDecodeError` would not be well formatted and a
-:exc:`FileNotFound` exception would not be handled at all.
+:exc:`FileNotFoundError` exception would not be handled at all.
Even :class:`~argparse.FileType` has its limitations for use with the ``type``
keyword. If one argument uses *FileType* and then a subsequent argument fails,
@@ -1445,7 +1445,7 @@ Action classes
Action classes implement the Action API, a callable which returns a callable
which processes arguments from the command-line. Any object which follows
this API may be passed as the ``action`` parameter to
-:meth:`add_argument`.
+:meth:`~ArgumentParser.add_argument`.
.. class:: Action(option_strings, dest, nargs=None, const=None, default=None, \
type=None, choices=None, required=False, help=None, \
@@ -1723,7 +1723,7 @@ Sub-commands
:class:`ArgumentParser` supports the creation of such sub-commands with the
:meth:`add_subparsers` method. The :meth:`add_subparsers` method is normally
called with no arguments and returns a special action object. This object
- has a single method, :meth:`~ArgumentParser.add_parser`, which takes a
+ has a single method, :meth:`~_SubParsersAction.add_parser`, which takes a
command name and any :class:`ArgumentParser` constructor arguments, and
returns an :class:`ArgumentParser` object that can be modified as usual.
@@ -1789,7 +1789,7 @@ Sub-commands
for that particular parser will be printed. The help message will not
include parent parser or sibling parser messages. (A help message for each
subparser command, however, can be given by supplying the ``help=`` argument
- to :meth:`add_parser` as above.)
+ to :meth:`~_SubParsersAction.add_parser` as above.)
::
@@ -2157,7 +2157,7 @@ the populated namespace and the list of remaining argument strings.
.. warning::
:ref:`Prefix matching ` rules apply to
- :meth:`parse_known_args`. The parser may consume an option even if it's just
+ :meth:`~ArgumentParser.parse_known_args`. The parser may consume an option even if it's just
a prefix of one of its known options, instead of leaving it in the remaining
arguments list.
@@ -2218,7 +2218,7 @@ support this parsing style.
These parsers do not support all the argparse features, and will raise
exceptions if unsupported features are used. In particular, subparsers,
-``argparse.REMAINDER``, and mutually exclusive groups that include both
+and mutually exclusive groups that include both
optionals and positionals are not supported.
The following example shows the difference between
@@ -2295,3 +2295,17 @@ A partial upgrade path from :mod:`optparse` to :mod:`argparse`:
* Replace the OptionParser constructor ``version`` argument with a call to
``parser.add_argument('--version', action='version', version='')``.
+
+Exceptions
+----------
+
+.. exception:: ArgumentError
+
+ An error from creating or using an argument (optional or positional).
+
+ The string value of this exception is the message, augmented with
+ information about the argument that caused it.
+
+.. exception:: ArgumentTypeError
+
+ Raised when something goes wrong converting a command line string to a type.
diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst
index 5138afc2bbe47b..e982cc166a3f2d 100644
--- a/Doc/library/asyncio-eventloop.rst
+++ b/Doc/library/asyncio-eventloop.rst
@@ -1438,9 +1438,7 @@ async/await code consider using the high-level
* *stdin* can be any of these:
- * a file-like object representing a pipe to be connected to the
- subprocess's standard input stream using
- :meth:`~loop.connect_write_pipe`
+ * a file-like object
* the :const:`subprocess.PIPE` constant (default) which will create a new
pipe and connect it,
* the value ``None`` which will make the subprocess inherit the file
@@ -1450,9 +1448,7 @@ async/await code consider using the high-level
* *stdout* can be any of these:
- * a file-like object representing a pipe to be connected to the
- subprocess's standard output stream using
- :meth:`~loop.connect_write_pipe`
+ * a file-like object
* the :const:`subprocess.PIPE` constant (default) which will create a new
pipe and connect it,
* the value ``None`` which will make the subprocess inherit the file
@@ -1462,9 +1458,7 @@ async/await code consider using the high-level
* *stderr* can be any of these:
- * a file-like object representing a pipe to be connected to the
- subprocess's standard error stream using
- :meth:`~loop.connect_write_pipe`
+ * a file-like object
* the :const:`subprocess.PIPE` constant (default) which will create a new
pipe and connect it,
* the value ``None`` which will make the subprocess inherit the file
@@ -1483,6 +1477,11 @@ async/await code consider using the high-level
as text. :func:`bytes.decode` can be used to convert the bytes returned
from the stream to text.
+ If a file-like object passed as *stdin*, *stdout* or *stderr* represents a
+ pipe, then the other side of this pipe should be registered with
+ :meth:`~loop.connect_write_pipe` or :meth:`~loop.connect_read_pipe` for use
+ with the event loop.
+
See the constructor of the :class:`subprocess.Popen` class
for documentation on other arguments.
@@ -1571,7 +1570,7 @@ Server objects are created by :meth:`loop.create_server`,
:meth:`loop.create_unix_server`, :func:`start_server`,
and :func:`start_unix_server` functions.
-Do not instantiate the class directly.
+Do not instantiate the :class:`Server` class directly.
.. class:: Server
@@ -1662,7 +1661,8 @@ Do not instantiate the class directly.
.. attribute:: sockets
- List of :class:`socket.socket` objects the server is listening on.
+ List of socket-like objects, ``asyncio.trsock.TransportSocket``, which
+ the server is listening on.
.. versionchanged:: 3.7
Prior to Python 3.7 ``Server.sockets`` used to return an
diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst
index 4274638c5e8625..b7c83aa04c09f1 100644
--- a/Doc/library/asyncio-subprocess.rst
+++ b/Doc/library/asyncio-subprocess.rst
@@ -207,8 +207,9 @@ their completion.
Interact with process:
1. send data to *stdin* (if *input* is not ``None``);
- 2. read data from *stdout* and *stderr*, until EOF is reached;
- 3. wait for process to terminate.
+ 2. closes *stdin*;
+ 3. read data from *stdout* and *stderr*, until EOF is reached;
+ 4. wait for process to terminate.
The optional *input* argument is the data (:class:`bytes` object)
that will be sent to the child process.
@@ -229,6 +230,10 @@ their completion.
Note, that the data read is buffered in memory, so do not use
this method if the data size is large or unlimited.
+ .. versionchanged:: 3.12
+
+ *stdin* gets closed when `input=None` too.
+
.. method:: send_signal(signal)
Sends the signal *signal* to the child process.
diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst
index 41d09e1e79705c..ba0f909c405a34 100644
--- a/Doc/library/asyncio-task.rst
+++ b/Doc/library/asyncio-task.rst
@@ -256,8 +256,9 @@ Creating Tasks
.. note::
- :meth:`asyncio.TaskGroup.create_task` is a newer alternative
- that allows for convenient waiting for a group of related tasks.
+ :meth:`asyncio.TaskGroup.create_task` is a new alternative
+ leveraging structural concurrency; it allows for waiting
+ for a group of related tasks with strong safety guarantees.
.. important::
@@ -340,7 +341,7 @@ Example::
async with asyncio.TaskGroup() as tg:
task1 = tg.create_task(some_coro(...))
task2 = tg.create_task(another_coro(...))
- print("Both tasks have completed now.")
+ print(f"Both tasks have completed now: {task1.result()}, {task2.result()}")
The ``async with`` statement will wait for all tasks in the group to finish.
While waiting, new tasks may still be added to the group
@@ -459,8 +460,12 @@ Running Tasks Concurrently
Tasks/Futures to be cancelled.
.. note::
- A more modern way to create and run tasks concurrently and
- wait for their completion is :class:`asyncio.TaskGroup`.
+ A new alternative to create and run tasks concurrently and
+ wait for their completion is :class:`asyncio.TaskGroup`. *TaskGroup*
+ provides stronger safety guarantees than *gather* for scheduling a nesting of subtasks:
+ if a task (or a subtask, a task scheduled by a task)
+ raises an exception, *TaskGroup* will, while *gather* will not,
+ cancel the remaining scheduled tasks).
.. _asyncio_example_gather:
@@ -829,6 +834,9 @@ Waiting Primitives
Deprecation warning is emitted if not all awaitable objects in the *aws*
iterable are Future-like objects and there is no running event loop.
+ .. versionchanged:: 3.12
+ Added support for generators yielding tasks.
+
Running in Threads
==================
diff --git a/Doc/library/calendar.rst b/Doc/library/calendar.rst
index 66f59f0e2ced27..07d04a1c7b582a 100644
--- a/Doc/library/calendar.rst
+++ b/Doc/library/calendar.rst
@@ -28,6 +28,58 @@ interpreted as prescribed by the ISO 8601 standard. Year 0 is 1 BC, year -1 is
2 BC, and so on.
+.. class:: Day
+
+ Enumeration defining the days of the week as integer constants, from 0 to 6.
+
+ .. attribute:: MONDAY
+
+ .. attribute:: TUESDAY
+
+ .. attribute:: WEDNESDAY
+
+ .. attribute:: THURSDAY
+
+ .. attribute:: FRIDAY
+
+ .. attribute:: SATURDAY
+
+ .. attribute:: SUNDAY
+
+ .. versionadded:: 3.12
+
+
+.. class:: Month
+
+ Enumeration defining months of the year as integer constants, from 1 to 12.
+
+ .. attribute:: JANUARY
+
+ .. attribute:: FEBRUARY
+
+ .. attribute:: MARCH
+
+ .. attribute:: APRIL
+
+ .. attribute:: MAY
+
+ .. attribute:: JUNE
+
+ .. attribute:: JULY
+
+ .. attribute:: AUGUST
+
+ .. attribute:: SEPTEMBER
+
+ .. attribute:: OCTOBER
+
+ .. attribute:: NOVEMBER
+
+ .. attribute:: DECEMBER
+
+ .. versionadded:: 3.12
+
+
.. class:: Calendar(firstweekday=0)
Creates a :class:`Calendar` object. *firstweekday* is an integer specifying the
diff --git a/Doc/library/contextlib.rst b/Doc/library/contextlib.rst
index 1b55868c3aa62f..7cd081d1f54f43 100644
--- a/Doc/library/contextlib.rst
+++ b/Doc/library/contextlib.rst
@@ -304,8 +304,15 @@ Functions and classes provided:
This context manager is :ref:`reentrant `.
+ If the code within the :keyword:`!with` block raises an
+ :exc:`ExceptionGroup`, suppressed exceptions are removed from the
+ group. If any exceptions in the group are not suppressed, a group containing them is re-raised.
+
.. versionadded:: 3.4
+ .. versionchanged:: 3.12
+ ``suppress`` now supports suppressing exceptions raised as
+ part of an :exc:`ExceptionGroup`.
.. function:: redirect_stdout(new_target)
diff --git a/Doc/library/copyreg.rst b/Doc/library/copyreg.rst
index 866b180f4bc3b8..2107215c0c1967 100644
--- a/Doc/library/copyreg.rst
+++ b/Doc/library/copyreg.rst
@@ -28,8 +28,8 @@ Such constructors may be factory functions or class instances.
.. function:: pickle(type, function, constructor_ob=None)
Declares that *function* should be used as a "reduction" function for objects
- of type *type*. *function* should return either a string or a tuple
- containing two or three elements. See the :attr:`~pickle.Pickler.dispatch_table`
+ of type *type*. *function* must return either a string or a tuple
+ containing two or five elements. See the :attr:`~pickle.Pickler.dispatch_table`
for more details on the interface of *function*.
The *constructor_ob* parameter is a legacy feature and is now ignored, but if
diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst
index 5f4dc25bfd7877..a5b20149921042 100644
--- a/Doc/library/dataclasses.rst
+++ b/Doc/library/dataclasses.rst
@@ -12,8 +12,8 @@
--------------
This module provides a decorator and functions for automatically
-adding generated :term:`special method`\s such as :meth:`__init__` and
-:meth:`__repr__` to user-defined classes. It was originally described
+adding generated :term:`special method`\s such as :meth:`~object.__init__` and
+:meth:`~object.__repr__` to user-defined classes. It was originally described
in :pep:`557`.
The member variables to use in these generated methods are defined
@@ -31,7 +31,7 @@ using :pep:`526` type annotations. For example, this code::
def total_cost(self) -> float:
return self.unit_price * self.quantity_on_hand
-will add, among other things, a :meth:`__init__` that looks like::
+will add, among other things, a :meth:`~object.__init__` that looks like::
def __init__(self, name: str, unit_price: float, quantity_on_hand: int = 0):
self.name = name
@@ -86,86 +86,86 @@ Module contents
The parameters to :func:`dataclass` are:
- - ``init``: If true (the default), a :meth:`__init__` method will be
+ - ``init``: If true (the default), a :meth:`~object.__init__` method will be
generated.
- If the class already defines :meth:`__init__`, this parameter is
+ If the class already defines :meth:`~object.__init__`, this parameter is
ignored.
- - ``repr``: If true (the default), a :meth:`__repr__` method will be
+ - ``repr``: If true (the default), a :meth:`~object.__repr__` method will be
generated. The generated repr string will have the class name and
the name and repr of each field, in the order they are defined in
the class. Fields that are marked as being excluded from the repr
are not included. For example:
``InventoryItem(name='widget', unit_price=3.0, quantity_on_hand=10)``.
- If the class already defines :meth:`__repr__`, this parameter is
+ If the class already defines :meth:`~object.__repr__`, this parameter is
ignored.
- - ``eq``: If true (the default), an :meth:`__eq__` method will be
+ - ``eq``: If true (the default), an :meth:`~object.__eq__` method will be
generated. This method compares the class as if it were a tuple
of its fields, in order. Both instances in the comparison must
be of the identical type.
- If the class already defines :meth:`__eq__`, this parameter is
+ If the class already defines :meth:`~object.__eq__`, this parameter is
ignored.
- - ``order``: If true (the default is ``False``), :meth:`__lt__`,
- :meth:`__le__`, :meth:`__gt__`, and :meth:`__ge__` methods will be
+ - ``order``: If true (the default is ``False``), :meth:`~object.__lt__`,
+ :meth:`~object.__le__`, :meth:`~object.__gt__`, and :meth:`~object.__ge__` methods will be
generated. These compare the class as if it were a tuple of its
fields, in order. Both instances in the comparison must be of the
identical type. If ``order`` is true and ``eq`` is false, a
:exc:`ValueError` is raised.
- If the class already defines any of :meth:`__lt__`,
- :meth:`__le__`, :meth:`__gt__`, or :meth:`__ge__`, then
+ If the class already defines any of :meth:`~object.__lt__`,
+ :meth:`~object.__le__`, :meth:`~object.__gt__`, or :meth:`~object.__ge__`, then
:exc:`TypeError` is raised.
- - ``unsafe_hash``: If ``False`` (the default), a :meth:`__hash__` method
+ - ``unsafe_hash``: If ``False`` (the default), a :meth:`~object.__hash__` method
is generated according to how ``eq`` and ``frozen`` are set.
- :meth:`__hash__` is used by built-in :meth:`hash()`, and when objects are
+ :meth:`~object.__hash__` is used by built-in :meth:`hash()`, and when objects are
added to hashed collections such as dictionaries and sets. Having a
- :meth:`__hash__` implies that instances of the class are immutable.
+ :meth:`~object.__hash__` implies that instances of the class are immutable.
Mutability is a complicated property that depends on the programmer's
- intent, the existence and behavior of :meth:`__eq__`, and the values of
+ intent, the existence and behavior of :meth:`~object.__eq__`, and the values of
the ``eq`` and ``frozen`` flags in the :func:`dataclass` decorator.
- By default, :func:`dataclass` will not implicitly add a :meth:`__hash__`
+ By default, :func:`dataclass` will not implicitly add a :meth:`~object.__hash__`
method unless it is safe to do so. Neither will it add or change an
- existing explicitly defined :meth:`__hash__` method. Setting the class
+ existing explicitly defined :meth:`~object.__hash__` method. Setting the class
attribute ``__hash__ = None`` has a specific meaning to Python, as
- described in the :meth:`__hash__` documentation.
+ described in the :meth:`~object.__hash__` documentation.
- If :meth:`__hash__` is not explicitly defined, or if it is set to ``None``,
- then :func:`dataclass` *may* add an implicit :meth:`__hash__` method.
+ If :meth:`~object.__hash__` is not explicitly defined, or if it is set to ``None``,
+ then :func:`dataclass` *may* add an implicit :meth:`~object.__hash__` method.
Although not recommended, you can force :func:`dataclass` to create a
- :meth:`__hash__` method with ``unsafe_hash=True``. This might be the case
+ :meth:`~object.__hash__` method with ``unsafe_hash=True``. This might be the case
if your class is logically immutable but can nonetheless be mutated.
This is a specialized use case and should be considered carefully.
- Here are the rules governing implicit creation of a :meth:`__hash__`
- method. Note that you cannot both have an explicit :meth:`__hash__`
+ Here are the rules governing implicit creation of a :meth:`~object.__hash__`
+ method. Note that you cannot both have an explicit :meth:`~object.__hash__`
method in your dataclass and set ``unsafe_hash=True``; this will result
in a :exc:`TypeError`.
If ``eq`` and ``frozen`` are both true, by default :func:`dataclass` will
- generate a :meth:`__hash__` method for you. If ``eq`` is true and
- ``frozen`` is false, :meth:`__hash__` will be set to ``None``, marking it
+ generate a :meth:`~object.__hash__` method for you. If ``eq`` is true and
+ ``frozen`` is false, :meth:`~object.__hash__` will be set to ``None``, marking it
unhashable (which it is, since it is mutable). If ``eq`` is false,
- :meth:`__hash__` will be left untouched meaning the :meth:`__hash__`
+ :meth:`~object.__hash__` will be left untouched meaning the :meth:`~object.__hash__`
method of the superclass will be used (if the superclass is
:class:`object`, this means it will fall back to id-based hashing).
- ``frozen``: If true (the default is ``False``), assigning to fields will
generate an exception. This emulates read-only frozen instances. If
- :meth:`__setattr__` or :meth:`__delattr__` is defined in the class, then
+ :meth:`~object.__setattr__` or :meth:`~object.__delattr__` is defined in the class, then
:exc:`TypeError` is raised. See the discussion below.
- ``match_args``: If true (the default is ``True``), the
``__match_args__`` tuple will be created from the list of
- parameters to the generated :meth:`__init__` method (even if
- :meth:`__init__` is not generated, see above). If false, or if
+ parameters to the generated :meth:`~object.__init__` method (even if
+ :meth:`~object.__init__` is not generated, see above). If false, or if
``__match_args__`` is already defined in the class, then
``__match_args__`` will not be generated.
@@ -173,18 +173,18 @@ Module contents
- ``kw_only``: If true (the default value is ``False``), then all
fields will be marked as keyword-only. If a field is marked as
- keyword-only, then the only effect is that the :meth:`__init__`
+ keyword-only, then the only effect is that the :meth:`~object.__init__`
parameter generated from a keyword-only field must be specified
- with a keyword when :meth:`__init__` is called. There is no
+ with a keyword when :meth:`~object.__init__` is called. There is no
effect on any other aspect of dataclasses. See the
:term:`parameter` glossary entry for details. Also see the
:const:`KW_ONLY` section.
.. versionadded:: 3.10
- - ``slots``: If true (the default is ``False``), :attr:`__slots__` attribute
+ - ``slots``: If true (the default is ``False``), :attr:`~object.__slots__` attribute
will be generated and new class will be returned instead of the original one.
- If :attr:`__slots__` is already defined in the class, then :exc:`TypeError`
+ If :attr:`~object.__slots__` is already defined in the class, then :exc:`TypeError`
is raised.
.. versionadded:: 3.10
@@ -215,7 +215,7 @@ Module contents
b: int = 0 # assign a default value for 'b'
In this example, both ``a`` and ``b`` will be included in the added
- :meth:`__init__` method, which will be defined as::
+ :meth:`~object.__init__` method, which will be defined as::
def __init__(self, a: int, b: int = 0):
@@ -256,13 +256,13 @@ Module contents
error to specify both ``default`` and ``default_factory``.
- ``init``: If true (the default), this field is included as a
- parameter to the generated :meth:`__init__` method.
+ parameter to the generated :meth:`~object.__init__` method.
- ``repr``: If true (the default), this field is included in the
- string returned by the generated :meth:`__repr__` method.
+ string returned by the generated :meth:`~object.__repr__` method.
- ``hash``: This can be a bool or ``None``. If true, this field is
- included in the generated :meth:`__hash__` method. If ``None`` (the
+ included in the generated :meth:`~object.__hash__` method. If ``None`` (the
default), use the value of ``compare``: this would normally be
the expected behavior. A field should be considered in the hash
if it's used for comparisons. Setting this value to anything
@@ -275,8 +275,8 @@ Module contents
is excluded from the hash, it will still be used for comparisons.
- ``compare``: If true (the default), this field is included in the
- generated equality and comparison methods (:meth:`__eq__`,
- :meth:`__gt__`, et al.).
+ generated equality and comparison methods (:meth:`~object.__eq__`,
+ :meth:`~object.__gt__`, et al.).
- ``metadata``: This can be a mapping or None. None is treated as
an empty dict. This value is wrapped in
@@ -287,7 +287,7 @@ Module contents
namespace in the metadata.
- ``kw_only``: If true, this field will be marked as keyword-only.
- This is used when the generated :meth:`__init__` method's
+ This is used when the generated :meth:`~object.__init__` method's
parameters are computed.
.. versionadded:: 3.10
@@ -435,13 +435,13 @@ Module contents
Class, raises :exc:`TypeError`. If values in ``changes`` do not
specify fields, raises :exc:`TypeError`.
- The newly returned object is created by calling the :meth:`__init__`
+ The newly returned object is created by calling the :meth:`~object.__init__`
method of the dataclass. This ensures that
:meth:`__post_init__`, if present, is also called.
Init-only variables without default values, if any exist, must be
specified on the call to :func:`replace` so that they can be passed to
- :meth:`__init__` and :meth:`__post_init__`.
+ :meth:`~object.__init__` and :meth:`__post_init__`.
It is an error for ``changes`` to contain any fields that are
defined as having ``init=False``. A :exc:`ValueError` will be raised
@@ -480,7 +480,7 @@ Module contents
:const:`KW_ONLY` is otherwise completely ignored. This includes the
name of such a field. By convention, a name of ``_`` is used for a
:const:`KW_ONLY` field. Keyword-only fields signify
- :meth:`__init__` parameters that must be specified as keywords when
+ :meth:`~object.__init__` parameters that must be specified as keywords when
the class is instantiated.
In this example, the fields ``y`` and ``z`` will be marked as keyword-only fields::
@@ -501,35 +501,38 @@ Module contents
.. exception:: FrozenInstanceError
- Raised when an implicitly defined :meth:`__setattr__` or
- :meth:`__delattr__` is called on a dataclass which was defined with
+ Raised when an implicitly defined :meth:`~object.__setattr__` or
+ :meth:`~object.__delattr__` is called on a dataclass which was defined with
``frozen=True``. It is a subclass of :exc:`AttributeError`.
+.. _post-init-processing:
+
Post-init processing
--------------------
-The generated :meth:`__init__` code will call a method named
-:meth:`__post_init__`, if :meth:`__post_init__` is defined on the
-class. It will normally be called as ``self.__post_init__()``.
-However, if any ``InitVar`` fields are defined, they will also be
-passed to :meth:`__post_init__` in the order they were defined in the
-class. If no :meth:`__init__` method is generated, then
-:meth:`__post_init__` will not automatically be called.
+.. function:: __post_init__()
-Among other uses, this allows for initializing field values that
-depend on one or more other fields. For example::
+ When defined on the class, it will be called by the generated
+ :meth:`~object.__init__`, normally as ``self.__post_init__()``.
+ However, if any ``InitVar`` fields are defined, they will also be
+ passed to :meth:`__post_init__` in the order they were defined in the
+ class. If no :meth:`~object.__init__` method is generated, then
+ :meth:`__post_init__` will not automatically be called.
- @dataclass
- class C:
- a: float
- b: float
- c: float = field(init=False)
+ Among other uses, this allows for initializing field values that
+ depend on one or more other fields. For example::
- def __post_init__(self):
- self.c = self.a + self.b
+ @dataclass
+ class C:
+ a: float
+ b: float
+ c: float = field(init=False)
+
+ def __post_init__(self):
+ self.c = self.a + self.b
-The :meth:`__init__` method generated by :func:`dataclass` does not call base
-class :meth:`__init__` methods. If the base class has an :meth:`__init__` method
+The :meth:`~object.__init__` method generated by :func:`dataclass` does not call base
+class :meth:`~object.__init__` methods. If the base class has an :meth:`~object.__init__` method
that has to be called, it is common to call this method in a
:meth:`__post_init__` method::
@@ -545,7 +548,7 @@ that has to be called, it is common to call this method in a
def __post_init__(self):
super().__init__(self.side, self.side)
-Note, however, that in general the dataclass-generated :meth:`__init__` methods
+Note, however, that in general the dataclass-generated :meth:`~object.__init__` methods
don't need to be called, since the derived dataclass will take care of
initializing all fields of any base class that is a dataclass itself.
@@ -573,7 +576,7 @@ if the type of a field is of type ``dataclasses.InitVar``. If a field
is an ``InitVar``, it is considered a pseudo-field called an init-only
field. As it is not a true field, it is not returned by the
module-level :func:`fields` function. Init-only fields are added as
-parameters to the generated :meth:`__init__` method, and are passed to
+parameters to the generated :meth:`~object.__init__` method, and are passed to
the optional :meth:`__post_init__` method. They are not otherwise used
by dataclasses.
@@ -601,12 +604,12 @@ Frozen instances
It is not possible to create truly immutable Python objects. However,
by passing ``frozen=True`` to the :meth:`dataclass` decorator you can
emulate immutability. In that case, dataclasses will add
-:meth:`__setattr__` and :meth:`__delattr__` methods to the class. These
+:meth:`~object.__setattr__` and :meth:`~object.__delattr__` methods to the class. These
methods will raise a :exc:`FrozenInstanceError` when invoked.
There is a tiny performance penalty when using ``frozen=True``:
-:meth:`__init__` cannot use simple assignment to initialize fields, and
-must use :meth:`object.__setattr__`.
+:meth:`~object.__init__` cannot use simple assignment to initialize fields, and
+must use :meth:`~object.__setattr__`.
Inheritance
-----------
@@ -634,14 +637,14 @@ example::
The final list of fields is, in order, ``x``, ``y``, ``z``. The final
type of ``x`` is ``int``, as specified in class ``C``.
-The generated :meth:`__init__` method for ``C`` will look like::
+The generated :meth:`~object.__init__` method for ``C`` will look like::
def __init__(self, x: int = 15, y: int = 0, z: int = 10):
-Re-ordering of keyword-only parameters in :meth:`__init__`
-----------------------------------------------------------
+Re-ordering of keyword-only parameters in :meth:`~object.__init__`
+------------------------------------------------------------------
-After the parameters needed for :meth:`__init__` are computed, any
+After the parameters needed for :meth:`~object.__init__` are computed, any
keyword-only parameters are moved to come after all regular
(non-keyword-only) parameters. This is a requirement of how
keyword-only parameters are implemented in Python: they must come
@@ -662,7 +665,7 @@ fields, and ``Base.x`` and ``D.z`` are regular fields::
z: int = 10
t: int = field(kw_only=True, default=0)
-The generated :meth:`__init__` method for ``D`` will look like::
+The generated :meth:`~object.__init__` method for ``D`` will look like::
def __init__(self, x: Any = 15.0, z: int = 10, *, y: int = 0, w: int = 1, t: int = 0):
@@ -671,7 +674,7 @@ the list of fields: parameters derived from regular fields are
followed by parameters derived from keyword-only fields.
The relative ordering of keyword-only parameters is maintained in the
-re-ordered :meth:`__init__` parameter list.
+re-ordered :meth:`~object.__init__` parameter list.
Default factory functions
@@ -683,10 +686,10 @@ example, to create a new instance of a list, use::
mylist: list = field(default_factory=list)
-If a field is excluded from :meth:`__init__` (using ``init=False``)
+If a field is excluded from :meth:`~object.__init__` (using ``init=False``)
and the field also specifies ``default_factory``, then the default
factory function will always be called from the generated
-:meth:`__init__` function. This happens because there is no other
+:meth:`~object.__init__` function. This happens because there is no other
way to give the field an initial value.
Mutable default values
@@ -714,7 +717,7 @@ Using dataclasses, *if* this code was valid::
@dataclass
class D:
- x: List = []
+ x: list = [] # This code raises ValueError
def add(self, element):
self.x += element
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index 761f5f04b9b288..bed19ad145a20c 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -896,6 +896,10 @@ Other constructors, all class methods:
in UTC. As such, the recommended way to create an object representing the
current time in UTC is by calling ``datetime.now(timezone.utc)``.
+ .. deprecated:: 3.12
+
+ Use :meth:`datetime.now` with :attr:`UTC` instead.
+
.. classmethod:: datetime.fromtimestamp(timestamp, tz=None)
@@ -964,6 +968,10 @@ Other constructors, all class methods:
:c:func:`gmtime` function. Raise :exc:`OSError` instead of
:exc:`ValueError` on :c:func:`gmtime` failure.
+ .. deprecated:: 3.12
+
+ Use :meth:`datetime.fromtimestamp` with :attr:`UTC` instead.
+
.. classmethod:: datetime.fromordinal(ordinal)
@@ -1043,7 +1051,7 @@ Other constructors, all class methods:
Return a :class:`.datetime` corresponding to *date_string*, parsed according to
*format*.
- This is equivalent to::
+ If *format* does not contain microseconds or timezone information, this is equivalent to::
datetime(*(time.strptime(date_string, format)[0:6]))
@@ -2510,10 +2518,7 @@ Notes:
Because the format depends on the current locale, care should be taken when
making assumptions about the output value. Field orderings will vary (for
example, "month/day/year" versus "day/month/year"), and the output may
- contain Unicode characters encoded using the locale's default encoding (for
- example, if the current locale is ``ja_JP``, the default encoding could be
- any one of ``eucJP``, ``SJIS``, or ``utf-8``; use :meth:`locale.getlocale`
- to determine the current locale's encoding).
+ contain non-ASCII characters.
(2)
The :meth:`strptime` method can parse years in the full [1, 9999] range, but
diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst
index 6187098a752053..0b4a4973cb4da0 100644
--- a/Doc/library/decimal.rst
+++ b/Doc/library/decimal.rst
@@ -926,7 +926,7 @@ Each thread has its own current context which is accessed or changed using the
You can also use the :keyword:`with` statement and the :func:`localcontext`
function to temporarily change the active context.
-.. function:: localcontext(ctx=None, \*\*kwargs)
+.. function:: localcontext(ctx=None, **kwargs)
Return a context manager that will set the current context for the active thread
to a copy of *ctx* on entry to the with-statement and restore the previous context
diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst
index 19c08b225ef2bc..6c3f436ddb1494 100644
--- a/Doc/library/dis.rst
+++ b/Doc/library/dis.rst
@@ -402,7 +402,7 @@ The Python compiler currently generates the following bytecode instructions.
**General instructions**
-In the following, We will refer to the interpreter stack as STACK and describe
+In the following, We will refer to the interpreter stack as ``STACK`` and describe
operations on it as if it was a Python list. The top of the stack corresponds to
``STACK[-1]`` in this language.
@@ -414,7 +414,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to
.. opcode:: POP_TOP
- Removes the top-of-stack item.::
+ Removes the top-of-stack item::
STACK.pop()
@@ -422,7 +422,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to
.. opcode:: END_FOR
Removes the top two values from the stack.
- Equivalent to POP_TOP; POP_TOP.
+ Equivalent to ``POP_TOP``; ``POP_TOP``.
Used to clean up at the end of loops, hence the name.
.. versionadded:: 3.12
@@ -431,7 +431,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to
.. opcode:: COPY (i)
Push the i-th item to the top of the stack without removing it from its original
- location.::
+ location::
assert i > 0
STACK.append(STACK[-i])
@@ -441,7 +441,7 @@ operations on it as if it was a Python list. The top of the stack corresponds to
.. opcode:: SWAP (i)
- Swap the top of the stack with the i-th element.::
+ Swap the top of the stack with the i-th element::
STACK[-i], STACK[-1] = stack[-1], STACK[-i]
@@ -513,7 +513,7 @@ not have to be) the original ``STACK[-2]``.
.. opcode:: BINARY_OP (op)
Implements the binary and in-place operators (depending on the value of
- *op*).::
+ *op*)::
rhs = STACK.pop()
lhs = STACK.pop()
@@ -580,14 +580,14 @@ not have to be) the original ``STACK[-2]``.
Implements ``STACK[-1] = get_awaitable(STACK[-1])``, where ``get_awaitable(o)``
returns ``o`` if ``o`` is a coroutine object or a generator object with
- the CO_ITERABLE_COROUTINE flag, or resolves
+ the :data:`~inspect.CO_ITERABLE_COROUTINE` flag, or resolves
``o.__await__``.
If the ``where`` operand is nonzero, it indicates where the instruction
occurs:
- * ``1`` After a call to ``__aenter__``
- * ``2`` After a call to ``__aexit__``
+ * ``1``: After a call to ``__aenter__``
+ * ``2``: After a call to ``__aexit__``
.. versionadded:: 3.5
@@ -652,6 +652,7 @@ not have to be) the original ``STACK[-2]``.
.. opcode:: SET_ADD (i)
Implements::
+
item = STACK.pop()
set.add(STACK[-i], item)
@@ -705,11 +706,11 @@ iterations of the loop.
Yields ``STACK.pop()`` from a :term:`generator`.
- .. versionchanged:: 3.11
- oparg set to be the stack depth.
+ .. versionchanged:: 3.11
+ oparg set to be the stack depth.
- .. versionchanged:: 3.12
- oparg set to be the exception block depth, for efficient closing of generators.
+ .. versionchanged:: 3.12
+ oparg set to be the exception block depth, for efficient closing of generators.
.. opcode:: SETUP_ANNOTATIONS
@@ -726,32 +727,32 @@ iterations of the loop.
Pops a value from the stack, which is used to restore the exception state.
- .. versionchanged:: 3.11
- Exception representation on the stack now consist of one, not three, items.
+ .. versionchanged:: 3.11
+ Exception representation on the stack now consist of one, not three, items.
.. opcode:: RERAISE
- Re-raises the exception currently on top of the stack. If oparg is non-zero,
- pops an additional value from the stack which is used to set ``f_lasti``
- of the current frame.
+ Re-raises the exception currently on top of the stack. If oparg is non-zero,
+ pops an additional value from the stack which is used to set ``f_lasti``
+ of the current frame.
- .. versionadded:: 3.9
+ .. versionadded:: 3.9
- .. versionchanged:: 3.11
- Exception representation on the stack now consist of one, not three, items.
+ .. versionchanged:: 3.11
+ Exception representation on the stack now consist of one, not three, items.
.. opcode:: PUSH_EXC_INFO
- Pops a value from the stack. Pushes the current exception to the top of the stack.
- Pushes the value originally popped back to the stack.
- Used in exception handlers.
+ Pops a value from the stack. Pushes the current exception to the top of the stack.
+ Pushes the value originally popped back to the stack.
+ Used in exception handlers.
- .. versionadded:: 3.11
+ .. versionadded:: 3.11
.. opcode:: CHECK_EXC_MATCH
Performs exception matching for ``except``. Tests whether the ``STACK[-2]``
- is an exception matching ``STACK[-1]``. Pops STACK[-1] and pushes the boolean
+ is an exception matching ``STACK[-1]``. Pops ``STACK[-1]`` and pushes the boolean
result of the test.
.. versionadded:: 3.11
@@ -770,16 +771,16 @@ iterations of the loop.
.. opcode:: WITH_EXCEPT_START
- Calls the function in position 4 on the stack with arguments (type, val, tb)
- representing the exception at the top of the stack.
- Used to implement the call ``context_manager.__exit__(*exc_info())`` when an exception
- has occurred in a :keyword:`with` statement.
+ Calls the function in position 4 on the stack with arguments (type, val, tb)
+ representing the exception at the top of the stack.
+ Used to implement the call ``context_manager.__exit__(*exc_info())`` when an exception
+ has occurred in a :keyword:`with` statement.
- .. versionadded:: 3.9
+ .. versionadded:: 3.9
- .. versionchanged:: 3.11
- The ``__exit__`` function is in position 4 of the stack rather than 7.
- Exception representation on the stack now consist of one, not three, items.
+ .. versionchanged:: 3.11
+ The ``__exit__`` function is in position 4 of the stack rather than 7.
+ Exception representation on the stack now consist of one, not three, items.
.. opcode:: LOAD_ASSERTION_ERROR
@@ -863,7 +864,7 @@ iterations of the loop.
.. opcode:: UNPACK_SEQUENCE (count)
Unpacks ``STACK[-1]`` into *count* individual values, which are put onto the stack
- right-to-left.::
+ right-to-left::
STACK.extend(STACK.pop()[:count:-1])
@@ -1028,7 +1029,7 @@ iterations of the loop.
This bytecode distinguishes two cases: if ``STACK[-1]`` has a method with the
correct name, the bytecode pushes the unbound method and ``STACK[-1]``.
``STACK[-1]`` will be used as the first argument (``self``) by :opcode:`CALL`
- when calling the unbound method. Otherwise, ``NULL`` and the object return by
+ when calling the unbound method. Otherwise, ``NULL`` and the object returned by
the attribute lookup are pushed.
.. versionchanged:: 3.12
@@ -1036,6 +1037,24 @@ iterations of the loop.
pushed to the stack before the attribute or unbound method respectively.
+.. opcode:: LOAD_SUPER_ATTR (namei)
+
+ This opcode implements :func:`super` (e.g. ``super().method()`` and
+ ``super().attr``). It works the same as :opcode:`LOAD_ATTR`, except that
+ ``namei`` is shifted left by 2 bits instead of 1, and instead of expecting a
+ single receiver on the stack, it expects three objects (from top of stack
+ down): ``self`` (the first argument to the current method), ``cls`` (the
+ class within which the current method was defined), and the global ``super``.
+
+ The low bit of ``namei`` signals to attempt a method load, as with
+ :opcode:`LOAD_ATTR`.
+
+ The second-low bit of ``namei``, if set, means that this was a two-argument
+ call to :func:`super` (unset means zero-argument).
+
+ .. versionadded:: 3.12
+
+
.. opcode:: COMPARE_OP (opname)
Performs a Boolean operation. The operation name can be found in
@@ -1189,7 +1208,7 @@ iterations of the loop.
.. opcode:: MAKE_CELL (i)
- Creates a new cell in slot ``i``. If that slot is empty then
+ Creates a new cell in slot ``i``. If that slot is nonempty then
that value is stored into the new cell.
.. versionadded:: 3.11
@@ -1314,9 +1333,9 @@ iterations of the loop.
.. opcode:: PUSH_NULL
- Pushes a ``NULL`` to the stack.
- Used in the call sequence to match the ``NULL`` pushed by
- :opcode:`LOAD_METHOD` for non-method calls.
+ Pushes a ``NULL`` to the stack.
+ Used in the call sequence to match the ``NULL`` pushed by
+ :opcode:`LOAD_METHOD` for non-method calls.
.. versionadded:: 3.11
@@ -1416,38 +1435,38 @@ iterations of the loop.
.. opcode:: RESUME (where)
- A no-op. Performs internal tracing, debugging and optimization checks.
+ A no-op. Performs internal tracing, debugging and optimization checks.
- The ``where`` operand marks where the ``RESUME`` occurs:
+ The ``where`` operand marks where the ``RESUME`` occurs:
- * ``0`` The start of a function, which is neither a generator, coroutine
- nor an async generator
- * ``1`` After a ``yield`` expression
- * ``2`` After a ``yield from`` expression
- * ``3`` After an ``await`` expression
+ * ``0`` The start of a function, which is neither a generator, coroutine
+ nor an async generator
+ * ``1`` After a ``yield`` expression
+ * ``2`` After a ``yield from`` expression
+ * ``3`` After an ``await`` expression
.. versionadded:: 3.11
.. opcode:: RETURN_GENERATOR
- Create a generator, coroutine, or async generator from the current frame.
- Used as first opcode of in code object for the above mentioned callables.
- Clear the current frame and return the newly created generator.
+ Create a generator, coroutine, or async generator from the current frame.
+ Used as first opcode of in code object for the above mentioned callables.
+ Clear the current frame and return the newly created generator.
- .. versionadded:: 3.11
+ .. versionadded:: 3.11
.. opcode:: SEND (delta)
- Equivalent to ``STACK[-1] = STACK[-2].send(STACK[-1])``. Used in ``yield from``
- and ``await`` statements.
+ Equivalent to ``STACK[-1] = STACK[-2].send(STACK[-1])``. Used in ``yield from``
+ and ``await`` statements.
- If the call raises :exc:`StopIteration`, pop both items, push the
- exception's ``value`` attribute, and increment the bytecode counter by
- *delta*.
+ If the call raises :exc:`StopIteration`, pop both items, push the
+ exception's ``value`` attribute, and increment the bytecode counter by
+ *delta*.
- .. versionadded:: 3.11
+ .. versionadded:: 3.11
.. opcode:: HAVE_ARGUMENT
@@ -1475,15 +1494,15 @@ iterations of the loop.
argument and sets ``STACK[-1]`` to the result. Used to implement
functionality that is necessary but not performance critical.
- The operand determines which intrinsic function is called:
+ The operand determines which intrinsic function is called:
- * ``0`` Not valid
- * ``1`` Prints the argument to standard out. Used in the REPL.
- * ``2`` Performs ``import *`` for the named module.
- * ``3`` Extracts the return value from a ``StopIteration`` exception.
- * ``4`` Wraps an aync generator value
- * ``5`` Performs the unary ``+`` operation
- * ``6`` Converts a list to a tuple
+ * ``0`` Not valid
+ * ``1`` Prints the argument to standard out. Used in the REPL.
+ * ``2`` Performs ``import *`` for the named module.
+ * ``3`` Extracts the return value from a ``StopIteration`` exception.
+ * ``4`` Wraps an aync generator value
+ * ``5`` Performs the unary ``+`` operation
+ * ``6`` Converts a list to a tuple
.. versionadded:: 3.12
@@ -1493,17 +1512,17 @@ iterations of the loop.
arguments and sets ``STACK[-1]`` to the result. Used to implement functionality that is
necessary but not performance critical.
- The operand determines which intrinsic function is called:
+ The operand determines which intrinsic function is called:
- * ``0`` Not valid
- * ``1`` Calculates the :exc:`ExceptionGroup` to raise from a ``try-except*``.
+ * ``0`` Not valid
+ * ``1`` Calculates the :exc:`ExceptionGroup` to raise from a ``try-except*``.
.. versionadded:: 3.12
**Pseudo-instructions**
-These opcodes do not appear in python bytecode, they are used by the compiler
+These opcodes do not appear in Python bytecode. They are used by the compiler
but are replaced by real opcodes or removed before bytecode is generated.
.. opcode:: SETUP_FINALLY (target)
@@ -1515,7 +1534,7 @@ but are replaced by real opcodes or removed before bytecode is generated.
.. opcode:: SETUP_CLEANUP (target)
- Like ``SETUP_FINALLY``, but in case of exception also pushes the last
+ Like ``SETUP_FINALLY``, but in case of an exception also pushes the last
instruction (``lasti``) to the stack so that ``RERAISE`` can restore it.
If an exception occurs, the value stack level and the last instruction on
the frame are restored to their current state, and control is transferred
@@ -1524,7 +1543,7 @@ but are replaced by real opcodes or removed before bytecode is generated.
.. opcode:: SETUP_WITH (target)
- Like ``SETUP_CLEANUP``, but in case of exception one more item is popped
+ Like ``SETUP_CLEANUP``, but in case of an exception one more item is popped
from the stack before control is transferred to the exception handler at
``target``.
@@ -1558,9 +1577,9 @@ Opcode collections
These collections are provided for automatic introspection of bytecode
instructions:
- .. versionchanged:: 3.12
- The collections now contain pseudo instructions as well. These are
- opcodes with values ``>= MIN_PSEUDO_OPCODE``.
+.. versionchanged:: 3.12
+ The collections now contain pseudo instructions as well. These are
+ opcodes with values ``>= MIN_PSEUDO_OPCODE``.
.. data:: opname
@@ -1581,7 +1600,7 @@ instructions:
Sequence of bytecodes that use their argument.
- .. versionadded:: 3.12
+ .. versionadded:: 3.12
.. data:: hasconst
@@ -1591,10 +1610,10 @@ instructions:
.. data:: hasfree
- Sequence of bytecodes that access a free variable (note that 'free' in this
+ Sequence of bytecodes that access a free variable. 'free' in this
context refers to names in the current scope that are referenced by inner
scopes or names in outer scopes that are referenced from this scope. It does
- *not* include references to global or builtin scopes).
+ *not* include references to global or builtin scopes.
.. data:: hasname
@@ -1625,4 +1644,4 @@ instructions:
Sequence of bytecodes that set an exception handler.
- .. versionadded:: 3.12
+ .. versionadded:: 3.12
diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst
index 07acf9da33e275..582e06261afd72 100644
--- a/Doc/library/enum.rst
+++ b/Doc/library/enum.rst
@@ -119,7 +119,8 @@ Module Contents
:func:`~enum.property`
Allows :class:`Enum` members to have attributes without conflicting with
- member names.
+ member names. The ``value`` and ``name`` attributes are implemented this
+ way.
:func:`unique`
@@ -169,7 +170,7 @@ Data Types
final *enum*, as well as creating the enum members, properly handling
duplicates, providing iteration over the enum class, etc.
- .. method:: EnumType.__call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)
+ .. method:: EnumType.__call__(cls, value, names=None, \*, module=None, qualname=None, type=None, start=1, boundary=None)
This method is called in two different ways:
@@ -317,7 +318,7 @@ Data Types
>>> PowersOfThree.SECOND.value
9
- .. method:: Enum.__init_subclass__(cls, **kwds)
+ .. method:: Enum.__init_subclass__(cls, \**kwds)
A *classmethod* that is used to further configure subsequent subclasses.
By default, does nothing.
diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
index 7792e598c1155c..a5e86ef0f9eb59 100644
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -1987,7 +1987,6 @@ are always available. They are listed here in alphabetical order.
.. index::
statement: import
- module: imp
.. note::
diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst
index d467e50bc7a424..29cbc87bf66d12 100644
--- a/Doc/library/functools.rst
+++ b/Doc/library/functools.rst
@@ -49,8 +49,13 @@ The :mod:`functools` module defines the following functions:
>>> factorial(12) # makes two new recursive calls, the other 10 are cached
479001600
- The cache is threadsafe so the wrapped function can be used in multiple
- threads.
+ The cache is threadsafe so that the wrapped function can be used in
+ multiple threads. This means that the underlying data structure will
+ remain coherent during concurrent updates.
+
+ It is possible for the wrapped function to be called more than once if
+ another thread makes an additional call before the initial call has been
+ completed and cached.
.. versionadded:: 3.9
@@ -118,6 +123,7 @@ The :mod:`functools` module defines the following functions:
def stdev(self):
return statistics.stdev(self._data)
+ .. versionadded:: 3.8
.. versionchanged:: 3.12
Prior to Python 3.12, ``cached_property`` included an undocumented lock to
@@ -126,8 +132,6 @@ The :mod:`functools` module defines the following functions:
per-instance, which could result in unacceptably high lock contention. In
Python 3.12+ this locking is removed.
- .. versionadded:: 3.8
-
.. function:: cmp_to_key(func)
@@ -159,8 +163,13 @@ The :mod:`functools` module defines the following functions:
*maxsize* most recent calls. It can save time when an expensive or I/O bound
function is periodically called with the same arguments.
- The cache is threadsafe so the wrapped function can be used in multiple
- threads.
+ The cache is threadsafe so that the wrapped function can be used in
+ multiple threads. This means that the underlying data structure will
+ remain coherent during concurrent updates.
+
+ It is possible for the wrapped function to be called more than once if
+ another thread makes an additional call before the initial call has been
+ completed and cached.
Since a dictionary is used to cache results, the positional and keyword
arguments to the function must be :term:`hashable`.
@@ -233,7 +242,7 @@ The :mod:`functools` module defines the following functions:
@lru_cache(maxsize=32)
def get_pep(num):
'Retrieve text of a Python Enhancement Proposal'
- resource = 'https://peps.python.org/pep-%04d/' % num
+ resource = f'https://peps.python.org/pep-{num:04d}'
try:
with urllib.request.urlopen(resource) as s:
return s.read()
diff --git a/Doc/library/imp.rst b/Doc/library/imp.rst
deleted file mode 100644
index 000793a7e66cae..00000000000000
--- a/Doc/library/imp.rst
+++ /dev/null
@@ -1,411 +0,0 @@
-:mod:`imp` --- Access the :ref:`import ` internals
-================================================================
-
-.. module:: imp
- :synopsis: Access the implementation of the import statement.
- :deprecated:
-
-**Source code:** :source:`Lib/imp.py`
-
-.. deprecated-removed:: 3.4 3.12
- The :mod:`imp` module is deprecated in favor of :mod:`importlib`.
-
-.. index:: statement: import
-
---------------
-
-This module provides an interface to the mechanisms used to implement the
-:keyword:`import` statement. It defines the following constants and functions:
-
-
-.. function:: get_magic()
-
- .. index:: pair: file; byte-code
-
- Return the magic string value used to recognize byte-compiled code files
- (:file:`.pyc` files). (This value may be different for each Python version.)
-
- .. deprecated:: 3.4
- Use :attr:`importlib.util.MAGIC_NUMBER` instead.
-
-
-.. function:: get_suffixes()
-
- Return a list of 3-element tuples, each describing a particular type of
- module. Each triple has the form ``(suffix, mode, type)``, where *suffix* is
- a string to be appended to the module name to form the filename to search
- for, *mode* is the mode string to pass to the built-in :func:`open` function
- to open the file (this can be ``'r'`` for text files or ``'rb'`` for binary
- files), and *type* is the file type, which has one of the values
- :const:`PY_SOURCE`, :const:`PY_COMPILED`, or :const:`C_EXTENSION`, described
- below.
-
- .. deprecated:: 3.3
- Use the constants defined on :mod:`importlib.machinery` instead.
-
-
-.. function:: find_module(name[, path])
-
- Try to find the module *name*. If *path* is omitted or ``None``, the list of
- directory names given by ``sys.path`` is searched, but first a few special
- places are searched: the function tries to find a built-in module with the
- given name (:const:`C_BUILTIN`), then a frozen module (:const:`PY_FROZEN`),
- and on some systems some other places are looked in as well (on Windows, it
- looks in the registry which may point to a specific file).
-
- Otherwise, *path* must be a list of directory names; each directory is
- searched for files with any of the suffixes returned by :func:`get_suffixes`
- above. Invalid names in the list are silently ignored (but all list items
- must be strings).
-
- If search is successful, the return value is a 3-element tuple ``(file,
- pathname, description)``:
-
- *file* is an open :term:`file object` positioned at the beginning, *pathname*
- is the pathname of the file found, and *description* is a 3-element tuple as
- contained in the list returned by :func:`get_suffixes` describing the kind of
- module found.
-
- If the module is built-in or frozen then *file* and *pathname* are both ``None``
- and the *description* tuple contains empty strings for its suffix and mode;
- the module type is indicated as given in parentheses above. If the search
- is unsuccessful, :exc:`ImportError` is raised. Other exceptions indicate
- problems with the arguments or environment.
-
- If the module is a package, *file* is ``None``, *pathname* is the package
- path and the last item in the *description* tuple is :const:`PKG_DIRECTORY`.
-
- This function does not handle hierarchical module names (names containing
- dots). In order to find *P.M*, that is, submodule *M* of package *P*, use
- :func:`find_module` and :func:`load_module` to find and load package *P*, and
- then use :func:`find_module` with the *path* argument set to ``P.__path__``.
- When *P* itself has a dotted name, apply this recipe recursively.
-
- .. deprecated:: 3.3
- Use :func:`importlib.util.find_spec` instead unless Python 3.3
- compatibility is required, in which case use
- :func:`importlib.find_loader`. For example usage of the former case,
- see the :ref:`importlib-examples` section of the :mod:`importlib`
- documentation.
-
-
-.. function:: load_module(name, file, pathname, description)
-
- Load a module that was previously found by :func:`find_module` (or by an
- otherwise conducted search yielding compatible results). This function does
- more than importing the module: if the module was already imported, it will
- reload the module! The *name* argument indicates the full
- module name (including the package name, if this is a submodule of a
- package). The *file* argument is an open file, and *pathname* is the
- corresponding file name; these can be ``None`` and ``''``, respectively, when
- the module is a package or not being loaded from a file. The *description*
- argument is a tuple, as would be returned by :func:`get_suffixes`, describing
- what kind of module must be loaded.
-
- If the load is successful, the return value is the module object; otherwise,
- an exception (usually :exc:`ImportError`) is raised.
-
- **Important:** the caller is responsible for closing the *file* argument, if
- it was not ``None``, even when an exception is raised. This is best done
- using a :keyword:`try` ... :keyword:`finally` statement.
-
- .. deprecated:: 3.3
- If previously used in conjunction with :func:`imp.find_module` then
- consider using :func:`importlib.import_module`, otherwise use the loader
- returned by the replacement you chose for :func:`imp.find_module`. If you
- called :func:`imp.load_module` and related functions directly with file
- path arguments then use a combination of
- :func:`importlib.util.spec_from_file_location` and
- :func:`importlib.util.module_from_spec`. See the :ref:`importlib-examples`
- section of the :mod:`importlib` documentation for details of the various
- approaches.
-
-
-.. function:: new_module(name)
-
- Return a new empty module object called *name*. This object is *not* inserted
- in ``sys.modules``.
-
- .. deprecated:: 3.4
- Use :func:`importlib.util.module_from_spec` instead.
-
-
-.. function:: reload(module)
-
- Reload a previously imported *module*. The argument must be a module object, so
- it must have been successfully imported before. This is useful if you have
- edited the module source file using an external editor and want to try out the
- new version without leaving the Python interpreter. The return value is the
- module object (the same as the *module* argument).
-
- When ``reload(module)`` is executed:
-
- * Python modules' code is recompiled and the module-level code reexecuted,
- defining a new set of objects which are bound to names in the module's
- dictionary. The ``init`` function of extension modules is not called a second
- time.
-
- * As with all other objects in Python the old objects are only reclaimed after
- their reference counts drop to zero.
-
- * The names in the module namespace are updated to point to any new or changed
- objects.
-
- * Other references to the old objects (such as names external to the module) are
- not rebound to refer to the new objects and must be updated in each namespace
- where they occur if that is desired.
-
- There are a number of other caveats:
-
- When a module is reloaded, its dictionary (containing the module's global
- variables) is retained. Redefinitions of names will override the old
- definitions, so this is generally not a problem. If the new version of a module
- does not define a name that was defined by the old version, the old definition
- remains. This feature can be used to the module's advantage if it maintains a
- global table or cache of objects --- with a :keyword:`try` statement it can test
- for the table's presence and skip its initialization if desired::
-
- try:
- cache
- except NameError:
- cache = {}
-
- It is legal though generally not very useful to reload built-in or dynamically
- loaded modules, except for :mod:`sys`, :mod:`__main__` and :mod:`builtins`.
- In many cases, however, extension modules are not designed to be initialized
- more than once, and may fail in arbitrary ways when reloaded.
-
- If a module imports objects from another module using :keyword:`from` ...
- :keyword:`import` ..., calling :func:`reload` for the other module does not
- redefine the objects imported from it --- one way around this is to re-execute
- the :keyword:`!from` statement, another is to use :keyword:`!import` and qualified
- names (*module*.*name*) instead.
-
- If a module instantiates instances of a class, reloading the module that defines
- the class does not affect the method definitions of the instances --- they
- continue to use the old class definition. The same is true for derived classes.
-
- .. versionchanged:: 3.3
- Relies on both ``__name__`` and ``__loader__`` being defined on the module
- being reloaded instead of just ``__name__``.
-
- .. deprecated:: 3.4
- Use :func:`importlib.reload` instead.
-
-
-The following functions are conveniences for handling :pep:`3147` byte-compiled
-file paths.
-
-.. versionadded:: 3.2
-
-.. function:: cache_from_source(path, debug_override=None)
-
- Return the :pep:`3147` path to the byte-compiled file associated with the
- source *path*. For example, if *path* is ``/foo/bar/baz.py`` the return
- value would be ``/foo/bar/__pycache__/baz.cpython-32.pyc`` for Python 3.2.
- The ``cpython-32`` string comes from the current magic tag (see
- :func:`get_tag`; if :attr:`sys.implementation.cache_tag` is not defined then
- :exc:`NotImplementedError` will be raised). By passing in ``True`` or
- ``False`` for *debug_override* you can override the system's value for
- ``__debug__``, leading to optimized bytecode.
-
- *path* need not exist.
-
- .. versionchanged:: 3.3
- If :attr:`sys.implementation.cache_tag` is ``None``, then
- :exc:`NotImplementedError` is raised.
-
- .. deprecated:: 3.4
- Use :func:`importlib.util.cache_from_source` instead.
-
- .. versionchanged:: 3.5
- The *debug_override* parameter no longer creates a ``.pyo`` file.
-
-
-.. function:: source_from_cache(path)
-
- Given the *path* to a :pep:`3147` file name, return the associated source code
- file path. For example, if *path* is
- ``/foo/bar/__pycache__/baz.cpython-32.pyc`` the returned path would be
- ``/foo/bar/baz.py``. *path* need not exist, however if it does not conform
- to :pep:`3147` format, a :exc:`ValueError` is raised. If
- :attr:`sys.implementation.cache_tag` is not defined,
- :exc:`NotImplementedError` is raised.
-
- .. versionchanged:: 3.3
- Raise :exc:`NotImplementedError` when
- :attr:`sys.implementation.cache_tag` is not defined.
-
- .. deprecated:: 3.4
- Use :func:`importlib.util.source_from_cache` instead.
-
-
-.. function:: get_tag()
-
- Return the :pep:`3147` magic tag string matching this version of Python's
- magic number, as returned by :func:`get_magic`.
-
- .. deprecated:: 3.4
- Use :attr:`sys.implementation.cache_tag` directly starting
- in Python 3.3.
-
-
-The following functions help interact with the import system's internal
-locking mechanism. Locking semantics of imports are an implementation
-detail which may vary from release to release. However, Python ensures
-that circular imports work without any deadlocks.
-
-
-.. function:: lock_held()
-
- Return ``True`` if the global import lock is currently held, else
- ``False``. On platforms without threads, always return ``False``.
-
- On platforms with threads, a thread executing an import first holds a
- global import lock, then sets up a per-module lock for the rest of the
- import. This blocks other threads from importing the same module until
- the original import completes, preventing other threads from seeing
- incomplete module objects constructed by the original thread. An
- exception is made for circular imports, which by construction have to
- expose an incomplete module object at some point.
-
- .. versionchanged:: 3.3
- The locking scheme has changed to per-module locks for
- the most part. A global import lock is kept for some critical tasks,
- such as initializing the per-module locks.
-
- .. deprecated:: 3.4
-
-
-.. function:: acquire_lock()
-
- Acquire the interpreter's global import lock for the current thread.
- This lock should be used by import hooks to ensure thread-safety when
- importing modules.
-
- Once a thread has acquired the import lock, the same thread may acquire it
- again without blocking; the thread must release it once for each time it has
- acquired it.
-
- On platforms without threads, this function does nothing.
-
- .. versionchanged:: 3.3
- The locking scheme has changed to per-module locks for
- the most part. A global import lock is kept for some critical tasks,
- such as initializing the per-module locks.
-
- .. deprecated:: 3.4
-
-
-.. function:: release_lock()
-
- Release the interpreter's global import lock. On platforms without
- threads, this function does nothing.
-
- .. versionchanged:: 3.3
- The locking scheme has changed to per-module locks for
- the most part. A global import lock is kept for some critical tasks,
- such as initializing the per-module locks.
-
- .. deprecated:: 3.4
-
-
-The following constants with integer values, defined in this module, are used
-to indicate the search result of :func:`find_module`.
-
-
-.. data:: PY_SOURCE
-
- The module was found as a source file.
-
- .. deprecated:: 3.3
-
-
-.. data:: PY_COMPILED
-
- The module was found as a compiled code object file.
-
- .. deprecated:: 3.3
-
-
-.. data:: C_EXTENSION
-
- The module was found as dynamically loadable shared library.
-
- .. deprecated:: 3.3
-
-
-.. data:: PKG_DIRECTORY
-
- The module was found as a package directory.
-
- .. deprecated:: 3.3
-
-
-.. data:: C_BUILTIN
-
- The module was found as a built-in module.
-
- .. deprecated:: 3.3
-
-
-.. data:: PY_FROZEN
-
- The module was found as a frozen module.
-
- .. deprecated:: 3.3
-
-
-.. class:: NullImporter(path_string)
-
- The :class:`NullImporter` type is a :pep:`302` import hook that handles
- non-directory path strings by failing to find any modules. Calling this type
- with an existing directory or empty string raises :exc:`ImportError`.
- Otherwise, a :class:`NullImporter` instance is returned.
-
- Instances have only one method:
-
- .. method:: NullImporter.find_module(fullname [, path])
-
- This method always returns ``None``, indicating that the requested module could
- not be found.
-
- .. versionchanged:: 3.3
- ``None`` is inserted into ``sys.path_importer_cache`` instead of an
- instance of :class:`NullImporter`.
-
- .. deprecated:: 3.4
- Insert ``None`` into ``sys.path_importer_cache`` instead.
-
-
-.. _examples-imp:
-
-Examples
---------
-
-The following function emulates what was the standard import statement up to
-Python 1.4 (no hierarchical module names). (This *implementation* wouldn't work
-in that version, since :func:`find_module` has been extended and
-:func:`load_module` has been added in 1.4.) ::
-
- import imp
- import sys
-
- def __import__(name, globals=None, locals=None, fromlist=None):
- # Fast path: see if the module has already been imported.
- try:
- return sys.modules[name]
- except KeyError:
- pass
-
- # If any of the following calls raises an exception,
- # there's a problem we can't handle -- let the caller handle it.
-
- fp, pathname, description = imp.find_module(name)
-
- try:
- return imp.load_module(name, fp, pathname, description)
- finally:
- # Since we may exit via an exception, close fp explicitly.
- if fp:
- fp.close()
diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst
index 6e084101995e25..b306d5f55a714f 100644
--- a/Doc/library/importlib.metadata.rst
+++ b/Doc/library/importlib.metadata.rst
@@ -308,6 +308,10 @@ Python module or `Import Package >> packages_distributions()
{'importlib_metadata': ['importlib-metadata'], 'yaml': ['PyYAML'], 'jaraco': ['jaraco.classes', 'jaraco.functools'], ...}
+Some editable installs, `do not supply top-level names
+`_, and thus this
+function is not reliable with such installs.
+
.. versionadded:: 3.10
.. _distributions:
diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index e57c393a6b370b..a0d794017e2602 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -789,6 +789,7 @@ which incur interpreter overhead.
.. testcode::
import collections
+ import functools
import math
import operator
import random
@@ -1082,7 +1083,7 @@ The following recipes have a more mathematical flavor:
# convolve(data, [1, -2, 1]) --> 2nd finite difference (2nd derivative)
kernel = tuple(kernel)[::-1]
n = len(kernel)
- padded_signal = chain(repeat(0, n-1), signal, [0] * (n-1))
+ padded_signal = chain(repeat(0, n-1), signal, repeat(0, n-1))
for window in sliding_window(padded_signal, n):
yield math.sumprod(kernel, window)
@@ -1092,10 +1093,8 @@ The following recipes have a more mathematical flavor:
(x - 5) (x + 4) (x - 3) expands to: x³ -4x² -17x + 60
"""
# polynomial_from_roots([5, -4, 3]) --> [1, -4, -17, 60]
- expansion = [1]
- for r in roots:
- expansion = convolve(expansion, (1, -r))
- return list(expansion)
+ factors = zip(repeat(1), map(operator.neg, roots))
+ return list(functools.reduce(convolve, factors, [1]))
def polynomial_eval(coefficients, x):
"""Evaluate a polynomial at a specific value.
diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst
index 84e309f1bc8326..8454296b815b41 100644
--- a/Doc/library/multiprocessing.rst
+++ b/Doc/library/multiprocessing.rst
@@ -452,9 +452,7 @@ process which created it.
importable by the children. This is covered in :ref:`multiprocessing-programming`
however it is worth pointing out here. This means that some examples, such
as the :class:`multiprocessing.pool.Pool` examples will not work in the
- interactive interpreter. For example:
-
- .. code-block:: text
+ interactive interpreter. For example::
>>> from multiprocessing import Pool
>>> p = Pool(5)
diff --git a/Doc/library/optparse.rst b/Doc/library/optparse.rst
index 3e29fed0175e04..5c02d8bc8835bf 100644
--- a/Doc/library/optparse.rst
+++ b/Doc/library/optparse.rst
@@ -954,7 +954,16 @@ The canonical way to create an :class:`Option` instance is with the
As you can see, most actions involve storing or updating a value somewhere.
:mod:`optparse` always creates a special object for this, conventionally called
-``options`` (it happens to be an instance of :class:`optparse.Values`). Option
+``options``, which is an instance of :class:`optparse.Values`.
+
+.. class:: Values
+
+ An object holding parsed argument names and values as attributes.
+ Normally created by calling when calling :meth:`OptionParser.parse_args`,
+ and can be overridden by a custom subclass passed to the *values* argument of
+ :meth:`OptionParser.parse_args` (as described in :ref:`optparse-parsing-arguments`).
+
+Option
arguments (and various other values) are stored as attributes of this object,
according to the :attr:`~Option.dest` (destination) option attribute.
@@ -991,6 +1000,14 @@ one that makes sense for *all* options.
Option attributes
^^^^^^^^^^^^^^^^^
+.. class:: Option
+
+ A single command line argument,
+ with various attributes passed by keyword to the constructor.
+ Normally created with :meth:`OptionParser.add_option` rather than directly,
+ and can be overridden by a custom class via the *option_class* argument
+ to :class:`OptionParser`.
+
The following option attributes may be passed as keyword arguments to
:meth:`OptionParser.add_option`. If you pass an option attribute that is not
relevant to a particular option, or fail to pass a required option attribute,
@@ -2027,7 +2044,7 @@ Features of note:
values.ensure_value(attr, value)
If the ``attr`` attribute of ``values`` doesn't exist or is ``None``, then
- ensure_value() first sets it to ``value``, and then returns 'value. This is
+ ensure_value() first sets it to ``value``, and then returns ``value``. This is
very handy for actions like ``"extend"``, ``"append"``, and ``"count"``, all
of which accumulate data in a variable and expect that variable to be of a
certain type (a list for the first two, an integer for the latter). Using
@@ -2035,3 +2052,27 @@ Features of note:
about setting a default value for the option destinations in question; they
can just leave the default as ``None`` and :meth:`ensure_value` will take care of
getting it right when it's needed.
+
+Exceptions
+----------
+
+.. exception:: OptionError
+
+ Raised if an :class:`Option` instance is created with invalid or
+ inconsistent arguments.
+
+.. exception:: OptionConflictError
+
+ Raised if conflicting options are added to an :class:`OptionParser`.
+
+.. exception:: OptionValueError
+
+ Raised if an invalid option value is encountered on the command line.
+
+.. exception:: BadOptionError
+
+ Raised if an invalid option is passed on the command line.
+
+.. exception:: AmbiguousOptionError
+
+ Raised if an ambiguous option is passed on the command line.
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index 7bb501c5946817..50e951c631fa88 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -3919,7 +3919,8 @@ to be ignored.
the :envvar:`PATH` variable. The other variants, :func:`execl`, :func:`execle`,
:func:`execv`, and :func:`execve`, will not use the :envvar:`PATH` variable to
locate the executable; *path* must contain an appropriate absolute or relative
- path.
+ path. Relative paths must include at least one slash, even on Windows, as
+ plain names will not be resolved.
For :func:`execle`, :func:`execlpe`, :func:`execve`, and :func:`execvpe` (note
that these all end in "e"), the *env* parameter must be a mapping which is
diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst
index 788a02dcb8922f..64e617b82b48bc 100644
--- a/Doc/library/pkgutil.rst
+++ b/Doc/library/pkgutil.rst
@@ -25,9 +25,9 @@ support.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
- This will add to the package's ``__path__`` all subdirectories of directories
- on :data:`sys.path` named after the package. This is useful if one wants to
- distribute different parts of a single logical package as multiple
+ For each directory on :data:`sys.path` that has a subdirectory that matches the
+ package name, add the subdirectory to the package's :attr:`__path__`. This is useful
+ if one wants to distribute different parts of a single logical package as multiple
directories.
It also looks for :file:`\*.pkg` files beginning where ``*`` matches the
@@ -82,7 +82,7 @@ support.
This is a backwards compatibility wrapper around
:func:`importlib.util.find_spec` that converts most failures to
:exc:`ImportError` and only returns the loader rather than the full
- :class:`ModuleSpec`.
+ :class:`importlib.machinery.ModuleSpec`.
.. versionchanged:: 3.3
Updated to be based directly on :mod:`importlib` rather than relying
diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst
index 4d485d25b54020..8fb0eca8df74d8 100644
--- a/Doc/library/readline.rst
+++ b/Doc/library/readline.rst
@@ -19,7 +19,7 @@ function.
Readline keybindings may be configured via an initialization file, typically
``.inputrc`` in your home directory. See `Readline Init File
-`_
+`_
in the GNU Readline manual for information about the format and
allowable constructs of that file, and the capabilities of the
Readline library in general.
diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst
index 373cc7d6072031..7f408be2336824 100644
--- a/Doc/library/shutil.rst
+++ b/Doc/library/shutil.rst
@@ -662,7 +662,7 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
Remove the archive format *name* from the list of supported formats.
-.. function:: unpack_archive(filename[, extract_dir[, format]])
+.. function:: unpack_archive(filename[, extract_dir[, format[, filter]]])
Unpack an archive. *filename* is the full path of the archive.
@@ -676,6 +676,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
registered for that extension. In case none is found,
a :exc:`ValueError` is raised.
+ The keyword-only *filter* argument is passed to the underlying unpacking
+ function. For zip files, *filter* is not accepted.
+ For tar files, it is recommended to set it to ``'data'``,
+ unless using features specific to tar and UNIX-like filesystems.
+ (See :ref:`tarfile-extraction-filter` for details.)
+ The ``'data'`` filter will become the default for tar files
+ in Python 3.14.
+
.. audit-event:: shutil.unpack_archive filename,extract_dir,format shutil.unpack_archive
.. warning::
@@ -688,6 +696,9 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
.. versionchanged:: 3.7
Accepts a :term:`path-like object` for *filename* and *extract_dir*.
+ .. versionchanged:: 3.12
+ Added the *filter* argument.
+
.. function:: register_unpack_format(name, extensions, function[, extra_args[, description]])
Registers an unpack format. *name* is the name of the format and
@@ -695,11 +706,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
``.zip`` for Zip files.
*function* is the callable that will be used to unpack archives. The
- callable will receive the path of the archive, followed by the directory
- the archive must be extracted to.
-
- When provided, *extra_args* is a sequence of ``(name, value)`` tuples that
- will be passed as keywords arguments to the callable.
+ callable will receive:
+
+ - the path of the archive, as a positional argument;
+ - the directory the archive must be extracted to, as a positional argument;
+ - possibly a *filter* keyword argument, if it was given to
+ :func:`unpack_archive`;
+ - additional keyword arguments, specified by *extra_args* as a sequence
+ of ``(name, value)`` tuples.
*description* can be provided to describe the format, and will be returned
by the :func:`get_unpack_formats` function.
diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst
index 2539c3d3883298..4686232b09ac47 100644
--- a/Doc/library/smtplib.rst
+++ b/Doc/library/smtplib.rst
@@ -25,7 +25,7 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions).
An :class:`SMTP` instance encapsulates an SMTP connection. It has methods
that support a full repertoire of SMTP and ESMTP operations. If the optional
- host and port parameters are given, the SMTP :meth:`connect` method is
+ *host* and *port* parameters are given, the SMTP :meth:`connect` method is
called with those parameters during initialization. If specified,
*local_hostname* is used as the FQDN of the local host in the HELO/EHLO
command. Otherwise, the local hostname is found using
@@ -34,12 +34,12 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions).
*timeout* parameter specifies a timeout in seconds for blocking operations
like the connection attempt (if not specified, the global default timeout
setting will be used). If the timeout expires, :exc:`TimeoutError` is
- raised. The optional source_address parameter allows binding
+ raised. The optional *source_address* parameter allows binding
to some specific source address in a machine with multiple network
interfaces, and/or to some specific source TCP port. It takes a 2-tuple
- (host, port), for the socket to bind to as its source address before
- connecting. If omitted (or if host or port are ``''`` and/or 0 respectively)
- the OS default behavior will be used.
+ ``(host, port)``, for the socket to bind to as its source address before
+ connecting. If omitted (or if *host* or *port* are ``''`` and/or ``0``
+ respectively) the OS default behavior will be used.
For normal use, you should only require the initialization/connect,
:meth:`sendmail`, and :meth:`SMTP.quit` methods.
diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst
index ceb962e860042d..d65e9fe81acf8b 100644
--- a/Doc/library/socketserver.rst
+++ b/Doc/library/socketserver.rst
@@ -140,9 +140,16 @@ server is the address family.
ForkingUDPServer
ThreadingTCPServer
ThreadingUDPServer
+ ForkingUnixStreamServer
+ ForkingUnixDatagramServer
+ ThreadingUnixStreamServer
+ ThreadingUnixDatagramServer
These classes are pre-defined using the mix-in classes.
+.. versionadded:: 3.12
+ The ``ForkingUnixStreamServer`` and ``ForkingUnixDatagramServer`` classes
+ were added.
To implement a service, you must derive a class from :class:`BaseRequestHandler`
and redefine its :meth:`~BaseRequestHandler.handle` method.
diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst
index 51146e00999659..89673b8006ae77 100644
--- a/Doc/library/sqlite3.rst
+++ b/Doc/library/sqlite3.rst
@@ -310,7 +310,7 @@ Module functions
to avoid data corruption.
See :attr:`threadsafety` for more information.
- :param Connection factory:
+ :param ~sqlite3.Connection factory:
A custom subclass of :class:`Connection` to create the connection with,
if not the default :class:`Connection` class.
@@ -337,7 +337,7 @@ Module functions
The default will change to ``False`` in a future Python release.
:type autocommit: bool
- :rtype: Connection
+ :rtype: ~sqlite3.Connection
.. audit-event:: sqlite3.connect database sqlite3.connect
.. audit-event:: sqlite3.connect/handle connection_handle sqlite3.connect
@@ -573,6 +573,38 @@ Module constants
package, a third-party library which used to upstream changes to
:mod:`!sqlite3`. Today, it carries no meaning or practical value.
+.. _sqlite3-dbconfig-constants:
+
+.. data:: SQLITE_DBCONFIG_DEFENSIVE
+ SQLITE_DBCONFIG_DQS_DDL
+ SQLITE_DBCONFIG_DQS_DML
+ SQLITE_DBCONFIG_ENABLE_FKEY
+ SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER
+ SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION
+ SQLITE_DBCONFIG_ENABLE_QPSG
+ SQLITE_DBCONFIG_ENABLE_TRIGGER
+ SQLITE_DBCONFIG_ENABLE_VIEW
+ SQLITE_DBCONFIG_LEGACY_ALTER_TABLE
+ SQLITE_DBCONFIG_LEGACY_FILE_FORMAT
+ SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE
+ SQLITE_DBCONFIG_RESET_DATABASE
+ SQLITE_DBCONFIG_TRIGGER_EQP
+ SQLITE_DBCONFIG_TRUSTED_SCHEMA
+ SQLITE_DBCONFIG_WRITABLE_SCHEMA
+
+ These constants are used for the :meth:`Connection.setconfig`
+ and :meth:`~Connection.getconfig` methods.
+
+ The availability of these constants varies depending on the version of SQLite
+ Python was compiled with.
+
+ .. versionadded:: 3.12
+
+ .. seealso::
+
+ https://www.sqlite.org/c3ref/c_dbconfig_defensive.html
+ SQLite docs: Database Connection Configuration Options
+
.. _sqlite3-connection-objects:
@@ -1041,12 +1073,25 @@ Connection objects
(2, 'broccoli pie', 'broccoli cheese onions flour')
(3, 'pumpkin pie', 'pumpkin sugar flour butter')
- .. method:: load_extension(path, /)
+ .. method:: load_extension(path, /, *, entrypoint=None)
- Load an SQLite extension from a shared library located at *path*.
+ Load an SQLite extension from a shared library.
Enable extension loading with :meth:`enable_load_extension` before
calling this method.
+ :param str path:
+
+ The path to the SQLite extension.
+
+ :param entrypoint:
+
+ Entry point name.
+ If ``None`` (the default),
+ SQLite will come up with an entry point name of its own;
+ see the SQLite docs `Loading an Extension`_ for details.
+
+ :type entrypoint: str | None
+
.. audit-event:: sqlite3.load_extension connection,path sqlite3.Connection.load_extension
.. versionadded:: 3.2
@@ -1054,6 +1099,11 @@ Connection objects
.. versionchanged:: 3.10
Added the ``sqlite3.load_extension`` auditing event.
+ .. versionadded:: 3.12
+ The *entrypoint* parameter.
+
+ .. _Loading an Extension: https://www.sqlite.org/loadext.html#loading_an_extension_
+
.. method:: iterdump
Return an :term:`iterator` to dump the database as SQL source code.
@@ -1079,7 +1129,7 @@ Connection objects
Works even if the database is being accessed by other clients
or concurrently by the same connection.
- :param Connection target:
+ :param ~sqlite3.Connection target:
The database connection to save the backup to.
:param int pages:
@@ -1201,6 +1251,30 @@ Connection objects
.. _SQLite limit category: https://www.sqlite.org/c3ref/c_limit_attached.html
+ .. method:: getconfig(op, /)
+
+ Query a boolean connection configuration option.
+
+ :param int op:
+ A :ref:`SQLITE_DBCONFIG code `.
+
+ :rtype: bool
+
+ .. versionadded:: 3.12
+
+ .. method:: setconfig(op, enable=True, /)
+
+ Set a boolean connection configuration option.
+
+ :param int op:
+ A :ref:`SQLITE_DBCONFIG code `.
+
+ :param bool enable:
+ ``True`` if the configuration option should be enabled (default);
+ ``False`` if it should be disabled.
+
+ .. versionadded:: 3.12
+
.. method:: serialize(*, name="main")
Serialize a database into a :class:`bytes` object. For an
@@ -1456,12 +1530,12 @@ Cursor objects
For every item in *parameters*,
repeatedly execute the :ref:`parameterized `
- SQL statement *sql*.
+ :abbr:`DML (Data Manipulation Language)` SQL statement *sql*.
Uses the same implicit transaction handling as :meth:`~Cursor.execute`.
:param str sql:
- A single SQL :abbr:`DML (Data Manipulation Language)` statement.
+ A single SQL DML statement.
:param parameters:
An :term:`!iterable` of parameters to bind with
@@ -1484,6 +1558,13 @@ Cursor objects
# cur is an sqlite3.Cursor object
cur.executemany("INSERT INTO data VALUES(?)", rows)
+ .. note::
+
+ Any resulting rows are discarded,
+ including DML statements with `RETURNING clauses`_.
+
+ .. _RETURNING clauses: https://www.sqlite.org/lang_returning.html
+
.. deprecated-removed:: 3.12 3.14
:exc:`DeprecationWarning` is emitted if
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index bcfc6e5cfce611..2360472b31f175 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -1605,8 +1605,8 @@ expression support in the :mod:`re` module).
converts it to ``"ss"``.
The casefolding algorithm is
- `described in section 3.13 of the Unicode Standard
- `__.
+ `described in section 3.13 'Default Case Folding' of the Unicode Standard
+ `__.
.. versionadded:: 3.3
@@ -1768,8 +1768,9 @@ expression support in the :mod:`re` module).
one character, ``False`` otherwise. Alphabetic characters are those characters defined
in the Unicode character database as "Letter", i.e., those with general category
property being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is different
- from the `Alphabetic property defined in the Unicode Standard
- `_.
+ from the `Alphabetic property defined in the section 4.10 'Letters, Alphabetic, and
+ Ideographic' of the Unicode Standard
+ `_.
.. method:: str.isascii()
@@ -1904,8 +1905,8 @@ expression support in the :mod:`re` module).
lowercase.
The lowercasing algorithm used is
- `described in section 3.13 of the Unicode Standard
- `__.
+ `described in section 3.13 'Default Case Folding' of the Unicode Standard
+ `__.
.. method:: str.lstrip([chars])
@@ -2250,8 +2251,8 @@ expression support in the :mod:`re` module).
titlecase).
The uppercasing algorithm used is
- `described in section 3.13 of the Unicode Standard
- `__.
+ `described in section 3.13 'Default Case Folding' of the Unicode Standard
+ `__.
.. method:: str.zfill(width)
@@ -3714,12 +3715,15 @@ copying.
types such as :class:`bytes` and :class:`bytearray`, an element is a single
byte, but other types such as :class:`array.array` may have bigger elements.
- ``len(view)`` is equal to the length of :class:`~memoryview.tolist`.
- If ``view.ndim = 0``, the length is 1. If ``view.ndim = 1``, the length
- is equal to the number of elements in the view. For higher dimensions,
- the length is equal to the length of the nested list representation of
- the view. The :class:`~memoryview.itemsize` attribute will give you the
- number of bytes in a single element.
+ ``len(view)`` is equal to the length of :class:`~memoryview.tolist`, which
+ is the nested list representation of the view. If ``view.ndim = 1``,
+ this is equal to the number of elements in the view.
+
+ .. versionchanged:: 3.12
+ If ``view.ndim == 0``, ``len(view)`` now raises :exc:`TypeError` instead of returning 1.
+
+ The :class:`~memoryview.itemsize` attribute will give you the number of
+ bytes in a single element.
A :class:`memoryview` supports slicing and indexing to expose its data.
One-dimensional slicing will result in a subview::
diff --git a/Doc/library/string.rst b/Doc/library/string.rst
index f55074cc582718..26b3f5000634f5 100644
--- a/Doc/library/string.rst
+++ b/Doc/library/string.rst
@@ -254,10 +254,10 @@ Some simple format string examples::
"Units destroyed: {players[0]}" # First element of keyword argument 'players'.
The *conversion* field causes a type coercion before formatting. Normally, the
-job of formatting a value is done by the :meth:`__format__` method of the value
+job of formatting a value is done by the :meth:`~object.__format__` method of the value
itself. However, in some cases it is desirable to force a type to be formatted
as a string, overriding its own definition of formatting. By converting the
-value to a string before calling :meth:`__format__`, the normal formatting logic
+value to a string before calling :meth:`~object.__format__`, the normal formatting logic
is bypassed.
Three conversion flags are currently supported: ``'!s'`` which calls :func:`str`
diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst
index 2b5a82e0107fb6..53dfbf827260c9 100644
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -919,9 +919,12 @@ Reassigning them to new values is unsupported:
.. attribute:: Popen.returncode
- The child return code, set by :meth:`poll` and :meth:`wait` (and indirectly
- by :meth:`communicate`). A ``None`` value indicates that the process
- hasn't terminated yet.
+ The child return code. Initially ``None``, :attr:`returncode` is set by
+ a call to the :meth:`poll`, :meth:`wait`, or :meth:`communicate` methods
+ if they detect that the process has terminated.
+
+ A ``None`` value indicates that the process hadn't yet terminated at the
+ time of the last method call.
A negative value ``-N`` indicates that the child was terminated by signal
``N`` (POSIX only).
diff --git a/Doc/library/superseded.rst b/Doc/library/superseded.rst
index 8786e227be9182..aaf66ea121d39c 100644
--- a/Doc/library/superseded.rst
+++ b/Doc/library/superseded.rst
@@ -17,7 +17,6 @@ backwards compatibility. They have been superseded by other modules.
chunk.rst
crypt.rst
imghdr.rst
- imp.rst
mailcap.rst
msilib.rst
nis.rst
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index e37d57edce515f..7c0e85142e7716 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -670,6 +670,13 @@ always available.
.. versionadded:: 3.4
+.. function:: getunicodeinternedsize()
+
+ Return the number of unicode objects that have been interned.
+
+ .. versionadded:: 3.12
+
+
.. function:: getandroidapilevel()
Return the build time API version of Android as an integer.
@@ -1246,10 +1253,6 @@ always available.
Originally specified in :pep:`302`.
- .. versionchanged:: 3.3
- ``None`` is stored instead of :class:`imp.NullImporter` when no finder
- is found.
-
.. data:: platform
diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst
index 741d40da152101..891af1bcf7edff 100644
--- a/Doc/library/tarfile.rst
+++ b/Doc/library/tarfile.rst
@@ -36,6 +36,13 @@ Some facts and figures:
.. versionchanged:: 3.3
Added support for :mod:`lzma` compression.
+.. versionchanged:: 3.12
+ Archives are extracted using a :ref:`filter `,
+ which makes it possible to either limit surprising/dangerous features,
+ or to acknowledge that they are expected and the archive is fully trusted.
+ By default, archives are fully trusted, but this default is deprecated
+ and slated to change in Python 3.14.
+
.. function:: open(name=None, mode='r', fileobj=None, bufsize=10240, **kwargs)
@@ -209,6 +216,38 @@ The :mod:`tarfile` module defines the following exceptions:
Is raised by :meth:`TarInfo.frombuf` if the buffer it gets is invalid.
+.. exception:: FilterError
+
+ Base class for members :ref:`refused ` by
+ filters.
+
+ .. attribute:: tarinfo
+
+ Information about the member that the filter refused to extract,
+ as :ref:`TarInfo `.
+
+.. exception:: AbsolutePathError
+
+ Raised to refuse extracting a member with an absolute path.
+
+.. exception:: OutsideDestinationError
+
+ Raised to refuse extracting a member outside the destination directory.
+
+.. exception:: SpecialFileError
+
+ Raised to refuse extracting a special file (e.g. a device or pipe).
+
+.. exception:: AbsoluteLinkError
+
+ Raised to refuse extracting a symbolic link with an absolute path.
+
+.. exception:: LinkOutsideDestinationError
+
+ Raised to refuse extracting a symbolic link pointing outside the destination
+ directory.
+
+
The following constants are available at the module level:
.. data:: ENCODING
@@ -319,11 +358,8 @@ be finalized; only the internally used file object will be closed. See the
*debug* can be set from ``0`` (no debug messages) up to ``3`` (all debug
messages). The messages are written to ``sys.stderr``.
- If *errorlevel* is ``0``, all errors are ignored when using :meth:`TarFile.extract`.
- Nevertheless, they appear as error messages in the debug output, when debugging
- is enabled. If ``1``, all *fatal* errors are raised as :exc:`OSError`
- exceptions. If ``2``, all *non-fatal* errors are raised as :exc:`TarError`
- exceptions as well.
+ *errorlevel* controls how extraction errors are handled,
+ see :attr:`the corresponding attribute <~TarFile.errorlevel>`.
The *encoding* and *errors* arguments define the character encoding to be
used for reading or writing the archive and how conversion errors are going
@@ -390,7 +426,7 @@ be finalized; only the internally used file object will be closed. See the
available.
-.. method:: TarFile.extractall(path=".", members=None, *, numeric_owner=False)
+.. method:: TarFile.extractall(path=".", members=None, *, numeric_owner=False, filter=None)
Extract all members from the archive to the current working directory or
directory *path*. If optional *members* is given, it must be a subset of the
@@ -404,6 +440,12 @@ be finalized; only the internally used file object will be closed. See the
are used to set the owner/group for the extracted files. Otherwise, the named
values from the tarfile are used.
+ The *filter* argument specifies how ``members`` are modified or rejected
+ before extraction.
+ See :ref:`tarfile-extraction-filter` for details.
+ It is recommended to set this explicitly depending on which *tar* features
+ you need to support.
+
.. warning::
Never extract archives from untrusted sources without prior inspection.
@@ -411,14 +453,20 @@ be finalized; only the internally used file object will be closed. See the
that have absolute filenames starting with ``"/"`` or filenames with two
dots ``".."``.
+ Set ``filter='data'`` to prevent the most dangerous security issues,
+ and read the :ref:`tarfile-extraction-filter` section for details.
+
.. versionchanged:: 3.5
Added the *numeric_owner* parameter.
.. versionchanged:: 3.6
The *path* parameter accepts a :term:`path-like object`.
+ .. versionchanged:: 3.12
+ Added the *filter* parameter.
-.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False)
+
+.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False, filter=None)
Extract a member from the archive to the current working directory, using its
full name. Its file information is extracted as accurately as possible. *member*
@@ -426,9 +474,8 @@ be finalized; only the internally used file object will be closed. See the
directory using *path*. *path* may be a :term:`path-like object`.
File attributes (owner, mtime, mode) are set unless *set_attrs* is false.
- If *numeric_owner* is :const:`True`, the uid and gid numbers from the tarfile
- are used to set the owner/group for the extracted files. Otherwise, the named
- values from the tarfile are used.
+ The *numeric_owner* and *filter* arguments are the same as
+ for :meth:`extractall`.
.. note::
@@ -439,6 +486,9 @@ be finalized; only the internally used file object will be closed. See the
See the warning for :meth:`extractall`.
+ Set ``filter='data'`` to prevent the most dangerous security issues,
+ and read the :ref:`tarfile-extraction-filter` section for details.
+
.. versionchanged:: 3.2
Added the *set_attrs* parameter.
@@ -448,6 +498,9 @@ be finalized; only the internally used file object will be closed. See the
.. versionchanged:: 3.6
The *path* parameter accepts a :term:`path-like object`.
+ .. versionchanged:: 3.12
+ Added the *filter* parameter.
+
.. method:: TarFile.extractfile(member)
@@ -460,6 +513,55 @@ be finalized; only the internally used file object will be closed. See the
.. versionchanged:: 3.3
Return an :class:`io.BufferedReader` object.
+.. attribute:: TarFile.errorlevel
+ :type: int
+
+ If *errorlevel* is ``0``, errors are ignored when using :meth:`TarFile.extract`
+ and :meth:`TarFile.extractall`.
+ Nevertheless, they appear as error messages in the debug output when
+ *debug* is greater than 0.
+ If ``1`` (the default), all *fatal* errors are raised as :exc:`OSError` or
+ :exc:`FilterError` exceptions. If ``2``, all *non-fatal* errors are raised
+ as :exc:`TarError` exceptions as well.
+
+ Some exceptions, e.g. ones caused by wrong argument types or data
+ corruption, are always raised.
+
+ Custom :ref:`extraction filters `
+ should raise :exc:`FilterError` for *fatal* errors
+ and :exc:`ExtractError` for *non-fatal* ones.
+
+ Note that when an exception is raised, the archive may be partially
+ extracted. It is the user’s responsibility to clean up.
+
+.. attribute:: TarFile.extraction_filter
+
+ .. versionadded:: 3.12
+
+ The :ref:`extraction filter ` used
+ as a default for the *filter* argument of :meth:`~TarFile.extract`
+ and :meth:`~TarFile.extractall`.
+
+ The attribute may be ``None`` or a callable.
+ String names are not allowed for this attribute, unlike the *filter*
+ argument to :meth:`~TarFile.extract`.
+
+ If ``extraction_filter`` is ``None`` (the default),
+ calling an extraction method without a *filter* argument will raise a
+ ``DeprecationWarning``,
+ and fall back to the :func:`fully_trusted ` filter,
+ whose dangerous behavior matches previous versions of Python.
+
+ In Python 3.14+, leaving ``extraction_filter=None`` will cause
+ extraction methods to use the :func:`data ` filter by default.
+
+ The attribute may be set on instances or overridden in subclasses.
+ It also is possible to set it on the ``TarFile`` class itself to set a
+ global default, although, since it affects all uses of *tarfile*,
+ it is best practice to only do so in top-level applications or
+ :mod:`site configuration `.
+ To set a global default this way, a filter function needs to be wrapped in
+ :func:`staticmethod()` to prevent injection of a ``self`` argument.
.. method:: TarFile.add(name, arcname=None, recursive=True, *, filter=None)
@@ -535,8 +637,23 @@ permissions, owner etc.), it provides some useful methods to determine its type.
It does *not* contain the file's data itself.
:class:`TarInfo` objects are returned by :class:`TarFile`'s methods
-:meth:`getmember`, :meth:`getmembers` and :meth:`gettarinfo`.
+:meth:`~TarFile.getmember`, :meth:`~TarFile.getmembers` and
+:meth:`~TarFile.gettarinfo`.
+Modifying the objects returned by :meth:`~!TarFile.getmember` or
+:meth:`~!TarFile.getmembers` will affect all subsequent
+operations on the archive.
+For cases where this is unwanted, you can use :mod:`copy.copy() ` or
+call the :meth:`~TarInfo.replace` method to create a modified copy in one step.
+
+Several attributes can be set to ``None`` to indicate that a piece of metadata
+is unused or unknown.
+Different :class:`TarInfo` methods handle ``None`` differently:
+
+- The :meth:`~TarFile.extract` or :meth:`~TarFile.extractall` methods will
+ ignore the corresponding metadata, leaving it set to a default.
+- :meth:`~TarFile.addfile` will fail.
+- :meth:`~TarFile.list` will print a placeholder string.
.. class:: TarInfo(name="")
@@ -569,24 +686,39 @@ A ``TarInfo`` object has the following public data attributes:
.. attribute:: TarInfo.name
+ :type: str
Name of the archive member.
.. attribute:: TarInfo.size
+ :type: int
Size in bytes.
.. attribute:: TarInfo.mtime
+ :type: int | float
- Time of last modification.
+ Time of last modification in seconds since the :ref:`epoch `,
+ as in :attr:`os.stat_result.st_mtime`.
+
+ .. versionchanged:: 3.12
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.mode
+ :type: int
- Permission bits.
+ Permission bits, as for :func:`os.chmod`.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.type
@@ -598,35 +730,76 @@ A ``TarInfo`` object has the following public data attributes:
.. attribute:: TarInfo.linkname
+ :type: str
Name of the target file name, which is only present in :class:`TarInfo` objects
of type :const:`LNKTYPE` and :const:`SYMTYPE`.
.. attribute:: TarInfo.uid
+ :type: int
User ID of the user who originally stored this member.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.gid
+ :type: int
Group ID of the user who originally stored this member.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.uname
+ :type: str
User name.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.gname
+ :type: str
Group name.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.pax_headers
+ :type: dict
A dictionary containing key-value pairs of an associated pax extended header.
+.. method:: TarInfo.replace(name=..., mtime=..., mode=..., linkname=...,
+ uid=..., gid=..., uname=..., gname=...,
+ deep=True)
+
+ .. versionadded:: 3.12
+
+ Return a *new* copy of the :class:`!TarInfo` object with the given attributes
+ changed. For example, to return a ``TarInfo`` with the group name set to
+ ``'staff'``, use::
+
+ new_tarinfo = old_tarinfo.replace(gname='staff')
+
+ By default, a deep copy is made.
+ If *deep* is false, the copy is shallow, i.e. ``pax_headers``
+ and any custom attributes are shared with the original ``TarInfo`` object.
A :class:`TarInfo` object also provides some convenient query methods:
@@ -676,9 +849,258 @@ A :class:`TarInfo` object also provides some convenient query methods:
Return :const:`True` if it is one of character device, block device or FIFO.
+.. _tarfile-extraction-filter:
+
+Extraction filters
+------------------
+
+.. versionadded:: 3.12
+
+The *tar* format is designed to capture all details of a UNIX-like filesystem,
+which makes it very powerful.
+Unfortunately, the features make it easy to create tar files that have
+unintended -- and possibly malicious -- effects when extracted.
+For example, extracting a tar file can overwrite arbitrary files in various
+ways (e.g. by using absolute paths, ``..`` path components, or symlinks that
+affect later members).
+
+In most cases, the full functionality is not needed.
+Therefore, *tarfile* supports extraction filters: a mechanism to limit
+functionality, and thus mitigate some of the security issues.
+
+.. seealso::
+
+ :pep:`706`
+ Contains further motivation and rationale behind the design.
+
+The *filter* argument to :meth:`TarFile.extract` or :meth:`~TarFile.extractall`
+can be:
+
+* the string ``'fully_trusted'``: Honor all metadata as specified in the
+ archive.
+ Should be used if the user trusts the archive completely, or implements
+ their own complex verification.
+
+* the string ``'tar'``: Honor most *tar*-specific features (i.e. features of
+ UNIX-like filesystems), but block features that are very likely to be
+ surprising or malicious. See :func:`tar_filter` for details.
+
+* the string ``'data'``: Ignore or block most features specific to UNIX-like
+ filesystems. Intended for extracting cross-platform data archives.
+ See :func:`data_filter` for details.
+
+* ``None`` (default): Use :attr:`TarFile.extraction_filter`.
+
+ If that is also ``None`` (the default), raise a ``DeprecationWarning``,
+ and fall back to the ``'fully_trusted'`` filter, whose dangerous behavior
+ matches previous versions of Python.
+
+ In Python 3.14, the ``'data'`` filter will become the default instead.
+ It's possible to switch earlier; see :attr:`TarFile.extraction_filter`.
+
+* A callable which will be called for each extracted member with a
+ :ref:`TarInfo ` describing the member and the destination
+ path to where the archive is extracted (i.e. the same path is used for all
+ members)::
+
+ filter(/, member: TarInfo, path: str) -> TarInfo | None
+
+ The callable is called just before each member is extracted, so it can
+ take the current state of the disk into account.
+ It can:
+
+ - return a :class:`TarInfo` object which will be used instead of the metadata
+ in the archive, or
+ - return ``None``, in which case the member will be skipped, or
+ - raise an exception to abort the operation or skip the member,
+ depending on :attr:`~TarFile.errorlevel`.
+ Note that when extraction is aborted, :meth:`~TarFile.extractall` may leave
+ the archive partially extracted. It does not attempt to clean up.
+
+Default named filters
+~~~~~~~~~~~~~~~~~~~~~
+
+The pre-defined, named filters are available as functions, so they can be
+reused in custom filters:
+
+.. function:: fully_trusted_filter(/, member, path)
+
+ Return *member* unchanged.
+
+ This implements the ``'fully_trusted'`` filter.
+
+.. function:: tar_filter(/, member, path)
+
+ Implements the ``'tar'`` filter.
+
+ - Strip leading slashes (``/`` and :attr:`os.sep`) from filenames.
+ - :ref:`Refuse ` to extract files with absolute
+ paths (in case the name is absolute
+ even after stripping slashes, e.g. ``C:/foo`` on Windows).
+ This raises :class:`~tarfile.AbsolutePathError`.
+ - :ref:`Refuse ` to extract files whose absolute
+ path (after following symlinks) would end up outside the destination.
+ This raises :class:`~tarfile.OutsideDestinationError`.
+ - Clear high mode bits (setuid, setgid, sticky) and group/other write bits
+ (:attr:`~stat.S_IWGRP`|:attr:`~stat.S_IWOTH`).
+
+ Return the modified ``TarInfo`` member.
+
+.. function:: data_filter(/, member, path)
+
+ Implements the ``'data'`` filter.
+ In addition to what ``tar_filter`` does:
+
+ - :ref:`Refuse ` to extract links (hard or soft)
+ that link to absolute paths, or ones that link outside the destination.
+
+ This raises :class:`~tarfile.AbsoluteLinkError` or
+ :class:`~tarfile.LinkOutsideDestinationError`.
+
+ Note that such files are refused even on platforms that do not support
+ symbolic links.
+
+ - :ref:`Refuse ` to extract device files
+ (including pipes).
+ This raises :class:`~tarfile.SpecialFileError`.
+
+ - For regular files, including hard links:
+
+ - Set the owner read and write permissions
+ (:attr:`~stat.S_IRUSR`|:attr:`~stat.S_IWUSR`).
+ - Remove the group & other executable permission
+ (:attr:`~stat.S_IXGRP`|:attr:`~stat.S_IXOTH`)
+ if the owner doesn’t have it (:attr:`~stat.S_IXUSR`).
+
+ - For other files (directories), set ``mode`` to ``None``, so
+ that extraction methods skip applying permission bits.
+ - Set user and group info (``uid``, ``gid``, ``uname``, ``gname``)
+ to ``None``, so that extraction methods skip setting it.
+
+ Return the modified ``TarInfo`` member.
+
+
+.. _tarfile-extraction-refuse:
+
+Filter errors
+~~~~~~~~~~~~~
+
+When a filter refuses to extract a file, it will raise an appropriate exception,
+a subclass of :class:`~tarfile.FilterError`.
+This will abort the extraction if :attr:`TarFile.errorlevel` is 1 or more.
+With ``errorlevel=0`` the error will be logged and the member will be skipped,
+but extraction will continue.
+
+
+Hints for further verification
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Even with ``filter='data'``, *tarfile* is not suited for extracting untrusted
+files without prior inspection.
+Among other issues, the pre-defined filters do not prevent denial-of-service
+attacks. Users should do additional checks.
+
+Here is an incomplete list of things to consider:
+
+* Extract to a :func:`new temporary directory `
+ to prevent e.g. exploiting pre-existing links, and to make it easier to
+ clean up after a failed extraction.
+* When working with untrusted data, use external (e.g. OS-level) limits on
+ disk, memory and CPU usage.
+* Check filenames against an allow-list of characters
+ (to filter out control characters, confusables, foreign path separators,
+ etc.).
+* Check that filenames have expected extensions (discouraging files that
+ execute when you “click on them”, or extension-less files like Windows special device names).
+* Limit the number of extracted files, total size of extracted data,
+ filename length (including symlink length), and size of individual files.
+* Check for files that would be shadowed on case-insensitive filesystems.
+
+Also note that:
+
+* Tar files may contain multiple versions of the same file.
+ Later ones are expected to overwrite any earlier ones.
+ This feature is crucial to allow updating tape archives, but can be abused
+ maliciously.
+* *tarfile* does not protect against issues with “live” data,
+ e.g. an attacker tinkering with the destination (or source) directory while
+ extraction (or archiving) is in progress.
+
+
+Supporting older Python versions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Extraction filters were added to Python 3.12, but may be backported to older
+versions as security updates.
+To check whether the feature is available, use e.g.
+``hasattr(tarfile, 'data_filter')`` rather than checking the Python version.
+
+The following examples show how to support Python versions with and without
+the feature.
+Note that setting ``extraction_filter`` will affect any subsequent operations.
+
+* Fully trusted archive::
+
+ my_tarfile.extraction_filter = (lambda member, path: member)
+ my_tarfile.extractall()
+
+* Use the ``'data'`` filter if available, but revert to Python 3.11 behavior
+ (``'fully_trusted'``) if this feature is not available::
+
+ my_tarfile.extraction_filter = getattr(tarfile, 'data_filter',
+ (lambda member, path: member))
+ my_tarfile.extractall()
+
+* Use the ``'data'`` filter; *fail* if it is not available::
+
+ my_tarfile.extractall(filter=tarfile.data_filter)
+
+ or::
+
+ my_tarfile.extraction_filter = tarfile.data_filter
+ my_tarfile.extractall()
+
+* Use the ``'data'`` filter; *warn* if it is not available::
+
+ if hasattr(tarfile, 'data_filter'):
+ my_tarfile.extractall(filter='data')
+ else:
+ # remove this when no longer needed
+ warn_the_user('Extracting may be unsafe; consider updating Python')
+ my_tarfile.extractall()
+
+
+Stateful extraction filter example
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+While *tarfile*'s extraction methods take a simple *filter* callable,
+custom filters may be more complex objects with an internal state.
+It may be useful to write these as context managers, to be used like this::
+
+ with StatefulFilter() as filter_func:
+ tar.extractall(path, filter=filter_func)
+
+Such a filter can be written as, for example::
+
+ class StatefulFilter:
+ def __init__(self):
+ self.file_count = 0
+
+ def __enter__(self):
+ return self
+
+ def __call__(self, member, path):
+ self.file_count += 1
+ return member
+
+ def __exit__(self, *exc_info):
+ print(f'{self.file_count} files extracted')
+
+
.. _tarfile-commandline:
.. program:: tarfile
+
Command-Line Interface
----------------------
@@ -748,6 +1170,13 @@ Command-line options
Verbose output.
+.. cmdoption:: --filter
+
+ Specifies the *filter* for ``--extract``.
+ See :ref:`tarfile-extraction-filter` for details.
+ Only string names are accepted (that is, ``fully_trusted``, ``tar``,
+ and ``data``).
+
.. _tar-examples:
Examples
@@ -757,7 +1186,7 @@ How to extract an entire tar archive to the current working directory::
import tarfile
tar = tarfile.open("sample.tar.gz")
- tar.extractall()
+ tar.extractall(filter='data')
tar.close()
How to extract a subset of a tar archive with :meth:`TarFile.extractall` using
diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst
index 61358eb76925b2..fd4c294613fd31 100644
--- a/Doc/library/tempfile.rst
+++ b/Doc/library/tempfile.rst
@@ -292,6 +292,9 @@ The module defines the following user-callable items:
.. versionchanged:: 3.6
The *dir* parameter now accepts a :term:`path-like object`.
+ .. versionchanged:: 3.12
+ :func:`mkdtemp` now always returns an absolute path, even if *dir* is relative.
+
.. function:: gettempdir()
diff --git a/Doc/library/token-list.inc b/Doc/library/token-list.inc
index 2739d5bfc1dfa2..3b345099bf54b5 100644
--- a/Doc/library/token-list.inc
+++ b/Doc/library/token-list.inc
@@ -201,6 +201,10 @@
Token value for ``":="``.
+.. data:: EXCLAMATION
+
+ Token value for ``"!"``.
+
.. data:: OP
.. data:: AWAIT
@@ -213,6 +217,12 @@
.. data:: SOFT_KEYWORD
+.. data:: FSTRING_START
+
+.. data:: FSTRING_MIDDLE
+
+.. data:: FSTRING_END
+
.. data:: ERRORTOKEN
.. data:: N_TOKENS
diff --git a/Doc/library/types.rst b/Doc/library/types.rst
index 27b9846325914d..a15fb5cfa49473 100644
--- a/Doc/library/types.rst
+++ b/Doc/library/types.rst
@@ -82,6 +82,46 @@ Dynamic Type Creation
.. versionadded:: 3.7
+.. function:: get_original_bases(cls, /)
+
+ Return the tuple of objects originally given as the bases of *cls* before
+ the :meth:`~object.__mro_entries__` method has been called on any bases
+ (following the mechanisms laid out in :pep:`560`). This is useful for
+ introspecting :ref:`Generics `.
+
+ For classes that have an ``__orig_bases__`` attribute, this
+ function returns the value of ``cls.__orig_bases__``.
+ For classes without the ``__orig_bases__`` attribute, ``cls.__bases__`` is
+ returned.
+
+ Examples::
+
+ from typing import TypeVar, Generic, NamedTuple, TypedDict
+
+ T = TypeVar("T")
+ class Foo(Generic[T]): ...
+ class Bar(Foo[int], float): ...
+ class Baz(list[str]): ...
+ Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+ Spam = TypedDict("Spam", {"a": int, "b": str})
+
+ assert Bar.__bases__ == (Foo, float)
+ assert get_original_bases(Bar) == (Foo[int], float)
+
+ assert Baz.__bases__ == (list,)
+ assert get_original_bases(Baz) == (list[str],)
+
+ assert Eggs.__bases__ == (tuple,)
+ assert get_original_bases(Eggs) == (NamedTuple,)
+
+ assert Spam.__bases__ == (dict,)
+ assert get_original_bases(Spam) == (TypedDict,)
+
+ assert int.__bases__ == (object,)
+ assert get_original_bases(int) == (object,)
+
+ .. versionadded:: 3.12
+
.. seealso::
:pep:`560` - Core support for typing module and generic types
@@ -311,6 +351,13 @@ Standard names are defined for the following types:
.. versionchanged:: 3.9.2
This type can now be subclassed.
+ .. seealso::
+
+ :ref:`Generic Alias Types`
+ In-depth documentation on instances of :class:`!types.GenericAlias`
+
+ :pep:`585` - Type Hinting Generics In Standard Collections
+ Introducing the :class:`!types.GenericAlias` class
.. class:: UnionType
diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst
index 15bab7775eadd8..409a95d528b5d3 100644
--- a/Doc/library/typing.rst
+++ b/Doc/library/typing.rst
@@ -98,6 +98,9 @@ annotations. These include:
*Introducing* :data:`LiteralString`
* :pep:`681`: Data Class Transforms
*Introducing* the :func:`@dataclass_transform` decorator
+* :pep:`692`: Using ``TypedDict`` for more precise ``**kwargs`` typing
+ *Introducing* a new way of typing ``**kwargs`` with :data:`Unpack` and
+ :data:`TypedDict`
* :pep:`698`: Adding an override decorator to typing
*Introducing* the :func:`@override` decorator
@@ -1417,8 +1420,10 @@ These are not used in annotations. They are building blocks for creating generic
tup: tuple[Unpack[Ts]]
In fact, ``Unpack`` can be used interchangeably with ``*`` in the context
- of types. You might see ``Unpack`` being used explicitly in older versions
- of Python, where ``*`` couldn't be used in certain places::
+ of :class:`typing.TypeVarTuple ` and
+ :class:`builtins.tuple ` types. You might see ``Unpack`` being used
+ explicitly in older versions of Python, where ``*`` couldn't be used in
+ certain places::
# In older versions of Python, TypeVarTuple and Unpack
# are located in the `typing_extensions` backports package.
@@ -1428,6 +1433,21 @@ These are not used in annotations. They are building blocks for creating generic
tup: tuple[*Ts] # Syntax error on Python <= 3.10!
tup: tuple[Unpack[Ts]] # Semantically equivalent, and backwards-compatible
+ ``Unpack`` can also be used along with :class:`typing.TypedDict` for typing
+ ``**kwargs`` in a function signature::
+
+ from typing import TypedDict, Unpack
+
+ class Movie(TypedDict):
+ name: str
+ year: int
+
+ # This function expects two keyword arguments - `name` of type `str`
+ # and `year` of type `int`.
+ def foo(**kwargs: Unpack[Movie]): ...
+
+ See :pep:`692` for more details on using ``Unpack`` for ``**kwargs`` typing.
+
.. versionadded:: 3.11
.. class:: ParamSpec(name, *, bound=None, covariant=False, contravariant=False)
diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst
index d1a977fd7da6a5..c70153dfcd69e1 100644
--- a/Doc/library/unittest.rst
+++ b/Doc/library/unittest.rst
@@ -2191,10 +2191,6 @@ Loading and running tests
.. versionadded:: 3.12
Added *durations* keyword argument.
- .. versionchanged:: 3.12
- Subclasses should accept ``**kwargs`` to ensure compatibility as the
- interface changes.
-
.. data:: defaultTestLoader
Instance of the :class:`TestLoader` class intended to be shared. If no
@@ -2285,7 +2281,8 @@ Loading and running tests
The *testRunner* argument can either be a test runner class or an already
created instance of it. By default ``main`` calls :func:`sys.exit` with
- an exit code indicating success or failure of the tests run.
+ an exit code indicating success (0) or failure (1) of the tests run.
+ An exit code of 5 indicates that no tests were run.
The *testLoader* argument has to be a :class:`TestLoader` instance,
and defaults to :data:`defaultTestLoader`.
diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst
index 64cc9c388ec30d..1b05458280d896 100644
--- a/Doc/library/urllib.request.rst
+++ b/Doc/library/urllib.request.rst
@@ -28,8 +28,8 @@ The :mod:`urllib.request` module defines the following functions:
.. function:: urlopen(url, data=None[, timeout], *, cafile=None, capath=None, cadefault=False, context=None)
- Open the URL *url*, which can be either a string or a
- :class:`Request` object.
+ Open *url*, which can be either a string containing a valid, properly
+ encoded URL, or a :class:`Request` object.
*data* must be an object specifying additional data to be sent to the
server, or ``None`` if no such data is needed. See :class:`Request`
@@ -192,7 +192,7 @@ The following classes are provided:
This class is an abstraction of a URL request.
- *url* should be a string containing a valid URL.
+ *url* should be a string containing a valid, properly encoded URL.
*data* must be an object specifying additional data to send to the
server, or ``None`` if no such data is needed. Currently HTTP
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index 240ab139838db9..52bf99e5bb0f67 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -284,11 +284,14 @@ creation according to their needs, the :class:`EnvBuilder` class.
.. method:: upgrade_dependencies(context)
- Upgrades the core venv dependency packages (currently ``pip`` and
- ``setuptools``) in the environment. This is done by shelling out to the
+ Upgrades the core venv dependency packages (currently ``pip``)
+ in the environment. This is done by shelling out to the
``pip`` executable in the environment.
.. versionadded:: 3.9
+ .. versionchanged:: 3.12
+
+ ``setuptools`` is no longer a core venv dependency.
.. method:: post_setup(context)
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index e2a085d6e98e67..6f4826cb065c64 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -7,7 +7,7 @@
.. moduleauthor:: James C. Ahlstrom
.. sectionauthor:: James C. Ahlstrom
-**Source code:** :source:`Lib/zipfile.py`
+**Source code:** :source:`Lib/zipfile/`
--------------
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 9f91ade35e50dc..55431f1951e50d 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -2102,6 +2102,10 @@ Resolving MRO entries
:func:`types.resolve_bases`
Dynamically resolve bases that are not instances of :class:`type`.
+ :func:`types.get_original_bases`
+ Retrieve a class's "original bases" prior to modifications by
+ :meth:`~object.__mro_entries__`.
+
:pep:`560`
Core support for typing module and generic types.
diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst
index b22b5251f1de46..57eb5403243eef 100644
--- a/Doc/reference/import.rst
+++ b/Doc/reference/import.rst
@@ -1077,4 +1077,5 @@ methods to finders and loaders.
.. [#fnpic] In legacy code, it is possible to find instances of
:class:`imp.NullImporter` in the :data:`sys.path_importer_cache`. It
is recommended that code be changed to use ``None`` instead. See
- :ref:`portingpythoncode` for more details.
+ :ref:`portingpythoncode` for more details. Note that the ``imp`` module
+ was removed in Python 3.12.
diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt
new file mode 100644
index 00000000000000..d0390a04ea6dd8
--- /dev/null
+++ b/Doc/requirements-oldest-sphinx.txt
@@ -0,0 +1,38 @@
+# Requirements to build the Python documentation, for the oldest supported
+# Sphinx version.
+#
+# We pin Sphinx and all of its dependencies to ensure a consistent environment.
+
+blurb
+python-docs-theme>=2022.1
+
+# Generated from:
+# pip install "Sphinx~=3.2.0" "docutils<0.17" "Jinja2<3" "MarkupSafe<2"
+# pip freeze
+#
+# Sphinx 3.2 comes from ``needs_sphinx = '3.2'`` in ``Doc/conf.py``.
+# Docutils<0.17, Jinja2<3, and MarkupSafe<2 are additionally specified as
+# Sphinx 3.2 is incompatible with newer releases of these packages.
+
+Sphinx==3.2.1
+alabaster==0.7.13
+Babel==2.12.1
+certifi==2022.12.7
+charset-normalizer==3.1.0
+colorama==0.4.6
+docutils==0.16
+idna==3.4
+imagesize==1.4.1
+Jinja2==2.11.3
+MarkupSafe==1.1.1
+packaging==23.1
+Pygments==2.15.1
+requests==2.29.0
+snowballstemmer==2.2.0
+sphinxcontrib-applehelp==1.0.4
+sphinxcontrib-devhelp==1.0.2
+sphinxcontrib-htmlhelp==2.0.1
+sphinxcontrib-jsmath==1.0.1
+sphinxcontrib-qthelp==1.0.3
+sphinxcontrib-serializinghtml==1.1.5
+urllib3==1.26.15
diff --git a/Doc/requirements.txt b/Doc/requirements.txt
index 71d3cd61e53877..9cbd15c2209dc6 100644
--- a/Doc/requirements.txt
+++ b/Doc/requirements.txt
@@ -1,4 +1,7 @@
# Requirements to build the Python documentation
+#
+# Note that when updating this file, you will likely also have to update
+# the Doc/constraints.txt file.
# Sphinx version is pinned so that new versions that introduce new warnings
# won't suddenly cause build failures. Updating the version is fine as long
@@ -13,3 +16,5 @@ sphinxext-opengraph==0.7.5
# The theme used by the documentation is stored separately, so we need
# to install that as well.
python-docs-theme>=2022.1
+
+-c constraints.txt
diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore
index f6fe8df97810ff..1d3503bf06f085 100644
--- a/Doc/tools/.nitignore
+++ b/Doc/tools/.nitignore
@@ -59,7 +59,6 @@ Doc/faq/gui.rst
Doc/faq/library.rst
Doc/faq/programming.rst
Doc/glossary.rst
-Doc/howto/argparse.rst
Doc/howto/curses.rst
Doc/howto/descriptor.rst
Doc/howto/enum.rst
@@ -78,7 +77,6 @@ Doc/library/__future__.rst
Doc/library/_thread.rst
Doc/library/abc.rst
Doc/library/aifc.rst
-Doc/library/argparse.rst
Doc/library/ast.rst
Doc/library/asyncio-dev.rst
Doc/library/asyncio-eventloop.rst
@@ -113,7 +111,6 @@ Doc/library/csv.rst
Doc/library/ctypes.rst
Doc/library/curses.ascii.rst
Doc/library/curses.rst
-Doc/library/dataclasses.rst
Doc/library/datetime.rst
Doc/library/dbm.rst
Doc/library/decimal.rst
@@ -152,7 +149,6 @@ Doc/library/http.cookiejar.rst
Doc/library/http.cookies.rst
Doc/library/http.server.rst
Doc/library/idle.rst
-Doc/library/imp.rst
Doc/library/importlib.resources.abc.rst
Doc/library/importlib.resources.rst
Doc/library/importlib.rst
@@ -180,7 +176,6 @@ Doc/library/os.rst
Doc/library/ossaudiodev.rst
Doc/library/pickle.rst
Doc/library/pickletools.rst
-Doc/library/pkgutil.rst
Doc/library/platform.rst
Doc/library/plistlib.rst
Doc/library/poplib.rst
diff --git a/Doc/using/unix.rst b/Doc/using/unix.rst
index 067ff4cce5e48d..0044eb07f56eec 100644
--- a/Doc/using/unix.rst
+++ b/Doc/using/unix.rst
@@ -54,13 +54,6 @@ On FreeBSD and OpenBSD
pkg_add ftp://ftp.openbsd.org/pub/OpenBSD/4.2/packages/i386/python-2.5.1p2.tgz
-On OpenSolaris
---------------
-
-You can get Python from `OpenCSW `_. Various versions
-of Python are available and can be installed with e.g. ``pkgutil -i python27``.
-
-
.. _building-python-on-unix:
Building Python
diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc
index 43ee6b7807d57e..2fc90126482268 100644
--- a/Doc/using/venv-create.inc
+++ b/Doc/using/venv-create.inc
@@ -61,12 +61,16 @@ The command, if run with ``-h``, will show the available options::
environment (pip is bootstrapped by default)
--prompt PROMPT Provides an alternative prompt prefix for this
environment.
- --upgrade-deps Upgrade core dependencies: pip setuptools to the
+ --upgrade-deps Upgrade core dependencies (pip) to the
latest version in PyPI
Once an environment has been created, you may wish to activate it, e.g. by
sourcing an activate script in its bin directory.
+.. versionchanged:: 3.12
+
+ ``setuptools`` is no longer a core venv dependency.
+
.. versionchanged:: 3.9
Add ``--upgrade-deps`` option to upgrade pip + setuptools to the latest on PyPI
@@ -104,4 +108,3 @@ invoked to bootstrap ``pip`` into the virtual environment.
Multiple paths can be given to ``venv``, in which case an identical virtual
environment will be created, according to the given options, at each provided
path.
-
diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst
index 1c4e41c0e0e239..380950eb507ffb 100644
--- a/Doc/using/windows.rst
+++ b/Doc/using/windows.rst
@@ -470,7 +470,7 @@ user's system, including environment variables, system registry settings, and
installed packages. The standard library is included as pre-compiled and
optimized ``.pyc`` files in a ZIP, and ``python3.dll``, ``python37.dll``,
``python.exe`` and ``pythonw.exe`` are all provided. Tcl/tk (including all
-dependants, such as Idle), pip and the Python documentation are not included.
+dependents, such as Idle), pip and the Python documentation are not included.
.. note::
diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst
index 34f2656f765c7d..4ee2aacb108a36 100644
--- a/Doc/whatsnew/2.6.rst
+++ b/Doc/whatsnew/2.6.rst
@@ -172,7 +172,7 @@ this edition of "What's New in Python" links to the bug/patch
item for each change.
Hosting of the Python bug tracker is kindly provided by
-`Upfront Systems `__
+`Upfront Systems `__
of Stellenbosch, South Africa. Martin von Löwis put a
lot of effort into importing existing bugs and patches from
SourceForge; his scripts for this import operation are at
diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst
index 810a2cd2537c34..36afcb163f1afc 100644
--- a/Doc/whatsnew/2.7.rst
+++ b/Doc/whatsnew/2.7.rst
@@ -2104,7 +2104,7 @@ Changes to Python's build process and to the C API include:
* The latest release of the GNU Debugger, GDB 7, can be `scripted
using Python
- `__.
+ `__.
When you begin debugging an executable program P, GDB will look for
a file named ``P-gdb.py`` and automatically read it. Dave Malcolm
contributed a :file:`python-gdb.py` that adds a number of
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index 4165b16ba76441..f4ee30b0d4d9eb 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -66,6 +66,10 @@ Summary -- Release highlights
.. PEP-sized items next.
+New typing features:
+
+* :ref:`whatsnew312-pep692`
+
Important deprecations, removals or restrictions:
* :pep:`623`, Remove wstr from Unicode
@@ -137,6 +141,43 @@ New Features
(Design by Pablo Galindo. Contributed by Pablo Galindo and Christian Heimes
with contributions from Gregory P. Smith [Google] and Mark Shannon
in :gh:`96123`.)
+* The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`,
+ have a new a *filter* argument that allows limiting tar features than may be
+ surprising or dangerous, such as creating files outside the destination
+ directory.
+ See :ref:`tarfile-extraction-filter` for details.
+ In Python 3.14, the default will switch to ``'data'``.
+ (Contributed by Petr Viktorin in :pep:`706`.)
+
+New Features Related to Type Hints
+==================================
+
+This section covers major changes affecting :pep:`484` type hints and
+the :mod:`typing` module.
+
+.. _whatsnew312-pep692:
+
+PEP 692: Using ``TypedDict`` for more precise ``**kwargs`` typing
+-----------------------------------------------------------------
+
+Typing ``**kwargs`` in a function signature as introduced by :pep:`484` allowed
+for valid annotations only in cases where all of the ``**kwargs`` were of the
+same type.
+
+This PEP specifies a more precise way of typing ``**kwargs`` by relying on
+typed dictionaries::
+
+ from typing import TypedDict, Unpack
+
+ class Movie(TypedDict):
+ name: str
+ year: int
+
+ def foo(**kwargs: Unpack[Movie]): ...
+
+See :pep:`692` for more details.
+
+(PEP written by Franek Magiera)
Other Language Changes
@@ -196,6 +237,11 @@ Other Language Changes
wrapped by a :exc:`RuntimeError`. Context information is added to the
exception as a :pep:`678` note. (Contributed by Irit Katriel in :gh:`77757`.)
+* When a ``try-except*`` construct handles the entire :exc:`ExceptionGroup`
+ and raises one other exception, that exception is no longer wrapped in an
+ :exc:`ExceptionGroup`. (Contributed by Irit Katriel in :gh:`103590`.)
+
+
New Modules
===========
@@ -250,15 +296,22 @@ asyncio
:mod:`asyncio` does not support legacy generator-based coroutines.
(Contributed by Kumar Aditya in :gh:`102748`.)
-* :func:`asyncio.wait` now accepts generators yielding tasks.
+* :func:`asyncio.wait` and :func:`asyncio.as_completed` now accepts generators
+ yielding tasks.
(Contributed by Kumar Aditya in :gh:`78530`.)
+calendar
+--------
+
+* Add enums :data:`~calendar.Month` and :data:`~calendar.Day`.
+ (Contributed by Prince Roshan in :gh:`103636`.)
+
csv
---
* Add :data:`~csv.QUOTE_NOTNULL` and :data:`~csv.QUOTE_STRINGS` flags to
provide finer grained control of ``None`` and empty strings by
- :class:`~csv.reader` and :class:`~csv.writer` objects.
+ :class:`~csv.writer` objects.
inspect
-------
@@ -308,6 +361,13 @@ fractions
* Objects of type :class:`fractions.Fraction` now support float-style
formatting. (Contributed by Mark Dickinson in :gh:`100161`.)
+itertools
+---------
+
+* Added :class:`itertools.batched()` for collecting into even-sized
+ tuples where the last batch may be shorter than the rest.
+ (Contributed by Raymond Hettinger in :gh:`98363`.)
+
math
----
@@ -391,6 +451,16 @@ sqlite3
:ref:`transaction handling `.
(Contributed by Erlend E. Aasland in :gh:`83638`.)
+* Add *entrypoint* keyword-only parameter to
+ :meth:`~sqlite3.Connection.load_extension`,
+ for overriding the SQLite extension entry point.
+ (Contributed by Erlend E. Aasland in :gh:`103015`.)
+
+* Add :meth:`~sqlite3.Connection.getconfig` and
+ :meth:`~sqlite3.Connection.setconfig` to :class:`~sqlite3.Connection`
+ to make configuration changes to a database connection.
+ (Contributed by Erlend E. Aasland in :gh:`103489`.)
+
threading
---------
@@ -399,6 +469,13 @@ threading
profiling functions in all running threads in addition to the calling one.
(Contributed by Pablo Galindo in :gh:`93503`.)
+types
+-----
+
+* Add :func:`types.get_original_bases` to allow for further introspection of
+ :ref:`user-defined-generics` when subclassed. (Contributed by
+ James Hilton-Balfe and Alex Waygood in :gh:`101827`.)
+
unicodedata
-----------
@@ -435,8 +512,10 @@ uuid
tempfile
--------
-The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter
-*delete_on_close* (Contributed by Evgeny Zorin in :gh:`58451`.)
+* The :class:`tempfile.NamedTemporaryFile` function has a new optional parameter
+ *delete_on_close* (Contributed by Evgeny Zorin in :gh:`58451`.)
+* :func:`tempfile.mkdtemp` now always returns an absolute path, even if the
+ argument provided to the *dir* parameter is a relative path.
.. _whatsnew-typing-py312:
@@ -531,6 +610,9 @@ Optimizations
replacement strings containing group references by 2--3 times.
(Contributed by Serhiy Storchaka in :gh:`91524`.)
+* Speed up :class:`asyncio.Task` creation by deferring expensive string formatting.
+ (Contributed by Itamar O in :gh:`103793`.)
+
CPython bytecode changes
========================
@@ -615,6 +697,13 @@ Deprecated
* The *onerror* argument of :func:`shutil.rmtree` is deprecated as will be removed
in Python 3.14. Use *onexc* instead. (Contributed by Irit Katriel in :gh:`102828`.)
+* Extracting tar archives without specifying *filter* is deprecated until
+ Python 3.14, when ``'data'`` filter will become the default.
+ See :ref:`tarfile-extraction-filter` for details.
+
+* ``calendar.January`` and ``calendar.February`` constants are deprecated and
+ replaced by :data:`calendar.Month.JANUARY` and :data:`calendar.Month.FEBRUARY`.
+ (Contributed by Prince Roshan in :gh:`103636`.)
Pending Removal in Python 3.13
------------------------------
@@ -731,6 +820,24 @@ Removed
project can be installed: it still provides ``distutils``.
(Contributed by Victor Stinner in :gh:`92584`.)
+* Remove the bundled setuptools wheel from :mod:`ensurepip`,
+ and stop installing setuptools in environments created by :mod:`venv`.
+
+ ``pip (>= 22.1)`` does not require setuptools to be installed in the
+ environment. ``setuptools``-based (and ``distutils``-based) packages
+ can still be used with ``pip install``, since pip will provide
+ ``setuptools`` in the build environment it uses for building a
+ package.
+
+ ``easy_install``, ``pkg_resources``, ``setuptools`` and ``distutils``
+ are no longer provided by default in environments created with
+ ``venv`` or bootstrapped with ``ensurepip``, since they are part of
+ the ``setuptools`` package. For projects relying on these at runtime,
+ the ``setuptools`` project should be declared as a dependency and
+ installed separately (typically, using pip).
+
+ (Contributed by Pradyun Gedam in :gh:`95299`.)
+
* Removed many old deprecated :mod:`unittest` features:
- A number of :class:`~unittest.TestCase` method aliases:
@@ -866,11 +973,14 @@ Removed
completed:
* References to, and support for ``module_repr()`` has been eradicated.
-
+ (Contributed by Barry Warsaw in :gh:`97850`.)
* ``importlib.util.set_package`` has been removed.
(Contributed by Brett Cannon in :gh:`65961`.)
+* The ``imp`` module has been removed. (Contributed by Barry Warsaw in
+ :gh:`98040`.)
+
* Removed the ``suspicious`` rule from the documentation Makefile, and
removed ``Doc/tools/rstlint.py``, both in favor of `sphinx-lint
`_.
@@ -959,6 +1069,10 @@ Changes in the Python API
exception instance, rather than to a ``(typ, exc, tb)`` tuple.
(Contributed by Irit Katriel in :gh:`103176`.)
+* When extracting tar files using :mod:`tarfile` or
+ :func:`shutil.unpack_archive`, pass the *filter* argument to limit features
+ that may be surprising or dangerous.
+ See :ref:`tarfile-extraction-filter` for details.
Build Changes
=============
@@ -1103,6 +1217,24 @@ New Features
to replace the legacy-api :c:func:`!PyErr_Display`. (Contributed by
Irit Katriel in :gh:`102755`).
+* :pep:`683`: Introduced Immortal Objects to Python which allows objects
+ to bypass reference counts and introduced changes to the C-API:
+
+ - ``_Py_IMMORTAL_REFCNT``: The reference count that defines an object
+ as immortal.
+ - ``_Py_IsImmortal`` Checks if an object has the immortal reference count.
+ - ``PyObject_HEAD_INIT`` This will now initialize reference count to
+ ``_Py_IMMORTAL_REFCNT`` when used with ``Py_BUILD_CORE``.
+ - ``SSTATE_INTERNED_IMMORTAL`` An identifier for interned unicode objects
+ that are immortal.
+ - ``SSTATE_INTERNED_IMMORTAL_STATIC`` An identifier for interned unicode
+ objects that are immortal and static
+ - ``sys.getunicodeinternedsize`` This returns the total number of unicode
+ objects that have been interned. This is now needed for refleak.py to
+ correctly track reference counts and allocated blocks
+
+ (Contributed by Eddie Elizondo in :gh:`84436`.)
+
Porting to Python 3.12
----------------------
@@ -1267,8 +1399,7 @@ Removed
* :c:func:`!PyUnicode_GetSize`
* :c:func:`!PyUnicode_GET_DATA_SIZE`
-* Remove the ``PyUnicode_InternImmortal()`` function and the
- ``SSTATE_INTERNED_IMMORTAL`` macro.
+* Remove the ``PyUnicode_InternImmortal()`` function macro.
(Contributed by Victor Stinner in :gh:`85858`.)
* Remove ``Jython`` compatibility hacks from several stdlib modules and tests.
diff --git a/Grammar/Tokens b/Grammar/Tokens
index 1f3e3b09913653..096876fdd130f8 100644
--- a/Grammar/Tokens
+++ b/Grammar/Tokens
@@ -53,6 +53,7 @@ ATEQUAL '@='
RARROW '->'
ELLIPSIS '...'
COLONEQUAL ':='
+EXCLAMATION '!'
OP
AWAIT
@@ -60,6 +61,9 @@ ASYNC
TYPE_IGNORE
TYPE_COMMENT
SOFT_KEYWORD
+FSTRING_START
+FSTRING_MIDDLE
+FSTRING_END
ERRORTOKEN
# These aren't used by the C tokenizer but are needed for tokenize.py
diff --git a/Grammar/python.gram b/Grammar/python.gram
index 2498251293e80e..6361dcd0985b99 100644
--- a/Grammar/python.gram
+++ b/Grammar/python.gram
@@ -194,7 +194,7 @@ yield_stmt[stmt_ty]: y=yield_expr { _PyAST_Expr(y, EXTRA) }
assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] { _PyAST_Assert(a, b, EXTRA) }
-import_stmt[stmt_ty]:
+import_stmt[stmt_ty]:
| invalid_import
| import_name
| import_from
@@ -415,8 +415,8 @@ try_stmt[stmt_ty]:
| invalid_try_stmt
| 'try' &&':' b=block f=finally_block { _PyAST_Try(b, NULL, NULL, f, EXTRA) }
| 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_block+ el=[else_block] f=[finally_block] { _PyAST_Try(b, ex, el, f, EXTRA) }
- | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_star_block+ el=[else_block] f=[finally_block] {
- CHECK_VERSION(stmt_ty, 11, "Exception groups are",
+ | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_star_block+ el=[else_block] f=[finally_block] {
+ CHECK_VERSION(stmt_ty, 11, "Exception groups are",
_PyAST_TryStar(b, ex, el, f, EXTRA)) }
@@ -807,7 +807,7 @@ atom[expr_ty]:
| 'True' { _PyAST_Constant(Py_True, NULL, EXTRA) }
| 'False' { _PyAST_Constant(Py_False, NULL, EXTRA) }
| 'None' { _PyAST_Constant(Py_None, NULL, EXTRA) }
- | &STRING strings
+ | &(STRING|FSTRING_START) strings
| NUMBER
| &'(' (tuple | group | genexp)
| &'[' (list | listcomp)
@@ -877,7 +877,25 @@ lambda_param[arg_ty]: a=NAME { _PyAST_arg(a->v.Name.id, NULL, NULL, EXTRA) }
# LITERALS
# ========
-strings[expr_ty] (memo): a=STRING+ { _PyPegen_concatenate_strings(p, a) }
+fstring_middle[expr_ty]:
+ | fstring_replacement_field
+ | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) }
+fstring_replacement_field[expr_ty]:
+ | '{' a=(yield_expr | star_expressions) debug_expr="="? conversion=[fstring_conversion] format=[fstring_full_format_spec] rbrace='}' {
+ _PyPegen_formatted_value(p, a, debug_expr, conversion, format, rbrace, EXTRA) }
+ | invalid_replacement_field
+fstring_conversion[ResultTokenWithMetadata*]:
+ | conv_token="!" conv=NAME { _PyPegen_check_fstring_conversion(p, conv_token, conv) }
+fstring_full_format_spec[ResultTokenWithMetadata*]:
+ | colon=':' spec=fstring_format_spec* { _PyPegen_setup_full_format_spec(p, colon, (asdl_expr_seq *) spec, EXTRA) }
+fstring_format_spec[expr_ty]:
+ | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) }
+ | fstring_replacement_field
+fstring[expr_ty]:
+ | a=FSTRING_START b=fstring_middle* c=FSTRING_END { _PyPegen_joined_str(p, a, (asdl_expr_seq*)b, c) }
+
+string[expr_ty]: s[Token*]=STRING { _PyPegen_constant_from_string(p, s) }
+strings[expr_ty] (memo): a[asdl_expr_seq*]=(fstring|string)+ { _PyPegen_concatenate_strings(p, a, EXTRA) }
list[expr_ty]:
| '[' a=[star_named_expressions] ']' { _PyAST_List(a, Load, EXTRA) }
@@ -1118,6 +1136,8 @@ invalid_expression:
_PyPegen_check_legacy_stmt(p, a) ? NULL : p->tokens[p->mark-1]->level == 0 ? NULL :
RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "invalid syntax. Perhaps you forgot a comma?") }
| a=disjunction 'if' b=disjunction !('else'|':') { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "expected 'else' after 'if' expression") }
+ | a='lambda' [lambda_params] b=':' &(FSTRING_MIDDLE | fstring_replacement_field) {
+ RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "f-string: lambda expressions are not allowed without parentheses") }
invalid_named_expression(memo):
| a=expression ':=' expression {
@@ -1241,7 +1261,7 @@ invalid_group:
invalid_import:
| a='import' dotted_name 'from' dotted_name {
RAISE_SYNTAX_ERROR_STARTING_FROM(a, "Did you mean to use 'from ... import ...' instead?") }
-
+
invalid_import_from_targets:
| import_from_as_names ',' NEWLINE {
RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") }
@@ -1335,3 +1355,24 @@ invalid_kvpair:
| expression a=':' &('}'|',') {RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "expression expected after dictionary key and ':'") }
invalid_starred_expression:
| a='*' expression '=' b=expression { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot assign to iterable argument unpacking") }
+invalid_replacement_field:
+ | '{' a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '='") }
+ | '{' a='!' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '!'") }
+ | '{' a=':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before ':'") }
+ | '{' a='}' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '}'") }
+ | '{' !(yield_expr | star_expressions) { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting a valid expression after '{'")}
+ | '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '=', or '!', or ':', or '}'") }
+ | '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '!', or ':', or '}'") }
+ | '{' (yield_expr | star_expressions) '='? invalid_conversion_character
+ | '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting ':' or '}'") }
+ | '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}' {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '}', or format specs") }
+ | '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}' {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '}'") }
+
+invalid_conversion_character:
+ | '!' &(':' | '}') { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: missing conversion character") }
+ | '!' !NAME { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: invalid conversion character") }
diff --git a/Include/boolobject.h b/Include/boolobject.h
index ca21fbfad8e827..976fa35201d035 100644
--- a/Include/boolobject.h
+++ b/Include/boolobject.h
@@ -11,8 +11,7 @@ PyAPI_DATA(PyTypeObject) PyBool_Type;
#define PyBool_Check(x) Py_IS_TYPE((x), &PyBool_Type)
-/* Py_False and Py_True are the only two bools in existence.
-Don't forget to apply Py_INCREF() when returning either!!! */
+/* Py_False and Py_True are the only two bools in existence. */
/* Don't use these directly */
PyAPI_DATA(PyLongObject) _Py_FalseStruct;
@@ -31,8 +30,8 @@ PyAPI_FUNC(int) Py_IsFalse(PyObject *x);
#define Py_IsFalse(x) Py_Is((x), Py_False)
/* Macros for returning Py_True or Py_False, respectively */
-#define Py_RETURN_TRUE return Py_NewRef(Py_True)
-#define Py_RETURN_FALSE return Py_NewRef(Py_False)
+#define Py_RETURN_TRUE return Py_True
+#define Py_RETURN_FALSE return Py_False
/* Function to return a bool from a C long */
PyAPI_FUNC(PyObject *) PyBool_FromLong(long);
diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h
index 8bc681b1a93f5c..79c1023baa9a0f 100644
--- a/Include/cpython/initconfig.h
+++ b/Include/cpython/initconfig.h
@@ -245,6 +245,8 @@ PyAPI_FUNC(PyStatus) PyConfig_SetWideStringList(PyConfig *config,
/* --- PyInterpreterConfig ------------------------------------ */
typedef struct {
+ // XXX "allow_object_sharing"? "own_objects"?
+ int use_main_obmalloc;
int allow_fork;
int allow_exec;
int allow_threads;
@@ -254,6 +256,7 @@ typedef struct {
#define _PyInterpreterConfig_INIT \
{ \
+ .use_main_obmalloc = 0, \
.allow_fork = 0, \
.allow_exec = 0, \
.allow_threads = 1, \
@@ -263,6 +266,7 @@ typedef struct {
#define _PyInterpreterConfig_LEGACY_INIT \
{ \
+ .use_main_obmalloc = 1, \
.allow_fork = 1, \
.allow_exec = 1, \
.allow_threads = 1, \
diff --git a/Include/cpython/object.h b/Include/cpython/object.h
index 98cc51cd7fee49..ce4d13cd9c28fe 100644
--- a/Include/cpython/object.h
+++ b/Include/cpython/object.h
@@ -564,3 +564,10 @@ PyAPI_FUNC(int) PyType_AddWatcher(PyType_WatchCallback callback);
PyAPI_FUNC(int) PyType_ClearWatcher(int watcher_id);
PyAPI_FUNC(int) PyType_Watch(int watcher_id, PyObject *type);
PyAPI_FUNC(int) PyType_Unwatch(int watcher_id, PyObject *type);
+
+/* Attempt to assign a version tag to the given type.
+ *
+ * Returns 1 if the type already had a valid version tag or a new one was
+ * assigned, or 0 if a new tag could not be assigned.
+ */
+PyAPI_FUNC(int) PyUnstable_Type_AssignVersionTag(PyTypeObject *type);
diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h
index ea6ed8d2bc4a4c..f33c72d4cf4d2a 100644
--- a/Include/cpython/pystate.h
+++ b/Include/cpython/pystate.h
@@ -11,6 +11,10 @@ is available in a given context. For example, forking the process
might not be allowed in the current interpreter (i.e. os.fork() would fail).
*/
+/* Set if the interpreter share obmalloc runtime state
+ with the main interpreter. */
+#define Py_RTFLAGS_USE_MAIN_OBMALLOC (1UL << 5)
+
/* Set if import should check a module for subinterpreter support. */
#define Py_RTFLAGS_MULTI_INTERP_EXTENSIONS (1UL << 8)
diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h
index 75a74ffa2f9dff..3394726dfffd72 100644
--- a/Include/cpython/unicodeobject.h
+++ b/Include/cpython/unicodeobject.h
@@ -98,9 +98,16 @@ typedef struct {
Py_ssize_t length; /* Number of code points in the string */
Py_hash_t hash; /* Hash value; -1 if not set */
struct {
- /* If interned is set, the two references from the
- dictionary to this object are *not* counted in ob_refcnt. */
- unsigned int interned:1;
+ /* If interned is non-zero, the two references from the
+ dictionary to this object are *not* counted in ob_refcnt.
+ The possible values here are:
+ 0: Not Interned
+ 1: Interned
+ 2: Interned and Immortal
+ 3: Interned, Immortal, and Static
+ This categorization allows the runtime to determine the right
+ cleanup mechanism at runtime shutdown. */
+ unsigned int interned:2;
/* Character size:
- PyUnicode_1BYTE_KIND (1):
@@ -135,7 +142,7 @@ typedef struct {
unsigned int ascii:1;
/* Padding to ensure that PyUnicode_DATA() is always aligned to
4 bytes (see issue #19537 on m68k). */
- unsigned int :26;
+ unsigned int :25;
} state;
} PyASCIIObject;
@@ -183,6 +190,8 @@ PyAPI_FUNC(int) _PyUnicode_CheckConsistency(
/* Interning state. */
#define SSTATE_NOT_INTERNED 0
#define SSTATE_INTERNED_MORTAL 1
+#define SSTATE_INTERNED_IMMORTAL 2
+#define SSTATE_INTERNED_IMMORTAL_STATIC 3
/* Use only if you know it's a string */
static inline unsigned int PyUnicode_CHECK_INTERNED(PyObject *op) {
diff --git a/Include/internal/pycore_bytesobject.h b/Include/internal/pycore_bytesobject.h
index 9173a4f105f800..d36fa9569d64a5 100644
--- a/Include/internal/pycore_bytesobject.h
+++ b/Include/internal/pycore_bytesobject.h
@@ -9,11 +9,6 @@ extern "C" {
#endif
-/* runtime lifecycle */
-
-extern PyStatus _PyBytes_InitTypes(PyInterpreterState *);
-
-
/* Substring Search.
Returns the index of the first occurrence of
diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h
index d32f37ac44d83c..86fd48b63ef8e4 100644
--- a/Include/internal/pycore_code.h
+++ b/Include/internal/pycore_code.h
@@ -51,6 +51,15 @@ typedef struct {
#define INLINE_CACHE_ENTRIES_BINARY_SUBSCR CACHE_ENTRIES(_PyBinarySubscrCache)
+typedef struct {
+ uint16_t counter;
+ uint16_t class_version[2];
+ uint16_t self_type_version[2];
+ uint16_t method[4];
+} _PySuperAttrCache;
+
+#define INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR CACHE_ENTRIES(_PySuperAttrCache)
+
typedef struct {
uint16_t counter;
uint16_t version[2];
@@ -217,6 +226,8 @@ extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range);
/* Specialization functions */
+extern void _Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, PyObject *self,
+ _Py_CODEUNIT *instr, PyObject *name, int load_method);
extern void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr,
PyObject *name);
extern void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr,
diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h
index f85240c48a89b0..1a032f652dddaf 100644
--- a/Include/internal/pycore_compile.h
+++ b/Include/internal/pycore_compile.h
@@ -19,6 +19,7 @@ PyAPI_FUNC(PyCodeObject*) _PyAST_Compile(
int optimize,
struct _arena *arena);
+static const _PyCompilerSrcLocation NO_LOCATION = {-1, -1, -1, -1};
typedef struct {
int optimize;
@@ -33,15 +34,21 @@ extern int _PyAST_Optimize(
struct _arena *arena,
_PyASTOptimizeState *state);
+typedef struct {
+ int h_offset;
+ int h_startdepth;
+ int h_preserve_lasti;
+} _PyCompile_ExceptHandlerInfo;
typedef struct {
int i_opcode;
int i_oparg;
_PyCompilerSrcLocation i_loc;
-} _PyCompilerInstruction;
+ _PyCompile_ExceptHandlerInfo i_except_handler_info;
+} _PyCompile_Instruction;
typedef struct {
- _PyCompilerInstruction *s_instrs;
+ _PyCompile_Instruction *s_instrs;
int s_allocated;
int s_used;
@@ -82,6 +89,8 @@ int _PyCompile_EnsureArrayLargeEnough(
int _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj);
+int _PyCompile_InstrSize(int opcode, int oparg);
+
/* Access compiler internals for unit testing */
PyAPI_FUNC(PyObject*) _PyCompile_CodeGen(
diff --git a/Include/internal/pycore_fileutils_windows.h b/Include/internal/pycore_fileutils_windows.h
index 9bc7feb8cecd01..e804d385e76708 100644
--- a/Include/internal/pycore_fileutils_windows.h
+++ b/Include/internal/pycore_fileutils_windows.h
@@ -75,6 +75,24 @@ static inline BOOL _Py_GetFileInformationByName(
return GetFileInformationByName(FileName, FileInformationClass, FileInfoBuffer, FileInfoBufferSize);
}
+static inline BOOL _Py_GetFileInformationByName_ErrorIsTrustworthy(int error)
+{
+ switch(error) {
+ case ERROR_FILE_NOT_FOUND:
+ case ERROR_PATH_NOT_FOUND:
+ case ERROR_NOT_READY:
+ case ERROR_BAD_NET_NAME:
+ case ERROR_BAD_NETPATH:
+ case ERROR_BAD_PATHNAME:
+ case ERROR_INVALID_NAME:
+ case ERROR_FILENAME_EXCED_RANGE:
+ return TRUE;
+ case ERROR_NOT_SUPPORTED:
+ return FALSE;
+ }
+ return FALSE;
+}
+
#endif
#endif
diff --git a/Include/internal/pycore_flowgraph.h b/Include/internal/pycore_flowgraph.h
index f470dad3aaa459..883334f4b182eb 100644
--- a/Include/internal/pycore_flowgraph.h
+++ b/Include/internal/pycore_flowgraph.h
@@ -11,7 +11,6 @@ extern "C" {
#include "pycore_opcode_utils.h"
#include "pycore_compile.h"
-static const _PyCompilerSrcLocation NO_LOCATION = {-1, -1, -1, -1};
typedef struct {
int i_opcode;
@@ -97,7 +96,6 @@ int _PyCfg_OptimizeCodeUnit(_PyCfgBuilder *g, PyObject *consts, PyObject *const_
int _PyCfg_Stackdepth(_PyCfgBasicblock *entryblock, int code_flags);
void _PyCfg_ConvertExceptionHandlersToNops(_PyCfgBasicblock *entryblock);
int _PyCfg_ResolveJumps(_PyCfgBuilder *g);
-int _PyCfg_InstrSize(_PyCfgInstruction *instruction);
static inline int
@@ -113,7 +111,7 @@ basicblock_nofallthrough(const _PyCfgBasicblock *b) {
PyCodeObject *
_PyAssemble_MakeCodeObject(_PyCompile_CodeUnitMetadata *u, PyObject *const_cache,
- PyObject *consts, int maxdepth, _PyCfgBasicblock *entryblock,
+ PyObject *consts, int maxdepth, _PyCompile_InstructionSequence *instrs,
int nlocalsplus, int code_flags, PyObject *filename);
#ifdef __cplusplus
diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h
index 20d48d20362571..d8d7fe9ef2ebde 100644
--- a/Include/internal/pycore_frame.h
+++ b/Include/internal/pycore_frame.h
@@ -145,9 +145,9 @@ _PyFrame_GetLocalsArray(_PyInterpreterFrame *frame)
}
/* Fetches the stack pointer, and sets stacktop to -1.
- Having stacktop <= 0 ensures that invalid
- values are not visible to the cycle GC.
- We choose -1 rather than 0 to assist debugging. */
+ Having stacktop <= 0 ensures that invalid
+ values are not visible to the cycle GC.
+ We choose -1 rather than 0 to assist debugging. */
static inline PyObject**
_PyFrame_GetStackPointer(_PyInterpreterFrame *frame)
{
diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h
index 14dfd9ea5823ed..4fa15d74b3ad64 100644
--- a/Include/internal/pycore_global_objects_fini_generated.h
+++ b/Include/internal/pycore_global_objects_fini_generated.h
@@ -8,15 +8,13 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
-#include "pycore_object.h" // _PyObject_IMMORTAL_REFCNT
-
#ifdef Py_DEBUG
static inline void
_PyStaticObject_CheckRefcnt(PyObject *obj) {
- if (Py_REFCNT(obj) < _PyObject_IMMORTAL_REFCNT) {
+ if (Py_REFCNT(obj) < _Py_IMMORTAL_REFCNT) {
_PyObject_ASSERT_FAILED_MSG(obj,
"immortal object has less refcnt than expected "
- "_PyObject_IMMORTAL_REFCNT");
+ "_Py_IMMORTAL_REFCNT");
}
}
#endif
@@ -892,6 +890,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_lineno));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(end_offset));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(endpos));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(entrypoint));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(env));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(errors));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(event));
diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h
index 6f430bb25eb8d3..e19d8ff1b50468 100644
--- a/Include/internal/pycore_global_strings.h
+++ b/Include/internal/pycore_global_strings.h
@@ -378,6 +378,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(end_lineno)
STRUCT_FOR_ID(end_offset)
STRUCT_FOR_ID(endpos)
+ STRUCT_FOR_ID(entrypoint)
STRUCT_FOR_ID(env)
STRUCT_FOR_ID(errors)
STRUCT_FOR_ID(event)
diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h
index 7a78a91aa617e6..0a9f24efbdb908 100644
--- a/Include/internal/pycore_import.h
+++ b/Include/internal/pycore_import.h
@@ -19,6 +19,8 @@ struct _import_runtime_state {
used exclusively for when the extensions dict is access/modified
from an arbitrary thread. */
PyThreadState main_tstate;
+ /* A lock to guard the dict. */
+ PyThread_type_lock mutex;
/* A dict mapping (filename, name) to PyModuleDef for modules.
Only legacy (single-phase init) extension modules are added
and only if they support multiple initialization (m_size >- 0)
diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h
index 86ae3d8dfc1860..7276ce35ba68f0 100644
--- a/Include/internal/pycore_interp.h
+++ b/Include/internal/pycore_interp.h
@@ -23,11 +23,12 @@ extern "C" {
#include "pycore_function.h" // FUNC_MAX_WATCHERS
#include "pycore_genobject.h" // struct _Py_async_gen_state
#include "pycore_gc.h" // struct _gc_runtime_state
+#include "pycore_global_objects.h" // struct _Py_interp_static_objects
#include "pycore_import.h" // struct _import_state
#include "pycore_instruments.h" // PY_MONITORING_EVENTS
#include "pycore_list.h" // struct _Py_list_state
-#include "pycore_global_objects.h" // struct _Py_interp_static_objects
#include "pycore_object_state.h" // struct _py_object_state
+#include "pycore_obmalloc.h" // struct obmalloc_state
#include "pycore_tuple.h" // struct _Py_tuple_state
#include "pycore_typeobject.h" // struct type_cache
#include "pycore_unicodeobject.h" // struct _Py_unicode_state
@@ -82,6 +83,8 @@ struct _is {
int _initialized;
int finalizing;
+ struct _obmalloc_state obmalloc;
+
struct _ceval_state ceval;
struct _gc_runtime_state gc;
diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h
index 137a0465d5ec60..fe86581e81f6b5 100644
--- a/Include/internal/pycore_long.h
+++ b/Include/internal/pycore_long.h
@@ -245,7 +245,7 @@ _PyLong_FlipSign(PyLongObject *op) {
#define _PyLong_DIGIT_INIT(val) \
{ \
- .ob_base = _PyObject_IMMORTAL_INIT(&PyLong_Type), \
+ .ob_base = _PyObject_HEAD_INIT(&PyLong_Type) \
.long_value = { \
.lv_tag = TAG_FROM_SIGN_AND_SIZE( \
(val) == 0 ? 0 : ((val) < 0 ? -1 : 1), \
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index b3d496ed6fc240..2ca047846e0935 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -14,21 +14,25 @@ extern "C" {
#include "pycore_pystate.h" // _PyInterpreterState_GET()
#include "pycore_runtime.h" // _PyRuntime
-/* This value provides *effective* immortality, meaning the object should never
- be deallocated (until runtime finalization). See PEP 683 for more details about
- immortality, as well as a proposed mechanism for proper immortality. */
-#define _PyObject_IMMORTAL_REFCNT 999999999
-
-#define _PyObject_IMMORTAL_INIT(type) \
- { \
- .ob_refcnt = _PyObject_IMMORTAL_REFCNT, \
- .ob_type = (type), \
- }
-#define _PyVarObject_IMMORTAL_INIT(type, size) \
- { \
- .ob_base = _PyObject_IMMORTAL_INIT(type), \
- .ob_size = size, \
- }
+/* We need to maintain an internal copy of Py{Var}Object_HEAD_INIT to avoid
+ designated initializer conflicts in C++20. If we use the deinition in
+ object.h, we will be mixing designated and non-designated initializers in
+ pycore objects which is forbiddent in C++20. However, if we then use
+ designated initializers in object.h then Extensions without designated break.
+ Furthermore, we can't use designated initializers in Extensions since these
+ are not supported pre-C++20. Thus, keeping an internal copy here is the most
+ backwards compatible solution */
+#define _PyObject_HEAD_INIT(type) \
+ { \
+ _PyObject_EXTRA_INIT \
+ .ob_refcnt = _Py_IMMORTAL_REFCNT, \
+ .ob_type = (type) \
+ },
+#define _PyVarObject_HEAD_INIT(type, size) \
+ { \
+ .ob_base = _PyObject_HEAD_INIT(type) \
+ .ob_size = size \
+ },
PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc(
const char *func,
@@ -61,9 +65,20 @@ static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n)
}
#define _Py_RefcntAdd(op, n) _Py_RefcntAdd(_PyObject_CAST(op), n)
+static inline void _Py_SetImmortal(PyObject *op)
+{
+ if (op) {
+ op->ob_refcnt = _Py_IMMORTAL_REFCNT;
+ }
+}
+#define _Py_SetImmortal(op) _Py_SetImmortal(_PyObject_CAST(op))
+
static inline void
_Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
{
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
_Py_DECREF_STAT_INC();
#ifdef Py_REF_DEBUG
_Py_DEC_REFTOTAL(_PyInterpreterState_GET());
@@ -82,6 +97,9 @@ _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
static inline void
_Py_DECREF_NO_DEALLOC(PyObject *op)
{
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
_Py_DECREF_STAT_INC();
#ifdef Py_REF_DEBUG
_Py_DEC_REFTOTAL(_PyInterpreterState_GET());
diff --git a/Include/internal/pycore_obmalloc.h b/Include/internal/pycore_obmalloc.h
index a5c7f4528f9126..ca2a0419b4f038 100644
--- a/Include/internal/pycore_obmalloc.h
+++ b/Include/internal/pycore_obmalloc.h
@@ -657,8 +657,12 @@ struct _obmalloc_usage {
#endif /* WITH_PYMALLOC_RADIX_TREE */
-struct _obmalloc_state {
+struct _obmalloc_global_state {
int dump_debug_stats;
+ Py_ssize_t interpreter_leaks;
+};
+
+struct _obmalloc_state {
struct _obmalloc_pools pools;
struct _obmalloc_mgmt mgmt;
struct _obmalloc_usage usage;
@@ -675,7 +679,11 @@ void _PyObject_VirtualFree(void *, size_t size);
/* This function returns the number of allocated memory blocks, regardless of size */
-PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void);
+extern Py_ssize_t _Py_GetGlobalAllocatedBlocks(void);
+#define _Py_GetAllocatedBlocks() \
+ _Py_GetGlobalAllocatedBlocks()
+extern Py_ssize_t _PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *);
+extern void _PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *);
#ifdef WITH_PYMALLOC
diff --git a/Include/internal/pycore_obmalloc_init.h b/Include/internal/pycore_obmalloc_init.h
index c9f197e72de9f5..8ee72ff2d4126f 100644
--- a/Include/internal/pycore_obmalloc_init.h
+++ b/Include/internal/pycore_obmalloc_init.h
@@ -54,9 +54,13 @@ extern "C" {
# error "NB_SMALL_SIZE_CLASSES should be less than 64"
#endif
-#define _obmalloc_state_INIT(obmalloc) \
+#define _obmalloc_global_state_INIT \
{ \
.dump_debug_stats = -1, \
+ }
+
+#define _obmalloc_state_INIT(obmalloc) \
+ { \
.pools = { \
.used = _obmalloc_pools_INIT(obmalloc.pools), \
}, \
diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h
index c039d712dc0ba1..a82885463ab2e9 100644
--- a/Include/internal/pycore_opcode.h
+++ b/Include/internal/pycore_opcode.h
@@ -42,6 +42,7 @@ const uint8_t _PyOpcode_Caches[256] = {
[LOAD_GLOBAL] = 4,
[BINARY_OP] = 1,
[SEND] = 1,
+ [LOAD_SUPER_ATTR] = 9,
[CALL] = 3,
};
@@ -179,6 +180,8 @@ const uint8_t _PyOpcode_Deopt[256] = {
[LOAD_GLOBAL_BUILTIN] = LOAD_GLOBAL,
[LOAD_GLOBAL_MODULE] = LOAD_GLOBAL,
[LOAD_NAME] = LOAD_NAME,
+ [LOAD_SUPER_ATTR] = LOAD_SUPER_ATTR,
+ [LOAD_SUPER_ATTR_METHOD] = LOAD_SUPER_ATTR,
[MAKE_CELL] = MAKE_CELL,
[MAKE_FUNCTION] = MAKE_FUNCTION,
[MAP_ADD] = MAP_ADD,
@@ -236,7 +239,7 @@ const uint8_t _PyOpcode_Deopt[256] = {
#endif // NEED_OPCODE_TABLES
#ifdef Py_DEBUG
-static const char *const _PyOpcode_OpName[263] = {
+static const char *const _PyOpcode_OpName[266] = {
[CACHE] = "CACHE",
[POP_TOP] = "POP_TOP",
[PUSH_NULL] = "PUSH_NULL",
@@ -303,29 +306,29 @@ static const char *const _PyOpcode_OpName[263] = {
[FOR_ITER_TUPLE] = "FOR_ITER_TUPLE",
[FOR_ITER_RANGE] = "FOR_ITER_RANGE",
[FOR_ITER_GEN] = "FOR_ITER_GEN",
+ [LOAD_SUPER_ATTR_METHOD] = "LOAD_SUPER_ATTR_METHOD",
[LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS",
- [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN",
[GET_ITER] = "GET_ITER",
[GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER",
- [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE",
+ [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN",
[LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS",
+ [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE",
[LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE",
- [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY",
[LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR",
[RETURN_GENERATOR] = "RETURN_GENERATOR",
+ [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY",
[LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT",
[LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT",
[LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT",
[LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT",
[LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES",
[LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST",
- [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST",
[RETURN_VALUE] = "RETURN_VALUE",
- [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST",
+ [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST",
[SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS",
+ [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST",
[LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN",
[LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE",
- [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE",
[POP_EXCEPT] = "POP_EXCEPT",
[STORE_NAME] = "STORE_NAME",
[DELETE_NAME] = "DELETE_NAME",
@@ -348,9 +351,9 @@ static const char *const _PyOpcode_OpName[263] = {
[IMPORT_NAME] = "IMPORT_NAME",
[IMPORT_FROM] = "IMPORT_FROM",
[JUMP_FORWARD] = "JUMP_FORWARD",
+ [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE",
[STORE_ATTR_SLOT] = "STORE_ATTR_SLOT",
[STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT",
- [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST",
[POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE",
[POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE",
[LOAD_GLOBAL] = "LOAD_GLOBAL",
@@ -378,9 +381,9 @@ static const char *const _PyOpcode_OpName[263] = {
[STORE_DEREF] = "STORE_DEREF",
[DELETE_DEREF] = "DELETE_DEREF",
[JUMP_BACKWARD] = "JUMP_BACKWARD",
- [STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST",
+ [LOAD_SUPER_ATTR] = "LOAD_SUPER_ATTR",
[CALL_FUNCTION_EX] = "CALL_FUNCTION_EX",
- [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
+ [STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST",
[EXTENDED_ARG] = "EXTENDED_ARG",
[LIST_APPEND] = "LIST_APPEND",
[SET_ADD] = "SET_ADD",
@@ -390,20 +393,20 @@ static const char *const _PyOpcode_OpName[263] = {
[YIELD_VALUE] = "YIELD_VALUE",
[RESUME] = "RESUME",
[MATCH_CLASS] = "MATCH_CLASS",
- [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT",
- [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST",
+ [STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST",
+ [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[FORMAT_VALUE] = "FORMAT_VALUE",
[BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP",
[BUILD_STRING] = "BUILD_STRING",
+ [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT",
+ [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST",
[UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE",
[UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE",
- [SEND_GEN] = "SEND_GEN",
- [161] = "<161>",
[LIST_EXTEND] = "LIST_EXTEND",
[SET_UPDATE] = "SET_UPDATE",
[DICT_MERGE] = "DICT_MERGE",
[DICT_UPDATE] = "DICT_UPDATE",
- [166] = "<166>",
+ [SEND_GEN] = "SEND_GEN",
[167] = "<167>",
[168] = "<168>",
[169] = "<169>",
@@ -500,12 +503,13 @@ static const char *const _PyOpcode_OpName[263] = {
[JUMP] = "JUMP",
[JUMP_NO_INTERRUPT] = "JUMP_NO_INTERRUPT",
[LOAD_METHOD] = "LOAD_METHOD",
+ [LOAD_SUPER_METHOD] = "LOAD_SUPER_METHOD",
+ [LOAD_ZERO_SUPER_METHOD] = "LOAD_ZERO_SUPER_METHOD",
+ [LOAD_ZERO_SUPER_ATTR] = "LOAD_ZERO_SUPER_ATTR",
};
#endif
#define EXTRA_CASES \
- case 161: \
- case 166: \
case 167: \
case 168: \
case 169: \
diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h
index a899e848bb8b3c..f96261a650dac7 100644
--- a/Include/internal/pycore_pylifecycle.h
+++ b/Include/internal/pycore_pylifecycle.h
@@ -64,6 +64,7 @@ extern void _PyAtExit_Fini(PyInterpreterState *interp);
extern void _PyThread_FiniType(PyInterpreterState *interp);
extern void _Py_Deepfreeze_Fini(void);
extern void _PyArg_Fini(void);
+extern void _Py_FinalizeAllocatedBlocks(_PyRuntimeState *);
extern PyStatus _PyGILState_Init(PyInterpreterState *interp);
extern PyStatus _PyGILState_SetTstate(PyThreadState *tstate);
diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h
index 6e5f2289cb6b95..180ea676bc22eb 100644
--- a/Include/internal/pycore_pystate.h
+++ b/Include/internal/pycore_pystate.h
@@ -33,6 +33,13 @@ _Py_IsMainInterpreter(PyInterpreterState *interp)
return (interp == _PyInterpreterState_Main());
}
+static inline int
+_Py_IsMainInterpreterFinalizing(PyInterpreterState *interp)
+{
+ return (_PyRuntimeState_GetFinalizing(interp->runtime) != NULL &&
+ interp == &interp->runtime->_main_interpreter);
+}
+
static inline const PyConfig *
_Py_GetMainConfig(void)
@@ -64,17 +71,14 @@ _Py_ThreadCanHandlePendingCalls(void)
/* Variable and macro for in-line access to current thread
and interpreter state */
-static inline PyThreadState*
-_PyRuntimeState_GetThreadState(_PyRuntimeState *runtime)
-{
- return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->tstate_current);
-}
+#if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE)
+extern _Py_thread_local PyThreadState *_Py_tss_tstate;
+#endif
+PyAPI_DATA(PyThreadState *) _PyThreadState_GetCurrent(void);
/* Get the current Python thread state.
- Efficient macro reading directly the 'tstate_current' atomic
- variable. The macro is unsafe: it does not check for error and it can
- return NULL.
+ This function is unsafe: it does not check for error and it can return NULL.
The caller must hold the GIL.
@@ -82,9 +86,20 @@ _PyRuntimeState_GetThreadState(_PyRuntimeState *runtime)
static inline PyThreadState*
_PyThreadState_GET(void)
{
- return _PyRuntimeState_GetThreadState(&_PyRuntime);
+#if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE)
+ return _Py_tss_tstate;
+#else
+ return _PyThreadState_GetCurrent();
+#endif
}
+static inline PyThreadState*
+_PyRuntimeState_GetThreadState(_PyRuntimeState *Py_UNUSED(runtime))
+{
+ return _PyThreadState_GET();
+}
+
+
static inline void
_Py_EnsureFuncTstateNotNULL(const char *func, PyThreadState *tstate)
{
diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h
index 3ebe49926edda6..d1b165d0ab9c38 100644
--- a/Include/internal/pycore_runtime.h
+++ b/Include/internal/pycore_runtime.h
@@ -21,10 +21,10 @@ extern "C" {
#include "pycore_pymem.h" // struct _pymem_allocators
#include "pycore_pyhash.h" // struct pyhash_runtime_state
#include "pycore_pythread.h" // struct _pythread_runtime_state
-#include "pycore_obmalloc.h" // struct obmalloc_state
#include "pycore_signal.h" // struct _signals_runtime_state
#include "pycore_time.h" // struct _time_runtime_state
#include "pycore_tracemalloc.h" // struct _tracemalloc_runtime_state
+#include "pycore_typeobject.h" // struct types_runtime_state
#include "pycore_unicodeobject.h" // struct _Py_unicode_runtime_ids
struct _getargs_runtime_state {
@@ -87,7 +87,7 @@ typedef struct pyruntimestate {
_Py_atomic_address _finalizing;
struct _pymem_allocators allocators;
- struct _obmalloc_state obmalloc;
+ struct _obmalloc_global_state obmalloc;
struct pyhash_runtime_state pyhash_state;
struct _time_runtime_state time;
struct _pythread_runtime_state threads;
@@ -119,9 +119,6 @@ typedef struct pyruntimestate {
unsigned long main_thread;
- /* Assuming the current thread holds the GIL, this is the
- PyThreadState for the current thread. */
- _Py_atomic_address tstate_current;
/* Used for the thread state bound to the current thread. */
Py_tss_t autoTSSkey;
@@ -153,13 +150,7 @@ typedef struct pyruntimestate {
struct _py_object_runtime_state object_state;
struct _Py_float_runtime_state float_state;
struct _Py_unicode_runtime_state unicode_state;
-
- struct {
- /* Used to set PyTypeObject.tp_version_tag */
- // bpo-42745: next_version_tag remains shared by all interpreters
- // because of static types.
- unsigned int next_version_tag;
- } types;
+ struct _types_runtime_state types;
/* All the objects that are shared by the runtime's interpreters. */
struct _Py_static_objects static_objects;
diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h
index 5b09a45e41cd84..a48461c0742872 100644
--- a/Include/internal/pycore_runtime_init.h
+++ b/Include/internal/pycore_runtime_init.h
@@ -29,7 +29,7 @@ extern PyTypeObject _PyExc_MemoryError;
_pymem_allocators_debug_INIT, \
_pymem_allocators_obj_arena_INIT, \
}, \
- .obmalloc = _obmalloc_state_INIT(runtime.obmalloc), \
+ .obmalloc = _obmalloc_global_state_INIT, \
.pyhash_state = pyhash_state_INIT, \
.signals = _signals_RUNTIME_INIT, \
.interpreters = { \
@@ -76,13 +76,13 @@ extern PyTypeObject _PyExc_MemoryError;
.latin1 = _Py_str_latin1_INIT, \
}, \
.tuple_empty = { \
- .ob_base = _PyVarObject_IMMORTAL_INIT(&PyTuple_Type, 0) \
+ .ob_base = _PyVarObject_HEAD_INIT(&PyTuple_Type, 0) \
}, \
.hamt_bitmap_node_empty = { \
- .ob_base = _PyVarObject_IMMORTAL_INIT(&_PyHamt_BitmapNode_Type, 0) \
+ .ob_base = _PyVarObject_HEAD_INIT(&_PyHamt_BitmapNode_Type, 0) \
}, \
.context_token_missing = { \
- .ob_base = _PyObject_IMMORTAL_INIT(&_PyContextTokenMissing_Type), \
+ .ob_base = _PyObject_HEAD_INIT(&_PyContextTokenMissing_Type) \
}, \
}, \
}, \
@@ -93,6 +93,7 @@ extern PyTypeObject _PyExc_MemoryError;
{ \
.id_refcount = -1, \
.imports = IMPORTS_INIT, \
+ .obmalloc = _obmalloc_state_INIT(INTERP.obmalloc), \
.ceval = { \
.recursion_limit = Py_DEFAULT_RECURSION_LIMIT, \
}, \
@@ -112,15 +113,18 @@ extern PyTypeObject _PyExc_MemoryError;
.func_state = { \
.next_version = 1, \
}, \
+ .types = { \
+ .next_version_tag = _Py_TYPE_BASE_VERSION_TAG, \
+ }, \
.static_objects = { \
.singletons = { \
._not_used = 1, \
.hamt_empty = { \
- .ob_base = _PyObject_IMMORTAL_INIT(&_PyHamt_Type), \
+ .ob_base = _PyObject_HEAD_INIT(&_PyHamt_Type) \
.h_root = (PyHamtNode*)&_Py_SINGLETON(hamt_bitmap_node_empty), \
}, \
.last_resort_memory_error = { \
- _PyObject_IMMORTAL_INIT(&_PyExc_MemoryError), \
+ _PyObject_HEAD_INIT(&_PyExc_MemoryError) \
}, \
}, \
}, \
@@ -138,7 +142,7 @@ extern PyTypeObject _PyExc_MemoryError;
#define _PyBytes_SIMPLE_INIT(CH, LEN) \
{ \
- _PyVarObject_IMMORTAL_INIT(&PyBytes_Type, (LEN)), \
+ _PyVarObject_HEAD_INIT(&PyBytes_Type, (LEN)) \
.ob_shash = -1, \
.ob_sval = { (CH) }, \
}
@@ -149,7 +153,7 @@ extern PyTypeObject _PyExc_MemoryError;
#define _PyUnicode_ASCII_BASE_INIT(LITERAL, ASCII) \
{ \
- .ob_base = _PyObject_IMMORTAL_INIT(&PyUnicode_Type), \
+ .ob_base = _PyObject_HEAD_INIT(&PyUnicode_Type) \
.length = sizeof(LITERAL) - 1, \
.hash = -1, \
.state = { \
diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h
index 0452c4c61551de..42c4874d9466bf 100644
--- a/Include/internal/pycore_runtime_init_generated.h
+++ b/Include/internal/pycore_runtime_init_generated.h
@@ -884,6 +884,7 @@ extern "C" {
INIT_ID(end_lineno), \
INIT_ID(end_offset), \
INIT_ID(endpos), \
+ INIT_ID(entrypoint), \
INIT_ID(env), \
INIT_ID(errors), \
INIT_ID(event), \
diff --git a/Include/internal/pycore_token.h b/Include/internal/pycore_token.h
index 95459ab9f7d004..b9df8766736adf 100644
--- a/Include/internal/pycore_token.h
+++ b/Include/internal/pycore_token.h
@@ -67,14 +67,18 @@ extern "C" {
#define RARROW 51
#define ELLIPSIS 52
#define COLONEQUAL 53
-#define OP 54
-#define AWAIT 55
-#define ASYNC 56
-#define TYPE_IGNORE 57
-#define TYPE_COMMENT 58
-#define SOFT_KEYWORD 59
-#define ERRORTOKEN 60
-#define N_TOKENS 64
+#define EXCLAMATION 54
+#define OP 55
+#define AWAIT 56
+#define ASYNC 57
+#define TYPE_IGNORE 58
+#define TYPE_COMMENT 59
+#define SOFT_KEYWORD 60
+#define FSTRING_START 61
+#define FSTRING_MIDDLE 62
+#define FSTRING_END 63
+#define ERRORTOKEN 64
+#define N_TOKENS 68
#define NT_OFFSET 256
/* Special definitions for cooperation with parser */
@@ -86,6 +90,8 @@ extern "C" {
(x) == NEWLINE || \
(x) == INDENT || \
(x) == DEDENT)
+#define ISSTRINGLIT(x) ((x) == STRING || \
+ (x) == FSTRING_MIDDLE)
// Symbols exported for test_peg_generator
diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h
index edc70843b57531..335edad89792c3 100644
--- a/Include/internal/pycore_tuple.h
+++ b/Include/internal/pycore_tuple.h
@@ -14,7 +14,6 @@ extern "C" {
/* runtime lifecycle */
extern PyStatus _PyTuple_InitGlobalObjects(PyInterpreterState *);
-extern PyStatus _PyTuple_InitTypes(PyInterpreterState *);
extern void _PyTuple_Fini(PyInterpreterState *);
diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h
index cc5ce2875101ea..76253fd5fd864c 100644
--- a/Include/internal/pycore_typeobject.h
+++ b/Include/internal/pycore_typeobject.h
@@ -11,22 +11,17 @@ extern "C" {
#endif
-/* runtime lifecycle */
+/* state */
-extern PyStatus _PyTypes_InitTypes(PyInterpreterState *);
-extern void _PyTypes_FiniTypes(PyInterpreterState *);
-extern void _PyTypes_Fini(PyInterpreterState *);
-
-
-/* other API */
-
-/* Length of array of slotdef pointers used to store slots with the
- same __name__. There should be at most MAX_EQUIV-1 slotdef entries with
- the same __name__, for any __name__. Since that's a static property, it is
- appropriate to declare fixed-size arrays for this. */
-#define MAX_EQUIV 10
+#define _Py_TYPE_BASE_VERSION_TAG (2<<16)
+#define _Py_MAX_GLOBAL_TYPE_VERSION_TAG (_Py_TYPE_BASE_VERSION_TAG - 1)
-typedef struct wrapperbase pytype_slotdef;
+struct _types_runtime_state {
+ /* Used to set PyTypeObject.tp_version_tag for core static types. */
+ // bpo-42745: next_version_tag remains shared by all interpreters
+ // because of static types.
+ unsigned int next_version_tag;
+};
// Type attribute lookup cache: speed up attribute and method lookups,
@@ -57,6 +52,36 @@ typedef struct {
PyObject *tp_weaklist;
} static_builtin_state;
+struct types_state {
+ /* Used to set PyTypeObject.tp_version_tag.
+ It starts at _Py_MAX_GLOBAL_TYPE_VERSION_TAG + 1,
+ where all those lower numbers are used for core static types. */
+ unsigned int next_version_tag;
+
+ struct type_cache type_cache;
+ size_t num_builtins_initialized;
+ static_builtin_state builtins[_Py_MAX_STATIC_BUILTIN_TYPES];
+};
+
+
+/* runtime lifecycle */
+
+extern PyStatus _PyTypes_InitTypes(PyInterpreterState *);
+extern void _PyTypes_FiniTypes(PyInterpreterState *);
+extern void _PyTypes_Fini(PyInterpreterState *);
+
+
+/* other API */
+
+/* Length of array of slotdef pointers used to store slots with the
+ same __name__. There should be at most MAX_EQUIV-1 slotdef entries with
+ the same __name__, for any __name__. Since that's a static property, it is
+ appropriate to declare fixed-size arrays for this. */
+#define MAX_EQUIV 10
+
+typedef struct wrapperbase pytype_slotdef;
+
+
static inline PyObject **
_PyStaticType_GET_WEAKREFS_LISTPTR(static_builtin_state *state)
{
@@ -78,12 +103,6 @@ _PyType_GetModuleState(PyTypeObject *type)
return mod->md_state;
}
-struct types_state {
- struct type_cache type_cache;
- size_t num_builtins_initialized;
- static_builtin_state builtins[_Py_MAX_STATIC_BUILTIN_TYPES];
-};
-
extern int _PyStaticType_InitBuiltin(PyTypeObject *type);
extern static_builtin_state * _PyStaticType_GetState(PyTypeObject *);
@@ -98,6 +117,11 @@ _Py_type_getattro(PyTypeObject *type, PyObject *name);
PyObject *_Py_slot_tp_getattro(PyObject *self, PyObject *name);
PyObject *_Py_slot_tp_getattr_hook(PyObject *self, PyObject *name);
+PyObject *
+_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *meth_found);
+PyObject *
+_PySuper_LookupDescr(PyTypeObject *su_type, PyObject *su_obj, PyObject *name);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_unicodeobject.h b/Include/internal/pycore_unicodeobject.h
index ff97b9a623d210..1bb0f366e78163 100644
--- a/Include/internal/pycore_unicodeobject.h
+++ b/Include/internal/pycore_unicodeobject.h
@@ -12,6 +12,7 @@ extern "C" {
#include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI
void _PyUnicode_ExactDealloc(PyObject *op);
+Py_ssize_t _PyUnicode_InternedSize(void);
/* runtime lifecycle */
diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h
index 7114a5416f2515..6d9cd24d9f3a13 100644
--- a/Include/internal/pycore_unicodeobject_generated.h
+++ b/Include/internal/pycore_unicodeobject_generated.h
@@ -987,6 +987,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) {
string = &_Py_ID(endpos);
assert(_PyUnicode_CheckConsistency(string, 1));
_PyUnicode_InternInPlace(interp, &string);
+ string = &_Py_ID(entrypoint);
+ assert(_PyUnicode_CheckConsistency(string, 1));
+ _PyUnicode_InternInPlace(interp, &string);
string = &_Py_ID(env);
assert(_PyUnicode_CheckConsistency(string, 1));
_PyUnicode_InternInPlace(interp, &string);
diff --git a/Include/object.h b/Include/object.h
index 2943a6066818cd..66c3df0d7f780a 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -78,12 +78,76 @@ whose size is determined when the object is allocated.
/* PyObject_HEAD defines the initial segment of every PyObject. */
#define PyObject_HEAD PyObject ob_base;
-#define PyObject_HEAD_INIT(type) \
- { _PyObject_EXTRA_INIT \
- 1, (type) },
+/*
+Immortalization:
+
+The following indicates the immortalization strategy depending on the amount
+of available bits in the reference count field. All strategies are backwards
+compatible but the specific reference count value or immortalization check
+might change depending on the specializations for the underlying system.
+
+Proper deallocation of immortal instances requires distinguishing between
+statically allocated immortal instances vs those promoted by the runtime to be
+immortal. The latter should be the only instances that require
+cleanup during runtime finalization.
+*/
+
+#if SIZEOF_VOID_P > 4
+/*
+In 64+ bit systems, an object will be marked as immortal by setting all of the
+lower 32 bits of the reference count field, which is equal to: 0xFFFFFFFF
+
+Using the lower 32 bits makes the value backwards compatible by allowing
+C-Extensions without the updated checks in Py_INCREF and Py_DECREF to safely
+increase and decrease the objects reference count. The object would lose its
+immortality, but the execution would still be correct.
+
+Reference count increases will use saturated arithmetic, taking advantage of
+having all the lower 32 bits set, which will avoid the reference count to go
+beyond the refcount limit. Immortality checks for reference count decreases will
+be done by checking the bit sign flag in the lower 32 bits.
+*/
+#define _Py_IMMORTAL_REFCNT UINT_MAX
+
+#else
+/*
+In 32 bit systems, an object will be marked as immortal by setting all of the
+lower 30 bits of the reference count field, which is equal to: 0x3FFFFFFF
-#define PyVarObject_HEAD_INIT(type, size) \
- { PyObject_HEAD_INIT(type) (size) },
+Using the lower 30 bits makes the value backwards compatible by allowing
+C-Extensions without the updated checks in Py_INCREF and Py_DECREF to safely
+increase and decrease the objects reference count. The object would lose its
+immortality, but the execution would still be correct.
+
+Reference count increases and decreases will first go through an immortality
+check by comparing the reference count field to the immortality reference count.
+*/
+#define _Py_IMMORTAL_REFCNT (UINT_MAX >> 2)
+#endif
+
+// Make all internal uses of PyObject_HEAD_INIT immortal while preserving the
+// C-API expectation that the refcnt will be set to 1.
+#ifdef Py_BUILD_CORE
+#define PyObject_HEAD_INIT(type) \
+ { \
+ _PyObject_EXTRA_INIT \
+ { _Py_IMMORTAL_REFCNT }, \
+ (type) \
+ },
+#else
+#define PyObject_HEAD_INIT(type) \
+ { \
+ _PyObject_EXTRA_INIT \
+ { 1 }, \
+ (type) \
+ },
+#endif /* Py_BUILD_CORE */
+
+#define PyVarObject_HEAD_INIT(type, size) \
+ { \
+ PyObject_HEAD_INIT(type) \
+ (size) \
+ },
/* PyObject_VAR_HEAD defines the initial segment of all variable-size
* container objects. These end with a declaration of an array with 1
@@ -101,7 +165,12 @@ whose size is determined when the object is allocated.
*/
struct _object {
_PyObject_HEAD_EXTRA
- Py_ssize_t ob_refcnt;
+ union {
+ Py_ssize_t ob_refcnt;
+#if SIZEOF_VOID_P > 4
+ PY_UINT32_T ob_refcnt_split[2];
+#endif
+ };
PyTypeObject *ob_type;
};
@@ -152,6 +221,15 @@ static inline Py_ssize_t Py_SIZE(PyObject *ob) {
# define Py_SIZE(ob) Py_SIZE(_PyObject_CAST(ob))
#endif
+static inline Py_ALWAYS_INLINE int _Py_IsImmortal(PyObject *op)
+{
+#if SIZEOF_VOID_P > 4
+ return _Py_CAST(PY_INT32_T, op->ob_refcnt) < 0;
+#else
+ return op->ob_refcnt == _Py_IMMORTAL_REFCNT;
+#endif
+}
+#define _Py_IsImmortal(op) _Py_IsImmortal(_PyObject_CAST(op))
static inline int Py_IS_TYPE(PyObject *ob, PyTypeObject *type) {
return Py_TYPE(ob) == type;
@@ -162,6 +240,13 @@ static inline int Py_IS_TYPE(PyObject *ob, PyTypeObject *type) {
static inline void Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) {
+ // This immortal check is for code that is unaware of immortal objects.
+ // The runtime tracks these objects and we should avoid as much
+ // as possible having extensions inadvertently change the refcnt
+ // of an immortalized object.
+ if (_Py_IsImmortal(ob)) {
+ return;
+ }
ob->ob_refcnt = refcnt;
}
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000
@@ -524,19 +609,33 @@ PyAPI_FUNC(void) Py_DecRef(PyObject *);
PyAPI_FUNC(void) _Py_IncRef(PyObject *);
PyAPI_FUNC(void) _Py_DecRef(PyObject *);
-static inline void Py_INCREF(PyObject *op)
+static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op)
{
#if defined(Py_REF_DEBUG) && defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030A0000
// Stable ABI for Python 3.10 built in debug mode.
_Py_IncRef(op);
#else
- _Py_INCREF_STAT_INC();
// Non-limited C API and limited C API for Python 3.9 and older access
// directly PyObject.ob_refcnt.
+#if SIZEOF_VOID_P > 4
+ // Portable saturated add, branching on the carry flag and set low bits
+ PY_UINT32_T cur_refcnt = op->ob_refcnt_split[PY_BIG_ENDIAN];
+ PY_UINT32_T new_refcnt = cur_refcnt + 1;
+ if (new_refcnt == 0) {
+ return;
+ }
+ op->ob_refcnt_split[PY_BIG_ENDIAN] = new_refcnt;
+#else
+ // Explicitly check immortality against the immortal value
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
+ op->ob_refcnt++;
+#endif
+ _Py_INCREF_STAT_INC();
#ifdef Py_REF_DEBUG
_Py_INC_REFTOTAL();
-#endif // Py_REF_DEBUG
- op->ob_refcnt++;
+#endif
#endif
}
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000
@@ -553,6 +652,9 @@ static inline void Py_DECREF(PyObject *op) {
#elif defined(Py_REF_DEBUG)
static inline void Py_DECREF(const char *filename, int lineno, PyObject *op)
{
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
_Py_DECREF_STAT_INC();
_Py_DEC_REFTOTAL();
if (--op->ob_refcnt != 0) {
@@ -567,11 +669,14 @@ static inline void Py_DECREF(const char *filename, int lineno, PyObject *op)
#define Py_DECREF(op) Py_DECREF(__FILE__, __LINE__, _PyObject_CAST(op))
#else
-static inline void Py_DECREF(PyObject *op)
+static inline Py_ALWAYS_INLINE void Py_DECREF(PyObject *op)
{
- _Py_DECREF_STAT_INC();
// Non-limited C API and limited C API for Python 3.9 and older access
// directly PyObject.ob_refcnt.
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
+ _Py_DECREF_STAT_INC();
if (--op->ob_refcnt == 0) {
_Py_Dealloc(op);
}
@@ -721,7 +826,7 @@ PyAPI_FUNC(int) Py_IsNone(PyObject *x);
#define Py_IsNone(x) Py_Is((x), Py_None)
/* Macro for returning Py_None from a function */
-#define Py_RETURN_NONE return Py_NewRef(Py_None)
+#define Py_RETURN_NONE return Py_None
/*
Py_NotImplemented is a singleton used to signal that an operation is
@@ -731,7 +836,7 @@ PyAPI_DATA(PyObject) _Py_NotImplementedStruct; /* Don't use this directly */
#define Py_NotImplemented (&_Py_NotImplementedStruct)
/* Macro for returning Py_NotImplemented from a function */
-#define Py_RETURN_NOTIMPLEMENTED return Py_NewRef(Py_NotImplemented)
+#define Py_RETURN_NOTIMPLEMENTED return Py_NotImplemented
/* Rich comparison opcodes */
#define Py_LT 0
diff --git a/Include/opcode.h b/Include/opcode.h
index aa8716ef5b4030..37a9e9bffa4cb7 100644
--- a/Include/opcode.h
+++ b/Include/opcode.h
@@ -95,6 +95,7 @@ extern "C" {
#define STORE_DEREF 138
#define DELETE_DEREF 139
#define JUMP_BACKWARD 140
+#define LOAD_SUPER_ATTR 141
#define CALL_FUNCTION_EX 142
#define EXTENDED_ARG 144
#define LIST_APPEND 145
@@ -142,7 +143,10 @@ extern "C" {
#define JUMP 260
#define JUMP_NO_INTERRUPT 261
#define LOAD_METHOD 262
-#define MAX_PSEUDO_OPCODE 262
+#define LOAD_SUPER_METHOD 263
+#define LOAD_ZERO_SUPER_METHOD 264
+#define LOAD_ZERO_SUPER_ATTR 265
+#define MAX_PSEUDO_OPCODE 265
#define BINARY_OP_ADD_FLOAT 6
#define BINARY_OP_ADD_INT 7
#define BINARY_OP_ADD_UNICODE 8
@@ -179,37 +183,41 @@ extern "C" {
#define FOR_ITER_TUPLE 63
#define FOR_ITER_RANGE 64
#define FOR_ITER_GEN 65
-#define LOAD_ATTR_CLASS 66
-#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 67
-#define LOAD_ATTR_INSTANCE_VALUE 70
-#define LOAD_ATTR_MODULE 72
-#define LOAD_ATTR_PROPERTY 73
-#define LOAD_ATTR_SLOT 76
-#define LOAD_ATTR_WITH_HINT 77
-#define LOAD_ATTR_METHOD_LAZY_DICT 78
-#define LOAD_ATTR_METHOD_NO_DICT 79
-#define LOAD_ATTR_METHOD_WITH_VALUES 80
-#define LOAD_CONST__LOAD_FAST 81
-#define LOAD_FAST__LOAD_CONST 82
-#define LOAD_FAST__LOAD_FAST 84
-#define LOAD_GLOBAL_BUILTIN 86
-#define LOAD_GLOBAL_MODULE 87
-#define STORE_ATTR_INSTANCE_VALUE 88
-#define STORE_ATTR_SLOT 111
-#define STORE_ATTR_WITH_HINT 112
-#define STORE_FAST__LOAD_FAST 113
-#define STORE_FAST__STORE_FAST 141
-#define STORE_SUBSCR_DICT 143
-#define STORE_SUBSCR_LIST_INT 153
-#define UNPACK_SEQUENCE_LIST 154
-#define UNPACK_SEQUENCE_TUPLE 158
-#define UNPACK_SEQUENCE_TWO_TUPLE 159
-#define SEND_GEN 160
+#define LOAD_SUPER_ATTR_METHOD 66
+#define LOAD_ATTR_CLASS 67
+#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 70
+#define LOAD_ATTR_INSTANCE_VALUE 72
+#define LOAD_ATTR_MODULE 73
+#define LOAD_ATTR_PROPERTY 76
+#define LOAD_ATTR_SLOT 77
+#define LOAD_ATTR_WITH_HINT 78
+#define LOAD_ATTR_METHOD_LAZY_DICT 79
+#define LOAD_ATTR_METHOD_NO_DICT 80
+#define LOAD_ATTR_METHOD_WITH_VALUES 81
+#define LOAD_CONST__LOAD_FAST 82
+#define LOAD_FAST__LOAD_CONST 84
+#define LOAD_FAST__LOAD_FAST 86
+#define LOAD_GLOBAL_BUILTIN 87
+#define LOAD_GLOBAL_MODULE 88
+#define STORE_ATTR_INSTANCE_VALUE 111
+#define STORE_ATTR_SLOT 112
+#define STORE_ATTR_WITH_HINT 113
+#define STORE_FAST__LOAD_FAST 143
+#define STORE_FAST__STORE_FAST 153
+#define STORE_SUBSCR_DICT 154
+#define STORE_SUBSCR_LIST_INT 158
+#define UNPACK_SEQUENCE_LIST 159
+#define UNPACK_SEQUENCE_TUPLE 160
+#define UNPACK_SEQUENCE_TWO_TUPLE 161
+#define SEND_GEN 166
#define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\
|| ((op) == JUMP) \
|| ((op) == JUMP_NO_INTERRUPT) \
|| ((op) == LOAD_METHOD) \
+ || ((op) == LOAD_SUPER_METHOD) \
+ || ((op) == LOAD_ZERO_SUPER_METHOD) \
+ || ((op) == LOAD_ZERO_SUPER_ATTR) \
)
#define HAS_CONST(op) (false\
diff --git a/Include/pyport.h b/Include/pyport.h
index eef0fe1bfd71d8..bd0ba6d0681b21 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -184,7 +184,6 @@ typedef Py_ssize_t Py_ssize_clean_t;
# define Py_LOCAL_INLINE(type) static inline type
#endif
-// bpo-28126: Py_MEMCPY is kept for backwards compatibility,
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000
# define Py_MEMCPY memcpy
#endif
@@ -663,6 +662,27 @@ extern char * _getpty(int *, int, mode_t, int);
# define WITH_THREAD
#endif
+#ifdef WITH_THREAD
+# ifdef Py_BUILD_CORE
+# ifdef HAVE_THREAD_LOCAL
+# error "HAVE_THREAD_LOCAL is already defined"
+# endif
+# define HAVE_THREAD_LOCAL 1
+# ifdef thread_local
+# define _Py_thread_local thread_local
+# elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_THREADS__)
+# define _Py_thread_local _Thread_local
+# elif defined(_MSC_VER) /* AKA NT_THREADS */
+# define _Py_thread_local __declspec(thread)
+# elif defined(__GNUC__) /* includes clang */
+# define _Py_thread_local __thread
+# else
+ // fall back to the PyThread_tss_*() API, or ignore.
+# undef HAVE_THREAD_LOCAL
+# endif
+# endif
+#endif
+
/* Check that ALT_SOABI is consistent with Py_TRACE_REFS:
./configure --with-trace-refs should must be used to define Py_TRACE_REFS */
#if defined(ALT_SOABI) && defined(Py_TRACE_REFS)
diff --git a/Lib/_strptime.py b/Lib/_strptime.py
index b97dfcce1e8e4d..77ccdc9e1d789b 100644
--- a/Lib/_strptime.py
+++ b/Lib/_strptime.py
@@ -290,22 +290,6 @@ def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
return 1 + days_to_week + day_of_week
-def _calc_julian_from_V(iso_year, iso_week, iso_weekday):
- """Calculate the Julian day based on the ISO 8601 year, week, and weekday.
- ISO weeks start on Mondays, with week 01 being the week containing 4 Jan.
- ISO week days range from 1 (Monday) to 7 (Sunday).
- """
- correction = datetime_date(iso_year, 1, 4).isoweekday() + 3
- ordinal = (iso_week * 7) + iso_weekday - correction
- # ordinal may be negative or 0 now, which means the date is in the previous
- # calendar year
- if ordinal < 1:
- ordinal += datetime_date(iso_year, 1, 1).toordinal()
- iso_year -= 1
- ordinal -= datetime_date(iso_year, 1, 1).toordinal()
- return iso_year, ordinal
-
-
def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a 2-tuple consisting of a time struct and an int containing
the number of microseconds based on the input string and the
@@ -483,7 +467,8 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
else:
tz = value
break
- # Deal with the cases where ambiguities arize
+
+ # Deal with the cases where ambiguities arise
# don't assume default values for ISO week/year
if year is None and iso_year is not None:
if iso_week is None or weekday is None:
@@ -511,7 +496,6 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
elif year is None:
year = 1900
-
# If we know the week of the year and what day of that week, we can figure
# out the Julian day of the year.
if julian is None and weekday is not None:
@@ -520,7 +504,10 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
week_starts_Mon)
elif iso_year is not None and iso_week is not None:
- year, julian = _calc_julian_from_V(iso_year, iso_week, weekday + 1)
+ datetime_result = datetime_date.fromisocalendar(iso_year, iso_week, weekday + 1)
+ year = datetime_result.year
+ month = datetime_result.month
+ day = datetime_result.day
if julian is not None and julian <= 0:
year -= 1
yday = 366 if calendar.isleap(year) else 365
diff --git a/Lib/ast.py b/Lib/ast.py
index 2cbc80a9835aa5..d9733a79d3a78f 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -25,6 +25,7 @@
:license: Python License.
"""
import sys
+import re
from _ast import *
from contextlib import contextmanager, nullcontext
from enum import IntEnum, auto, _simple_enum
@@ -305,28 +306,17 @@ def get_docstring(node, clean=True):
return text
-def _splitlines_no_ff(source):
+_line_pattern = re.compile(r"(.*?(?:\r\n|\n|\r|$))")
+def _splitlines_no_ff(source, maxlines=None):
"""Split a string into lines ignoring form feed and other chars.
This mimics how the Python parser splits source code.
"""
- idx = 0
lines = []
- next_line = ''
- while idx < len(source):
- c = source[idx]
- next_line += c
- idx += 1
- # Keep \r\n together
- if c == '\r' and idx < len(source) and source[idx] == '\n':
- next_line += '\n'
- idx += 1
- if c in '\r\n':
- lines.append(next_line)
- next_line = ''
-
- if next_line:
- lines.append(next_line)
+ for lineno, match in enumerate(_line_pattern.finditer(source), 1):
+ if maxlines is not None and lineno > maxlines:
+ break
+ lines.append(match[0])
return lines
@@ -360,7 +350,7 @@ def get_source_segment(source, node, *, padded=False):
except AttributeError:
return None
- lines = _splitlines_no_ff(source)
+ lines = _splitlines_no_ff(source, maxlines=end_lineno+1)
if end_lineno == lineno:
return lines[lineno].encode()[col_offset:end_col_offset].decode()
diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py
index 3a697129e4c914..fa2422b7fba4a7 100644
--- a/Lib/asyncio/selector_events.py
+++ b/Lib/asyncio/selector_events.py
@@ -794,6 +794,8 @@ def __init__(self, loop, sock, protocol, extra=None, server=None):
self._buffer = collections.deque()
self._conn_lost = 0 # Set when call to connection_lost scheduled.
self._closing = False # Set when close() called.
+ self._paused = False # Set when pause_reading() called
+
if self._server is not None:
self._server._attach()
loop._transports[self._sock_fd] = self
@@ -839,6 +841,25 @@ def get_protocol(self):
def is_closing(self):
return self._closing
+ def is_reading(self):
+ return not self.is_closing() and not self._paused
+
+ def pause_reading(self):
+ if not self.is_reading():
+ return
+ self._paused = True
+ self._loop._remove_reader(self._sock_fd)
+ if self._loop.get_debug():
+ logger.debug("%r pauses reading", self)
+
+ def resume_reading(self):
+ if self._closing or not self._paused:
+ return
+ self._paused = False
+ self._add_reader(self._sock_fd, self._read_ready)
+ if self._loop.get_debug():
+ logger.debug("%r resumes reading", self)
+
def close(self):
if self._closing:
return
@@ -898,9 +919,8 @@ def get_write_buffer_size(self):
return sum(map(len, self._buffer))
def _add_reader(self, fd, callback, *args):
- if self._closing:
+ if not self.is_reading():
return
-
self._loop._add_reader(fd, callback, *args)
@@ -915,7 +935,6 @@ def __init__(self, loop, sock, protocol, waiter=None,
self._read_ready_cb = None
super().__init__(loop, sock, protocol, extra, server)
self._eof = False
- self._paused = False
self._empty_waiter = None
if _HAS_SENDMSG:
self._write_ready = self._write_sendmsg
@@ -943,25 +962,6 @@ def set_protocol(self, protocol):
super().set_protocol(protocol)
- def is_reading(self):
- return not self._paused and not self._closing
-
- def pause_reading(self):
- if self._closing or self._paused:
- return
- self._paused = True
- self._loop._remove_reader(self._sock_fd)
- if self._loop.get_debug():
- logger.debug("%r pauses reading", self)
-
- def resume_reading(self):
- if self._closing or not self._paused:
- return
- self._paused = False
- self._add_reader(self._sock_fd, self._read_ready)
- if self._loop.get_debug():
- logger.debug("%r resumes reading", self)
-
def _read_ready(self):
self._read_ready_cb()
diff --git a/Lib/asyncio/subprocess.py b/Lib/asyncio/subprocess.py
index cd10231f710f11..50727ca300e63e 100644
--- a/Lib/asyncio/subprocess.py
+++ b/Lib/asyncio/subprocess.py
@@ -144,10 +144,11 @@ def kill(self):
async def _feed_stdin(self, input):
debug = self._loop.get_debug()
- self.stdin.write(input)
- if debug:
- logger.debug(
- '%r communicate: feed stdin (%s bytes)', self, len(input))
+ if input is not None:
+ self.stdin.write(input)
+ if debug:
+ logger.debug(
+ '%r communicate: feed stdin (%s bytes)', self, len(input))
try:
await self.stdin.drain()
except (BrokenPipeError, ConnectionResetError) as exc:
@@ -180,7 +181,7 @@ async def _read_stream(self, fd):
return output
async def communicate(self, input=None):
- if input is not None:
+ if self.stdin is not None:
stdin = self._feed_stdin(input)
else:
stdin = self._noop()
diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py
index b21e0394141bf4..17fb4d5f7646ce 100644
--- a/Lib/asyncio/unix_events.py
+++ b/Lib/asyncio/unix_events.py
@@ -485,13 +485,21 @@ def __init__(self, loop, pipe, protocol, waiter=None, extra=None):
self._loop.call_soon(self._protocol.connection_made, self)
# only start reading when connection_made() has been called
- self._loop.call_soon(self._loop._add_reader,
+ self._loop.call_soon(self._add_reader,
self._fileno, self._read_ready)
if waiter is not None:
# only wake up the waiter when connection_made() has been called
self._loop.call_soon(futures._set_result_unless_cancelled,
waiter, None)
+ def _add_reader(self, fd, callback):
+ if not self.is_reading():
+ return
+ self._loop._add_reader(fd, callback)
+
+ def is_reading(self):
+ return not self._paused and not self._closing
+
def __repr__(self):
info = [self.__class__.__name__]
if self._pipe is None:
@@ -532,7 +540,7 @@ def _read_ready(self):
self._loop.call_soon(self._call_connection_lost, None)
def pause_reading(self):
- if self._closing or self._paused:
+ if not self.is_reading():
return
self._paused = True
self._loop._remove_reader(self._fileno)
diff --git a/Lib/bdb.py b/Lib/bdb.py
index 7f9b09514ffd00..0f3eec653baaad 100644
--- a/Lib/bdb.py
+++ b/Lib/bdb.py
@@ -574,6 +574,8 @@ def format_stack_entry(self, frame_lineno, lprefix=': '):
line = linecache.getline(filename, lineno, frame.f_globals)
if line:
s += lprefix + line.strip()
+ else:
+ s += f'{lprefix}Warning: lineno is None'
return s
# The following methods can be called by clients to use
diff --git a/Lib/cProfile.py b/Lib/cProfile.py
index f7000a8bfa0ddb..135a12c3965c00 100755
--- a/Lib/cProfile.py
+++ b/Lib/cProfile.py
@@ -8,6 +8,7 @@
import _lsprof
import importlib.machinery
+import io
import profile as _pyprofile
# ____________________________________________________________
@@ -168,7 +169,7 @@ def main():
else:
progname = args[0]
sys.path.insert(0, os.path.dirname(progname))
- with open(progname, 'rb') as fp:
+ with io.open_code(progname) as fp:
code = compile(fp.read(), progname, 'exec')
spec = importlib.machinery.ModuleSpec(name='__main__', loader=None,
origin=progname)
diff --git a/Lib/calendar.py b/Lib/calendar.py
index 657396439c91fc..bbd4fea3b88ca4 100644
--- a/Lib/calendar.py
+++ b/Lib/calendar.py
@@ -7,8 +7,10 @@
import sys
import datetime
+from enum import IntEnum, global_enum
import locale as _locale
from itertools import repeat
+import warnings
__all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday",
"firstweekday", "isleap", "leapdays", "weekday", "monthrange",
@@ -16,6 +18,9 @@
"timegm", "month_name", "month_abbr", "day_name", "day_abbr",
"Calendar", "TextCalendar", "HTMLCalendar", "LocaleTextCalendar",
"LocaleHTMLCalendar", "weekheader",
+ "Day", "Month", "JANUARY", "FEBRUARY", "MARCH",
+ "APRIL", "MAY", "JUNE", "JULY",
+ "AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER",
"MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY",
"SATURDAY", "SUNDAY"]
@@ -37,9 +42,47 @@ def __str__(self):
return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday
-# Constants for months referenced later
-January = 1
-February = 2
+def __getattr__(name):
+ if name in ('January', 'February'):
+ warnings.warn(f"The '{name}' attribute is deprecated, use '{name.upper()}' instead",
+ DeprecationWarning, stacklevel=2)
+ if name == 'January':
+ return 1
+ else:
+ return 2
+
+ raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
+
+
+# Constants for months
+@global_enum
+class Month(IntEnum):
+ JANUARY = 1
+ FEBRUARY = 2
+ MARCH = 3
+ APRIL = 4
+ MAY = 5
+ JUNE = 6
+ JULY = 7
+ AUGUST = 8
+ SEPTEMBER = 9
+ OCTOBER = 10
+ NOVEMBER = 11
+ DECEMBER = 12
+
+
+# Constants for days
+@global_enum
+class Day(IntEnum):
+ MONDAY = 0
+ TUESDAY = 1
+ WEDNESDAY = 2
+ THURSDAY = 3
+ FRIDAY = 4
+ SATURDAY = 5
+ SUNDAY = 6
+
+
# Number of days per month (except for February in leap years)
mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
@@ -95,9 +138,6 @@ def __len__(self):
month_name = _localized_month('%B')
month_abbr = _localized_month('%b')
-# Constants for weekdays
-(MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7)
-
def isleap(year):
"""Return True for leap years, False for non-leap years."""
@@ -125,12 +165,12 @@ def monthrange(year, month):
if not 1 <= month <= 12:
raise IllegalMonthError(month)
day1 = weekday(year, month, 1)
- ndays = mdays[month] + (month == February and isleap(year))
+ ndays = mdays[month] + (month == FEBRUARY and isleap(year))
return day1, ndays
def _monthlen(year, month):
- return mdays[month] + (month == February and isleap(year))
+ return mdays[month] + (month == FEBRUARY and isleap(year))
def _prevmonth(year, month):
@@ -260,10 +300,7 @@ def yeardatescalendar(self, year, width=3):
Each month contains between 4 and 6 weeks and each week contains 1-7
days. Days are datetime.date objects.
"""
- months = [
- self.monthdatescalendar(year, i)
- for i in range(January, January+12)
- ]
+ months = [self.monthdatescalendar(year, m) for m in Month]
return [months[i:i+width] for i in range(0, len(months), width) ]
def yeardays2calendar(self, year, width=3):
@@ -273,10 +310,7 @@ def yeardays2calendar(self, year, width=3):
(day number, weekday number) tuples. Day numbers outside this month are
zero.
"""
- months = [
- self.monthdays2calendar(year, i)
- for i in range(January, January+12)
- ]
+ months = [self.monthdays2calendar(year, m) for m in Month]
return [months[i:i+width] for i in range(0, len(months), width) ]
def yeardayscalendar(self, year, width=3):
@@ -285,10 +319,7 @@ def yeardayscalendar(self, year, width=3):
yeardatescalendar()). Entries in the week lists are day numbers.
Day numbers outside this month are zero.
"""
- months = [
- self.monthdayscalendar(year, i)
- for i in range(January, January+12)
- ]
+ months = [self.monthdayscalendar(year, m) for m in Month]
return [months[i:i+width] for i in range(0, len(months), width) ]
@@ -509,7 +540,7 @@ def formatyear(self, theyear, width=3):
a('\n')
a('%s ' % (
width, self.cssclass_year_head, theyear))
- for i in range(January, January+12, width):
+ for i in range(JANUARY, JANUARY+12, width):
# months in this row
months = range(i, min(i+width, 13))
a('')
diff --git a/Lib/contextlib.py b/Lib/contextlib.py
index 30d9ac25b2bbec..b5acbcb9e6d77c 100644
--- a/Lib/contextlib.py
+++ b/Lib/contextlib.py
@@ -441,7 +441,16 @@ def __exit__(self, exctype, excinst, exctb):
# exactly reproduce the limitations of the CPython interpreter.
#
# See http://bugs.python.org/issue12029 for more details
- return exctype is not None and issubclass(exctype, self._exceptions)
+ if exctype is None:
+ return
+ if issubclass(exctype, self._exceptions):
+ return True
+ if issubclass(exctype, ExceptionGroup):
+ match, rest = excinst.split(self._exceptions)
+ if rest is None:
+ return True
+ raise rest
+ return False
class _BaseExitStack:
diff --git a/Lib/curses/textpad.py b/Lib/curses/textpad.py
index 2079953a06614b..aa87061b8d749e 100644
--- a/Lib/curses/textpad.py
+++ b/Lib/curses/textpad.py
@@ -102,7 +102,10 @@ def do_command(self, ch):
self._insert_printable_char(ch)
elif ch == curses.ascii.SOH: # ^a
self.win.move(y, 0)
- elif ch in (curses.ascii.STX,curses.KEY_LEFT, curses.ascii.BS,curses.KEY_BACKSPACE):
+ elif ch in (curses.ascii.STX,curses.KEY_LEFT,
+ curses.ascii.BS,
+ curses.KEY_BACKSPACE,
+ curses.ascii.DEL):
if x > 0:
self.win.move(y, x-1)
elif y == 0:
@@ -111,7 +114,7 @@ def do_command(self, ch):
self.win.move(y-1, self._end_of_line(y-1))
else:
self.win.move(y-1, self.maxx)
- if ch in (curses.ascii.BS, curses.KEY_BACKSPACE):
+ if ch in (curses.ascii.BS, curses.KEY_BACKSPACE, curses.ascii.DEL):
self.win.delch()
elif ch == curses.ascii.EOT: # ^d
self.win.delch()
diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py
index 4026c8b77975b7..a73cdc22a5f4b3 100644
--- a/Lib/dataclasses.py
+++ b/Lib/dataclasses.py
@@ -1128,8 +1128,13 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen,
if not getattr(cls, '__doc__'):
# Create a class doc-string.
- cls.__doc__ = (cls.__name__ +
- str(inspect.signature(cls)).replace(' -> None', ''))
+ try:
+ # In some cases fetching a signature is not possible.
+ # But, we surely should not fail in this case.
+ text_sig = str(inspect.signature(cls)).replace(' -> None', '')
+ except (TypeError, ValueError):
+ text_sig = ''
+ cls.__doc__ = (cls.__name__ + text_sig)
if match_args:
# I could probably compute this once
diff --git a/Lib/datetime.py b/Lib/datetime.py
index 637144637485bc..b0eb1c216a689d 100644
--- a/Lib/datetime.py
+++ b/Lib/datetime.py
@@ -1801,6 +1801,13 @@ def fromtimestamp(cls, timestamp, tz=None):
@classmethod
def utcfromtimestamp(cls, t):
"""Construct a naive UTC datetime from a POSIX timestamp."""
+ import warnings
+ warnings.warn("datetime.utcfromtimestamp() is deprecated and scheduled "
+ "for removal in a future version. Use timezone-aware "
+ "objects to represent datetimes in UTC: "
+ "datetime.fromtimestamp(t, datetime.UTC).",
+ DeprecationWarning,
+ stacklevel=2)
return cls._fromtimestamp(t, True, None)
@classmethod
@@ -1812,8 +1819,15 @@ def now(cls, tz=None):
@classmethod
def utcnow(cls):
"Construct a UTC datetime from time.time()."
+ import warnings
+ warnings.warn("datetime.utcnow() is deprecated and scheduled for "
+ "removal in a future version. Instead, Use timezone-aware "
+ "objects to represent datetimes in UTC: "
+ "datetime.now(datetime.UTC).",
+ DeprecationWarning,
+ stacklevel=2)
t = _time.time()
- return cls.utcfromtimestamp(t)
+ return cls._fromtimestamp(t, True, None)
@classmethod
def combine(cls, date, time, tzinfo=True):
@@ -1965,6 +1979,11 @@ def replace(self, year=None, month=None, day=None, hour=None,
def _local_timezone(self):
if self.tzinfo is None:
ts = self._mktime()
+ # Detect gap
+ ts2 = self.replace(fold=1-self.fold)._mktime()
+ if ts2 != ts: # This happens in a gap or a fold
+ if (ts2 > ts) == self.fold:
+ ts = ts2
else:
ts = (self - _EPOCH) // timedelta(seconds=1)
localtm = _time.localtime(ts)
diff --git a/Lib/dis.py b/Lib/dis.py
index b39b2835330135..85c109584bf94f 100644
--- a/Lib/dis.py
+++ b/Lib/dis.py
@@ -41,6 +41,7 @@
FOR_ITER = opmap['FOR_ITER']
SEND = opmap['SEND']
LOAD_ATTR = opmap['LOAD_ATTR']
+LOAD_SUPER_ATTR = opmap['LOAD_SUPER_ATTR']
CACHE = opmap["CACHE"]
@@ -64,10 +65,10 @@ def _try_compile(source, name):
expect code objects
"""
try:
- c = compile(source, name, 'eval')
+ return compile(source, name, 'eval')
except SyntaxError:
- c = compile(source, name, 'exec')
- return c
+ pass
+ return compile(source, name, 'exec')
def dis(x=None, *, file=None, depth=None, show_caches=False, adaptive=False):
"""Disassemble classes, methods, functions, and other compiled objects.
@@ -368,9 +369,8 @@ def _get_const_value(op, arg, co_consts):
assert op in hasconst
argval = UNKNOWN
- if op == LOAD_CONST or op == RETURN_CONST:
- if co_consts is not None:
- argval = co_consts[arg]
+ if co_consts is not None:
+ argval = co_consts[arg]
return argval
def _get_const_info(op, arg, co_consts):
@@ -475,6 +475,10 @@ def _get_instructions_bytes(code, varname_from_oparg=None,
argval, argrepr = _get_name_info(arg//2, get_name)
if (arg & 1) and argrepr:
argrepr = "NULL|self + " + argrepr
+ elif deop == LOAD_SUPER_ATTR:
+ argval, argrepr = _get_name_info(arg//4, get_name)
+ if (arg & 1) and argrepr:
+ argrepr = "NULL|self + " + argrepr
else:
argval, argrepr = _get_name_info(arg, get_name)
elif deop in hasjabs:
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index e637e6df06612d..0d6bd812475eea 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -1987,7 +1987,7 @@ def get_address_list(value):
try:
token, value = get_address(value)
address_list.append(token)
- except errors.HeaderParseError as err:
+ except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
@@ -2096,7 +2096,7 @@ def get_msg_id(value):
except errors.HeaderParseError:
try:
token, value = get_no_fold_literal(value)
- except errors.HeaderParseError as e:
+ except errors.HeaderParseError:
try:
token, value = get_domain(value)
msg_id.defects.append(errors.ObsoleteHeaderDefect(
@@ -2443,7 +2443,6 @@ def get_parameter(value):
raise errors.HeaderParseError("Parameter not followed by '='")
param.append(ValueTerminal('=', 'parameter-separator'))
value = value[1:]
- leader = None
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
param.append(token)
@@ -2568,7 +2567,7 @@ def parse_mime_parameters(value):
try:
token, value = get_parameter(value)
mime_parameters.append(token)
- except errors.HeaderParseError as err:
+ except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
@@ -2626,7 +2625,6 @@ def parse_content_type_header(value):
don't do that.
"""
ctype = ContentType()
- recover = False
if not value:
ctype.defects.append(errors.HeaderMissingRequiredValue(
"Missing content type specification"))
diff --git a/Lib/email/charset.py b/Lib/email/charset.py
index 9af269442fb8af..043801107b60e5 100644
--- a/Lib/email/charset.py
+++ b/Lib/email/charset.py
@@ -341,7 +341,6 @@ def header_encode_lines(self, string, maxlengths):
if not lines and not current_line:
lines.append(None)
else:
- separator = (' ' if lines else '')
joined_line = EMPTYSTRING.join(current_line)
header_bytes = _encode(joined_line, codec)
lines.append(encoder(header_bytes))
diff --git a/Lib/email/feedparser.py b/Lib/email/feedparser.py
index 6bc4e0c4e59895..885097c7dda067 100644
--- a/Lib/email/feedparser.py
+++ b/Lib/email/feedparser.py
@@ -264,7 +264,7 @@ def _parsegen(self):
yield NeedMoreData
continue
break
- msg = self._pop_message()
+ self._pop_message()
# We need to pop the EOF matcher in order to tell if we're at
# the end of the current file, not the end of the last block
# of message headers.
diff --git a/Lib/email/message.py b/Lib/email/message.py
index b540c33984a753..411118c74dabb4 100644
--- a/Lib/email/message.py
+++ b/Lib/email/message.py
@@ -14,7 +14,7 @@
# Intrapackage imports
from email import utils
from email import errors
-from email._policybase import Policy, compat32
+from email._policybase import compat32
from email import charset as _charset
from email._encoded_words import decode_b
Charset = _charset.Charset
diff --git a/Lib/email/mime/text.py b/Lib/email/mime/text.py
index dfe53c426b2ac4..7672b789138600 100644
--- a/Lib/email/mime/text.py
+++ b/Lib/email/mime/text.py
@@ -6,7 +6,6 @@
__all__ = ['MIMEText']
-from email.charset import Charset
from email.mime.nonmultipart import MIMENonMultipart
@@ -36,6 +35,6 @@ def __init__(self, _text, _subtype='plain', _charset=None, *, policy=None):
_charset = 'utf-8'
MIMENonMultipart.__init__(self, 'text', _subtype, policy=policy,
- **{'charset': str(_charset)})
+ charset=str(_charset))
self.set_payload(_text, _charset)
diff --git a/Lib/email/utils.py b/Lib/email/utils.py
index 4d014bacd6182e..81da5394ea1695 100644
--- a/Lib/email/utils.py
+++ b/Lib/email/utils.py
@@ -143,13 +143,13 @@ def formatdate(timeval=None, localtime=False, usegmt=False):
# 2822 requires that day and month names be the English abbreviations.
if timeval is None:
timeval = time.time()
- if localtime or usegmt:
- dt = datetime.datetime.fromtimestamp(timeval, datetime.timezone.utc)
- else:
- dt = datetime.datetime.utcfromtimestamp(timeval)
+ dt = datetime.datetime.fromtimestamp(timeval, datetime.timezone.utc)
+
if localtime:
dt = dt.astimezone()
usegmt = False
+ elif not usegmt:
+ dt = dt.replace(tzinfo=None)
return format_datetime(dt, usegmt)
def format_datetime(dt, usegmt=False):
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
index 00e77749e25e77..5f4f1d75b43e64 100644
--- a/Lib/ensurepip/__init__.py
+++ b/Lib/ensurepip/__init__.py
@@ -9,11 +9,9 @@
__all__ = ["version", "bootstrap"]
-_PACKAGE_NAMES = ('setuptools', 'pip')
-_SETUPTOOLS_VERSION = "65.5.0"
-_PIP_VERSION = "23.0.1"
+_PACKAGE_NAMES = ('pip',)
+_PIP_VERSION = "23.1.2"
_PROJECTS = [
- ("setuptools", _SETUPTOOLS_VERSION, "py3"),
("pip", _PIP_VERSION, "py3"),
]
@@ -153,17 +151,17 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
_disable_pip_configuration_settings()
- # By default, installing pip and setuptools installs all of the
+ # By default, installing pip installs all of the
# following scripts (X.Y == running Python version):
#
- # pip, pipX, pipX.Y, easy_install, easy_install-X.Y
+ # pip, pipX, pipX.Y
#
# pip 1.5+ allows ensurepip to request that some of those be left out
if altinstall:
- # omit pip, pipX and easy_install
+ # omit pip, pipX
os.environ["ENSUREPIP_OPTIONS"] = "altinstall"
elif not default_pip:
- # omit pip and easy_install
+ # omit pip
os.environ["ENSUREPIP_OPTIONS"] = "install"
with tempfile.TemporaryDirectory() as tmpdir:
@@ -271,14 +269,14 @@ def _main(argv=None):
action="store_true",
default=False,
help=("Make an alternate install, installing only the X.Y versioned "
- "scripts (Default: pipX, pipX.Y, easy_install-X.Y)."),
+ "scripts (Default: pipX, pipX.Y)."),
)
parser.add_argument(
"--default-pip",
action="store_true",
default=False,
help=("Make a default pip install, installing the unqualified pip "
- "and easy_install in addition to the versioned scripts."),
+ "in addition to the versioned scripts."),
)
args = parser.parse_args(argv)
diff --git a/Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl
similarity index 76%
rename from Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl
rename to Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl
index a855dc40e8630d..6a2515615ccda3 100644
Binary files a/Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl differ
diff --git a/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl
deleted file mode 100644
index 123a13e2c6b254..00000000000000
Binary files a/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl and /dev/null differ
diff --git a/Lib/enum.py b/Lib/enum.py
index e9f224a303d3e5..6e497f7ef6a7de 100644
--- a/Lib/enum.py
+++ b/Lib/enum.py
@@ -190,6 +190,8 @@ class property(DynamicClassAttribute):
"""
member = None
+ _attr_type = None
+ _cls_type = None
def __get__(self, instance, ownerclass=None):
if instance is None:
@@ -199,33 +201,36 @@ def __get__(self, instance, ownerclass=None):
raise AttributeError(
'%r has no attribute %r' % (ownerclass, self.name)
)
- else:
- if self.fget is None:
- # look for a member by this name.
- try:
- return ownerclass._member_map_[self.name]
- except KeyError:
- raise AttributeError(
- '%r has no attribute %r' % (ownerclass, self.name)
- ) from None
- else:
- return self.fget(instance)
+ if self.fget is not None:
+ # use previous enum.property
+ return self.fget(instance)
+ elif self._attr_type == 'attr':
+ # look up previous attibute
+ return getattr(self._cls_type, self.name)
+ elif self._attr_type == 'desc':
+ # use previous descriptor
+ return getattr(instance._value_, self.name)
+ # look for a member by this name.
+ try:
+ return ownerclass._member_map_[self.name]
+ except KeyError:
+ raise AttributeError(
+ '%r has no attribute %r' % (ownerclass, self.name)
+ ) from None
def __set__(self, instance, value):
- if self.fset is None:
- raise AttributeError(
- " cannot set attribute %r" % (self.clsname, self.name)
- )
- else:
+ if self.fset is not None:
return self.fset(instance, value)
+ raise AttributeError(
+ " cannot set attribute %r" % (self.clsname, self.name)
+ )
def __delete__(self, instance):
- if self.fdel is None:
- raise AttributeError(
- " cannot delete attribute %r" % (self.clsname, self.name)
- )
- else:
+ if self.fdel is not None:
return self.fdel(instance)
+ raise AttributeError(
+ " cannot delete attribute %r" % (self.clsname, self.name)
+ )
def __set_name__(self, ownerclass, name):
self.name = name
@@ -313,27 +318,38 @@ def __set_name__(self, enum_class, member_name):
enum_class._member_names_.append(member_name)
# if necessary, get redirect in place and then add it to _member_map_
found_descriptor = None
+ descriptor_type = None
+ class_type = None
for base in enum_class.__mro__[1:]:
- descriptor = base.__dict__.get(member_name)
- if descriptor is not None:
- if isinstance(descriptor, (property, DynamicClassAttribute)):
- found_descriptor = descriptor
+ attr = base.__dict__.get(member_name)
+ if attr is not None:
+ if isinstance(attr, (property, DynamicClassAttribute)):
+ found_descriptor = attr
+ class_type = base
+ descriptor_type = 'enum'
break
- elif (
- hasattr(descriptor, 'fget') and
- hasattr(descriptor, 'fset') and
- hasattr(descriptor, 'fdel')
- ):
- found_descriptor = descriptor
+ elif _is_descriptor(attr):
+ found_descriptor = attr
+ descriptor_type = descriptor_type or 'desc'
+ class_type = class_type or base
continue
+ else:
+ descriptor_type = 'attr'
+ class_type = base
if found_descriptor:
redirect = property()
redirect.member = enum_member
redirect.__set_name__(enum_class, member_name)
- # earlier descriptor found; copy fget, fset, fdel to this one.
- redirect.fget = found_descriptor.fget
- redirect.fset = found_descriptor.fset
- redirect.fdel = found_descriptor.fdel
+ if descriptor_type in ('enum','desc'):
+ # earlier descriptor found; copy fget, fset, fdel to this one.
+ redirect.fget = getattr(found_descriptor, 'fget', None)
+ redirect._get = getattr(found_descriptor, '__get__', None)
+ redirect.fset = getattr(found_descriptor, 'fset', None)
+ redirect._set = getattr(found_descriptor, '__set__', None)
+ redirect.fdel = getattr(found_descriptor, 'fdel', None)
+ redirect._del = getattr(found_descriptor, '__delete__', None)
+ redirect._attr_type = descriptor_type
+ redirect._cls_type = class_type
setattr(enum_class, member_name, redirect)
else:
setattr(enum_class, member_name, enum_member)
diff --git a/Lib/http/cookiejar.py b/Lib/http/cookiejar.py
index 93b10d26c84545..bd89370e16831e 100644
--- a/Lib/http/cookiejar.py
+++ b/Lib/http/cookiejar.py
@@ -104,9 +104,9 @@ def time2isoz(t=None):
"""
if t is None:
- dt = datetime.datetime.utcnow()
+ dt = datetime.datetime.now(tz=datetime.UTC)
else:
- dt = datetime.datetime.utcfromtimestamp(t)
+ dt = datetime.datetime.fromtimestamp(t, tz=datetime.UTC)
return "%04d-%02d-%02d %02d:%02d:%02dZ" % (
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
@@ -122,9 +122,9 @@ def time2netscape(t=None):
"""
if t is None:
- dt = datetime.datetime.utcnow()
+ dt = datetime.datetime.now(tz=datetime.UTC)
else:
- dt = datetime.datetime.utcfromtimestamp(t)
+ dt = datetime.datetime.fromtimestamp(t, tz=datetime.UTC)
return "%s, %02d-%s-%04d %02d:%02d:%02d GMT" % (
DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1],
dt.year, dt.hour, dt.minute, dt.second)
diff --git a/Lib/idlelib/calltip_w.py b/Lib/idlelib/calltip_w.py
index 1e0404aa49f562..278546064adde2 100644
--- a/Lib/idlelib/calltip_w.py
+++ b/Lib/idlelib/calltip_w.py
@@ -25,7 +25,7 @@ def __init__(self, text_widget):
text_widget: a Text widget with code for which call-tips are desired
"""
# Note: The Text widget will be accessible as self.anchor_widget
- super(CalltipWindow, self).__init__(text_widget)
+ super().__init__(text_widget)
self.label = self.text = None
self.parenline = self.parencol = self.lastline = None
@@ -54,7 +54,7 @@ def position_window(self):
return
self.lastline = curline
self.anchor_widget.see("insert")
- super(CalltipWindow, self).position_window()
+ super().position_window()
def showtip(self, text, parenleft, parenright):
"""Show the call-tip, bind events which will close it and reposition it.
@@ -73,7 +73,7 @@ def showtip(self, text, parenleft, parenright):
self.parenline, self.parencol = map(
int, self.anchor_widget.index(parenleft).split("."))
- super(CalltipWindow, self).showtip()
+ super().showtip()
self._bind_events()
@@ -143,7 +143,7 @@ def hidetip(self):
# ValueError may be raised by MultiCall
pass
- super(CalltipWindow, self).hidetip()
+ super().hidetip()
def _bind_events(self):
"""Bind event handlers."""
diff --git a/Lib/idlelib/debugger.py b/Lib/idlelib/debugger.py
index ccd03e46e16147..452c62b42655b3 100644
--- a/Lib/idlelib/debugger.py
+++ b/Lib/idlelib/debugger.py
@@ -49,9 +49,9 @@ def __frame2message(self, frame):
filename = code.co_filename
lineno = frame.f_lineno
basename = os.path.basename(filename)
- message = "%s:%s" % (basename, lineno)
+ message = f"{basename}:{lineno}"
if code.co_name != "?":
- message = "%s: %s()" % (message, code.co_name)
+ message = f"{message}: {code.co_name}()"
return message
@@ -213,7 +213,8 @@ def interaction(self, message, frame, info=None):
m1 = "%s" % str(type)
if value is not None:
try:
- m1 = "%s: %s" % (m1, str(value))
+ # TODO redo entire section, tries not needed.
+ m1 = f"{m1}: {value}"
except:
pass
bg = "yellow"
diff --git a/Lib/idlelib/debugobj.py b/Lib/idlelib/debugobj.py
index 5a4c9978842035..71d01c7070df54 100644
--- a/Lib/idlelib/debugobj.py
+++ b/Lib/idlelib/debugobj.py
@@ -87,7 +87,7 @@ def GetSubList(self):
continue
def setfunction(value, key=key, object=self.object):
object[key] = value
- item = make_objecttreeitem("%r:" % (key,), value, setfunction)
+ item = make_objecttreeitem(f"{key!r}:", value, setfunction)
sublist.append(item)
return sublist
diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py
index 08d6aa2efde22a..505815502600b1 100644
--- a/Lib/idlelib/editor.py
+++ b/Lib/idlelib/editor.py
@@ -38,12 +38,13 @@
def _sphinx_version():
"Format sys.version_info to produce the Sphinx version string used to install the chm docs"
major, minor, micro, level, serial = sys.version_info
- release = '%s%s' % (major, minor)
- release += '%s' % (micro,)
+ # TODO remove unneeded function since .chm no longer installed
+ release = f'{major}{minor}'
+ release += f'{micro}'
if level == 'candidate':
- release += 'rc%s' % (serial,)
+ release += f'rc{serial}'
elif level != 'final':
- release += '%s%s' % (level[0], serial)
+ release += f'{level[0]}{serial}'
return release
@@ -950,7 +951,7 @@ def update_recent_files_list(self, new_file=None):
rf_list = []
file_path = self.recent_files_path
if file_path and os.path.exists(file_path):
- with open(file_path, 'r',
+ with open(file_path,
encoding='utf_8', errors='replace') as rf_list_file:
rf_list = rf_list_file.readlines()
if new_file:
@@ -1458,7 +1459,7 @@ def newline_and_indent_event(self, event):
else:
self.reindent_to(y.compute_backslash_indent())
else:
- assert 0, "bogus continuation type %r" % (c,)
+ assert 0, f"bogus continuation type {c!r}"
return "break"
# This line starts a brand new statement; indent relative to
diff --git a/Lib/idlelib/filelist.py b/Lib/idlelib/filelist.py
index 254f5caf6b81b0..f87781d2570fe0 100644
--- a/Lib/idlelib/filelist.py
+++ b/Lib/idlelib/filelist.py
@@ -22,7 +22,7 @@ def open(self, filename, action=None):
# This can happen when bad filename is passed on command line:
messagebox.showerror(
"File Error",
- "%r is a directory." % (filename,),
+ f"{filename!r} is a directory.",
master=self.root)
return None
key = os.path.normcase(filename)
@@ -90,7 +90,7 @@ def filename_changed_edit(self, edit):
self.inversedict[conflict] = None
messagebox.showerror(
"Name Conflict",
- "You now have multiple edit windows open for %r" % (filename,),
+ f"You now have multiple edit windows open for {filename!r}",
master=self.root)
self.dict[newkey] = edit
self.inversedict[edit] = newkey
diff --git a/Lib/idlelib/idle_test/test_config.py b/Lib/idlelib/idle_test/test_config.py
index 697fda527968de..08ed76fe288294 100644
--- a/Lib/idlelib/idle_test/test_config.py
+++ b/Lib/idlelib/idle_test/test_config.py
@@ -191,7 +191,7 @@ def setUpClass(cls):
idle_dir = os.path.abspath(sys.path[0])
for ctype in conf.config_types:
config_path = os.path.join(idle_dir, '../config-%s.def' % ctype)
- with open(config_path, 'r') as f:
+ with open(config_path) as f:
cls.config_string[ctype] = f.read()
cls.orig_warn = config._warn
diff --git a/Lib/idlelib/idle_test/test_outwin.py b/Lib/idlelib/idle_test/test_outwin.py
index e347bfca7f191a..d6e85ad674417c 100644
--- a/Lib/idlelib/idle_test/test_outwin.py
+++ b/Lib/idlelib/idle_test/test_outwin.py
@@ -159,7 +159,7 @@ def test_file_line_helper(self, mock_open):
for line, expected_output in test_lines:
self.assertEqual(flh(line), expected_output)
if expected_output:
- mock_open.assert_called_with(expected_output[0], 'r')
+ mock_open.assert_called_with(expected_output[0])
if __name__ == '__main__':
diff --git a/Lib/idlelib/idle_test/test_sidebar.py b/Lib/idlelib/idle_test/test_sidebar.py
index 049531e66a414e..5506fd2b0e22a5 100644
--- a/Lib/idlelib/idle_test/test_sidebar.py
+++ b/Lib/idlelib/idle_test/test_sidebar.py
@@ -328,7 +328,7 @@ def test_scroll(self):
self.assertEqual(self.linenumber.sidebar_text.index('@0,0'), '11.0')
# Generate a mouse-wheel event and make sure it scrolled up or down.
- # The meaning of the "delta" is OS-dependant, so this just checks for
+ # The meaning of the "delta" is OS-dependent, so this just checks for
# any change.
self.linenumber.sidebar_text.event_generate('',
x=0, y=0,
@@ -691,7 +691,7 @@ def test_mousewheel(self):
self.assertIsNotNone(text.dlineinfo(text.index(f'{last_lineno}.0')))
# Scroll up using the event.
- # The meaning delta is platform-dependant.
+ # The meaning of delta is platform-dependent.
delta = -1 if sys.platform == 'darwin' else 120
sidebar.canvas.event_generate('', x=0, y=0, delta=delta)
yield
diff --git a/Lib/idlelib/multicall.py b/Lib/idlelib/multicall.py
index dc02001292fc14..0200f445cc9340 100644
--- a/Lib/idlelib/multicall.py
+++ b/Lib/idlelib/multicall.py
@@ -52,9 +52,9 @@
_modifier_masks = (MC_CONTROL, MC_ALT, MC_SHIFT, MC_META)
# a dictionary to map a modifier name into its number
-_modifier_names = dict([(name, number)
+_modifier_names = {name: number
for number in range(len(_modifiers))
- for name in _modifiers[number]])
+ for name in _modifiers[number]}
# In 3.4, if no shell window is ever open, the underlying Tk widget is
# destroyed before .__del__ methods here are called. The following
@@ -134,7 +134,7 @@ def nbits(n):
return nb
statelist = []
for state in states:
- substates = list(set(state & x for x in states))
+ substates = list({state & x for x in states})
substates.sort(key=nbits, reverse=True)
statelist.append(substates)
return statelist
@@ -258,9 +258,9 @@ def __del__(self):
_binder_classes = (_ComplexBinder,) * 4 + (_SimpleBinder,) * (len(_types)-4)
# A dictionary to map a type name into its number
-_type_names = dict([(name, number)
+_type_names = {name: number
for number in range(len(_types))
- for name in _types[number]])
+ for name in _types[number]}
_keysym_re = re.compile(r"^\w+$")
_button_re = re.compile(r"^[1-5]$")
diff --git a/Lib/idlelib/outwin.py b/Lib/idlelib/outwin.py
index 5ab08bbaf4bc95..ac67c904ab9797 100644
--- a/Lib/idlelib/outwin.py
+++ b/Lib/idlelib/outwin.py
@@ -42,7 +42,7 @@ def file_line_helper(line):
if match:
filename, lineno = match.group(1, 2)
try:
- f = open(filename, "r")
+ f = open(filename)
f.close()
break
except OSError:
diff --git a/Lib/idlelib/pyshell.py b/Lib/idlelib/pyshell.py
index e68233a5a4131e..bdde156166171b 100755
--- a/Lib/idlelib/pyshell.py
+++ b/Lib/idlelib/pyshell.py
@@ -249,7 +249,7 @@ def store_file_breaks(self):
breaks = self.breakpoints
filename = self.io.filename
try:
- with open(self.breakpointPath, "r") as fp:
+ with open(self.breakpointPath) as fp:
lines = fp.readlines()
except OSError:
lines = []
@@ -279,7 +279,7 @@ def restore_file_breaks(self):
if filename is None:
return
if os.path.isfile(self.breakpointPath):
- with open(self.breakpointPath, "r") as fp:
+ with open(self.breakpointPath) as fp:
lines = fp.readlines()
for line in lines:
if line.startswith(filename + '='):
@@ -441,7 +441,7 @@ def build_subprocess_arglist(self):
# run from the IDLE source directory.
del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc',
default=False, type='bool')
- command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,)
+ command = f"__import__('idlelib.run').run.main({del_exitf!r})"
return [sys.executable] + w + ["-c", command, str(self.port)]
def start_subprocess(self):
@@ -574,9 +574,9 @@ def transfer_path(self, with_cwd=False):
self.runcommand("""if 1:
import sys as _sys
- _sys.path = %r
+ _sys.path = {!r}
del _sys
- \n""" % (path,))
+ \n""".format(path))
active_seq = None
@@ -703,14 +703,14 @@ def stuffsource(self, source):
def prepend_syspath(self, filename):
"Prepend sys.path with file's directory if not already included"
self.runcommand("""if 1:
- _filename = %r
+ _filename = {!r}
import sys as _sys
from os.path import dirname as _dirname
_dir = _dirname(_filename)
if not _dir in _sys.path:
_sys.path.insert(0, _dir)
del _filename, _sys, _dirname, _dir
- \n""" % (filename,))
+ \n""".format(filename))
def showsyntaxerror(self, filename=None):
"""Override Interactive Interpreter method: Use Colorizing
@@ -1536,7 +1536,7 @@ def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:")
except getopt.error as msg:
- print("Error: %s\n%s" % (msg, usage_msg), file=sys.stderr)
+ print(f"Error: {msg}\n{usage_msg}", file=sys.stderr)
sys.exit(2)
for o, a in opts:
if o == '-c':
@@ -1668,9 +1668,9 @@ def main():
if cmd or script:
shell.interp.runcommand("""if 1:
import sys as _sys
- _sys.argv = %r
+ _sys.argv = {!r}
del _sys
- \n""" % (sys.argv,))
+ \n""".format(sys.argv))
if cmd:
shell.interp.execsource(cmd)
elif script:
diff --git a/Lib/idlelib/redirector.py b/Lib/idlelib/redirector.py
index 9ab34c5acfb22c..4928340e98df68 100644
--- a/Lib/idlelib/redirector.py
+++ b/Lib/idlelib/redirector.py
@@ -47,9 +47,8 @@ def __init__(self, widget):
tk.createcommand(w, self.dispatch)
def __repr__(self):
- return "%s(%s<%s>)" % (self.__class__.__name__,
- self.widget.__class__.__name__,
- self.widget._w)
+ w = self.widget
+ return f"{self.__class__.__name__,}({w.__class__.__name__}<{w._w}>)"
def close(self):
"Unregister operations and revert redirection created by .__init__."
@@ -143,8 +142,7 @@ def __init__(self, redir, operation):
self.orig_and_operation = (redir.orig, operation)
def __repr__(self):
- return "%s(%r, %r)" % (self.__class__.__name__,
- self.redir, self.operation)
+ return f"{self.__class__.__name__,}({self.redir!r}, {self.operation!r})"
def __call__(self, *args):
return self.tk_call(self.orig_and_operation + args)
diff --git a/Lib/idlelib/rpc.py b/Lib/idlelib/rpc.py
index 62eec84c9c8d09..b08b80c9004551 100644
--- a/Lib/idlelib/rpc.py
+++ b/Lib/idlelib/rpc.py
@@ -174,7 +174,7 @@ def localcall(self, seq, request):
except TypeError:
return ("ERROR", "Bad request format")
if oid not in self.objtable:
- return ("ERROR", "Unknown object id: %r" % (oid,))
+ return ("ERROR", f"Unknown object id: {oid!r}")
obj = self.objtable[oid]
if methodname == "__methods__":
methods = {}
@@ -185,7 +185,7 @@ def localcall(self, seq, request):
_getattributes(obj, attributes)
return ("OK", attributes)
if not hasattr(obj, methodname):
- return ("ERROR", "Unsupported method name: %r" % (methodname,))
+ return ("ERROR", f"Unsupported method name: {methodname!r}")
method = getattr(obj, methodname)
try:
if how == 'CALL':
diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py
index 577c49eb67b20d..84792a82b0022c 100644
--- a/Lib/idlelib/run.py
+++ b/Lib/idlelib/run.py
@@ -52,13 +52,13 @@ def idle_formatwarning(message, category, filename, lineno, line=None):
"""Format warnings the IDLE way."""
s = "\nWarning (from warnings module):\n"
- s += ' File \"%s\", line %s\n' % (filename, lineno)
+ s += f' File \"{filename}\", line {lineno}\n'
if line is None:
line = linecache.getline(filename, lineno)
line = line.strip()
if line:
s += " %s\n" % line
- s += "%s: %s\n" % (category.__name__, message)
+ s += f"{category.__name__}: {message}\n"
return s
def idle_showwarning_subproc(
@@ -239,6 +239,7 @@ def print_exception():
efile = sys.stderr
typ, val, tb = excinfo = sys.exc_info()
sys.last_type, sys.last_value, sys.last_traceback = excinfo
+ sys.last_exc = val
seen = set()
def print_exc(typ, exc, tb):
diff --git a/Lib/idlelib/textview.py b/Lib/idlelib/textview.py
index a66c1a4309a617..23f0f4cb5027ec 100644
--- a/Lib/idlelib/textview.py
+++ b/Lib/idlelib/textview.py
@@ -169,7 +169,7 @@ def view_file(parent, title, filename, encoding, modal=True, wrap='word',
with contents of the file.
"""
try:
- with open(filename, 'r', encoding=encoding) as file:
+ with open(filename, encoding=encoding) as file:
contents = file.read()
except OSError:
showerror(title='File Load Error',
diff --git a/Lib/idlelib/tooltip.py b/Lib/idlelib/tooltip.py
index d714318dae8ef1..3983690dd41177 100644
--- a/Lib/idlelib/tooltip.py
+++ b/Lib/idlelib/tooltip.py
@@ -92,7 +92,7 @@ def __init__(self, anchor_widget, hover_delay=1000):
e.g. after hovering over the anchor widget with the mouse for enough
time.
"""
- super(OnHoverTooltipBase, self).__init__(anchor_widget)
+ super().__init__(anchor_widget)
self.hover_delay = hover_delay
self._after_id = None
@@ -107,7 +107,7 @@ def __del__(self):
self.anchor_widget.unbind("", self._id3) # pragma: no cover
except TclError:
pass
- super(OnHoverTooltipBase, self).__del__()
+ super().__del__()
def _show_event(self, event=None):
"""event handler to display the tooltip"""
@@ -139,7 +139,7 @@ def hidetip(self):
self.unschedule()
except TclError: # pragma: no cover
pass
- super(OnHoverTooltipBase, self).hidetip()
+ super().hidetip()
class Hovertip(OnHoverTooltipBase):
@@ -154,7 +154,7 @@ def __init__(self, anchor_widget, text, hover_delay=1000):
e.g. after hovering over the anchor widget with the mouse for enough
time.
"""
- super(Hovertip, self).__init__(anchor_widget, hover_delay=hover_delay)
+ super().__init__(anchor_widget, hover_delay=hover_delay)
self.text = text
def showcontents(self):
diff --git a/Lib/idlelib/tree.py b/Lib/idlelib/tree.py
index 5947268f5c35ae..5f30f0f6092bfa 100644
--- a/Lib/idlelib/tree.py
+++ b/Lib/idlelib/tree.py
@@ -32,7 +32,7 @@
if os.path.isdir(_icondir):
ICONDIR = _icondir
elif not os.path.isdir(ICONDIR):
- raise RuntimeError("can't find icon directory (%r)" % (ICONDIR,))
+ raise RuntimeError(f"can't find icon directory ({ICONDIR!r})")
def listicons(icondir=ICONDIR):
"""Utility to display the available icons."""
diff --git a/Lib/idlelib/undo.py b/Lib/idlelib/undo.py
index 85ecffecb4cbcb..5f10c0f05c1acb 100644
--- a/Lib/idlelib/undo.py
+++ b/Lib/idlelib/undo.py
@@ -309,7 +309,7 @@ def __repr__(self):
s = self.__class__.__name__
strs = []
for cmd in self.cmds:
- strs.append(" %r" % (cmd,))
+ strs.append(f" {cmd!r}")
return s + "(\n" + ",\n".join(strs) + "\n)"
def __len__(self):
diff --git a/Lib/imp.py b/Lib/imp.py
deleted file mode 100644
index fe850f6a001814..00000000000000
--- a/Lib/imp.py
+++ /dev/null
@@ -1,346 +0,0 @@
-"""This module provides the components needed to build your own __import__
-function. Undocumented functions are obsolete.
-
-In most cases it is preferred you consider using the importlib module's
-functionality over this module.
-
-"""
-# (Probably) need to stay in _imp
-from _imp import (lock_held, acquire_lock, release_lock,
- get_frozen_object, is_frozen_package,
- init_frozen, is_builtin, is_frozen,
- _fix_co_filename, _frozen_module_names)
-try:
- from _imp import create_dynamic
-except ImportError:
- # Platform doesn't support dynamic loading.
- create_dynamic = None
-
-from importlib._bootstrap import _ERR_MSG, _exec, _load, _builtin_from_name
-from importlib._bootstrap_external import SourcelessFileLoader
-
-from importlib import machinery
-from importlib import util
-import importlib
-import os
-import sys
-import tokenize
-import types
-import warnings
-
-warnings.warn("the imp module is deprecated in favour of importlib and slated "
- "for removal in Python 3.12; "
- "see the module's documentation for alternative uses",
- DeprecationWarning, stacklevel=2)
-
-# DEPRECATED
-SEARCH_ERROR = 0
-PY_SOURCE = 1
-PY_COMPILED = 2
-C_EXTENSION = 3
-PY_RESOURCE = 4
-PKG_DIRECTORY = 5
-C_BUILTIN = 6
-PY_FROZEN = 7
-PY_CODERESOURCE = 8
-IMP_HOOK = 9
-
-
-def new_module(name):
- """**DEPRECATED**
-
- Create a new module.
-
- The module is not entered into sys.modules.
-
- """
- return types.ModuleType(name)
-
-
-def get_magic():
- """**DEPRECATED**
-
- Return the magic number for .pyc files.
- """
- return util.MAGIC_NUMBER
-
-
-def get_tag():
- """Return the magic tag for .pyc files."""
- return sys.implementation.cache_tag
-
-
-def cache_from_source(path, debug_override=None):
- """**DEPRECATED**
-
- Given the path to a .py file, return the path to its .pyc file.
-
- The .py file does not need to exist; this simply returns the path to the
- .pyc file calculated as if the .py file were imported.
-
- If debug_override is not None, then it must be a boolean and is used in
- place of sys.flags.optimize.
-
- If sys.implementation.cache_tag is None then NotImplementedError is raised.
-
- """
- with warnings.catch_warnings():
- warnings.simplefilter('ignore')
- return util.cache_from_source(path, debug_override)
-
-
-def source_from_cache(path):
- """**DEPRECATED**
-
- Given the path to a .pyc. file, return the path to its .py file.
-
- The .pyc file does not need to exist; this simply returns the path to
- the .py file calculated to correspond to the .pyc file. If path does
- not conform to PEP 3147 format, ValueError will be raised. If
- sys.implementation.cache_tag is None then NotImplementedError is raised.
-
- """
- return util.source_from_cache(path)
-
-
-def get_suffixes():
- """**DEPRECATED**"""
- extensions = [(s, 'rb', C_EXTENSION) for s in machinery.EXTENSION_SUFFIXES]
- source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES]
- bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES]
-
- return extensions + source + bytecode
-
-
-class NullImporter:
-
- """**DEPRECATED**
-
- Null import object.
-
- """
-
- def __init__(self, path):
- if path == '':
- raise ImportError('empty pathname', path='')
- elif os.path.isdir(path):
- raise ImportError('existing directory', path=path)
-
- def find_module(self, fullname):
- """Always returns None."""
- return None
-
-
-class _HackedGetData:
-
- """Compatibility support for 'file' arguments of various load_*()
- functions."""
-
- def __init__(self, fullname, path, file=None):
- super().__init__(fullname, path)
- self.file = file
-
- def get_data(self, path):
- """Gross hack to contort loader to deal w/ load_*()'s bad API."""
- if self.file and path == self.path:
- # The contract of get_data() requires us to return bytes. Reopen the
- # file in binary mode if needed.
- if not self.file.closed:
- file = self.file
- if 'b' not in file.mode:
- file.close()
- if self.file.closed:
- self.file = file = open(self.path, 'rb')
-
- with file:
- return file.read()
- else:
- return super().get_data(path)
-
-
-class _LoadSourceCompatibility(_HackedGetData, machinery.SourceFileLoader):
-
- """Compatibility support for implementing load_source()."""
-
-
-def load_source(name, pathname, file=None):
- loader = _LoadSourceCompatibility(name, pathname, file)
- spec = util.spec_from_file_location(name, pathname, loader=loader)
- if name in sys.modules:
- module = _exec(spec, sys.modules[name])
- else:
- module = _load(spec)
- # To allow reloading to potentially work, use a non-hacked loader which
- # won't rely on a now-closed file object.
- module.__loader__ = machinery.SourceFileLoader(name, pathname)
- module.__spec__.loader = module.__loader__
- return module
-
-
-class _LoadCompiledCompatibility(_HackedGetData, SourcelessFileLoader):
-
- """Compatibility support for implementing load_compiled()."""
-
-
-def load_compiled(name, pathname, file=None):
- """**DEPRECATED**"""
- loader = _LoadCompiledCompatibility(name, pathname, file)
- spec = util.spec_from_file_location(name, pathname, loader=loader)
- if name in sys.modules:
- module = _exec(spec, sys.modules[name])
- else:
- module = _load(spec)
- # To allow reloading to potentially work, use a non-hacked loader which
- # won't rely on a now-closed file object.
- module.__loader__ = SourcelessFileLoader(name, pathname)
- module.__spec__.loader = module.__loader__
- return module
-
-
-def load_package(name, path):
- """**DEPRECATED**"""
- if os.path.isdir(path):
- extensions = (machinery.SOURCE_SUFFIXES[:] +
- machinery.BYTECODE_SUFFIXES[:])
- for extension in extensions:
- init_path = os.path.join(path, '__init__' + extension)
- if os.path.exists(init_path):
- path = init_path
- break
- else:
- raise ValueError('{!r} is not a package'.format(path))
- spec = util.spec_from_file_location(name, path,
- submodule_search_locations=[])
- if name in sys.modules:
- return _exec(spec, sys.modules[name])
- else:
- return _load(spec)
-
-
-def load_module(name, file, filename, details):
- """**DEPRECATED**
-
- Load a module, given information returned by find_module().
-
- The module name must include the full package name, if any.
-
- """
- suffix, mode, type_ = details
- if mode and (not mode.startswith('r') or '+' in mode):
- raise ValueError('invalid file open mode {!r}'.format(mode))
- elif file is None and type_ in {PY_SOURCE, PY_COMPILED}:
- msg = 'file object required for import (type code {})'.format(type_)
- raise ValueError(msg)
- elif type_ == PY_SOURCE:
- return load_source(name, filename, file)
- elif type_ == PY_COMPILED:
- return load_compiled(name, filename, file)
- elif type_ == C_EXTENSION and load_dynamic is not None:
- if file is None:
- with open(filename, 'rb') as opened_file:
- return load_dynamic(name, filename, opened_file)
- else:
- return load_dynamic(name, filename, file)
- elif type_ == PKG_DIRECTORY:
- return load_package(name, filename)
- elif type_ == C_BUILTIN:
- return init_builtin(name)
- elif type_ == PY_FROZEN:
- return init_frozen(name)
- else:
- msg = "Don't know how to import {} (type code {})".format(name, type_)
- raise ImportError(msg, name=name)
-
-
-def find_module(name, path=None):
- """**DEPRECATED**
-
- Search for a module.
-
- If path is omitted or None, search for a built-in, frozen or special
- module and continue search in sys.path. The module name cannot
- contain '.'; to search for a submodule of a package, pass the
- submodule name and the package's __path__.
-
- """
- if not isinstance(name, str):
- raise TypeError("'name' must be a str, not {}".format(type(name)))
- elif not isinstance(path, (type(None), list)):
- # Backwards-compatibility
- raise RuntimeError("'path' must be None or a list, "
- "not {}".format(type(path)))
-
- if path is None:
- if is_builtin(name):
- return None, None, ('', '', C_BUILTIN)
- elif is_frozen(name):
- return None, None, ('', '', PY_FROZEN)
- else:
- path = sys.path
-
- for entry in path:
- package_directory = os.path.join(entry, name)
- for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]:
- package_file_name = '__init__' + suffix
- file_path = os.path.join(package_directory, package_file_name)
- if os.path.isfile(file_path):
- return None, package_directory, ('', '', PKG_DIRECTORY)
- for suffix, mode, type_ in get_suffixes():
- file_name = name + suffix
- file_path = os.path.join(entry, file_name)
- if os.path.isfile(file_path):
- break
- else:
- continue
- break # Break out of outer loop when breaking out of inner loop.
- else:
- raise ImportError(_ERR_MSG.format(name), name=name)
-
- encoding = None
- if 'b' not in mode:
- with open(file_path, 'rb') as file:
- encoding = tokenize.detect_encoding(file.readline)[0]
- file = open(file_path, mode, encoding=encoding)
- return file, file_path, (suffix, mode, type_)
-
-
-def reload(module):
- """**DEPRECATED**
-
- Reload the module and return it.
-
- The module must have been successfully imported before.
-
- """
- return importlib.reload(module)
-
-
-def init_builtin(name):
- """**DEPRECATED**
-
- Load and return a built-in module by name, or None is such module doesn't
- exist
- """
- try:
- return _builtin_from_name(name)
- except ImportError:
- return None
-
-
-if create_dynamic:
- def load_dynamic(name, path, file=None):
- """**DEPRECATED**
-
- Load an extension module.
- """
- import importlib.machinery
- loader = importlib.machinery.ExtensionFileLoader(name, path)
-
- # Issue #24748: Skip the sys.modules check in _load_module_shim;
- # always load new extension
- spec = importlib.util.spec_from_file_location(
- name, path, loader=loader)
- return _load(spec)
-
-else:
- load_dynamic = None
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index 22fa2469964ab3..e4fcaa61e6de29 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -1260,7 +1260,7 @@ def _find_and_load_unlocked(name, import_):
try:
path = parent_module.__path__
except AttributeError:
- msg = f'{_ERR_MSG_PREFIX} {name!r}; {parent!r} is not a package'
+ msg = f'{_ERR_MSG_PREFIX}{name!r}; {parent!r} is not a package'
raise ModuleNotFoundError(msg, name=name) from None
parent_spec = parent_module.__spec__
child = name.rpartition('.')[2]
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py
index c0c757d94d8781..cb227373ca2fd4 100644
--- a/Lib/importlib/_bootstrap_external.py
+++ b/Lib/importlib/_bootstrap_external.py
@@ -439,7 +439,9 @@ def _write_atomic(path, data, mode=0o666):
# Python 3.12a7 3523 (Convert COMPARE_AND_BRANCH back to COMPARE_OP)
# Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches)
# Python 3.12b1 3525 (Shrink the CALL caches)
-# Python 3.12a7 3526 (Add instrumentation support)
+# Python 3.12b1 3526 (Add instrumentation support)
+# Python 3.12b1 3527 (Add LOAD_SUPER_ATTR)
+# Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization)
# Python 3.13 will start with 3550
@@ -456,7 +458,7 @@ def _write_atomic(path, data, mode=0o666):
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
-MAGIC_NUMBER = (3526).to_bytes(2, 'little') + b'\r\n'
+MAGIC_NUMBER = (3528).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py
index 40ab1a1aaac328..82e0ce1b281c54 100644
--- a/Lib/importlib/metadata/__init__.py
+++ b/Lib/importlib/metadata/__init__.py
@@ -12,7 +12,9 @@
import functools
import itertools
import posixpath
+import contextlib
import collections
+import inspect
from . import _adapters, _meta
from ._collections import FreezableDefaultDict, Pair
@@ -24,7 +26,7 @@
from importlib import import_module
from importlib.abc import MetaPathFinder
from itertools import starmap
-from typing import List, Mapping, Optional
+from typing import List, Mapping, Optional, cast
__all__ = [
@@ -341,11 +343,30 @@ def __repr__(self):
return f''
-class Distribution:
+class DeprecatedNonAbstract:
+ def __new__(cls, *args, **kwargs):
+ all_names = {
+ name for subclass in inspect.getmro(cls) for name in vars(subclass)
+ }
+ abstract = {
+ name
+ for name in all_names
+ if getattr(getattr(cls, name), '__isabstractmethod__', False)
+ }
+ if abstract:
+ warnings.warn(
+ f"Unimplemented abstract methods {abstract}",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return super().__new__(cls)
+
+
+class Distribution(DeprecatedNonAbstract):
"""A Python distribution package."""
@abc.abstractmethod
- def read_text(self, filename):
+ def read_text(self, filename) -> Optional[str]:
"""Attempt to load metadata file given by the name.
:param filename: The name of the file in the distribution info.
@@ -419,7 +440,7 @@ def metadata(self) -> _meta.PackageMetadata:
The returned object will have keys that name the various bits of
metadata. See PEP 566 for details.
"""
- text = (
+ opt_text = (
self.read_text('METADATA')
or self.read_text('PKG-INFO')
# This last clause is here to support old egg-info files. Its
@@ -427,6 +448,7 @@ def metadata(self) -> _meta.PackageMetadata:
# (which points to the egg-info file) attribute unchanged.
or self.read_text('')
)
+ text = cast(str, opt_text)
return _adapters.Message(email.message_from_string(text))
@property
@@ -455,8 +477,8 @@ def files(self):
:return: List of PackagePath for this distribution or None
Result is `None` if the metadata file that enumerates files
- (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
- missing.
+ (i.e. RECORD for dist-info, or installed-files.txt or
+ SOURCES.txt for egg-info) is missing.
Result may be empty if the metadata exists but is empty.
"""
@@ -469,9 +491,19 @@ def make_file(name, hash=None, size_str=None):
@pass_none
def make_files(lines):
- return list(starmap(make_file, csv.reader(lines)))
+ return starmap(make_file, csv.reader(lines))
- return make_files(self._read_files_distinfo() or self._read_files_egginfo())
+ @pass_none
+ def skip_missing_files(package_paths):
+ return list(filter(lambda path: path.locate().exists(), package_paths))
+
+ return skip_missing_files(
+ make_files(
+ self._read_files_distinfo()
+ or self._read_files_egginfo_installed()
+ or self._read_files_egginfo_sources()
+ )
+ )
def _read_files_distinfo(self):
"""
@@ -480,10 +512,45 @@ def _read_files_distinfo(self):
text = self.read_text('RECORD')
return text and text.splitlines()
- def _read_files_egginfo(self):
+ def _read_files_egginfo_installed(self):
+ """
+ Read installed-files.txt and return lines in a similar
+ CSV-parsable format as RECORD: each file must be placed
+ relative to the site-packages directory and must also be
+ quoted (since file names can contain literal commas).
+
+ This file is written when the package is installed by pip,
+ but it might not be written for other installation methods.
+ Assume the file is accurate if it exists.
"""
- SOURCES.txt might contain literal commas, so wrap each line
- in quotes.
+ text = self.read_text('installed-files.txt')
+ # Prepend the .egg-info/ subdir to the lines in this file.
+ # But this subdir is only available from PathDistribution's
+ # self._path.
+ subdir = getattr(self, '_path', None)
+ if not text or not subdir:
+ return
+
+ paths = (
+ (subdir / name)
+ .resolve()
+ .relative_to(self.locate_file('').resolve())
+ .as_posix()
+ for name in text.splitlines()
+ )
+ return map('"{}"'.format, paths)
+
+ def _read_files_egginfo_sources(self):
+ """
+ Read SOURCES.txt and return lines in a similar CSV-parsable
+ format as RECORD: each file name must be quoted (since it
+ might contain literal commas).
+
+ Note that SOURCES.txt is not a reliable source for what
+ files are installed by a package. This file is generated
+ for a source archive, and the files that are present
+ there (e.g. setup.py) may not correctly reflect the files
+ that are present after the package has been installed.
"""
text = self.read_text('SOURCES.txt')
return text and map('"{}"'.format, text.splitlines())
@@ -886,8 +953,13 @@ def _top_level_declared(dist):
def _top_level_inferred(dist):
- return {
- f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name
+ opt_names = {
+ f.parts[0] if len(f.parts) > 1 else inspect.getmodulename(f)
for f in always_iterable(dist.files)
- if f.suffix == ".py"
}
+
+ @pass_none
+ def importable_name(name):
+ return '.' not in name
+
+ return filter(importable_name, opt_names)
diff --git a/Lib/importlib/metadata/_adapters.py b/Lib/importlib/metadata/_adapters.py
index aa460d3eda50fb..6aed69a30857e4 100644
--- a/Lib/importlib/metadata/_adapters.py
+++ b/Lib/importlib/metadata/_adapters.py
@@ -1,3 +1,5 @@
+import functools
+import warnings
import re
import textwrap
import email.message
@@ -5,6 +7,15 @@
from ._text import FoldedCase
+# Do not remove prior to 2024-01-01 or Python 3.14
+_warn = functools.partial(
+ warnings.warn,
+ "Implicit None on return values is deprecated and will raise KeyErrors.",
+ DeprecationWarning,
+ stacklevel=2,
+)
+
+
class Message(email.message.Message):
multiple_use_keys = set(
map(
@@ -39,6 +50,16 @@ def __init__(self, *args, **kwargs):
def __iter__(self):
return super().__iter__()
+ def __getitem__(self, item):
+ """
+ Warn users that a ``KeyError`` can be expected when a
+ mising key is supplied. Ref python/importlib_metadata#371.
+ """
+ res = super().__getitem__(item)
+ if res is None:
+ _warn()
+ return res
+
def _repair_headers(self):
def redent(value):
"Correct for RFC822 indentation"
diff --git a/Lib/importlib/metadata/_meta.py b/Lib/importlib/metadata/_meta.py
index d5c0576194ece2..c9a7ef906a8a8c 100644
--- a/Lib/importlib/metadata/_meta.py
+++ b/Lib/importlib/metadata/_meta.py
@@ -1,4 +1,5 @@
-from typing import Any, Dict, Iterator, List, Protocol, TypeVar, Union
+from typing import Protocol
+from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload
_T = TypeVar("_T")
@@ -17,7 +18,21 @@ def __getitem__(self, key: str) -> str:
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
- def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
+ @overload
+ def get(self, name: str, failobj: None = None) -> Optional[str]:
+ ... # pragma: no cover
+
+ @overload
+ def get(self, name: str, failobj: _T) -> Union[str, _T]:
+ ... # pragma: no cover
+
+ # overload per python/importlib_metadata#435
+ @overload
+ def get_all(self, name: str, failobj: None = None) -> Optional[List[Any]]:
+ ... # pragma: no cover
+
+ @overload
+ def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@@ -29,18 +44,19 @@ def json(self) -> Dict[str, Union[str, List[str]]]:
"""
-class SimplePath(Protocol):
+class SimplePath(Protocol[_T]):
"""
A minimal subset of pathlib.Path required by PathDistribution.
"""
- def joinpath(self) -> 'SimplePath':
+ def joinpath(self) -> _T:
... # pragma: no cover
- def __truediv__(self) -> 'SimplePath':
+ def __truediv__(self, other: Union[str, _T]) -> _T:
... # pragma: no cover
- def parent(self) -> 'SimplePath':
+ @property
+ def parent(self) -> _T:
... # pragma: no cover
def read_text(self) -> str:
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 4242b40c2a08df..6d1d7b766cb3bb 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -3006,7 +3006,7 @@ def __init__(self, parameters=None, *, return_annotation=_empty,
if __validate_parameters__:
params = OrderedDict()
top_kind = _POSITIONAL_ONLY
- kind_defaults = False
+ seen_default = False
for param in parameters:
kind = param.kind
@@ -3021,21 +3021,19 @@ def __init__(self, parameters=None, *, return_annotation=_empty,
kind.description)
raise ValueError(msg)
elif kind > top_kind:
- kind_defaults = False
top_kind = kind
if kind in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD):
if param.default is _empty:
- if kind_defaults:
+ if seen_default:
# No default for this parameter, but the
- # previous parameter of the same kind had
- # a default
+ # previous parameter of had a default
msg = 'non-default argument follows default ' \
'argument'
raise ValueError(msg)
else:
# There is a default for this parameter.
- kind_defaults = True
+ seen_default = True
if name in params:
msg = 'duplicate parameter name: {!r}'.format(name)
diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py
index 1cb71d8032e173..af1d5c4800cce8 100644
--- a/Lib/ipaddress.py
+++ b/Lib/ipaddress.py
@@ -1821,9 +1821,6 @@ def _string_from_ip_int(cls, ip_int=None):
def _explode_shorthand_ip_string(self):
"""Expand a shortened IPv6 address.
- Args:
- ip_str: A string, the IPv6 address.
-
Returns:
A string, the expanded IPv6 address.
diff --git a/Lib/locale.py b/Lib/locale.py
index 4127d917465936..e94f0d1acbaa7d 100644
--- a/Lib/locale.py
+++ b/Lib/locale.py
@@ -962,7 +962,7 @@ def getpreferredencoding(do_setlocale=True):
'c.ascii': 'C',
'c.en': 'C',
'c.iso88591': 'en_US.ISO8859-1',
- 'c.utf8': 'en_US.UTF-8',
+ 'c.utf8': 'C.UTF-8',
'c_c': 'C',
'c_c.c': 'C',
'ca': 'ca_ES.ISO8859-1',
diff --git a/Lib/opcode.py b/Lib/opcode.py
index dd739e5dd3f6f8..aef8407948df15 100644
--- a/Lib/opcode.py
+++ b/Lib/opcode.py
@@ -196,7 +196,7 @@ def pseudo_op(name, op, real_ops):
def_op('DELETE_DEREF', 139)
hasfree.append(139)
jrel_op('JUMP_BACKWARD', 140) # Number of words to skip (backwards)
-
+name_op('LOAD_SUPER_ATTR', 141)
def_op('CALL_FUNCTION_EX', 142) # Flags
def_op('EXTENDED_ARG', 144)
@@ -264,6 +264,9 @@ def pseudo_op(name, op, real_ops):
pseudo_op('JUMP_NO_INTERRUPT', 261, ['JUMP_FORWARD', 'JUMP_BACKWARD_NO_INTERRUPT'])
pseudo_op('LOAD_METHOD', 262, ['LOAD_ATTR'])
+pseudo_op('LOAD_SUPER_METHOD', 263, ['LOAD_SUPER_ATTR'])
+pseudo_op('LOAD_ZERO_SUPER_METHOD', 264, ['LOAD_SUPER_ATTR'])
+pseudo_op('LOAD_ZERO_SUPER_ATTR', 265, ['LOAD_SUPER_ATTR'])
MAX_PSEUDO_OPCODE = MIN_PSEUDO_OPCODE + len(_pseudo_ops) - 1
@@ -350,6 +353,9 @@ def pseudo_op(name, op, real_ops):
"FOR_ITER_RANGE",
"FOR_ITER_GEN",
],
+ "LOAD_SUPER_ATTR": [
+ "LOAD_SUPER_ATTR_METHOD",
+ ],
"LOAD_ATTR": [
# These potentially push [NULL, bound method] onto the stack.
"LOAD_ATTR_CLASS",
@@ -423,6 +429,12 @@ def pseudo_op(name, op, real_ops):
"FOR_ITER": {
"counter": 1,
},
+ "LOAD_SUPER_ATTR": {
+ "counter": 1,
+ "class_version": 2,
+ "self_type_version": 2,
+ "method": 4,
+ },
"LOAD_ATTR": {
"counter": 1,
"version": 2,
diff --git a/Lib/pathlib.py b/Lib/pathlib.py
index 8ef834e68d2d35..8eb08949fa9b43 100644
--- a/Lib/pathlib.py
+++ b/Lib/pathlib.py
@@ -331,9 +331,14 @@ def _parse_path(cls, path):
if altsep:
path = path.replace(altsep, sep)
drv, root, rel = cls._flavour.splitroot(path)
- if drv.startswith(sep):
- # pathlib assumes that UNC paths always have a root.
- root = sep
+ if not root and drv.startswith(sep) and not drv.endswith(sep):
+ drv_parts = drv.split(sep)
+ if len(drv_parts) == 4 and drv_parts[2] not in '?.':
+ # e.g. //server/share
+ root = sep
+ elif len(drv_parts) == 6:
+ # e.g. //?/unc/server/share
+ root = sep
parsed = [sys.intern(str(x)) for x in rel.split(sep) if x and x != '.']
return drv, root, parsed
diff --git a/Lib/pdb.py b/Lib/pdb.py
index a3553b345a8dd3..645cbf518e58e3 100755
--- a/Lib/pdb.py
+++ b/Lib/pdb.py
@@ -154,7 +154,7 @@ def namespace(self):
@property
def code(self):
- with io.open(self) as fp:
+ with io.open_code(self) as fp:
return f"exec(compile({fp.read()!r}, {self!r}, 'exec'))"
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index bdebfd2fc8ac32..fb977eaaa05767 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -14,7 +14,7 @@
__all__ = [
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
'walk_packages', 'iter_modules', 'get_data',
- 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
+ 'read_code', 'extend_path',
'ModuleInfo',
]
@@ -185,187 +185,6 @@ def _iter_file_finder_modules(importer, prefix=''):
importlib.machinery.FileFinder, _iter_file_finder_modules)
-def _import_imp():
- global imp
- with warnings.catch_warnings():
- warnings.simplefilter('ignore', DeprecationWarning)
- imp = importlib.import_module('imp')
-
-class ImpImporter:
- """PEP 302 Finder that wraps Python's "classic" import algorithm
-
- ImpImporter(dirname) produces a PEP 302 finder that searches that
- directory. ImpImporter(None) produces a PEP 302 finder that searches
- the current sys.path, plus any modules that are frozen or built-in.
-
- Note that ImpImporter does not currently support being used by placement
- on sys.meta_path.
- """
-
- def __init__(self, path=None):
- global imp
- warnings.warn("This emulation is deprecated and slated for removal "
- "in Python 3.12; use 'importlib' instead",
- DeprecationWarning)
- _import_imp()
- self.path = path
-
- def find_module(self, fullname, path=None):
- # Note: we ignore 'path' argument since it is only used via meta_path
- subname = fullname.split(".")[-1]
- if subname != fullname and self.path is None:
- return None
- if self.path is None:
- path = None
- else:
- path = [os.path.realpath(self.path)]
- try:
- file, filename, etc = imp.find_module(subname, path)
- except ImportError:
- return None
- return ImpLoader(fullname, file, filename, etc)
-
- def iter_modules(self, prefix=''):
- if self.path is None or not os.path.isdir(self.path):
- return
-
- yielded = {}
- import inspect
- try:
- filenames = os.listdir(self.path)
- except OSError:
- # ignore unreadable directories like import does
- filenames = []
- filenames.sort() # handle packages before same-named modules
-
- for fn in filenames:
- modname = inspect.getmodulename(fn)
- if modname=='__init__' or modname in yielded:
- continue
-
- path = os.path.join(self.path, fn)
- ispkg = False
-
- if not modname and os.path.isdir(path) and '.' not in fn:
- modname = fn
- try:
- dircontents = os.listdir(path)
- except OSError:
- # ignore unreadable directories like import does
- dircontents = []
- for fn in dircontents:
- subname = inspect.getmodulename(fn)
- if subname=='__init__':
- ispkg = True
- break
- else:
- continue # not a package
-
- if modname and '.' not in modname:
- yielded[modname] = 1
- yield prefix + modname, ispkg
-
-
-class ImpLoader:
- """PEP 302 Loader that wraps Python's "classic" import algorithm
- """
- code = source = None
-
- def __init__(self, fullname, file, filename, etc):
- warnings.warn("This emulation is deprecated and slated for removal in "
- "Python 3.12; use 'importlib' instead",
- DeprecationWarning)
- _import_imp()
- self.file = file
- self.filename = filename
- self.fullname = fullname
- self.etc = etc
-
- def load_module(self, fullname):
- self._reopen()
- try:
- mod = imp.load_module(fullname, self.file, self.filename, self.etc)
- finally:
- if self.file:
- self.file.close()
- # Note: we don't set __loader__ because we want the module to look
- # normal; i.e. this is just a wrapper for standard import machinery
- return mod
-
- def get_data(self, pathname):
- with open(pathname, "rb") as file:
- return file.read()
-
- def _reopen(self):
- if self.file and self.file.closed:
- mod_type = self.etc[2]
- if mod_type==imp.PY_SOURCE:
- self.file = open(self.filename, 'r')
- elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
- self.file = open(self.filename, 'rb')
-
- def _fix_name(self, fullname):
- if fullname is None:
- fullname = self.fullname
- elif fullname != self.fullname:
- raise ImportError("Loader for module %s cannot handle "
- "module %s" % (self.fullname, fullname))
- return fullname
-
- def is_package(self, fullname):
- fullname = self._fix_name(fullname)
- return self.etc[2]==imp.PKG_DIRECTORY
-
- def get_code(self, fullname=None):
- fullname = self._fix_name(fullname)
- if self.code is None:
- mod_type = self.etc[2]
- if mod_type==imp.PY_SOURCE:
- source = self.get_source(fullname)
- self.code = compile(source, self.filename, 'exec')
- elif mod_type==imp.PY_COMPILED:
- self._reopen()
- try:
- self.code = read_code(self.file)
- finally:
- self.file.close()
- elif mod_type==imp.PKG_DIRECTORY:
- self.code = self._get_delegate().get_code()
- return self.code
-
- def get_source(self, fullname=None):
- fullname = self._fix_name(fullname)
- if self.source is None:
- mod_type = self.etc[2]
- if mod_type==imp.PY_SOURCE:
- self._reopen()
- try:
- self.source = self.file.read()
- finally:
- self.file.close()
- elif mod_type==imp.PY_COMPILED:
- if os.path.exists(self.filename[:-1]):
- with open(self.filename[:-1], 'r') as f:
- self.source = f.read()
- elif mod_type==imp.PKG_DIRECTORY:
- self.source = self._get_delegate().get_source()
- return self.source
-
- def _get_delegate(self):
- finder = ImpImporter(self.filename)
- spec = _get_spec(finder, '__init__')
- return spec.loader
-
- def get_filename(self, fullname=None):
- fullname = self._fix_name(fullname)
- mod_type = self.etc[2]
- if mod_type==imp.PKG_DIRECTORY:
- return self._get_delegate().get_filename()
- elif mod_type in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
- return self.filename
- return None
-
-
try:
import zipimport
from zipimport import zipimporter
@@ -511,10 +330,10 @@ def extend_path(path, name):
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
- This will add to the package's __path__ all subdirectories of
- directories on sys.path named after the package. This is useful
- if one wants to distribute different parts of a single logical
- package as multiple directories.
+ For each directory on sys.path that has a subdirectory that
+ matches the package name, add the subdirectory to the package's
+ __path__. This is useful if one wants to distribute different
+ parts of a single logical package as multiple directories.
It also looks for *.pkg files beginning where * matches the name
argument. This feature is similar to *.pth files (see site.py),
diff --git a/Lib/profile.py b/Lib/profile.py
index 453e56285c510c..4b82523b03d64b 100755
--- a/Lib/profile.py
+++ b/Lib/profile.py
@@ -25,6 +25,7 @@
import importlib.machinery
+import io
import sys
import time
import marshal
@@ -588,7 +589,7 @@ def main():
else:
progname = args[0]
sys.path.insert(0, os.path.dirname(progname))
- with open(progname, 'rb') as fp:
+ with io.open_code(progname) as fp:
code = compile(fp.read(), progname, 'exec')
spec = importlib.machinery.ModuleSpec(name='__main__', loader=None,
origin=progname)
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index 78d8fd5357f72a..1c3443fa8469f7 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -512,7 +512,7 @@ def getdocloc(self, object, basedir=sysconfig.get_path('stdlib')):
basedir = os.path.normcase(basedir)
if (isinstance(object, type(os)) and
- (object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
+ (object.__name__ in ('errno', 'exceptions', 'gc',
'marshal', 'posix', 'signal', 'sys',
'_thread', 'zipimport') or
(file.startswith(basedir) and
diff --git a/Lib/runpy.py b/Lib/runpy.py
index 54fc136d4074f2..42f896c9cd5094 100644
--- a/Lib/runpy.py
+++ b/Lib/runpy.py
@@ -279,12 +279,7 @@ def run_path(path_name, init_globals=None, run_name=None):
pkg_name = run_name.rpartition(".")[0]
from pkgutil import get_importer
importer = get_importer(path_name)
- # Trying to avoid importing imp so as to not consume the deprecation warning.
- is_NullImporter = False
- if type(importer).__module__ == 'imp':
- if type(importer).__name__ == 'NullImporter':
- is_NullImporter = True
- if isinstance(importer, type(None)) or is_NullImporter:
+ if isinstance(importer, type(None)):
# Not a valid sys.path entry, so run the code directly
# execfile() doesn't help as we want to allow compiled files
code, fname = _get_code_from_file(run_name, path_name)
diff --git a/Lib/shutil.py b/Lib/shutil.py
index 95b6c5299cab29..7d1a3d00011f37 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -332,7 +332,7 @@ def _copyxattr(src, dst, *, follow_symlinks=True):
os.setxattr(dst, name, value, follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno not in (errno.EPERM, errno.ENOTSUP, errno.ENODATA,
- errno.EINVAL):
+ errno.EINVAL, errno.EACCES):
raise
else:
def _copyxattr(*args, **kwargs):
@@ -1245,7 +1245,7 @@ def _unpack_zipfile(filename, extract_dir):
finally:
zip.close()
-def _unpack_tarfile(filename, extract_dir):
+def _unpack_tarfile(filename, extract_dir, *, filter=None):
"""Unpack tar/tar.gz/tar.bz2/tar.xz `filename` to `extract_dir`
"""
import tarfile # late import for breaking circular dependency
@@ -1255,7 +1255,7 @@ def _unpack_tarfile(filename, extract_dir):
raise ReadError(
"%s is not a compressed or uncompressed tar file" % filename)
try:
- tarobj.extractall(extract_dir)
+ tarobj.extractall(extract_dir, filter=filter)
finally:
tarobj.close()
@@ -1288,7 +1288,7 @@ def _find_unpack_format(filename):
return name
return None
-def unpack_archive(filename, extract_dir=None, format=None):
+def unpack_archive(filename, extract_dir=None, format=None, *, filter=None):
"""Unpack an archive.
`filename` is the name of the archive.
@@ -1302,6 +1302,9 @@ def unpack_archive(filename, extract_dir=None, format=None):
was registered for that extension.
In case none is found, a ValueError is raised.
+
+ If `filter` is given, it is passed to the underlying
+ extraction function.
"""
sys.audit("shutil.unpack_archive", filename, extract_dir, format)
@@ -1311,6 +1314,10 @@ def unpack_archive(filename, extract_dir=None, format=None):
extract_dir = os.fspath(extract_dir)
filename = os.fspath(filename)
+ if filter is None:
+ filter_kwargs = {}
+ else:
+ filter_kwargs = {'filter': filter}
if format is not None:
try:
format_info = _UNPACK_FORMATS[format]
@@ -1318,7 +1325,7 @@ def unpack_archive(filename, extract_dir=None, format=None):
raise ValueError("Unknown unpack format '{0}'".format(format)) from None
func = format_info[1]
- func(filename, extract_dir, **dict(format_info[2]))
+ func(filename, extract_dir, **dict(format_info[2]), **filter_kwargs)
else:
# we need to look at the registered unpackers supported extensions
format = _find_unpack_format(filename)
@@ -1326,7 +1333,7 @@ def unpack_archive(filename, extract_dir=None, format=None):
raise ReadError("Unknown archive format '{0}'".format(filename))
func = _UNPACK_FORMATS[format][1]
- kwargs = dict(_UNPACK_FORMATS[format][2])
+ kwargs = dict(_UNPACK_FORMATS[format][2]) | filter_kwargs
func(filename, extract_dir, **kwargs)
diff --git a/Lib/socketserver.py b/Lib/socketserver.py
index 842d526b011911..cd028ef1c63b85 100644
--- a/Lib/socketserver.py
+++ b/Lib/socketserver.py
@@ -141,6 +141,8 @@ class will essentially render the service "deaf" while one request is
__all__.extend(["UnixStreamServer","UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
+ if hasattr(os, "fork"):
+ __all__.extend(["ForkingUnixStreamServer", "ForkingUnixDatagramServer"])
# poll/select have the advantage of not requiring any extra file descriptor,
# contrarily to epoll/kqueue (also, they require a single syscall).
@@ -727,6 +729,11 @@ class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
+ if hasattr(os, "fork"):
+ class ForkingUnixStreamServer(ForkingMixIn, UnixStreamServer): pass
+
+ class ForkingUnixDatagramServer(ForkingMixIn, UnixDatagramServer): pass
+
class BaseRequestHandler:
"""Base class for request handler classes.
diff --git a/Lib/sqlite3/__main__.py b/Lib/sqlite3/__main__.py
index f8a5cca24e56af..3228dbc09d502a 100644
--- a/Lib/sqlite3/__main__.py
+++ b/Lib/sqlite3/__main__.py
@@ -94,12 +94,16 @@ def main():
db_name = repr(args.filename)
# Prepare REPL banner and prompts.
+ if sys.platform == "win32" and "idlelib.run" not in sys.modules:
+ eofkey = "CTRL-Z"
+ else:
+ eofkey = "CTRL-D"
banner = dedent(f"""
sqlite3 shell, running on SQLite version {sqlite3.sqlite_version}
Connected to {db_name}
Each command will be run using execute() on the cursor.
- Type ".help" for more information; type ".quit" or CTRL-D to quit.
+ Type ".help" for more information; type ".quit" or {eofkey} to quit.
""").strip()
sys.ps1 = "sqlite> "
sys.ps2 = " ... "
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index b733195c9c5636..7781a430839ea5 100755
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -46,6 +46,7 @@
import struct
import copy
import re
+import warnings
try:
import pwd
@@ -65,7 +66,11 @@
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
"CompressionError", "StreamError", "ExtractError", "HeaderError",
"ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
- "DEFAULT_FORMAT", "open"]
+ "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
+ "tar_filter", "FilterError", "AbsoluteLinkError",
+ "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
+ "LinkOutsideDestinationError"]
+
#---------------------------------------------------------
# tar constants
@@ -154,6 +159,8 @@
def stn(s, length, encoding, errors):
"""Convert a string to a null-terminated bytes object.
"""
+ if s is None:
+ raise ValueError("metadata cannot contain None")
s = s.encode(encoding, errors)
return s[:length] + (length - len(s)) * NUL
@@ -707,9 +714,127 @@ def __init__(self, tarfile, tarinfo):
super().__init__(fileobj)
#class ExFileObject
+
+#-----------------------------
+# extraction filters (PEP 706)
+#-----------------------------
+
+class FilterError(TarError):
+ pass
+
+class AbsolutePathError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'member {tarinfo.name!r} has an absolute path')
+
+class OutsideDestinationError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
+ + 'which is outside the destination')
+
+class SpecialFileError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'{tarinfo.name!r} is a special file')
+
+class AbsoluteLinkError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'{tarinfo.name!r} is a symlink to an absolute path')
+
+class LinkOutsideDestinationError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+ + 'which is outside the destination')
+
+def _get_filtered_attrs(member, dest_path, for_data=True):
+ new_attrs = {}
+ name = member.name
+ dest_path = os.path.realpath(dest_path)
+ # Strip leading / (tar's directory separator) from filenames.
+ # Include os.sep (target OS directory separator) as well.
+ if name.startswith(('/', os.sep)):
+ name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
+ if os.path.isabs(name):
+ # Path is absolute even after stripping.
+ # For example, 'C:/foo' on Windows.
+ raise AbsolutePathError(member)
+ # Ensure we stay in the destination
+ target_path = os.path.realpath(os.path.join(dest_path, name))
+ if os.path.commonpath([target_path, dest_path]) != dest_path:
+ raise OutsideDestinationError(member, target_path)
+ # Limit permissions (no high bits, and go-w)
+ mode = member.mode
+ if mode is not None:
+ # Strip high bits & group/other write bits
+ mode = mode & 0o755
+ if for_data:
+ # For data, handle permissions & file types
+ if member.isreg() or member.islnk():
+ if not mode & 0o100:
+ # Clear executable bits if not executable by user
+ mode &= ~0o111
+ # Ensure owner can read & write
+ mode |= 0o600
+ elif member.isdir() or member.issym():
+ # Ignore mode for directories & symlinks
+ mode = None
+ else:
+ # Reject special files
+ raise SpecialFileError(member)
+ if mode != member.mode:
+ new_attrs['mode'] = mode
+ if for_data:
+ # Ignore ownership for 'data'
+ if member.uid is not None:
+ new_attrs['uid'] = None
+ if member.gid is not None:
+ new_attrs['gid'] = None
+ if member.uname is not None:
+ new_attrs['uname'] = None
+ if member.gname is not None:
+ new_attrs['gname'] = None
+ # Check link destination for 'data'
+ if member.islnk() or member.issym():
+ if os.path.isabs(member.linkname):
+ raise AbsoluteLinkError(member)
+ target_path = os.path.realpath(os.path.join(dest_path, member.linkname))
+ if os.path.commonpath([target_path, dest_path]) != dest_path:
+ raise LinkOutsideDestinationError(member, target_path)
+ return new_attrs
+
+def fully_trusted_filter(member, dest_path):
+ return member
+
+def tar_filter(member, dest_path):
+ new_attrs = _get_filtered_attrs(member, dest_path, False)
+ if new_attrs:
+ return member.replace(**new_attrs, deep=False)
+ return member
+
+def data_filter(member, dest_path):
+ new_attrs = _get_filtered_attrs(member, dest_path, True)
+ if new_attrs:
+ return member.replace(**new_attrs, deep=False)
+ return member
+
+_NAMED_FILTERS = {
+ "fully_trusted": fully_trusted_filter,
+ "tar": tar_filter,
+ "data": data_filter,
+}
+
#------------------
# Exported Classes
#------------------
+
+# Sentinel for replace() defaults, meaning "don't change the attribute"
+_KEEP = object()
+
class TarInfo(object):
"""Informational class which holds the details about an
archive member given by a tar header block.
@@ -790,12 +915,44 @@ def linkpath(self, linkname):
def __repr__(self):
return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+ def replace(self, *,
+ name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
+ uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
+ deep=True, _KEEP=_KEEP):
+ """Return a deep copy of self with the given attributes replaced.
+ """
+ if deep:
+ result = copy.deepcopy(self)
+ else:
+ result = copy.copy(self)
+ if name is not _KEEP:
+ result.name = name
+ if mtime is not _KEEP:
+ result.mtime = mtime
+ if mode is not _KEEP:
+ result.mode = mode
+ if linkname is not _KEEP:
+ result.linkname = linkname
+ if uid is not _KEEP:
+ result.uid = uid
+ if gid is not _KEEP:
+ result.gid = gid
+ if uname is not _KEEP:
+ result.uname = uname
+ if gname is not _KEEP:
+ result.gname = gname
+ return result
+
def get_info(self):
"""Return the TarInfo's attributes as a dictionary.
"""
+ if self.mode is None:
+ mode = None
+ else:
+ mode = self.mode & 0o7777
info = {
"name": self.name,
- "mode": self.mode & 0o7777,
+ "mode": mode,
"uid": self.uid,
"gid": self.gid,
"size": self.size,
@@ -818,6 +975,9 @@ def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescap
"""Return a tar header as a string of 512 byte blocks.
"""
info = self.get_info()
+ for name, value in info.items():
+ if value is None:
+ raise ValueError("%s may not be None" % name)
if format == USTAR_FORMAT:
return self.create_ustar_header(info, encoding, errors)
@@ -948,6 +1108,12 @@ def _create_header(info, format, encoding, errors):
devmajor = stn("", 8, encoding, errors)
devminor = stn("", 8, encoding, errors)
+ # None values in metadata should cause ValueError.
+ # itn()/stn() do this for all fields except type.
+ filetype = info.get("type", REGTYPE)
+ if filetype is None:
+ raise ValueError("TarInfo.type must not be None")
+
parts = [
stn(info.get("name", ""), 100, encoding, errors),
itn(info.get("mode", 0) & 0o7777, 8, format),
@@ -956,7 +1122,7 @@ def _create_header(info, format, encoding, errors):
itn(info.get("size", 0), 12, format),
itn(info.get("mtime", 0), 12, format),
b" ", # checksum field
- info.get("type", REGTYPE),
+ filetype,
stn(info.get("linkname", ""), 100, encoding, errors),
info.get("magic", POSIX_MAGIC),
stn(info.get("uname", ""), 32, encoding, errors),
@@ -1462,6 +1628,8 @@ class TarFile(object):
fileobject = ExFileObject # The file-object for extractfile().
+ extraction_filter = None # The default filter for extraction.
+
def __init__(self, name=None, mode="r", fileobj=None, format=None,
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
errors="surrogateescape", pax_headers=None, debug=None,
@@ -1936,7 +2104,10 @@ def list(self, verbose=True, *, members=None):
members = self
for tarinfo in members:
if verbose:
- _safe_print(stat.filemode(tarinfo.mode))
+ if tarinfo.mode is None:
+ _safe_print("??????????")
+ else:
+ _safe_print(stat.filemode(tarinfo.mode))
_safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
tarinfo.gname or tarinfo.gid))
if tarinfo.ischr() or tarinfo.isblk():
@@ -1944,8 +2115,11 @@ def list(self, verbose=True, *, members=None):
("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
else:
_safe_print("%10d" % tarinfo.size)
- _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
- % time.localtime(tarinfo.mtime)[:6])
+ if tarinfo.mtime is None:
+ _safe_print("????-??-?? ??:??:??")
+ else:
+ _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
+ % time.localtime(tarinfo.mtime)[:6])
_safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
@@ -2032,32 +2206,63 @@ def addfile(self, tarinfo, fileobj=None):
self.members.append(tarinfo)
- def extractall(self, path=".", members=None, *, numeric_owner=False):
+ def _get_filter_function(self, filter):
+ if filter is None:
+ filter = self.extraction_filter
+ if filter is None:
+ warnings.warn(
+ 'Python 3.14 will, by default, filter extracted tar '
+ + 'archives and reject files or modify their metadata. '
+ + 'Use the filter argument to control this behavior.',
+ DeprecationWarning)
+ return fully_trusted_filter
+ if isinstance(filter, str):
+ raise TypeError(
+ 'String names are not supported for '
+ + 'TarFile.extraction_filter. Use a function such as '
+ + 'tarfile.data_filter directly.')
+ return filter
+ if callable(filter):
+ return filter
+ try:
+ return _NAMED_FILTERS[filter]
+ except KeyError:
+ raise ValueError(f"filter {filter!r} not found") from None
+
+ def extractall(self, path=".", members=None, *, numeric_owner=False,
+ filter=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers(). If `numeric_owner` is True, only
the numbers for user/group names are used and not the names.
+
+ The `filter` function will be called on each member just
+ before extraction.
+ It can return a changed TarInfo or None to skip the member.
+ String names of common filters are accepted.
"""
directories = []
+ filter_function = self._get_filter_function(filter)
if members is None:
members = self
- for tarinfo in members:
+ for member in members:
+ tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ if tarinfo is None:
+ continue
if tarinfo.isdir():
- # Extract directories with a safe mode.
+ # For directories, delay setting attributes until later,
+ # since permissions can interfere with extraction and
+ # extracting contents can reset mtime.
directories.append(tarinfo)
- tarinfo = copy.copy(tarinfo)
- tarinfo.mode = 0o700
- # Do not set_attrs directories, as we will do that further down
- self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(),
- numeric_owner=numeric_owner)
+ self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
+ numeric_owner=numeric_owner)
# Reverse sort directories.
- directories.sort(key=lambda a: a.name)
- directories.reverse()
+ directories.sort(key=lambda a: a.name, reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
@@ -2067,12 +2272,10 @@ def extractall(self, path=".", members=None, *, numeric_owner=False):
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
- if self.errorlevel > 1:
- raise
- else:
- self._dbg(1, "tarfile: %s" % e)
+ self._handle_nonfatal_error(e)
- def extract(self, member, path="", set_attrs=True, *, numeric_owner=False):
+ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
+ filter=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
@@ -2080,35 +2283,70 @@ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False):
mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
is True, only the numbers for user/group names are used and not
the names.
+
+ The `filter` function will be called before extraction.
+ It can return a changed TarInfo or None to skip the member.
+ String names of common filters are accepted.
"""
- self._check("r")
+ filter_function = self._get_filter_function(filter)
+ tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ if tarinfo is not None:
+ self._extract_one(tarinfo, path, set_attrs, numeric_owner)
+ def _get_extract_tarinfo(self, member, filter_function, path):
+ """Get filtered TarInfo (or None) from member, which might be a str"""
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
+ unfiltered = tarinfo
+ try:
+ tarinfo = filter_function(tarinfo, path)
+ except (OSError, FilterError) as e:
+ self._handle_fatal_error(e)
+ except ExtractError as e:
+ self._handle_nonfatal_error(e)
+ if tarinfo is None:
+ self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
+ return None
# Prepare the link target for makelink().
if tarinfo.islnk():
+ tarinfo = copy.copy(tarinfo)
tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+ return tarinfo
+
+ def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
+ """Extract from filtered tarinfo to disk"""
+ self._check("r")
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs,
numeric_owner=numeric_owner)
except OSError as e:
- if self.errorlevel > 0:
- raise
- else:
- if e.filename is None:
- self._dbg(1, "tarfile: %s" % e.strerror)
- else:
- self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+ self._handle_fatal_error(e)
except ExtractError as e:
- if self.errorlevel > 1:
- raise
+ self._handle_nonfatal_error(e)
+
+ def _handle_nonfatal_error(self, e):
+ """Handle non-fatal error (ExtractError) according to errorlevel"""
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+ def _handle_fatal_error(self, e):
+ """Handle "fatal" error according to self.errorlevel"""
+ if self.errorlevel > 0:
+ raise
+ elif isinstance(e, OSError):
+ if e.filename is None:
+ self._dbg(1, "tarfile: %s" % e.strerror)
else:
- self._dbg(1, "tarfile: %s" % e)
+ self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+ else:
+ self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
@@ -2195,9 +2433,13 @@ def makedir(self, tarinfo, targetpath):
"""Make a directory called targetpath.
"""
try:
- # Use a safe mode for the directory, the real mode is set
- # later in _extract_member().
- os.mkdir(targetpath, 0o700)
+ if tarinfo.mode is None:
+ # Use the system's default mode
+ os.mkdir(targetpath)
+ else:
+ # Use a safe mode for the directory, the real mode is set
+ # later in _extract_member().
+ os.mkdir(targetpath, 0o700)
except FileExistsError:
pass
@@ -2240,6 +2482,9 @@ def makedev(self, tarinfo, targetpath):
raise ExtractError("special devices not supported by system")
mode = tarinfo.mode
+ if mode is None:
+ # Use mknod's default
+ mode = 0o600
if tarinfo.isblk():
mode |= stat.S_IFBLK
else:
@@ -2261,7 +2506,6 @@ def makelink(self, tarinfo, targetpath):
os.unlink(targetpath)
os.symlink(tarinfo.linkname, targetpath)
else:
- # See extract().
if os.path.exists(tarinfo._link_target):
os.link(tarinfo._link_target, targetpath)
else:
@@ -2286,15 +2530,19 @@ def chown(self, tarinfo, targetpath, numeric_owner):
u = tarinfo.uid
if not numeric_owner:
try:
- if grp:
+ if grp and tarinfo.gname:
g = grp.getgrnam(tarinfo.gname)[2]
except KeyError:
pass
try:
- if pwd:
+ if pwd and tarinfo.uname:
u = pwd.getpwnam(tarinfo.uname)[2]
except KeyError:
pass
+ if g is None:
+ g = -1
+ if u is None:
+ u = -1
try:
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
@@ -2306,6 +2554,8 @@ def chown(self, tarinfo, targetpath, numeric_owner):
def chmod(self, tarinfo, targetpath):
"""Set file permissions of targetpath according to tarinfo.
"""
+ if tarinfo.mode is None:
+ return
try:
os.chmod(targetpath, tarinfo.mode)
except OSError as e:
@@ -2314,10 +2564,13 @@ def chmod(self, tarinfo, targetpath):
def utime(self, tarinfo, targetpath):
"""Set modification time of targetpath according to tarinfo.
"""
+ mtime = tarinfo.mtime
+ if mtime is None:
+ return
if not hasattr(os, 'utime'):
return
try:
- os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
+ os.utime(targetpath, (mtime, mtime))
except OSError as e:
raise ExtractError("could not change modification time") from e
@@ -2395,13 +2648,26 @@ def _getmember(self, name, tarinfo=None, normalize=False):
members = self.getmembers()
# Limit the member search list up to tarinfo.
+ skipping = False
if tarinfo is not None:
- members = members[:members.index(tarinfo)]
+ try:
+ index = members.index(tarinfo)
+ except ValueError:
+ # The given starting point might be a (modified) copy.
+ # We'll later skip members until we find an equivalent.
+ skipping = True
+ else:
+ # Happy fast path
+ members = members[:index]
if normalize:
name = os.path.normpath(name)
for member in reversed(members):
+ if skipping:
+ if tarinfo.offset == member.offset:
+ skipping = False
+ continue
if normalize:
member_name = os.path.normpath(member.name)
else:
@@ -2410,6 +2676,10 @@ def _getmember(self, name, tarinfo=None, normalize=False):
if name == member_name:
return member
+ if skipping:
+ # Starting point was not found
+ raise ValueError(tarinfo)
+
def _load(self):
"""Read through the entire archive file and look for readable
members.
@@ -2500,6 +2770,7 @@ def __exit__(self, type, value, traceback):
#--------------------
# exported functions
#--------------------
+
def is_tarfile(name):
"""Return True if name points to a tar archive that we
are able to handle, else return False.
@@ -2528,6 +2799,10 @@ def main():
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-v', '--verbose', action='store_true', default=False,
help='Verbose output')
+ parser.add_argument('--filter', metavar='',
+ choices=_NAMED_FILTERS,
+ help='Filter for extraction')
+
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-l', '--list', metavar='',
help='Show listing of a tarfile')
@@ -2539,8 +2814,12 @@ def main():
help='Create tarfile from sources')
group.add_argument('-t', '--test', metavar='',
help='Test if a tarfile is valid')
+
args = parser.parse_args()
+ if args.filter and args.extract is None:
+ parser.exit(1, '--filter is only valid for extraction\n')
+
if args.test is not None:
src = args.test
if is_tarfile(src):
@@ -2571,7 +2850,7 @@ def main():
if is_tarfile(src):
with TarFile.open(src, 'r:*') as tf:
- tf.extractall(path=curdir)
+ tf.extractall(path=curdir, filter=args.filter)
if args.verbose:
if curdir == '.':
msg = '{!r} file is extracted.'.format(src)
diff --git a/Lib/tempfile.py b/Lib/tempfile.py
index 4732eb0efe1f76..2b4f4313247128 100644
--- a/Lib/tempfile.py
+++ b/Lib/tempfile.py
@@ -376,7 +376,7 @@ def mkdtemp(suffix=None, prefix=None, dir=None):
continue
else:
raise
- return file
+ return _os.path.abspath(file)
raise FileExistsError(_errno.EEXIST,
"No usable temporary directory name found")
diff --git a/Lib/test/_test_embed_structseq.py b/Lib/test/_test_embed_structseq.py
index 868f9f83e8be77..834daa4df55fec 100644
--- a/Lib/test/_test_embed_structseq.py
+++ b/Lib/test/_test_embed_structseq.py
@@ -1,27 +1,31 @@
import sys
import types
-import unittest
+# Note: This test file can't import `unittest` since the runtime can't
+# currently guarantee that it will not leak memory. Doing so will mark
+# the test as passing but with reference leaks. This can safely import
+# the `unittest` library once there's a strict guarantee of no leaks
+# during runtime shutdown.
# bpo-46417: Test that structseq types used by the sys module are still
# valid when Py_Finalize()/Py_Initialize() are called multiple times.
-class TestStructSeq(unittest.TestCase):
+class TestStructSeq:
# test PyTypeObject members
- def check_structseq(self, obj_type):
+ def _check_structseq(self, obj_type):
# ob_refcnt
- self.assertGreaterEqual(sys.getrefcount(obj_type), 1)
+ assert sys.getrefcount(obj_type) > 1
# tp_base
- self.assertTrue(issubclass(obj_type, tuple))
+ assert issubclass(obj_type, tuple)
# tp_bases
- self.assertEqual(obj_type.__bases__, (tuple,))
+ assert obj_type.__bases__ == (tuple,)
# tp_dict
- self.assertIsInstance(obj_type.__dict__, types.MappingProxyType)
+ assert isinstance(obj_type.__dict__, types.MappingProxyType)
# tp_mro
- self.assertEqual(obj_type.__mro__, (obj_type, tuple, object))
+ assert obj_type.__mro__ == (obj_type, tuple, object)
# tp_name
- self.assertIsInstance(type.__name__, str)
+ assert isinstance(type.__name__, str)
# tp_subclasses
- self.assertEqual(obj_type.__subclasses__(), [])
+ assert obj_type.__subclasses__() == []
def test_sys_attrs(self):
for attr_name in (
@@ -32,23 +36,23 @@ def test_sys_attrs(self):
'thread_info', # ThreadInfoType
'version_info', # VersionInfoType
):
- with self.subTest(attr=attr_name):
- attr = getattr(sys, attr_name)
- self.check_structseq(type(attr))
+ attr = getattr(sys, attr_name)
+ self._check_structseq(type(attr))
def test_sys_funcs(self):
func_names = ['get_asyncgen_hooks'] # AsyncGenHooksType
if hasattr(sys, 'getwindowsversion'):
func_names.append('getwindowsversion') # WindowsVersionType
for func_name in func_names:
- with self.subTest(func=func_name):
- func = getattr(sys, func_name)
- obj = func()
- self.check_structseq(type(obj))
+ func = getattr(sys, func_name)
+ obj = func()
+ self._check_structseq(type(obj))
try:
- unittest.main()
+ tests = TestStructSeq()
+ tests.test_sys_attrs()
+ tests.test_sys_funcs()
except SystemExit as exc:
if exc.args[0] != 0:
raise
diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py
index 570f803918c1ef..c5eb6e7f1643ee 100644
--- a/Lib/test/datetimetester.py
+++ b/Lib/test/datetimetester.py
@@ -2437,7 +2437,8 @@ def test_utcfromtimestamp(self):
ts = time.time()
expected = time.gmtime(ts)
- got = self.theclass.utcfromtimestamp(ts)
+ with self.assertWarns(DeprecationWarning):
+ got = self.theclass.utcfromtimestamp(ts)
self.verify_field_equality(expected, got)
# Run with US-style DST rules: DST begins 2 a.m. on second Sunday in
@@ -2483,8 +2484,12 @@ def test_timestamp_aware(self):
@support.run_with_tz('MSK-03') # Something east of Greenwich
def test_microsecond_rounding(self):
+ def utcfromtimestamp(*args, **kwargs):
+ with self.assertWarns(DeprecationWarning):
+ return self.theclass.utcfromtimestamp(*args, **kwargs)
+
for fts in [self.theclass.fromtimestamp,
- self.theclass.utcfromtimestamp]:
+ utcfromtimestamp]:
zero = fts(0)
self.assertEqual(zero.second, 0)
self.assertEqual(zero.microsecond, 0)
@@ -2581,10 +2586,11 @@ def test_fromtimestamp_limits(self):
self.theclass.fromtimestamp(ts)
def test_utcfromtimestamp_limits(self):
- try:
- self.theclass.utcfromtimestamp(-2**32 - 1)
- except (OSError, OverflowError):
- self.skipTest("Test not valid on this platform")
+ with self.assertWarns(DeprecationWarning):
+ try:
+ self.theclass.utcfromtimestamp(-2**32 - 1)
+ except (OSError, OverflowError):
+ self.skipTest("Test not valid on this platform")
min_dt = self.theclass.min.replace(tzinfo=timezone.utc)
min_ts = min_dt.timestamp()
@@ -2597,10 +2603,11 @@ def test_utcfromtimestamp_limits(self):
("maximum", max_ts, max_dt.replace(tzinfo=None)),
]:
with self.subTest(test_name, ts=ts, expected=expected):
- try:
- actual = self.theclass.utcfromtimestamp(ts)
- except (OSError, OverflowError) as exc:
- self.skipTest(str(exc))
+ with self.assertWarns(DeprecationWarning):
+ try:
+ actual = self.theclass.utcfromtimestamp(ts)
+ except (OSError, OverflowError) as exc:
+ self.skipTest(str(exc))
self.assertEqual(actual, expected)
@@ -2645,7 +2652,8 @@ def test_negative_float_fromtimestamp(self):
@unittest.skipIf(sys.platform == "win32", "Windows doesn't accept negative timestamps")
def test_negative_float_utcfromtimestamp(self):
- d = self.theclass.utcfromtimestamp(-1.05)
+ with self.assertWarns(DeprecationWarning):
+ d = self.theclass.utcfromtimestamp(-1.05)
self.assertEqual(d, self.theclass(1969, 12, 31, 23, 59, 58, 950000))
def test_utcnow(self):
@@ -2655,8 +2663,11 @@ def test_utcnow(self):
# a second of each other.
tolerance = timedelta(seconds=1)
for dummy in range(3):
- from_now = self.theclass.utcnow()
- from_timestamp = self.theclass.utcfromtimestamp(time.time())
+ with self.assertWarns(DeprecationWarning):
+ from_now = self.theclass.utcnow()
+
+ with self.assertWarns(DeprecationWarning):
+ from_timestamp = self.theclass.utcfromtimestamp(time.time())
if abs(from_timestamp - from_now) <= tolerance:
break
# Else try again a few times.
@@ -2956,7 +2967,11 @@ def __new__(cls, *args, **kwargs):
constr_name=constr_name):
constructor = getattr(base_obj, constr_name)
- dt = constructor(*constr_args)
+ if constr_name == "utcfromtimestamp":
+ with self.assertWarns(DeprecationWarning):
+ dt = constructor(*constr_args)
+ else:
+ dt = constructor(*constr_args)
# Test that it creates the right subclass
self.assertIsInstance(dt, DateTimeSubclass)
@@ -2986,7 +3001,11 @@ def __new__(cls, *args, **kwargs):
for name, meth_name, kwargs in test_cases:
with self.subTest(name):
constr = getattr(DateTimeSubclass, meth_name)
- dt = constr(**kwargs)
+ if constr == "utcnow":
+ with self.assertWarns(DeprecationWarning):
+ dt = constr(**kwargs)
+ else:
+ dt = constr(**kwargs)
self.assertIsInstance(dt, DateTimeSubclass)
self.assertEqual(dt.extra, 7)
@@ -4642,7 +4661,8 @@ def test_tzinfo_now(self):
for dummy in range(3):
now = datetime.now(weirdtz)
self.assertIs(now.tzinfo, weirdtz)
- utcnow = datetime.utcnow().replace(tzinfo=utc)
+ with self.assertWarns(DeprecationWarning):
+ utcnow = datetime.utcnow().replace(tzinfo=utc)
now2 = utcnow.astimezone(weirdtz)
if abs(now - now2) < timedelta(seconds=30):
break
@@ -4676,7 +4696,8 @@ def test_tzinfo_fromtimestamp(self):
# Try to make sure tz= actually does some conversion.
timestamp = 1000000000
- utcdatetime = datetime.utcfromtimestamp(timestamp)
+ with self.assertWarns(DeprecationWarning):
+ utcdatetime = datetime.utcfromtimestamp(timestamp)
# In POSIX (epoch 1970), that's 2001-09-09 01:46:40 UTC, give or take.
# But on some flavor of Mac, it's nowhere near that. So we can't have
# any idea here what time that actually is, we can only test that
@@ -4690,7 +4711,8 @@ def test_tzinfo_fromtimestamp(self):
def test_tzinfo_utcnow(self):
meth = self.theclass.utcnow
# Ensure it doesn't require tzinfo (i.e., that this doesn't blow up).
- base = meth()
+ with self.assertWarns(DeprecationWarning):
+ base = meth()
# Try with and without naming the keyword; for whatever reason,
# utcnow() doesn't accept a tzinfo argument.
off42 = FixedOffset(42, "42")
@@ -4702,7 +4724,8 @@ def test_tzinfo_utcfromtimestamp(self):
meth = self.theclass.utcfromtimestamp
ts = time.time()
# Ensure it doesn't require tzinfo (i.e., that this doesn't blow up).
- base = meth(ts)
+ with self.assertWarns(DeprecationWarning):
+ base = meth(ts)
# Try with and without naming the keyword; for whatever reason,
# utcfromtimestamp() doesn't accept a tzinfo argument.
off42 = FixedOffset(42, "42")
@@ -5309,7 +5332,7 @@ def dst(self, dt):
def test_fromutc(self):
self.assertRaises(TypeError, Eastern.fromutc) # not enough args
- now = datetime.utcnow().replace(tzinfo=utc_real)
+ now = datetime.now(tz=utc_real)
self.assertRaises(ValueError, Eastern.fromutc, now) # wrong tzinfo
now = now.replace(tzinfo=Eastern) # insert correct tzinfo
enow = Eastern.fromutc(now) # doesn't blow up
@@ -5411,9 +5434,11 @@ def test_bug_1028306(self):
self.assertEqual(datetime_sc, as_datetime)
def test_extra_attributes(self):
+ with self.assertWarns(DeprecationWarning):
+ utcnow = datetime.utcnow()
for x in [date.today(),
time(),
- datetime.utcnow(),
+ utcnow,
timedelta(),
tzinfo(),
timezone(timedelta())]:
@@ -6073,6 +6098,7 @@ def stats(cls, start_year=1):
def transitions(self):
for (_, prev_ti), (t, ti) in pairs(zip(self.ut, self.ti)):
shift = ti[0] - prev_ti[0]
+ # TODO: Remove this use of utcfromtimestamp
yield datetime.utcfromtimestamp(t), shift
def nondst_folds(self):
@@ -6212,6 +6238,10 @@ def test_system_transitions(self):
ts1 = dt.replace(fold=1).timestamp()
self.assertEqual(ts0, s0 + ss / 2)
self.assertEqual(ts1, s0 - ss / 2)
+ # gh-83861
+ utc0 = dt.astimezone(timezone.utc)
+ utc1 = dt.replace(fold=1).astimezone(timezone.utc)
+ self.assertEqual(utc0, utc1 + timedelta(0, ss))
finally:
if TZ is None:
del os.environ['TZ']
diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py
index 4298fa806e1065..2de8c6cfbc61a1 100644
--- a/Lib/test/libregrtest/refleak.py
+++ b/Lib/test/libregrtest/refleak.py
@@ -73,9 +73,10 @@ def get_pooled_int(value):
fd_deltas = [0] * repcount
getallocatedblocks = sys.getallocatedblocks
gettotalrefcount = sys.gettotalrefcount
+ getunicodeinternedsize = sys.getunicodeinternedsize
fd_count = os_helper.fd_count
# initialize variables to make pyflakes quiet
- rc_before = alloc_before = fd_before = 0
+ rc_before = alloc_before = fd_before = interned_before = 0
if not ns.quiet:
print("beginning", repcount, "repetitions", file=sys.stderr)
@@ -91,9 +92,13 @@ def get_pooled_int(value):
dash_R_cleanup(fs, ps, pic, zdc, abcs)
support.gc_collect()
- # Read memory statistics immediately after the garbage collection
- alloc_after = getallocatedblocks()
- rc_after = gettotalrefcount()
+ # Read memory statistics immediately after the garbage collection.
+ # Also, readjust the reference counts and alloc blocks by ignoring
+ # any strings that might have been interned during test_func. These
+ # strings will be deallocated at runtime shutdown
+ interned_after = getunicodeinternedsize()
+ alloc_after = getallocatedblocks() - interned_after
+ rc_after = gettotalrefcount() - interned_after * 2
fd_after = fd_count()
if not ns.quiet:
@@ -106,6 +111,7 @@ def get_pooled_int(value):
alloc_before = alloc_after
rc_before = rc_after
fd_before = fd_after
+ interned_before = interned_after
if not ns.quiet:
print(file=sys.stderr)
diff --git a/Lib/test/shadowed_super.py b/Lib/test/shadowed_super.py
new file mode 100644
index 00000000000000..2a62f667e93818
--- /dev/null
+++ b/Lib/test/shadowed_super.py
@@ -0,0 +1,7 @@
+class super:
+ msg = "truly super"
+
+
+class C:
+ def method(self):
+ return super().msg
diff --git a/Lib/test/support/testcase.py b/Lib/test/support/testcase.py
new file mode 100644
index 00000000000000..1e4363b15783eb
--- /dev/null
+++ b/Lib/test/support/testcase.py
@@ -0,0 +1,25 @@
+class ExceptionIsLikeMixin:
+ def assertExceptionIsLike(self, exc, template):
+ """
+ Passes when the provided `exc` matches the structure of `template`.
+ Individual exceptions don't have to be the same objects or even pass
+ an equality test: they only need to be the same type and contain equal
+ `exc_obj.args`.
+ """
+ if exc is None and template is None:
+ return
+
+ if template is None:
+ self.fail(f"unexpected exception: {exc}")
+
+ if exc is None:
+ self.fail(f"expected an exception like {template!r}, got None")
+
+ if not isinstance(exc, ExceptionGroup):
+ self.assertEqual(exc.__class__, template.__class__)
+ self.assertEqual(exc.args[0], template.args[0])
+ else:
+ self.assertEqual(exc.message, template.message)
+ self.assertEqual(len(exc.exceptions), len(template.exceptions))
+ for e, t in zip(exc.exceptions, template.exceptions):
+ self.assertExceptionIsLike(e, t)
diff --git a/Lib/test/support/testresult.py b/Lib/test/support/testresult.py
index 2cd1366cd8a9e1..14474be222dc4b 100644
--- a/Lib/test/support/testresult.py
+++ b/Lib/test/support/testresult.py
@@ -18,10 +18,13 @@ def __init__(self, stream, descriptions, verbosity):
self.buffer = True
if self.USE_XML:
from xml.etree import ElementTree as ET
- from datetime import datetime
+ from datetime import datetime, UTC
self.__ET = ET
self.__suite = ET.Element('testsuite')
- self.__suite.set('start', datetime.utcnow().isoformat(' '))
+ self.__suite.set('start',
+ datetime.now(UTC)
+ .replace(tzinfo=None)
+ .isoformat(' '))
self.__e = None
self.__start_time = None
diff --git a/Lib/test/test__xxinterpchannels.py b/Lib/test/test__xxinterpchannels.py
index b65281106f667c..750cd99b85e7a6 100644
--- a/Lib/test/test__xxinterpchannels.py
+++ b/Lib/test/test__xxinterpchannels.py
@@ -1469,19 +1469,19 @@ def _assert_closed_in_interp(self, fix, interp=None):
with self.assertRaises(channels.ChannelClosedError):
channels.close(fix.cid, force=True)
else:
- run_interp(interp.id, f"""
+ run_interp(interp.id, """
with helpers.expect_channel_closed():
channels.recv(cid)
""")
- run_interp(interp.id, f"""
+ run_interp(interp.id, """
with helpers.expect_channel_closed():
channels.send(cid, b'spam')
""")
- run_interp(interp.id, f"""
+ run_interp(interp.id, """
with helpers.expect_channel_closed():
channels.close(cid)
""")
- run_interp(interp.id, f"""
+ run_interp(interp.id, """
with helpers.expect_channel_closed():
channels.close(cid, force=True)
""")
diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py
index 965967e3f2734b..1ee18774d17209 100644
--- a/Lib/test/test__xxsubinterpreters.py
+++ b/Lib/test/test__xxsubinterpreters.py
@@ -798,7 +798,7 @@ def test_shared_overwrites(self):
"""))
shared = {'spam': b'ham'}
- script = dedent(f"""
+ script = dedent("""
ns2 = dict(vars())
del ns2['__builtins__']
""")
@@ -902,7 +902,7 @@ def test_execution_namespace_is_main(self):
# XXX Fix this test!
@unittest.skip('blocking forever')
def test_still_running_at_exit(self):
- script = dedent(f"""
+ script = dedent("""
from textwrap import dedent
import threading
import _xxsubinterpreters as _interpreters
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index 6c932e1305e1dd..8eef7baec70118 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -774,11 +774,6 @@ def test_parenthesized_with_feature_version(self):
ast.parse('with (CtxManager() as example): ...', feature_version=(3, 8))
ast.parse('with CtxManager() as example: ...', feature_version=(3, 8))
- def test_debug_f_string_feature_version(self):
- ast.parse('f"{x=}"', feature_version=(3, 8))
- with self.assertRaises(SyntaxError):
- ast.parse('f"{x=}"', feature_version=(3, 7))
-
def test_assignment_expression_feature_version(self):
ast.parse('(x := 0)', feature_version=(3, 8))
with self.assertRaises(SyntaxError):
@@ -2298,6 +2293,17 @@ class C:
cdef = ast.parse(s).body[0]
self.assertEqual(ast.get_source_segment(s, cdef.body[0], padded=True), s_method)
+ def test_source_segment_newlines(self):
+ s = 'def f():\n pass\ndef g():\r pass\r\ndef h():\r\n pass\r\n'
+ f, g, h = ast.parse(s).body
+ self._check_content(s, f, 'def f():\n pass')
+ self._check_content(s, g, 'def g():\r pass')
+ self._check_content(s, h, 'def h():\r\n pass')
+
+ s = 'def f():\n a = 1\r b = 2\r\n c = 3\n'
+ f = ast.parse(s).body[0]
+ self._check_content(s, f, s.rstrip())
+
def test_source_segment_missing_info(self):
s = 'v = 1\r\nw = 1\nx = 1\n\ry = 1\r\n'
v, w, x, y = ast.parse(s).body
diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py
index 6cb7dc300c5331..c42856e578b8cc 100644
--- a/Lib/test/test_asyncio/test_proactor_events.py
+++ b/Lib/test/test_asyncio/test_proactor_events.py
@@ -447,6 +447,19 @@ def monkey():
self.assertFalse(tr.is_reading())
+ def test_pause_reading_connection_made(self):
+ tr = self.socket_transport()
+ self.protocol.connection_made.side_effect = lambda _: tr.pause_reading()
+ test_utils.run_briefly(self.loop)
+ self.assertFalse(tr.is_reading())
+ self.loop.assert_no_reader(7)
+
+ tr.resume_reading()
+ self.assertTrue(tr.is_reading())
+
+ tr.close()
+ self.assertFalse(tr.is_reading())
+
def pause_writing_transport(self, high):
tr = self.socket_transport()
diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py
index e41341fd26e19e..47693ea4d3ce2e 100644
--- a/Lib/test/test_asyncio/test_selector_events.py
+++ b/Lib/test/test_asyncio/test_selector_events.py
@@ -547,6 +547,22 @@ def test_pause_resume_reading(self):
self.assertFalse(tr.is_reading())
self.loop.assert_no_reader(7)
+ def test_pause_reading_connection_made(self):
+ tr = self.socket_transport()
+ self.protocol.connection_made.side_effect = lambda _: tr.pause_reading()
+ test_utils.run_briefly(self.loop)
+ self.assertFalse(tr.is_reading())
+ self.loop.assert_no_reader(7)
+
+ tr.resume_reading()
+ self.assertTrue(tr.is_reading())
+ self.loop.assert_reader(7, tr._read_ready)
+
+ tr.close()
+ self.assertFalse(tr.is_reading())
+ self.loop.assert_no_reader(7)
+
+
def test_read_eof_received_error(self):
transport = self.socket_transport()
transport.close = mock.Mock()
diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py
index eba6e2d1f28f3e..eeeca40c15cd28 100644
--- a/Lib/test/test_asyncio/test_subprocess.py
+++ b/Lib/test/test_asyncio/test_subprocess.py
@@ -151,6 +151,24 @@ async def run(data):
self.assertEqual(exitcode, 0)
self.assertEqual(stdout, b'some data')
+ def test_communicate_none_input(self):
+ args = PROGRAM_CAT
+
+ async def run():
+ proc = await asyncio.create_subprocess_exec(
+ *args,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ stdout, stderr = await proc.communicate()
+ return proc.returncode, stdout
+
+ task = run()
+ task = asyncio.wait_for(task, support.LONG_TIMEOUT)
+ exitcode, stdout = self.loop.run_until_complete(task)
+ self.assertEqual(exitcode, 0)
+ self.assertEqual(stdout, b'')
+
def test_shell(self):
proc = self.loop.run_until_complete(
asyncio.create_subprocess_shell('exit 7')
diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py
index 31622c91470bcb..6e8a51ce2555d5 100644
--- a/Lib/test/test_asyncio/test_tasks.py
+++ b/Lib/test/test_asyncio/test_tasks.py
@@ -399,6 +399,18 @@ async def notmuch():
self.loop.run_until_complete(t1)
self.loop.run_until_complete(t2)
+ def test_task_set_name_pylong(self):
+ # test that setting the task name to a PyLong explicitly doesn't
+ # incorrectly trigger the deferred name formatting logic
+ async def notmuch():
+ return 123
+
+ t = self.new_task(self.loop, notmuch(), name=987654321)
+ self.assertEqual(t.get_name(), '987654321')
+ t.set_name(123456789)
+ self.assertEqual(t.get_name(), '123456789')
+ self.loop.run_until_complete(t)
+
def test_task_repr_name_not_str(self):
async def notmuch():
return 123
diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py
index 96999470a7c69a..cdf3eaac68af15 100644
--- a/Lib/test/test_asyncio/test_unix_events.py
+++ b/Lib/test/test_asyncio/test_unix_events.py
@@ -1712,11 +1712,11 @@ class PolicyTests(unittest.TestCase):
def create_policy(self):
return asyncio.DefaultEventLoopPolicy()
- def test_get_default_child_watcher(self):
+ @mock.patch('asyncio.unix_events.can_use_pidfd')
+ def test_get_default_child_watcher(self, m_can_use_pidfd):
+ m_can_use_pidfd.return_value = False
policy = self.create_policy()
self.assertIsNone(policy._watcher)
- unix_events.can_use_pidfd = mock.Mock()
- unix_events.can_use_pidfd.return_value = False
with self.assertWarns(DeprecationWarning):
watcher = policy.get_child_watcher()
self.assertIsInstance(watcher, asyncio.ThreadedChildWatcher)
@@ -1725,10 +1725,9 @@ def test_get_default_child_watcher(self):
with self.assertWarns(DeprecationWarning):
self.assertIs(watcher, policy.get_child_watcher())
+ m_can_use_pidfd.return_value = True
policy = self.create_policy()
self.assertIsNone(policy._watcher)
- unix_events.can_use_pidfd = mock.Mock()
- unix_events.can_use_pidfd.return_value = True
with self.assertWarns(DeprecationWarning):
watcher = policy.get_child_watcher()
self.assertIsInstance(watcher, asyncio.PidfdChildWatcher)
diff --git a/Lib/test/test_bdb.py b/Lib/test/test_bdb.py
index fc4b8094316332..568c88e326c087 100644
--- a/Lib/test/test_bdb.py
+++ b/Lib/test/test_bdb.py
@@ -1207,7 +1207,8 @@ def main():
class TestRegressions(unittest.TestCase):
def test_format_stack_entry_no_lineno(self):
# See gh-101517
- Bdb().format_stack_entry((sys._getframe(), None))
+ self.assertIn('Warning: lineno is None',
+ Bdb().format_stack_entry((sys._getframe(), None)))
if __name__ == "__main__":
diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py
index 8ac3b7e7eb29d1..098d2d999643cb 100644
--- a/Lib/test/test_buffer.py
+++ b/Lib/test/test_buffer.py
@@ -965,8 +965,10 @@ def check_memoryview(m, expected_readonly=readonly):
self.assertEqual(m.strides, tuple(strides))
self.assertEqual(m.suboffsets, tuple(suboffsets))
- n = 1 if ndim == 0 else len(lst)
- self.assertEqual(len(m), n)
+ if ndim == 0:
+ self.assertRaises(TypeError, len, m)
+ else:
+ self.assertEqual(len(m), len(lst))
rep = result.tolist() if fmt else result.tobytes()
self.assertEqual(rep, lst)
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index e7a79bc13b7f3d..04dd8ff3070c99 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -28,7 +28,7 @@
from types import AsyncGeneratorType, FunctionType, CellType
from operator import neg
from test import support
-from test.support import (swap_attr, maybe_get_event_loop_policy)
+from test.support import (cpython_only, swap_attr, maybe_get_event_loop_policy)
from test.support.os_helper import (EnvironmentVarGuard, TESTFN, unlink)
from test.support.script_helper import assert_python_ok
from test.support.warnings_helper import check_warnings
@@ -2370,6 +2370,28 @@ def __del__(self):
self.assertEqual(["before", "after"], out.decode().splitlines())
+@cpython_only
+class ImmortalTests(unittest.TestCase):
+ def test_immortal(self):
+ none_refcount = sys.getrefcount(None)
+ true_refcount = sys.getrefcount(True)
+ false_refcount = sys.getrefcount(False)
+ smallint_refcount = sys.getrefcount(100)
+
+ # Assert that all of these immortal instances have large ref counts.
+ self.assertGreater(none_refcount, 2 ** 15)
+ self.assertGreater(true_refcount, 2 ** 15)
+ self.assertGreater(false_refcount, 2 ** 15)
+ self.assertGreater(smallint_refcount, 2 ** 15)
+
+ # Confirm that the refcount doesn't change even with a new ref to them.
+ l = [None, True, False, 100]
+ self.assertEqual(sys.getrefcount(None), none_refcount)
+ self.assertEqual(sys.getrefcount(True), true_refcount)
+ self.assertEqual(sys.getrefcount(False), false_refcount)
+ self.assertEqual(sys.getrefcount(100), smallint_refcount)
+
+
class TestType(unittest.TestCase):
def test_new_type(self):
A = type('A', (), {})
diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py
index ccfbeede0be949..03388e8c55d5a8 100644
--- a/Lib/test/test_calendar.py
+++ b/Lib/test/test_calendar.py
@@ -8,6 +8,7 @@
import sys
import datetime
import os
+import warnings
# From https://en.wikipedia.org/wiki/Leap_year_starting_on_Saturday
result_0_02_text = """\
@@ -490,6 +491,14 @@ def test_format(self):
self.assertEqual(out.getvalue().strip(), "1 2 3")
class CalendarTestCase(unittest.TestCase):
+
+ def test_deprecation_warning(self):
+ with warnings.catch_warnings(record=True) as w:
+ calendar.January
+ self.assertEqual(len(w), 1)
+ self.assertEqual(w[0].category, DeprecationWarning)
+ self.assertIn("The 'January' attribute is deprecated, use 'JANUARY' instead", str(w[0].message))
+
def test_isleap(self):
# Make sure that the return is right for a few years, and
# ensure that the return values are 1 or 0, not just true or
diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py
index 637adc01a331ce..9470cf12a7d1c4 100644
--- a/Lib/test/test_capi/test_misc.py
+++ b/Lib/test/test_capi/test_misc.py
@@ -1211,20 +1211,25 @@ def test_configured_settings(self):
"""
import json
+ OBMALLOC = 1<<5
EXTENSIONS = 1<<8
THREADS = 1<<10
DAEMON_THREADS = 1<<11
FORK = 1<<15
EXEC = 1<<16
- features = ['fork', 'exec', 'threads', 'daemon_threads', 'extensions']
+ features = ['obmalloc', 'fork', 'exec', 'threads', 'daemon_threads',
+ 'extensions']
kwlist = [f'allow_{n}' for n in features]
+ kwlist[0] = 'use_main_obmalloc'
kwlist[-1] = 'check_multi_interp_extensions'
+
+ # expected to work
for config, expected in {
- (True, True, True, True, True):
- FORK | EXEC | THREADS | DAEMON_THREADS | EXTENSIONS,
- (False, False, False, False, False): 0,
- (False, False, True, False, True): THREADS | EXTENSIONS,
+ (True, True, True, True, True, True):
+ OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS | EXTENSIONS,
+ (True, False, False, False, False, False): OBMALLOC,
+ (False, False, False, True, False, True): THREADS | EXTENSIONS,
}.items():
kwargs = dict(zip(kwlist, config))
expected = {
@@ -1246,6 +1251,20 @@ def test_configured_settings(self):
self.assertEqual(settings, expected)
+ # expected to fail
+ for config in [
+ (False, False, False, False, False, False),
+ ]:
+ kwargs = dict(zip(kwlist, config))
+ with self.subTest(config):
+ script = textwrap.dedent(f'''
+ import _testinternalcapi
+ _testinternalcapi.get_interp_settings()
+ raise NotImplementedError('unreachable')
+ ''')
+ with self.assertRaises(RuntimeError):
+ support.run_in_subinterp_with_config(script, **kwargs)
+
@unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module")
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def test_overridden_setting_extensions_subinterp_check(self):
@@ -1257,13 +1276,15 @@ def test_overridden_setting_extensions_subinterp_check(self):
"""
import json
+ OBMALLOC = 1<<5
EXTENSIONS = 1<<8
THREADS = 1<<10
DAEMON_THREADS = 1<<11
FORK = 1<<15
EXEC = 1<<16
- BASE_FLAGS = FORK | EXEC | THREADS | DAEMON_THREADS
+ BASE_FLAGS = OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS
base_kwargs = {
+ 'use_main_obmalloc': True,
'allow_fork': True,
'allow_exec': True,
'allow_threads': True,
@@ -1400,7 +1421,7 @@ def callback():
@threading_helper.requires_working_threading()
def test_gilstate_ensure_no_deadlock(self):
# See https://github.com/python/cpython/issues/96071
- code = textwrap.dedent(f"""
+ code = textwrap.dedent("""
import _testcapi
def callback():
diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py
index f10d72ea5547ee..d98e23855e0c19 100644
--- a/Lib/test/test_cmd_line_script.py
+++ b/Lib/test/test_cmd_line_script.py
@@ -636,9 +636,9 @@ def test_syntaxerror_multi_line_fstring(self):
self.assertEqual(
stderr.splitlines()[-3:],
[
- b' foo"""',
- b' ^',
- b'SyntaxError: f-string: empty expression not allowed',
+ b' foo = f"""{}',
+ b' ^',
+ b'SyntaxError: f-string: valid expression required before \'}\'',
],
)
diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py
index ecb3525a928468..ca06a39f5df142 100644
--- a/Lib/test/test_code.py
+++ b/Lib/test/test_code.py
@@ -356,7 +356,7 @@ def foo():
foo.__code__ = foo.__code__.replace(
co_code=b'\xe5' + foo.__code__.co_code[1:])
- msg = f"unknown opcode 229"
+ msg = "unknown opcode 229"
with self.assertRaisesRegex(SystemError, msg):
foo()
diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py
index 6966c2ffd811b8..e3c382266fa058 100644
--- a/Lib/test/test_codeop.py
+++ b/Lib/test/test_codeop.py
@@ -277,7 +277,7 @@ def test_filename(self):
def test_warning(self):
# Test that the warning is only returned once.
with warnings_helper.check_warnings(
- ('"is" with a literal', SyntaxWarning),
+ ('"is" with \'str\' literal', SyntaxWarning),
("invalid escape sequence", SyntaxWarning),
) as w:
compile_command(r"'\e' is 0")
diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py
index bfe18c7fc50330..fb568a48396498 100644
--- a/Lib/test/test_collections.py
+++ b/Lib/test/test_collections.py
@@ -1626,7 +1626,7 @@ def test_Set_from_iterable(self):
class SetUsingInstanceFromIterable(MutableSet):
def __init__(self, values, created_by):
if not created_by:
- raise ValueError(f'created_by must be specified')
+ raise ValueError('created_by must be specified')
self.created_by = created_by
self._values = set(values)
diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py
index ec06785b5667a6..0f8351ab8108a6 100644
--- a/Lib/test/test_contextlib.py
+++ b/Lib/test/test_contextlib.py
@@ -10,6 +10,7 @@
from contextlib import * # Tests __all__
from test import support
from test.support import os_helper
+from test.support.testcase import ExceptionIsLikeMixin
import weakref
@@ -1148,7 +1149,7 @@ class TestRedirectStderr(TestRedirectStream, unittest.TestCase):
orig_stream = "stderr"
-class TestSuppress(unittest.TestCase):
+class TestSuppress(ExceptionIsLikeMixin, unittest.TestCase):
@support.requires_docstrings
def test_instance_docs(self):
@@ -1202,6 +1203,30 @@ def test_cm_is_reentrant(self):
1/0
self.assertTrue(outer_continued)
+ def test_exception_groups(self):
+ eg_ve = lambda: ExceptionGroup(
+ "EG with ValueErrors only",
+ [ValueError("ve1"), ValueError("ve2"), ValueError("ve3")],
+ )
+ eg_all = lambda: ExceptionGroup(
+ "EG with many types of exceptions",
+ [ValueError("ve1"), KeyError("ke1"), ValueError("ve2"), KeyError("ke2")],
+ )
+ with suppress(ValueError):
+ raise eg_ve()
+ with suppress(ValueError, KeyError):
+ raise eg_all()
+ with self.assertRaises(ExceptionGroup) as eg1:
+ with suppress(ValueError):
+ raise eg_all()
+ self.assertExceptionIsLike(
+ eg1.exception,
+ ExceptionGroup(
+ "EG with many types of exceptions",
+ [KeyError("ke1"), KeyError("ke2")],
+ ),
+ )
+
class TestChdir(unittest.TestCase):
def make_relative_path(self, *parts):
diff --git a/Lib/test/test_coroutines.py b/Lib/test/test_coroutines.py
index 6ab19efcc588b8..47145782c0f04f 100644
--- a/Lib/test/test_coroutines.py
+++ b/Lib/test/test_coroutines.py
@@ -2365,15 +2365,15 @@ def check(depth, msg):
f"coroutine '{corofn.__qualname__}' was never awaited\n",
"Coroutine created at (most recent call last)\n",
f' File "{a1_filename}", line {a1_lineno}, in a1\n',
- f' return corofn() # comment in a1',
+ " return corofn() # comment in a1",
]))
check(2, "".join([
f"coroutine '{corofn.__qualname__}' was never awaited\n",
"Coroutine created at (most recent call last)\n",
f' File "{a2_filename}", line {a2_lineno}, in a2\n',
- f' return a1() # comment in a2\n',
+ " return a1() # comment in a2\n",
f' File "{a1_filename}", line {a1_lineno}, in a1\n',
- f' return corofn() # comment in a1',
+ " return corofn() # comment in a1",
]))
finally:
diff --git a/Lib/test/test_ctypes/test_pep3118.py b/Lib/test/test_ctypes/test_pep3118.py
index c8a70e3e335693..038161745df905 100644
--- a/Lib/test/test_ctypes/test_pep3118.py
+++ b/Lib/test/test_ctypes/test_pep3118.py
@@ -28,7 +28,7 @@ def test_native_types(self):
if shape:
self.assertEqual(len(v), shape[0])
else:
- self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob))
+ self.assertRaises(TypeError, len, v)
self.assertEqual(v.itemsize, sizeof(itemtp))
self.assertEqual(v.shape, shape)
# XXX Issue #12851: PyCData_NewGetBuffer() must provide strides
@@ -39,11 +39,10 @@ def test_native_types(self):
# they are always read/write
self.assertFalse(v.readonly)
- if v.shape:
- n = 1
- for dim in v.shape:
- n = n * dim
- self.assertEqual(n * v.itemsize, len(v.tobytes()))
+ n = 1
+ for dim in v.shape:
+ n = n * dim
+ self.assertEqual(n * v.itemsize, len(v.tobytes()))
except:
# so that we can see the failing type
print(tp)
@@ -58,7 +57,7 @@ def test_endian_types(self):
if shape:
self.assertEqual(len(v), shape[0])
else:
- self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob))
+ self.assertRaises(TypeError, len, v)
self.assertEqual(v.itemsize, sizeof(itemtp))
self.assertEqual(v.shape, shape)
# XXX Issue #12851
@@ -67,11 +66,10 @@ def test_endian_types(self):
# they are always read/write
self.assertFalse(v.readonly)
- if v.shape:
- n = 1
- for dim in v.shape:
- n = n * dim
- self.assertEqual(n, len(v))
+ n = 1
+ for dim in v.shape:
+ n = n * dim
+ self.assertEqual(n * v.itemsize, len(v.tobytes()))
except:
# so that we can see the failing type
print(tp)
@@ -243,7 +241,7 @@ class LEPoint(LittleEndianStructure):
#
endian_types = [
(BEPoint, "T{>l:x:>l:y:}".replace('l', s_long), (), BEPoint),
- (LEPoint, "T{l:x:>l:y:}".replace('l', s_long), (), POINTER(BEPoint)),
(POINTER(LEPoint), "&T{"
- ' for field z is not allowed'
+ "mutable default .*Subclass'>"
+ " for field z is not allowed"
):
@dataclass
class Point:
@@ -2297,6 +2297,19 @@ class C:
self.assertDocStrEqual(C.__doc__, "C(x:collections.deque=)")
+ def test_docstring_with_no_signature(self):
+ # See https://github.com/python/cpython/issues/103449
+ class Meta(type):
+ __call__ = dict
+ class Base(metaclass=Meta):
+ pass
+
+ @dataclass
+ class C(Base):
+ pass
+
+ self.assertDocStrEqual(C.__doc__, "C")
+
class TestInit(unittest.TestCase):
def test_base_has_init(self):
diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py
index 0a60a979614d52..5262c5c257cb89 100644
--- a/Lib/test/test_dis.py
+++ b/Lib/test/test_dis.py
@@ -227,6 +227,26 @@ def bug42562():
JUMP_FORWARD -4 (to 0)
"""
+def func_w_kwargs(a, b, **c):
+ pass
+
+def wrap_func_w_kwargs():
+ func_w_kwargs(1, 2, c=5)
+
+dis_kw_names = """\
+%3d RESUME 0
+
+%3d LOAD_GLOBAL 1 (NULL + func_w_kwargs)
+ LOAD_CONST 1 (1)
+ LOAD_CONST 2 (2)
+ LOAD_CONST 3 (5)
+ KW_NAMES 4 (('c',))
+ CALL 3
+ POP_TOP
+ RETURN_CONST 0 (None)
+""" % (wrap_func_w_kwargs.__code__.co_firstlineno,
+ wrap_func_w_kwargs.__code__.co_firstlineno + 1)
+
_BIG_LINENO_FORMAT = """\
1 RESUME 0
@@ -861,6 +881,13 @@ def do_disassembly_test(self, func, expected, with_offsets=False):
self.maxDiff = None
got = self.get_disassembly(func, depth=0)
self.do_disassembly_compare(got, expected, with_offsets)
+ # Add checks for dis.disco
+ if hasattr(func, '__code__'):
+ got_disco = io.StringIO()
+ with contextlib.redirect_stdout(got_disco):
+ dis.disco(func.__code__)
+ self.do_disassembly_compare(got_disco.getvalue(), expected,
+ with_offsets)
def test_opmap(self):
self.assertEqual(dis.opmap["NOP"], 9)
@@ -911,6 +938,10 @@ def test_bug_46724(self):
# Test that negative operargs are handled properly
self.do_disassembly_test(bug46724, dis_bug46724)
+ def test_kw_names(self):
+ # Test that value is displayed for KW_NAMES
+ self.do_disassembly_test(wrap_func_w_kwargs, dis_kw_names)
+
def test_big_linenos(self):
def func(count):
namespace = {}
@@ -1067,6 +1098,13 @@ def check(expected, **kwargs):
check(dis_nested_2, depth=None)
check(dis_nested_2)
+ def test__try_compile_no_context_exc_on_error(self):
+ # see gh-102114
+ try:
+ dis._try_compile(")", "")
+ except Exception as e:
+ self.assertIsNone(e.__context__)
+
@staticmethod
def code_quicken(f, times=ADAPTIVE_WARMUP_DELAY):
for _ in range(times):
@@ -1928,6 +1966,14 @@ def test_findlabels(self):
self.assertEqual(sorted(labels), sorted(jumps))
+ def test_findlinestarts(self):
+ def func():
+ pass
+
+ code = func.__code__
+ offsets = [linestart[0] for linestart in dis.findlinestarts(code)]
+ self.assertEqual(offsets, [0, 2])
+
class TestDisTraceback(DisTestBase):
def setUp(self) -> None:
diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py
index e56d0db8627e91..c9691bbf304915 100644
--- a/Lib/test/test_embed.py
+++ b/Lib/test/test_embed.py
@@ -110,7 +110,7 @@ def run_embedded_interpreter(self, *args, env=None,
print(f"--- {cmd} failed ---")
print(f"stdout:\n{out}")
print(f"stderr:\n{err}")
- print(f"------")
+ print("------")
self.assertEqual(p.returncode, returncode,
"bad returncode %d, stderr is %r" %
@@ -1656,6 +1656,7 @@ def test_init_use_frozen_modules(self):
api=API_PYTHON, env=env)
def test_init_main_interpreter_settings(self):
+ OBMALLOC = 1<<5
EXTENSIONS = 1<<8
THREADS = 1<<10
DAEMON_THREADS = 1<<11
@@ -1664,7 +1665,7 @@ def test_init_main_interpreter_settings(self):
expected = {
# All optional features should be enabled.
'feature_flags':
- FORK | EXEC | THREADS | DAEMON_THREADS,
+ OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS,
}
out, err = self.run_embedded_interpreter(
'test_init_main_interpreter_settings',
diff --git a/Lib/test/test_ensurepip.py b/Lib/test/test_ensurepip.py
index bfca0cd7fbe483..69ab2a4feaa938 100644
--- a/Lib/test/test_ensurepip.py
+++ b/Lib/test/test_ensurepip.py
@@ -20,7 +20,6 @@ def test_version(self):
# Test version()
with tempfile.TemporaryDirectory() as tmpdir:
self.touch(tmpdir, "pip-1.2.3b1-py2.py3-none-any.whl")
- self.touch(tmpdir, "setuptools-49.1.3-py3-none-any.whl")
with (unittest.mock.patch.object(ensurepip, '_PACKAGES', None),
unittest.mock.patch.object(ensurepip, '_WHEEL_PKG_DIR', tmpdir)):
self.assertEqual(ensurepip.version(), '1.2.3b1')
@@ -36,15 +35,12 @@ def test_get_packages_no_dir(self):
# use bundled wheel packages
self.assertIsNotNone(packages['pip'].wheel_name)
- self.assertIsNotNone(packages['setuptools'].wheel_name)
def test_get_packages_with_dir(self):
# Test _get_packages() with a wheel package directory
- setuptools_filename = "setuptools-49.1.3-py3-none-any.whl"
pip_filename = "pip-20.2.2-py2.py3-none-any.whl"
with tempfile.TemporaryDirectory() as tmpdir:
- self.touch(tmpdir, setuptools_filename)
self.touch(tmpdir, pip_filename)
# not used, make sure that it's ignored
self.touch(tmpdir, "wheel-0.34.2-py2.py3-none-any.whl")
@@ -53,15 +49,12 @@ def test_get_packages_with_dir(self):
unittest.mock.patch.object(ensurepip, '_WHEEL_PKG_DIR', tmpdir)):
packages = ensurepip._get_packages()
- self.assertEqual(packages['setuptools'].version, '49.1.3')
- self.assertEqual(packages['setuptools'].wheel_path,
- os.path.join(tmpdir, setuptools_filename))
self.assertEqual(packages['pip'].version, '20.2.2')
self.assertEqual(packages['pip'].wheel_path,
os.path.join(tmpdir, pip_filename))
# wheel package is ignored
- self.assertEqual(sorted(packages), ['pip', 'setuptools'])
+ self.assertEqual(sorted(packages), ['pip'])
class EnsurepipMixin:
@@ -92,13 +85,13 @@ def test_basic_bootstrapping(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "setuptools", "pip",
+ unittest.mock.ANY, "pip",
],
unittest.mock.ANY,
)
additional_paths = self.run_pip.call_args[0][1]
- self.assertEqual(len(additional_paths), 2)
+ self.assertEqual(len(additional_paths), 1)
def test_bootstrapping_with_root(self):
ensurepip.bootstrap(root="/foo/bar/")
@@ -107,7 +100,7 @@ def test_bootstrapping_with_root(self):
[
"install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "--root", "/foo/bar/",
- "setuptools", "pip",
+ "pip",
],
unittest.mock.ANY,
)
@@ -118,7 +111,7 @@ def test_bootstrapping_with_user(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "--user", "setuptools", "pip",
+ unittest.mock.ANY, "--user", "pip",
],
unittest.mock.ANY,
)
@@ -129,7 +122,7 @@ def test_bootstrapping_with_upgrade(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "--upgrade", "setuptools", "pip",
+ unittest.mock.ANY, "--upgrade", "pip",
],
unittest.mock.ANY,
)
@@ -140,7 +133,7 @@ def test_bootstrapping_with_verbosity_1(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "-v", "setuptools", "pip",
+ unittest.mock.ANY, "-v", "pip",
],
unittest.mock.ANY,
)
@@ -151,7 +144,7 @@ def test_bootstrapping_with_verbosity_2(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "-vv", "setuptools", "pip",
+ unittest.mock.ANY, "-vv", "pip",
],
unittest.mock.ANY,
)
@@ -162,7 +155,7 @@ def test_bootstrapping_with_verbosity_3(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "-vvv", "setuptools", "pip",
+ unittest.mock.ANY, "-vvv", "pip",
],
unittest.mock.ANY,
)
@@ -239,7 +232,6 @@ def test_uninstall(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "pip",
- "setuptools",
]
)
@@ -250,7 +242,6 @@ def test_uninstall_with_verbosity_1(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "-v", "pip",
- "setuptools",
]
)
@@ -261,7 +252,6 @@ def test_uninstall_with_verbosity_2(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "-vv", "pip",
- "setuptools",
]
)
@@ -272,7 +262,7 @@ def test_uninstall_with_verbosity_3(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "-vvv",
- "pip", "setuptools",
+ "pip"
]
)
@@ -312,13 +302,13 @@ def test_basic_bootstrapping(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "setuptools", "pip",
+ unittest.mock.ANY, "pip",
],
unittest.mock.ANY,
)
additional_paths = self.run_pip.call_args[0][1]
- self.assertEqual(len(additional_paths), 2)
+ self.assertEqual(len(additional_paths), 1)
self.assertEqual(exit_code, 0)
def test_bootstrapping_error_code(self):
@@ -344,7 +334,6 @@ def test_basic_uninstall(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "pip",
- "setuptools",
]
)
diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py
index e9dfcf8586a823..fb7a016c9007f8 100644
--- a/Lib/test/test_enum.py
+++ b/Lib/test/test_enum.py
@@ -819,10 +819,27 @@ class TestPlainFlag(_EnumTests, _PlainOutputTests, _FlagTests, unittest.TestCase
class TestIntEnum(_EnumTests, _MinimalOutputTests, unittest.TestCase):
enum_type = IntEnum
+ #
+ def test_shadowed_attr(self):
+ class Number(IntEnum):
+ divisor = 1
+ numerator = 2
+ #
+ self.assertEqual(Number.divisor.numerator, 1)
+ self.assertIs(Number.numerator.divisor, Number.divisor)
class TestStrEnum(_EnumTests, _MinimalOutputTests, unittest.TestCase):
enum_type = StrEnum
+ #
+ def test_shadowed_attr(self):
+ class Book(StrEnum):
+ author = 'author'
+ title = 'title'
+ #
+ self.assertEqual(Book.author.title(), 'Author')
+ self.assertEqual(Book.title.title(), 'Title')
+ self.assertIs(Book.title.author, Book.author)
class TestIntFlag(_EnumTests, _MinimalOutputTests, _FlagTests, unittest.TestCase):
diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py
index abcbf046e2cc22..be4fd73bfdc36b 100644
--- a/Lib/test/test_eof.py
+++ b/Lib/test/test_eof.py
@@ -4,6 +4,7 @@
from test import support
from test.support import os_helper
from test.support import script_helper
+from test.support import warnings_helper
import unittest
class EOFTestCase(unittest.TestCase):
@@ -36,10 +37,11 @@ def test_EOFS_with_file(self):
rc, out, err = script_helper.assert_python_failure(file_name)
self.assertIn(b'unterminated triple-quoted string literal (detected at line 3)', err)
+ @warnings_helper.ignore_warnings(category=SyntaxWarning)
def test_eof_with_line_continuation(self):
expect = "unexpected EOF while parsing (, line 1)"
try:
- compile('"\\xhh" \\', '', 'exec', dont_inherit=True)
+ compile('"\\Xhh" \\', '', 'exec')
except SyntaxError as msg:
self.assertEqual(str(msg), expect)
else:
diff --git a/Lib/test/test_except_star.py b/Lib/test/test_except_star.py
index c5167c5bba38af..c49c6008e08e8c 100644
--- a/Lib/test/test_except_star.py
+++ b/Lib/test/test_except_star.py
@@ -1,6 +1,7 @@
import sys
import unittest
import textwrap
+from test.support.testcase import ExceptionIsLikeMixin
class TestInvalidExceptStar(unittest.TestCase):
def test_mixed_except_and_except_star_is_syntax_error(self):
@@ -169,26 +170,7 @@ def f(x):
self.assertIsInstance(exc, ExceptionGroup)
-class ExceptStarTest(unittest.TestCase):
- def assertExceptionIsLike(self, exc, template):
- if exc is None and template is None:
- return
-
- if template is None:
- self.fail(f"unexpected exception: {exc}")
-
- if exc is None:
- self.fail(f"expected an exception like {template!r}, got None")
-
- if not isinstance(exc, ExceptionGroup):
- self.assertEqual(exc.__class__, template.__class__)
- self.assertEqual(exc.args[0], template.args[0])
- else:
- self.assertEqual(exc.message, template.message)
- self.assertEqual(len(exc.exceptions), len(template.exceptions))
- for e, t in zip(exc.exceptions, template.exceptions):
- self.assertExceptionIsLike(e, t)
-
+class ExceptStarTest(ExceptionIsLikeMixin, unittest.TestCase):
def assertMetadataEqual(self, e1, e2):
if e1 is None or e2 is None:
self.assertTrue(e1 is None and e2 is None)
@@ -636,18 +618,17 @@ def test_raise_handle_all_raise_one_named(self):
raise orig
except* (TypeError, ValueError) as e:
raise SyntaxError(3)
- except BaseException as e:
+ except SyntaxError as e:
exc = e
- self.assertExceptionIsLike(
- exc, ExceptionGroup("", [SyntaxError(3)]))
+ self.assertExceptionIsLike(exc, SyntaxError(3))
self.assertExceptionIsLike(
- exc.exceptions[0].__context__,
+ exc.__context__,
ExceptionGroup("eg", [TypeError(1), ValueError(2)]))
self.assertMetadataNotEqual(orig, exc)
- self.assertMetadataEqual(orig, exc.exceptions[0].__context__)
+ self.assertMetadataEqual(orig, exc.__context__)
def test_raise_handle_all_raise_one_unnamed(self):
orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)])
@@ -656,18 +637,17 @@ def test_raise_handle_all_raise_one_unnamed(self):
raise orig
except* (TypeError, ValueError) as e:
raise SyntaxError(3)
- except ExceptionGroup as e:
+ except SyntaxError as e:
exc = e
- self.assertExceptionIsLike(
- exc, ExceptionGroup("", [SyntaxError(3)]))
+ self.assertExceptionIsLike(exc, SyntaxError(3))
self.assertExceptionIsLike(
- exc.exceptions[0].__context__,
+ exc.__context__,
ExceptionGroup("eg", [TypeError(1), ValueError(2)]))
self.assertMetadataNotEqual(orig, exc)
- self.assertMetadataEqual(orig, exc.exceptions[0].__context__)
+ self.assertMetadataEqual(orig, exc.__context__)
def test_raise_handle_all_raise_two_named(self):
orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)])
@@ -791,23 +771,22 @@ def test_raise_handle_all_raise_one_named(self):
raise orig
except* (TypeError, ValueError) as e:
raise SyntaxError(3) from e
- except BaseException as e:
+ except SyntaxError as e:
exc = e
- self.assertExceptionIsLike(
- exc, ExceptionGroup("", [SyntaxError(3)]))
+ self.assertExceptionIsLike(exc, SyntaxError(3))
self.assertExceptionIsLike(
- exc.exceptions[0].__context__,
+ exc.__context__,
ExceptionGroup("eg", [TypeError(1), ValueError(2)]))
self.assertExceptionIsLike(
- exc.exceptions[0].__cause__,
+ exc.__cause__,
ExceptionGroup("eg", [TypeError(1), ValueError(2)]))
self.assertMetadataNotEqual(orig, exc)
- self.assertMetadataEqual(orig, exc.exceptions[0].__context__)
- self.assertMetadataEqual(orig, exc.exceptions[0].__cause__)
+ self.assertMetadataEqual(orig, exc.__context__)
+ self.assertMetadataEqual(orig, exc.__cause__)
def test_raise_handle_all_raise_one_unnamed(self):
orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)])
@@ -817,23 +796,22 @@ def test_raise_handle_all_raise_one_unnamed(self):
except* (TypeError, ValueError) as e:
e = sys.exception()
raise SyntaxError(3) from e
- except ExceptionGroup as e:
+ except SyntaxError as e:
exc = e
- self.assertExceptionIsLike(
- exc, ExceptionGroup("", [SyntaxError(3)]))
+ self.assertExceptionIsLike(exc, SyntaxError(3))
self.assertExceptionIsLike(
- exc.exceptions[0].__context__,
+ exc.__context__,
ExceptionGroup("eg", [TypeError(1), ValueError(2)]))
self.assertExceptionIsLike(
- exc.exceptions[0].__cause__,
+ exc.__cause__,
ExceptionGroup("eg", [TypeError(1), ValueError(2)]))
self.assertMetadataNotEqual(orig, exc)
- self.assertMetadataEqual(orig, exc.exceptions[0].__context__)
- self.assertMetadataEqual(orig, exc.exceptions[0].__cause__)
+ self.assertMetadataEqual(orig, exc.__context__)
+ self.assertMetadataEqual(orig, exc.__cause__)
def test_raise_handle_all_raise_two_named(self):
orig = ExceptionGroup("eg", [TypeError(1), ValueError(2)])
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 684e888f08c778..4ef7decfbc263e 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -155,6 +155,7 @@ def ckmsg(src, msg):
ckmsg(s, "'continue' not properly in loop")
ckmsg("continue\n", "'continue' not properly in loop")
+ ckmsg("f'{6 0}'", "invalid syntax. Perhaps you forgot a comma?")
def testSyntaxErrorMissingParens(self):
def ckmsg(src, msg, exception=SyntaxError):
@@ -227,7 +228,7 @@ def testSyntaxErrorOffset(self):
check('Python = "\u1e54\xfd\u0163\u0125\xf2\xf1" +', 1, 20)
check(b'# -*- coding: cp1251 -*-\nPython = "\xcf\xb3\xf2\xee\xed" +',
2, 19, encoding='cp1251')
- check(b'Python = "\xcf\xb3\xf2\xee\xed" +', 1, 18)
+ check(b'Python = "\xcf\xb3\xf2\xee\xed" +', 1, 10)
check('x = "a', 1, 5)
check('lambda x: x = 2', 1, 1)
check('f{a + b + c}', 1, 2)
diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py
index b3f6ef41d77b8f..5e94c99ae65af1 100644
--- a/Lib/test/test_fstring.py
+++ b/Lib/test/test_fstring.py
@@ -13,6 +13,7 @@
import types
import decimal
import unittest
+from test import support
from test.support.os_helper import temp_cwd
from test.support.script_helper import assert_python_failure
@@ -329,13 +330,13 @@ def test_ast_line_numbers_multiline_fstring(self):
self.assertEqual(t.body[1].lineno, 3)
self.assertEqual(t.body[1].value.lineno, 3)
self.assertEqual(t.body[1].value.values[0].lineno, 3)
- self.assertEqual(t.body[1].value.values[1].lineno, 3)
- self.assertEqual(t.body[1].value.values[2].lineno, 3)
+ self.assertEqual(t.body[1].value.values[1].lineno, 4)
+ self.assertEqual(t.body[1].value.values[2].lineno, 6)
self.assertEqual(t.body[1].col_offset, 0)
self.assertEqual(t.body[1].value.col_offset, 0)
- self.assertEqual(t.body[1].value.values[0].col_offset, 0)
- self.assertEqual(t.body[1].value.values[1].col_offset, 0)
- self.assertEqual(t.body[1].value.values[2].col_offset, 0)
+ self.assertEqual(t.body[1].value.values[0].col_offset, 4)
+ self.assertEqual(t.body[1].value.values[1].col_offset, 2)
+ self.assertEqual(t.body[1].value.values[2].col_offset, 11)
# NOTE: the following lineno information and col_offset is correct for
# expressions within FormattedValues.
binop = t.body[1].value.values[1].value
@@ -366,13 +367,13 @@ def test_ast_line_numbers_multiline_fstring(self):
self.assertEqual(t.body[0].lineno, 2)
self.assertEqual(t.body[0].value.lineno, 2)
self.assertEqual(t.body[0].value.values[0].lineno, 2)
- self.assertEqual(t.body[0].value.values[1].lineno, 2)
- self.assertEqual(t.body[0].value.values[2].lineno, 2)
+ self.assertEqual(t.body[0].value.values[1].lineno, 3)
+ self.assertEqual(t.body[0].value.values[2].lineno, 3)
self.assertEqual(t.body[0].col_offset, 0)
self.assertEqual(t.body[0].value.col_offset, 4)
- self.assertEqual(t.body[0].value.values[0].col_offset, 4)
- self.assertEqual(t.body[0].value.values[1].col_offset, 4)
- self.assertEqual(t.body[0].value.values[2].col_offset, 4)
+ self.assertEqual(t.body[0].value.values[0].col_offset, 8)
+ self.assertEqual(t.body[0].value.values[1].col_offset, 10)
+ self.assertEqual(t.body[0].value.values[2].col_offset, 17)
# Check {blech}
self.assertEqual(t.body[0].value.values[1].value.lineno, 3)
self.assertEqual(t.body[0].value.values[1].value.end_lineno, 3)
@@ -387,6 +388,20 @@ def test_ast_line_numbers_with_parentheses(self):
t = ast.parse(expr)
self.assertEqual(type(t), ast.Module)
self.assertEqual(len(t.body), 1)
+ # check the joinedstr location
+ joinedstr = t.body[0].value
+ self.assertEqual(type(joinedstr), ast.JoinedStr)
+ self.assertEqual(joinedstr.lineno, 3)
+ self.assertEqual(joinedstr.end_lineno, 3)
+ self.assertEqual(joinedstr.col_offset, 4)
+ self.assertEqual(joinedstr.end_col_offset, 17)
+ # check the formatted value location
+ fv = t.body[0].value.values[1]
+ self.assertEqual(type(fv), ast.FormattedValue)
+ self.assertEqual(fv.lineno, 3)
+ self.assertEqual(fv.end_lineno, 3)
+ self.assertEqual(fv.col_offset, 7)
+ self.assertEqual(fv.end_col_offset, 16)
# check the test(t) location
call = t.body[0].value.values[1].value
self.assertEqual(type(call), ast.Call)
@@ -397,6 +412,50 @@ def test_ast_line_numbers_with_parentheses(self):
expr = """
x = (
+ u'wat',
+ u"wat",
+ b'wat',
+ b"wat",
+ f'wat',
+ f"wat",
+)
+
+y = (
+ u'''wat''',
+ u\"\"\"wat\"\"\",
+ b'''wat''',
+ b\"\"\"wat\"\"\",
+ f'''wat''',
+ f\"\"\"wat\"\"\",
+)
+ """
+ t = ast.parse(expr)
+ self.assertEqual(type(t), ast.Module)
+ self.assertEqual(len(t.body), 2)
+ x, y = t.body
+
+ # Check the single quoted string offsets first.
+ offsets = [
+ (elt.col_offset, elt.end_col_offset)
+ for elt in x.value.elts
+ ]
+ self.assertTrue(all(
+ offset == (4, 10)
+ for offset in offsets
+ ))
+
+ # Check the triple quoted string offsets.
+ offsets = [
+ (elt.col_offset, elt.end_col_offset)
+ for elt in y.value.elts
+ ]
+ self.assertTrue(all(
+ offset == (4, 14)
+ for offset in offsets
+ ))
+
+ expr = """
+x = (
'PERL_MM_OPT', (
f'wat'
f'some_string={f(x)} '
@@ -415,9 +474,9 @@ def test_ast_line_numbers_with_parentheses(self):
# check the first wat
self.assertEqual(type(wat1), ast.Constant)
self.assertEqual(wat1.lineno, 4)
- self.assertEqual(wat1.end_lineno, 6)
- self.assertEqual(wat1.col_offset, 12)
- self.assertEqual(wat1.end_col_offset, 18)
+ self.assertEqual(wat1.end_lineno, 5)
+ self.assertEqual(wat1.col_offset, 14)
+ self.assertEqual(wat1.end_col_offset, 26)
# check the call
call = middle.value
self.assertEqual(type(call), ast.Call)
@@ -427,10 +486,14 @@ def test_ast_line_numbers_with_parentheses(self):
self.assertEqual(call.end_col_offset, 31)
# check the second wat
self.assertEqual(type(wat2), ast.Constant)
- self.assertEqual(wat2.lineno, 4)
+ self.assertEqual(wat2.lineno, 5)
self.assertEqual(wat2.end_lineno, 6)
- self.assertEqual(wat2.col_offset, 12)
- self.assertEqual(wat2.end_col_offset, 18)
+ self.assertEqual(wat2.col_offset, 32)
+ # wat ends at the offset 17, but the whole f-string
+ # ends at the offset 18 (since the quote is part of the
+ # f-string but not the wat string)
+ self.assertEqual(wat2.end_col_offset, 17)
+ self.assertEqual(fstring.end_col_offset, 18)
def test_docstring(self):
def f():
@@ -467,36 +530,42 @@ def test_literal(self):
self.assertEqual(f' ', ' ')
def test_unterminated_string(self):
- self.assertAllRaise(SyntaxError, 'f-string: unterminated string',
+ self.assertAllRaise(SyntaxError, 'unterminated string',
[r"""f'{"x'""",
r"""f'{"x}'""",
r"""f'{("x'""",
r"""f'{("x}'""",
])
+ @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI")
def test_mismatched_parens(self):
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' "
r"does not match opening parenthesis '\('",
["f'{((}'",
])
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\)' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\)' "
r"does not match opening parenthesis '\['",
["f'{a[4)}'",
])
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\]' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\]' "
r"does not match opening parenthesis '\('",
["f'{a(4]}'",
])
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' "
r"does not match opening parenthesis '\['",
["f'{a[4}'",
])
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' "
r"does not match opening parenthesis '\('",
["f'{a(4}'",
])
self.assertRaises(SyntaxError, eval, "f'{" + "("*500 + "}'")
+ def test_fstring_nested_too_deeply(self):
+ self.assertAllRaise(SyntaxError,
+ "f-string: expressions nested too deeply",
+ ['f"{1+2:{1+2:{1+1:{1}}}}"'])
+
def test_double_braces(self):
self.assertEqual(f'{{', '{')
self.assertEqual(f'a{{', 'a{')
@@ -559,8 +628,14 @@ def test_compile_time_concat(self):
self.assertEqual(f'' '' f'', '')
self.assertEqual(f'' '' f'' '', '')
- self.assertAllRaise(SyntaxError, "f-string: expecting '}'",
- ["f'{3' f'}'", # can't concat to get a valid f-string
+ # This is not really [f'{'] + [f'}'] since we treat the inside
+ # of braces as a purely new context, so it is actually f'{ and
+ # then eval(' f') (a valid expression) and then }' which would
+ # constitute a valid f-string.
+ self.assertEqual(f'{' f'}', ' f')
+
+ self.assertAllRaise(SyntaxError, "expecting '}'",
+ ['''f'{3' f"}"''', # can't concat to get a valid f-string
])
def test_comments(self):
@@ -618,25 +693,19 @@ def test_format_specifier_expressions(self):
self.assertEqual(f'{-10:-{"#"}1{0}x}', ' -0xa')
self.assertEqual(f'{-10:{"-"}#{1}0{"x"}}', ' -0xa')
self.assertEqual(f'{10:#{3 != {4:5} and width}x}', ' 0xa')
+ self.assertEqual(f'result: {value:{width:{0}}.{precision:1}}', 'result: 12.35')
- self.assertAllRaise(SyntaxError,
- """f-string: invalid conversion character 'r{"': """
- """expected 's', 'r', or 'a'""",
+ self.assertAllRaise(SyntaxError, "f-string: expecting ':' or '}'",
["""f'{"s"!r{":10"}}'""",
-
# This looks like a nested format spec.
])
- self.assertAllRaise(SyntaxError, "f-string: invalid syntax",
+ self.assertAllRaise(SyntaxError,
+ "f-string: expecting a valid expression after '{'",
[# Invalid syntax inside a nested spec.
"f'{4:{/5}}'",
])
- self.assertAllRaise(SyntaxError, "f-string: expressions nested too deeply",
- [# Can't nest format specifiers.
- "f'result: {value:{width:{0}}.{precision:1}}'",
- ])
-
self.assertAllRaise(SyntaxError, 'f-string: invalid conversion character',
[# No expansion inside conversion or for
# the : or ! itself.
@@ -655,7 +724,8 @@ def __format__(self, spec):
self.assertEqual(f'{x} {x}', '1 2')
def test_missing_expression(self):
- self.assertAllRaise(SyntaxError, 'f-string: empty expression not allowed',
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before '}'",
["f'{}'",
"f'{ }'"
"f' {} '",
@@ -667,8 +737,8 @@ def test_missing_expression(self):
"f'''{\t\f\r\n}'''",
])
- # Different error messages are raised when a specifier ('!', ':' or '=') is used after an empty expression
- self.assertAllRaise(SyntaxError, "f-string: expression required before '!'",
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before '!'",
["f'{!r}'",
"f'{ !r}'",
"f'{!}'",
@@ -689,7 +759,8 @@ def test_missing_expression(self):
"f'{ !xr:a}'",
])
- self.assertAllRaise(SyntaxError, "f-string: expression required before ':'",
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before ':'",
["f'{:}'",
"f'{ :!}'",
"f'{:2}'",
@@ -697,7 +768,8 @@ def test_missing_expression(self):
"f'{:'",
])
- self.assertAllRaise(SyntaxError, "f-string: expression required before '='",
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before '='",
["f'{=}'",
"f'{ =}'",
"f'{ =:}'",
@@ -715,24 +787,18 @@ def test_missing_expression(self):
def test_parens_in_expressions(self):
self.assertEqual(f'{3,}', '(3,)')
- # Add these because when an expression is evaluated, parens
- # are added around it. But we shouldn't go from an invalid
- # expression to a valid one. The added parens are just
- # supposed to allow whitespace (including newlines).
- self.assertAllRaise(SyntaxError, 'f-string: invalid syntax',
+ self.assertAllRaise(SyntaxError,
+ "f-string: expecting a valid expression after '{'",
["f'{,}'",
- "f'{,}'", # this is (,), which is an error
])
self.assertAllRaise(SyntaxError, r"f-string: unmatched '\)'",
["f'{3)+(4}'",
])
- self.assertAllRaise(SyntaxError, 'unterminated string literal',
- ["f'{\n}'",
- ])
def test_newlines_before_syntax_error(self):
- self.assertAllRaise(SyntaxError, "invalid syntax",
+ self.assertAllRaise(SyntaxError,
+ "f-string: expecting a valid expression after '{'",
["f'{.}'", "\nf'{.}'", "\n\nf'{.}'"])
def test_backslashes_in_string_part(self):
@@ -776,7 +842,7 @@ def test_backslashes_in_string_part(self):
self.assertEqual(f'2\x203', '2 3')
self.assertEqual(f'\x203', ' 3')
- with self.assertWarns(SyntaxWarning): # invalid escape sequence
+ with self.assertWarns(DeprecationWarning): # invalid escape sequence
value = eval(r"f'\{6*7}'")
self.assertEqual(value, '\\42')
self.assertEqual(f'\\{6*7}', '\\42')
@@ -809,18 +875,40 @@ def test_misformed_unicode_character_name(self):
r"'\N{GREEK CAPITAL LETTER DELTA'",
])
- def test_no_backslashes_in_expression_part(self):
- self.assertAllRaise(SyntaxError, 'f-string expression part cannot include a backslash',
- [r"f'{\'a\'}'",
- r"f'{\t3}'",
- r"f'{\}'",
- r"rf'{\'a\'}'",
- r"rf'{\t3}'",
- r"rf'{\}'",
- r"""rf'{"\N{LEFT CURLY BRACKET}"}'""",
- r"f'{\n}'",
+ def test_backslashes_in_expression_part(self):
+ self.assertEqual(f"{(
+ 1 +
+ 2
+ )}", "3")
+
+ self.assertEqual("\N{LEFT CURLY BRACKET}", '{')
+ self.assertEqual(f'{"\N{LEFT CURLY BRACKET}"}', '{')
+ self.assertEqual(rf'{"\N{LEFT CURLY BRACKET}"}', '{')
+
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before '}'",
+ ["f'{\n}'",
])
+ def test_invalid_backslashes_inside_fstring_context(self):
+ # All of these variations are invalid python syntax,
+ # so they are also invalid in f-strings as well.
+ cases = [
+ formatting.format(expr=expr)
+ for formatting in [
+ "{expr}",
+ "f'{{{expr}}}'",
+ "rf'{{{expr}}}'",
+ ]
+ for expr in [
+ r"\'a\'",
+ r"\t3",
+ r"\\"[0],
+ ]
+ ]
+ self.assertAllRaise(SyntaxError, 'unexpected character after line continuation',
+ cases)
+
def test_no_escapes_for_braces(self):
"""
Only literal curly braces begin an expression.
@@ -843,11 +931,67 @@ def test_lambda(self):
self.assertEqual(f'{(lambda y:x*y)("8"):10}', "88888 ")
# lambda doesn't work without parens, because the colon
- # makes the parser think it's a format_spec
- self.assertAllRaise(SyntaxError, 'f-string: invalid syntax',
+ # makes the parser think it's a format_spec
+ # emit warning if we can match a format_spec
+ self.assertAllRaise(SyntaxError,
+ "f-string: lambda expressions are not allowed "
+ "without parentheses",
["f'{lambda x:x}'",
+ "f'{lambda :x}'",
+ "f'{lambda *arg, :x}'",
+ "f'{1, lambda:x}'",
+ "f'{lambda x:}'",
+ "f'{lambda :}'",
])
+ # but don't emit the paren warning in general cases
+ with self.assertRaisesRegex(SyntaxError, "f-string: expecting a valid expression after '{'"):
+ eval("f'{+ lambda:None}'")
+
+ def test_valid_prefixes(self):
+ self.assertEqual(F'{1}', "1")
+ self.assertEqual(FR'{2}', "2")
+ self.assertEqual(fR'{3}', "3")
+
+ def test_roundtrip_raw_quotes(self):
+ self.assertEqual(fr"\'", "\\'")
+ self.assertEqual(fr'\"', '\\"')
+ self.assertEqual(fr'\"\'', '\\"\\\'')
+ self.assertEqual(fr'\'\"', '\\\'\\"')
+ self.assertEqual(fr'\"\'\"', '\\"\\\'\\"')
+ self.assertEqual(fr'\'\"\'', '\\\'\\"\\\'')
+ self.assertEqual(fr'\"\'\"\'', '\\"\\\'\\"\\\'')
+
+ def test_fstring_backslash_before_double_bracket(self):
+ self.assertEqual(f'\{{\}}', '\\{\\}')
+ self.assertEqual(f'\{{', '\\{')
+ self.assertEqual(f'\{{{1+1}', '\\{2')
+ self.assertEqual(f'\}}{1+1}', '\\}2')
+ self.assertEqual(f'{1+1}\}}', '2\\}')
+ self.assertEqual(fr'\{{\}}', '\\{\\}')
+ self.assertEqual(fr'\{{', '\\{')
+ self.assertEqual(fr'\{{{1+1}', '\\{2')
+ self.assertEqual(fr'\}}{1+1}', '\\}2')
+ self.assertEqual(fr'{1+1}\}}', '2\\}')
+
+ def test_fstring_backslash_prefix_raw(self):
+ self.assertEqual(f'\\', '\\')
+ self.assertEqual(f'\\\\', '\\\\')
+ self.assertEqual(fr'\\', r'\\')
+ self.assertEqual(fr'\\\\', r'\\\\')
+ self.assertEqual(rf'\\', r'\\')
+ self.assertEqual(rf'\\\\', r'\\\\')
+ self.assertEqual(Rf'\\', R'\\')
+ self.assertEqual(Rf'\\\\', R'\\\\')
+ self.assertEqual(fR'\\', R'\\')
+ self.assertEqual(fR'\\\\', R'\\\\')
+ self.assertEqual(FR'\\', R'\\')
+ self.assertEqual(FR'\\\\', R'\\\\')
+
+ def test_fstring_format_spec_greedy_matching(self):
+ self.assertEqual(f"{1:}}}", "1}")
+ self.assertEqual(f"{1:>3{5}}}}", " 1}")
+
def test_yield(self):
# Not terribly useful, but make sure the yield turns
# a function into a generator
@@ -1037,6 +1181,11 @@ def test_conversions(self):
self.assertEqual(f'{"a"!r}', "'a'")
self.assertEqual(f'{"a"!a}', "'a'")
+ # Conversions can have trailing whitespace after them since it
+ # does not provide any significance
+ self.assertEqual(f"{3!s }", "3")
+ self.assertEqual(f'{3.14!s :10.10}', '3.14 ')
+
# Not a conversion.
self.assertEqual(f'{"a!r"}', "a!r")
@@ -1049,16 +1198,27 @@ def test_conversions(self):
"f'{3!g'",
])
- self.assertAllRaise(SyntaxError, 'f-string: missed conversion character',
+ self.assertAllRaise(SyntaxError, 'f-string: missing conversion character',
["f'{3!}'",
"f'{3!:'",
"f'{3!:}'",
])
- for conv in 'g', 'A', '3', 'G', '!', ' s', 's ', ' s ', 'ä', 'ɐ', 'ª':
+ for conv_identifier in 'g', 'A', 'G', 'ä', 'ɐ':
self.assertAllRaise(SyntaxError,
"f-string: invalid conversion character %r: "
- "expected 's', 'r', or 'a'" % conv,
+ "expected 's', 'r', or 'a'" % conv_identifier,
+ ["f'{3!" + conv_identifier + "}'"])
+
+ for conv_non_identifier in '3', '!':
+ self.assertAllRaise(SyntaxError,
+ "f-string: invalid conversion character",
+ ["f'{3!" + conv_non_identifier + "}'"])
+
+ for conv in ' s', ' s ':
+ self.assertAllRaise(SyntaxError,
+ "f-string: conversion type must come right after the"
+ " exclamanation mark",
["f'{3!" + conv + "}'"])
self.assertAllRaise(SyntaxError,
@@ -1097,8 +1257,7 @@ def test_mismatched_braces(self):
])
self.assertAllRaise(SyntaxError, "f-string: expecting '}'",
- ["f'{3:{{>10}'",
- "f'{3'",
+ ["f'{3'",
"f'{3!'",
"f'{3:'",
"f'{3!s'",
@@ -1111,11 +1270,14 @@ def test_mismatched_braces(self):
"f'{{{'",
"f'{{}}{'",
"f'{'",
- "f'x{<'", # See bpo-46762.
- "f'x{>'",
"f'{i='", # See gh-93418.
])
+ self.assertAllRaise(SyntaxError,
+ "f-string: expecting a valid expression after '{'",
+ ["f'{3:{{>10}'",
+ ])
+
# But these are just normal strings.
self.assertEqual(f'{"{"}', '{')
self.assertEqual(f'{"}"}', '}')
@@ -1314,6 +1476,7 @@ def __repr__(self):
self.assertEqual(f'X{x =}Y', 'Xx ='+repr(x)+'Y')
self.assertEqual(f'X{x= }Y', 'Xx= '+repr(x)+'Y')
self.assertEqual(f'X{x = }Y', 'Xx = '+repr(x)+'Y')
+ self.assertEqual(f"sadsd {1 + 1 = :{1 + 1:1d}f}", "sadsd 1 + 1 = 2.000000")
# These next lines contains tabs. Backslash escapes don't
# work in f-strings.
@@ -1335,7 +1498,8 @@ def test_walrus(self):
self.assertEqual(x, 10)
def test_invalid_syntax_error_message(self):
- with self.assertRaisesRegex(SyntaxError, "f-string: invalid syntax"):
+ with self.assertRaisesRegex(SyntaxError,
+ "f-string: expecting '=', or '!', or ':', or '}'"):
compile("f'{a $ b}'", "?", "exec")
def test_with_two_commas_in_format_specifier(self):
@@ -1359,13 +1523,31 @@ def test_with_an_underscore_and_a_comma_in_format_specifier(self):
f'{1:_,}'
def test_syntax_error_for_starred_expressions(self):
- error_msg = re.escape("cannot use starred expression here")
- with self.assertRaisesRegex(SyntaxError, error_msg):
+ with self.assertRaisesRegex(SyntaxError, "can't use starred expression here"):
compile("f'{*a}'", "?", "exec")
- error_msg = re.escape("cannot use double starred expression here")
- with self.assertRaisesRegex(SyntaxError, error_msg):
+ with self.assertRaisesRegex(SyntaxError,
+ "f-string: expecting a valid expression after '{'"):
compile("f'{**a}'", "?", "exec")
+ def test_not_closing_quotes(self):
+ self.assertAllRaise(SyntaxError, "unterminated f-string literal", ['f"', "f'"])
+ self.assertAllRaise(SyntaxError, "unterminated triple-quoted f-string literal",
+ ['f"""', "f'''"])
+
+ def test_syntax_error_after_debug(self):
+ self.assertAllRaise(SyntaxError, "f-string: expecting a valid expression after '{'",
+ [
+ "f'{1=}{;'",
+ "f'{1=}{+;'",
+ "f'{1=}{2}{;'",
+ "f'{1=}{3}{;'",
+ ])
+ self.assertAllRaise(SyntaxError, "f-string: expecting '=', or '!', or ':', or '}'",
+ [
+ "f'{1=}{1;'",
+ "f'{1=}{1;}'",
+ ])
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py
index 0f39b8f45714ad..311a864a52387d 100644
--- a/Lib/test/test_gdb.py
+++ b/Lib/test/test_gdb.py
@@ -962,7 +962,7 @@ def test_wrapper_call(self):
cmd = textwrap.dedent('''
class MyList(list):
def __init__(self):
- super().__init__() # wrapper_call()
+ super(*[]).__init__() # wrapper_call()
id("first break point")
l = MyList()
diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py
index 9b59d1e3e0aad2..24d4216417521c 100644
--- a/Lib/test/test_genericalias.py
+++ b/Lib/test/test_genericalias.py
@@ -314,8 +314,11 @@ def test_parameter_chaining(self):
with self.assertRaises(TypeError):
list[int][int]
+ with self.assertRaises(TypeError):
dict[T, int][str, int]
+ with self.assertRaises(TypeError):
dict[str, T][str, int]
+ with self.assertRaises(TypeError):
dict[T, T][str, int]
def test_equality(self):
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index ced9000f75f2e5..ee105a3de17f8a 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -236,12 +236,9 @@ def check(test, error=False):
check(f"[{num}for x in ()]")
check(f"{num}spam", error=True)
+ with self.assertWarnsRegex(SyntaxWarning, r'invalid \w+ literal'):
+ compile(f"{num}is x", "", "eval")
with warnings.catch_warnings():
- warnings.filterwarnings('ignore', '"is" with a literal',
- SyntaxWarning)
- with self.assertWarnsRegex(SyntaxWarning,
- r'invalid \w+ literal'):
- compile(f"{num}is x", "", "eval")
warnings.simplefilter('error', SyntaxWarning)
with self.assertRaisesRegex(SyntaxError,
r'invalid \w+ literal'):
@@ -1467,14 +1464,22 @@ def test_comparison(self):
if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in x is x is not x: pass
def test_comparison_is_literal(self):
- def check(test, msg='"is" with a literal'):
+ def check(test, msg):
self.check_syntax_warning(test, msg)
- check('x is 1')
- check('x is "thing"')
- check('1 is x')
- check('x is y is 1')
- check('x is not 1', '"is not" with a literal')
+ check('x is 1', '"is" with \'int\' literal')
+ check('x is "thing"', '"is" with \'str\' literal')
+ check('1 is x', '"is" with \'int\' literal')
+ check('x is y is 1', '"is" with \'int\' literal')
+ check('x is not 1', '"is not" with \'int\' literal')
+ check('x is not (1, 2)', '"is not" with \'tuple\' literal')
+ check('(1, 2) is not x', '"is not" with \'tuple\' literal')
+
+ check('None is 1', '"is" with \'int\' literal')
+ check('1 is None', '"is" with \'int\' literal')
+
+ check('x == 3 is y', '"is" with \'int\' literal')
+ check('x == "thing" is y', '"is" with \'str\' literal')
with warnings.catch_warnings():
warnings.simplefilter('error', SyntaxWarning)
@@ -1482,6 +1487,10 @@ def check(test, msg='"is" with a literal'):
compile('x is False', '', 'exec')
compile('x is True', '', 'exec')
compile('x is ...', '', 'exec')
+ compile('None is x', '', 'exec')
+ compile('False is x', '', 'exec')
+ compile('True is x', '', 'exec')
+ compile('... is x', '', 'exec')
def test_warn_missed_comma(self):
def check(test):
diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py
deleted file mode 100644
index 03e3adba221e57..00000000000000
--- a/Lib/test/test_imp.py
+++ /dev/null
@@ -1,1364 +0,0 @@
-import gc
-import json
-import importlib
-import importlib.util
-import os
-import os.path
-import py_compile
-import sys
-from test import support
-from test.support import import_helper
-from test.support import os_helper
-from test.support import script_helper
-from test.support import warnings_helper
-import textwrap
-import types
-import unittest
-import warnings
-imp = warnings_helper.import_deprecated('imp')
-import _imp
-import _testinternalcapi
-try:
- import _xxsubinterpreters as _interpreters
-except ModuleNotFoundError:
- _interpreters = None
-
-
-OS_PATH_NAME = os.path.__name__
-
-
-def requires_subinterpreters(meth):
- """Decorator to skip a test if subinterpreters are not supported."""
- return unittest.skipIf(_interpreters is None,
- 'subinterpreters required')(meth)
-
-
-def requires_load_dynamic(meth):
- """Decorator to skip a test if not running under CPython or lacking
- imp.load_dynamic()."""
- meth = support.cpython_only(meth)
- return unittest.skipIf(getattr(imp, 'load_dynamic', None) is None,
- 'imp.load_dynamic() required')(meth)
-
-
-class ModuleSnapshot(types.SimpleNamespace):
- """A representation of a module for testing.
-
- Fields:
-
- * id - the module's object ID
- * module - the actual module or an adequate substitute
- * __file__
- * __spec__
- * name
- * origin
- * ns - a copy (dict) of the module's __dict__ (or None)
- * ns_id - the object ID of the module's __dict__
- * cached - the sys.modules[mod.__spec__.name] entry (or None)
- * cached_id - the object ID of the sys.modules entry (or None)
-
- In cases where the value is not available (e.g. due to serialization),
- the value will be None.
- """
- _fields = tuple('id module ns ns_id cached cached_id'.split())
-
- @classmethod
- def from_module(cls, mod):
- name = mod.__spec__.name
- cached = sys.modules.get(name)
- return cls(
- id=id(mod),
- module=mod,
- ns=types.SimpleNamespace(**mod.__dict__),
- ns_id=id(mod.__dict__),
- cached=cached,
- cached_id=id(cached),
- )
-
- SCRIPT = textwrap.dedent('''
- {imports}
-
- name = {name!r}
-
- {prescript}
-
- mod = {name}
-
- {body}
-
- {postscript}
- ''')
- IMPORTS = textwrap.dedent('''
- import sys
- ''').strip()
- SCRIPT_BODY = textwrap.dedent('''
- # Capture the snapshot data.
- cached = sys.modules.get(name)
- snapshot = dict(
- id=id(mod),
- module=dict(
- __file__=mod.__file__,
- __spec__=dict(
- name=mod.__spec__.name,
- origin=mod.__spec__.origin,
- ),
- ),
- ns=None,
- ns_id=id(mod.__dict__),
- cached=None,
- cached_id=id(cached) if cached else None,
- )
- ''').strip()
- CLEANUP_SCRIPT = textwrap.dedent('''
- # Clean up the module.
- sys.modules.pop(name, None)
- ''').strip()
-
- @classmethod
- def build_script(cls, name, *,
- prescript=None,
- import_first=False,
- postscript=None,
- postcleanup=False,
- ):
- if postcleanup is True:
- postcleanup = cls.CLEANUP_SCRIPT
- elif isinstance(postcleanup, str):
- postcleanup = textwrap.dedent(postcleanup).strip()
- postcleanup = cls.CLEANUP_SCRIPT + os.linesep + postcleanup
- else:
- postcleanup = ''
- prescript = textwrap.dedent(prescript).strip() if prescript else ''
- postscript = textwrap.dedent(postscript).strip() if postscript else ''
-
- if postcleanup:
- if postscript:
- postscript = postscript + os.linesep * 2 + postcleanup
- else:
- postscript = postcleanup
-
- if import_first:
- prescript += textwrap.dedent(f'''
-
- # Now import the module.
- assert name not in sys.modules
- import {name}''')
-
- return cls.SCRIPT.format(
- imports=cls.IMPORTS.strip(),
- name=name,
- prescript=prescript.strip(),
- body=cls.SCRIPT_BODY.strip(),
- postscript=postscript,
- )
-
- @classmethod
- def parse(cls, text):
- raw = json.loads(text)
- mod = raw['module']
- mod['__spec__'] = types.SimpleNamespace(**mod['__spec__'])
- raw['module'] = types.SimpleNamespace(**mod)
- return cls(**raw)
-
- @classmethod
- def from_subinterp(cls, name, interpid=None, *, pipe=None, **script_kwds):
- if pipe is not None:
- return cls._from_subinterp(name, interpid, pipe, script_kwds)
- pipe = os.pipe()
- try:
- return cls._from_subinterp(name, interpid, pipe, script_kwds)
- finally:
- r, w = pipe
- os.close(r)
- os.close(w)
-
- @classmethod
- def _from_subinterp(cls, name, interpid, pipe, script_kwargs):
- r, w = pipe
-
- # Build the script.
- postscript = textwrap.dedent(f'''
- # Send the result over the pipe.
- import json
- import os
- os.write({w}, json.dumps(snapshot).encode())
-
- ''')
- _postscript = script_kwargs.get('postscript')
- if _postscript:
- _postscript = textwrap.dedent(_postscript).lstrip()
- postscript += _postscript
- script_kwargs['postscript'] = postscript.strip()
- script = cls.build_script(name, **script_kwargs)
-
- # Run the script.
- if interpid is None:
- ret = support.run_in_subinterp(script)
- if ret != 0:
- raise AssertionError(f'{ret} != 0')
- else:
- _interpreters.run_string(interpid, script)
-
- # Parse the results.
- text = os.read(r, 1000)
- return cls.parse(text.decode())
-
-
-class LockTests(unittest.TestCase):
-
- """Very basic test of import lock functions."""
-
- def verify_lock_state(self, expected):
- self.assertEqual(imp.lock_held(), expected,
- "expected imp.lock_held() to be %r" % expected)
- def testLock(self):
- LOOPS = 50
-
- # The import lock may already be held, e.g. if the test suite is run
- # via "import test.autotest".
- lock_held_at_start = imp.lock_held()
- self.verify_lock_state(lock_held_at_start)
-
- for i in range(LOOPS):
- imp.acquire_lock()
- self.verify_lock_state(True)
-
- for i in range(LOOPS):
- imp.release_lock()
-
- # The original state should be restored now.
- self.verify_lock_state(lock_held_at_start)
-
- if not lock_held_at_start:
- try:
- imp.release_lock()
- except RuntimeError:
- pass
- else:
- self.fail("release_lock() without lock should raise "
- "RuntimeError")
-
-class ImportTests(unittest.TestCase):
- def setUp(self):
- mod = importlib.import_module('test.encoded_modules')
- self.test_strings = mod.test_strings
- self.test_path = mod.__path__
-
- # test_import_encoded_module moved to test_source_encoding.py
-
- def test_find_module_encoding(self):
- for mod, encoding, _ in self.test_strings:
- with imp.find_module('module_' + mod, self.test_path)[0] as fd:
- self.assertEqual(fd.encoding, encoding)
-
- path = [os.path.dirname(__file__)]
- with self.assertRaises(SyntaxError):
- imp.find_module('badsyntax_pep3120', path)
-
- def test_issue1267(self):
- for mod, encoding, _ in self.test_strings:
- fp, filename, info = imp.find_module('module_' + mod,
- self.test_path)
- with fp:
- self.assertNotEqual(fp, None)
- self.assertEqual(fp.encoding, encoding)
- self.assertEqual(fp.tell(), 0)
- self.assertEqual(fp.readline(), '# test %s encoding\n'
- % encoding)
-
- fp, filename, info = imp.find_module("tokenize")
- with fp:
- self.assertNotEqual(fp, None)
- self.assertEqual(fp.encoding, "utf-8")
- self.assertEqual(fp.tell(), 0)
- self.assertEqual(fp.readline(),
- '"""Tokenization help for Python programs.\n')
-
- def test_issue3594(self):
- temp_mod_name = 'test_imp_helper'
- sys.path.insert(0, '.')
- try:
- with open(temp_mod_name + '.py', 'w', encoding="latin-1") as file:
- file.write("# coding: cp1252\nu = 'test.test_imp'\n")
- file, filename, info = imp.find_module(temp_mod_name)
- file.close()
- self.assertEqual(file.encoding, 'cp1252')
- finally:
- del sys.path[0]
- os_helper.unlink(temp_mod_name + '.py')
- os_helper.unlink(temp_mod_name + '.pyc')
-
- def test_issue5604(self):
- # Test cannot cover imp.load_compiled function.
- # Martin von Loewis note what shared library cannot have non-ascii
- # character because init_xxx function cannot be compiled
- # and issue never happens for dynamic modules.
- # But sources modified to follow generic way for processing paths.
-
- # the return encoding could be uppercase or None
- fs_encoding = sys.getfilesystemencoding()
-
- # covers utf-8 and Windows ANSI code pages
- # one non-space symbol from every page
- # (http://en.wikipedia.org/wiki/Code_page)
- known_locales = {
- 'utf-8' : b'\xc3\xa4',
- 'cp1250' : b'\x8C',
- 'cp1251' : b'\xc0',
- 'cp1252' : b'\xc0',
- 'cp1253' : b'\xc1',
- 'cp1254' : b'\xc0',
- 'cp1255' : b'\xe0',
- 'cp1256' : b'\xe0',
- 'cp1257' : b'\xc0',
- 'cp1258' : b'\xc0',
- }
-
- if sys.platform == 'darwin':
- self.assertEqual(fs_encoding, 'utf-8')
- # Mac OS X uses the Normal Form D decomposition
- # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html
- special_char = b'a\xcc\x88'
- else:
- special_char = known_locales.get(fs_encoding)
-
- if not special_char:
- self.skipTest("can't run this test with %s as filesystem encoding"
- % fs_encoding)
- decoded_char = special_char.decode(fs_encoding)
- temp_mod_name = 'test_imp_helper_' + decoded_char
- test_package_name = 'test_imp_helper_package_' + decoded_char
- init_file_name = os.path.join(test_package_name, '__init__.py')
- try:
- # if the curdir is not in sys.path the test fails when run with
- # ./python ./Lib/test/regrtest.py test_imp
- sys.path.insert(0, os.curdir)
- with open(temp_mod_name + '.py', 'w', encoding="utf-8") as file:
- file.write('a = 1\n')
- file, filename, info = imp.find_module(temp_mod_name)
- with file:
- self.assertIsNotNone(file)
- self.assertTrue(filename[:-3].endswith(temp_mod_name))
- self.assertEqual(info[0], '.py')
- self.assertEqual(info[1], 'r')
- self.assertEqual(info[2], imp.PY_SOURCE)
-
- mod = imp.load_module(temp_mod_name, file, filename, info)
- self.assertEqual(mod.a, 1)
-
- with warnings.catch_warnings():
- warnings.simplefilter('ignore')
- mod = imp.load_source(temp_mod_name, temp_mod_name + '.py')
- self.assertEqual(mod.a, 1)
-
- with warnings.catch_warnings():
- warnings.simplefilter('ignore')
- if not sys.dont_write_bytecode:
- mod = imp.load_compiled(
- temp_mod_name,
- imp.cache_from_source(temp_mod_name + '.py'))
- self.assertEqual(mod.a, 1)
-
- if not os.path.exists(test_package_name):
- os.mkdir(test_package_name)
- with open(init_file_name, 'w', encoding="utf-8") as file:
- file.write('b = 2\n')
- with warnings.catch_warnings():
- warnings.simplefilter('ignore')
- package = imp.load_package(test_package_name, test_package_name)
- self.assertEqual(package.b, 2)
- finally:
- del sys.path[0]
- for ext in ('.py', '.pyc'):
- os_helper.unlink(temp_mod_name + ext)
- os_helper.unlink(init_file_name + ext)
- os_helper.rmtree(test_package_name)
- os_helper.rmtree('__pycache__')
-
- def test_issue9319(self):
- path = os.path.dirname(__file__)
- self.assertRaises(SyntaxError,
- imp.find_module, "badsyntax_pep3120", [path])
-
- def test_load_from_source(self):
- # Verify that the imp module can correctly load and find .py files
- # XXX (ncoghlan): It would be nice to use import_helper.CleanImport
- # here, but that breaks because the os module registers some
- # handlers in copy_reg on import. Since CleanImport doesn't
- # revert that registration, the module is left in a broken
- # state after reversion. Reinitialising the module contents
- # and just reverting os.environ to its previous state is an OK
- # workaround
- with import_helper.CleanImport('os', 'os.path', OS_PATH_NAME):
- import os
- orig_path = os.path
- orig_getenv = os.getenv
- with os_helper.EnvironmentVarGuard():
- x = imp.find_module("os")
- self.addCleanup(x[0].close)
- new_os = imp.load_module("os", *x)
- self.assertIs(os, new_os)
- self.assertIs(orig_path, new_os.path)
- self.assertIsNot(orig_getenv, new_os.getenv)
-
- @requires_load_dynamic
- def test_issue15828_load_extensions(self):
- # Issue 15828 picked up that the adapter between the old imp API
- # and importlib couldn't handle C extensions
- example = "_heapq"
- x = imp.find_module(example)
- file_ = x[0]
- if file_ is not None:
- self.addCleanup(file_.close)
- mod = imp.load_module(example, *x)
- self.assertEqual(mod.__name__, example)
-
- @requires_load_dynamic
- def test_issue16421_multiple_modules_in_one_dll(self):
- # Issue 16421: loading several modules from the same compiled file fails
- m = '_testimportmultiple'
- fileobj, pathname, description = imp.find_module(m)
- fileobj.close()
- mod0 = imp.load_dynamic(m, pathname)
- mod1 = imp.load_dynamic('_testimportmultiple_foo', pathname)
- mod2 = imp.load_dynamic('_testimportmultiple_bar', pathname)
- self.assertEqual(mod0.__name__, m)
- self.assertEqual(mod1.__name__, '_testimportmultiple_foo')
- self.assertEqual(mod2.__name__, '_testimportmultiple_bar')
- with self.assertRaises(ImportError):
- imp.load_dynamic('nonexistent', pathname)
-
- @requires_load_dynamic
- def test_load_dynamic_ImportError_path(self):
- # Issue #1559549 added `name` and `path` attributes to ImportError
- # in order to provide better detail. Issue #10854 implemented those
- # attributes on import failures of extensions on Windows.
- path = 'bogus file path'
- name = 'extension'
- with self.assertRaises(ImportError) as err:
- imp.load_dynamic(name, path)
- self.assertIn(path, err.exception.path)
- self.assertEqual(name, err.exception.name)
-
- @requires_load_dynamic
- def test_load_module_extension_file_is_None(self):
- # When loading an extension module and the file is None, open one
- # on the behalf of imp.load_dynamic().
- # Issue #15902
- name = '_testimportmultiple'
- found = imp.find_module(name)
- if found[0] is not None:
- found[0].close()
- if found[2][2] != imp.C_EXTENSION:
- self.skipTest("found module doesn't appear to be a C extension")
- imp.load_module(name, None, *found[1:])
-
- @requires_load_dynamic
- def test_issue24748_load_module_skips_sys_modules_check(self):
- name = 'test.imp_dummy'
- try:
- del sys.modules[name]
- except KeyError:
- pass
- try:
- module = importlib.import_module(name)
- spec = importlib.util.find_spec('_testmultiphase')
- module = imp.load_dynamic(name, spec.origin)
- self.assertEqual(module.__name__, name)
- self.assertEqual(module.__spec__.name, name)
- self.assertEqual(module.__spec__.origin, spec.origin)
- self.assertRaises(AttributeError, getattr, module, 'dummy_name')
- self.assertEqual(module.int_const, 1969)
- self.assertIs(sys.modules[name], module)
- finally:
- try:
- del sys.modules[name]
- except KeyError:
- pass
-
- @unittest.skipIf(sys.dont_write_bytecode,
- "test meaningful only when writing bytecode")
- def test_bug7732(self):
- with os_helper.temp_cwd():
- source = os_helper.TESTFN + '.py'
- os.mkdir(source)
- self.assertRaisesRegex(ImportError, '^No module',
- imp.find_module, os_helper.TESTFN, ["."])
-
- def test_multiple_calls_to_get_data(self):
- # Issue #18755: make sure multiple calls to get_data() can succeed.
- loader = imp._LoadSourceCompatibility('imp', imp.__file__,
- open(imp.__file__, encoding="utf-8"))
- loader.get_data(imp.__file__) # File should be closed
- loader.get_data(imp.__file__) # Will need to create a newly opened file
-
- def test_load_source(self):
- # Create a temporary module since load_source(name) modifies
- # sys.modules[name] attributes like __loader___
- modname = f"tmp{__name__}"
- mod = type(sys.modules[__name__])(modname)
- with support.swap_item(sys.modules, modname, mod):
- with self.assertRaisesRegex(ValueError, 'embedded null'):
- imp.load_source(modname, __file__ + "\0")
-
- @support.cpython_only
- def test_issue31315(self):
- # There shouldn't be an assertion failure in imp.create_dynamic(),
- # when spec.name is not a string.
- create_dynamic = support.get_attribute(imp, 'create_dynamic')
- class BadSpec:
- name = None
- origin = 'foo'
- with self.assertRaises(TypeError):
- create_dynamic(BadSpec())
-
- def test_issue_35321(self):
- # Both _frozen_importlib and _frozen_importlib_external
- # should have a spec origin of "frozen" and
- # no need to clean up imports in this case.
-
- import _frozen_importlib_external
- self.assertEqual(_frozen_importlib_external.__spec__.origin, "frozen")
-
- import _frozen_importlib
- self.assertEqual(_frozen_importlib.__spec__.origin, "frozen")
-
- def test_source_hash(self):
- self.assertEqual(_imp.source_hash(42, b'hi'), b'\xfb\xd9G\x05\xaf$\x9b~')
- self.assertEqual(_imp.source_hash(43, b'hi'), b'\xd0/\x87C\xccC\xff\xe2')
-
- def test_pyc_invalidation_mode_from_cmdline(self):
- cases = [
- ([], "default"),
- (["--check-hash-based-pycs", "default"], "default"),
- (["--check-hash-based-pycs", "always"], "always"),
- (["--check-hash-based-pycs", "never"], "never"),
- ]
- for interp_args, expected in cases:
- args = interp_args + [
- "-c",
- "import _imp; print(_imp.check_hash_based_pycs)",
- ]
- res = script_helper.assert_python_ok(*args)
- self.assertEqual(res.out.strip().decode('utf-8'), expected)
-
- def test_find_and_load_checked_pyc(self):
- # issue 34056
- with os_helper.temp_cwd():
- with open('mymod.py', 'wb') as fp:
- fp.write(b'x = 42\n')
- py_compile.compile(
- 'mymod.py',
- doraise=True,
- invalidation_mode=py_compile.PycInvalidationMode.CHECKED_HASH,
- )
- file, path, description = imp.find_module('mymod', path=['.'])
- mod = imp.load_module('mymod', file, path, description)
- self.assertEqual(mod.x, 42)
-
- def test_issue98354(self):
- # _imp.create_builtin should raise TypeError
- # if 'name' attribute of 'spec' argument is not a 'str' instance
-
- create_builtin = support.get_attribute(_imp, "create_builtin")
-
- class FakeSpec:
- def __init__(self, name):
- self.name = self
- spec = FakeSpec("time")
- with self.assertRaises(TypeError):
- create_builtin(spec)
-
- class FakeSpec2:
- name = [1, 2, 3, 4]
- spec = FakeSpec2()
- with self.assertRaises(TypeError):
- create_builtin(spec)
-
- import builtins
- class UnicodeSubclass(str):
- pass
- class GoodSpec:
- name = UnicodeSubclass("builtins")
- spec = GoodSpec()
- bltin = create_builtin(spec)
- self.assertEqual(bltin, builtins)
-
- class UnicodeSubclassFakeSpec(str):
- def __init__(self, name):
- self.name = self
- spec = UnicodeSubclassFakeSpec("builtins")
- bltin = create_builtin(spec)
- self.assertEqual(bltin, builtins)
-
- @support.cpython_only
- def test_create_builtin_subinterp(self):
- # gh-99578: create_builtin() behavior changes after the creation of the
- # first sub-interpreter. Test both code paths, before and after the
- # creation of a sub-interpreter. Previously, create_builtin() had
- # a reference leak after the creation of the first sub-interpreter.
-
- import builtins
- create_builtin = support.get_attribute(_imp, "create_builtin")
- class Spec:
- name = "builtins"
- spec = Spec()
-
- def check_get_builtins():
- refcnt = sys.getrefcount(builtins)
- mod = _imp.create_builtin(spec)
- self.assertIs(mod, builtins)
- self.assertEqual(sys.getrefcount(builtins), refcnt + 1)
- # Check that a GC collection doesn't crash
- gc.collect()
-
- check_get_builtins()
-
- ret = support.run_in_subinterp("import builtins")
- self.assertEqual(ret, 0)
-
- check_get_builtins()
-
-
-class TestSinglePhaseSnapshot(ModuleSnapshot):
-
- @classmethod
- def from_module(cls, mod):
- self = super().from_module(mod)
- self.summed = mod.sum(1, 2)
- self.lookedup = mod.look_up_self()
- self.lookedup_id = id(self.lookedup)
- self.state_initialized = mod.state_initialized()
- if hasattr(mod, 'initialized_count'):
- self.init_count = mod.initialized_count()
- return self
-
- SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent(f'''
- snapshot['module'].update(dict(
- int_const=mod.int_const,
- str_const=mod.str_const,
- _module_initialized=mod._module_initialized,
- ))
- snapshot.update(dict(
- summed=mod.sum(1, 2),
- lookedup_id=id(mod.look_up_self()),
- state_initialized=mod.state_initialized(),
- init_count=mod.initialized_count(),
- has_spam=hasattr(mod, 'spam'),
- spam=getattr(mod, 'spam', None),
- ))
- ''').rstrip()
-
- @classmethod
- def parse(cls, text):
- self = super().parse(text)
- if not self.has_spam:
- del self.spam
- del self.has_spam
- return self
-
-
-@requires_load_dynamic
-class SinglephaseInitTests(unittest.TestCase):
-
- NAME = '_testsinglephase'
-
- @classmethod
- def setUpClass(cls):
- if '-R' in sys.argv or '--huntrleaks' in sys.argv:
- # https://github.com/python/cpython/issues/102251
- raise unittest.SkipTest('unresolved refleaks (see gh-102251)')
- fileobj, filename, _ = imp.find_module(cls.NAME)
- fileobj.close()
- cls.FILE = filename
-
- # Start fresh.
- cls.clean_up()
-
- def tearDown(self):
- # Clean up the module.
- self.clean_up()
-
- @classmethod
- def clean_up(cls):
- name = cls.NAME
- filename = cls.FILE
- if name in sys.modules:
- if hasattr(sys.modules[name], '_clear_globals'):
- assert sys.modules[name].__file__ == filename
- sys.modules[name]._clear_globals()
- del sys.modules[name]
- # Clear all internally cached data for the extension.
- _testinternalcapi.clear_extension(name, filename)
-
- #########################
- # helpers
-
- def add_module_cleanup(self, name):
- def clean_up():
- # Clear all internally cached data for the extension.
- _testinternalcapi.clear_extension(name, self.FILE)
- self.addCleanup(clean_up)
-
- def load(self, name):
- try:
- already_loaded = self.already_loaded
- except AttributeError:
- already_loaded = self.already_loaded = {}
- assert name not in already_loaded
- mod = imp.load_dynamic(name, self.FILE)
- self.assertNotIn(mod, already_loaded.values())
- already_loaded[name] = mod
- return types.SimpleNamespace(
- name=name,
- module=mod,
- snapshot=TestSinglePhaseSnapshot.from_module(mod),
- )
-
- def re_load(self, name, mod):
- assert sys.modules[name] is mod
- assert mod.__dict__ == mod.__dict__
- reloaded = imp.load_dynamic(name, self.FILE)
- return types.SimpleNamespace(
- name=name,
- module=reloaded,
- snapshot=TestSinglePhaseSnapshot.from_module(reloaded),
- )
-
- # subinterpreters
-
- def add_subinterpreter(self):
- interpid = _interpreters.create(isolated=False)
- _interpreters.run_string(interpid, textwrap.dedent('''
- import sys
- import _testinternalcapi
- '''))
- def clean_up():
- _interpreters.run_string(interpid, textwrap.dedent(f'''
- name = {self.NAME!r}
- if name in sys.modules:
- sys.modules[name]._clear_globals()
- _testinternalcapi.clear_extension(name, {self.FILE!r})
- '''))
- _interpreters.destroy(interpid)
- self.addCleanup(clean_up)
- return interpid
-
- def import_in_subinterp(self, interpid=None, *,
- postscript=None,
- postcleanup=False,
- ):
- name = self.NAME
-
- if postcleanup:
- import_ = 'import _testinternalcapi' if interpid is None else ''
- postcleanup = f'''
- {import_}
- mod._clear_globals()
- _testinternalcapi.clear_extension(name, {self.FILE!r})
- '''
-
- try:
- pipe = self._pipe
- except AttributeError:
- r, w = pipe = self._pipe = os.pipe()
- self.addCleanup(os.close, r)
- self.addCleanup(os.close, w)
-
- snapshot = TestSinglePhaseSnapshot.from_subinterp(
- name,
- interpid,
- pipe=pipe,
- import_first=True,
- postscript=postscript,
- postcleanup=postcleanup,
- )
-
- return types.SimpleNamespace(
- name=name,
- module=None,
- snapshot=snapshot,
- )
-
- # checks
-
- def check_common(self, loaded):
- isolated = False
-
- mod = loaded.module
- if not mod:
- # It came from a subinterpreter.
- isolated = True
- mod = loaded.snapshot.module
- # mod.__name__ might not match, but the spec will.
- self.assertEqual(mod.__spec__.name, loaded.name)
- self.assertEqual(mod.__file__, self.FILE)
- self.assertEqual(mod.__spec__.origin, self.FILE)
- if not isolated:
- self.assertTrue(issubclass(mod.error, Exception))
- self.assertEqual(mod.int_const, 1969)
- self.assertEqual(mod.str_const, 'something different')
- self.assertIsInstance(mod._module_initialized, float)
- self.assertGreater(mod._module_initialized, 0)
-
- snap = loaded.snapshot
- self.assertEqual(snap.summed, 3)
- if snap.state_initialized is not None:
- self.assertIsInstance(snap.state_initialized, float)
- self.assertGreater(snap.state_initialized, 0)
- if isolated:
- # The "looked up" module is interpreter-specific
- # (interp->imports.modules_by_index was set for the module).
- self.assertEqual(snap.lookedup_id, snap.id)
- self.assertEqual(snap.cached_id, snap.id)
- with self.assertRaises(AttributeError):
- snap.spam
- else:
- self.assertIs(snap.lookedup, mod)
- self.assertIs(snap.cached, mod)
-
- def check_direct(self, loaded):
- # The module has its own PyModuleDef, with a matching name.
- self.assertEqual(loaded.module.__name__, loaded.name)
- self.assertIs(loaded.snapshot.lookedup, loaded.module)
-
- def check_indirect(self, loaded, orig):
- # The module re-uses another's PyModuleDef, with a different name.
- assert orig is not loaded.module
- assert orig.__name__ != loaded.name
- self.assertNotEqual(loaded.module.__name__, loaded.name)
- self.assertIs(loaded.snapshot.lookedup, loaded.module)
-
- def check_basic(self, loaded, expected_init_count):
- # m_size == -1
- # The module loads fresh the first time and copies m_copy after.
- snap = loaded.snapshot
- self.assertIsNot(snap.state_initialized, None)
- self.assertIsInstance(snap.init_count, int)
- self.assertGreater(snap.init_count, 0)
- self.assertEqual(snap.init_count, expected_init_count)
-
- def check_with_reinit(self, loaded):
- # m_size >= 0
- # The module loads fresh every time.
- pass
-
- def check_fresh(self, loaded):
- """
- The module had not been loaded before (at least since fully reset).
- """
- snap = loaded.snapshot
- # The module's init func was run.
- # A copy of the module's __dict__ was stored in def->m_base.m_copy.
- # The previous m_copy was deleted first.
- # _PyRuntime.imports.extensions was set.
- self.assertEqual(snap.init_count, 1)
- # The global state was initialized.
- # The module attrs were initialized from that state.
- self.assertEqual(snap.module._module_initialized,
- snap.state_initialized)
-
- def check_semi_fresh(self, loaded, base, prev):
- """
- The module had been loaded before and then reset
- (but the module global state wasn't).
- """
- snap = loaded.snapshot
- # The module's init func was run again.
- # A copy of the module's __dict__ was stored in def->m_base.m_copy.
- # The previous m_copy was deleted first.
- # The module globals did not get reset.
- self.assertNotEqual(snap.id, base.snapshot.id)
- self.assertNotEqual(snap.id, prev.snapshot.id)
- self.assertEqual(snap.init_count, prev.snapshot.init_count + 1)
- # The global state was updated.
- # The module attrs were initialized from that state.
- self.assertEqual(snap.module._module_initialized,
- snap.state_initialized)
- self.assertNotEqual(snap.state_initialized,
- base.snapshot.state_initialized)
- self.assertNotEqual(snap.state_initialized,
- prev.snapshot.state_initialized)
-
- def check_copied(self, loaded, base):
- """
- The module had been loaded before and never reset.
- """
- snap = loaded.snapshot
- # The module's init func was not run again.
- # The interpreter copied m_copy, as set by the other interpreter,
- # with objects owned by the other interpreter.
- # The module globals did not get reset.
- self.assertNotEqual(snap.id, base.snapshot.id)
- self.assertEqual(snap.init_count, base.snapshot.init_count)
- # The global state was not updated since the init func did not run.
- # The module attrs were not directly initialized from that state.
- # The state and module attrs still match the previous loading.
- self.assertEqual(snap.module._module_initialized,
- snap.state_initialized)
- self.assertEqual(snap.state_initialized,
- base.snapshot.state_initialized)
-
- #########################
- # the tests
-
- def test_cleared_globals(self):
- loaded = self.load(self.NAME)
- _testsinglephase = loaded.module
- init_before = _testsinglephase.state_initialized()
-
- _testsinglephase._clear_globals()
- init_after = _testsinglephase.state_initialized()
- init_count = _testsinglephase.initialized_count()
-
- self.assertGreater(init_before, 0)
- self.assertEqual(init_after, 0)
- self.assertEqual(init_count, -1)
-
- def test_variants(self):
- # Exercise the most meaningful variants described in Python/import.c.
- self.maxDiff = None
-
- # Check the "basic" module.
-
- name = self.NAME
- expected_init_count = 1
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.check_direct(loaded)
- self.check_basic(loaded, expected_init_count)
- basic = loaded.module
-
- # Check its indirect variants.
-
- name = f'{self.NAME}_basic_wrapper'
- self.add_module_cleanup(name)
- expected_init_count += 1
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.check_indirect(loaded, basic)
- self.check_basic(loaded, expected_init_count)
-
- # Currently PyState_AddModule() always replaces the cached module.
- self.assertIs(basic.look_up_self(), loaded.module)
- self.assertEqual(basic.initialized_count(), expected_init_count)
-
- # The cached module shouldn't change after this point.
- basic_lookedup = loaded.module
-
- # Check its direct variant.
-
- name = f'{self.NAME}_basic_copy'
- self.add_module_cleanup(name)
- expected_init_count += 1
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.check_direct(loaded)
- self.check_basic(loaded, expected_init_count)
-
- # This should change the cached module for _testsinglephase.
- self.assertIs(basic.look_up_self(), basic_lookedup)
- self.assertEqual(basic.initialized_count(), expected_init_count)
-
- # Check the non-basic variant that has no state.
-
- name = f'{self.NAME}_with_reinit'
- self.add_module_cleanup(name)
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.assertIs(loaded.snapshot.state_initialized, None)
- self.check_direct(loaded)
- self.check_with_reinit(loaded)
-
- # This should change the cached module for _testsinglephase.
- self.assertIs(basic.look_up_self(), basic_lookedup)
- self.assertEqual(basic.initialized_count(), expected_init_count)
-
- # Check the basic variant that has state.
-
- name = f'{self.NAME}_with_state'
- self.add_module_cleanup(name)
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.assertIsNot(loaded.snapshot.state_initialized, None)
- self.check_direct(loaded)
- self.check_with_reinit(loaded)
-
- # This should change the cached module for _testsinglephase.
- self.assertIs(basic.look_up_self(), basic_lookedup)
- self.assertEqual(basic.initialized_count(), expected_init_count)
-
- def test_basic_reloaded(self):
- # m_copy is copied into the existing module object.
- # Global state is not changed.
- self.maxDiff = None
-
- for name in [
- self.NAME, # the "basic" module
- f'{self.NAME}_basic_wrapper', # the indirect variant
- f'{self.NAME}_basic_copy', # the direct variant
- ]:
- self.add_module_cleanup(name)
- with self.subTest(name):
- loaded = self.load(name)
- reloaded = self.re_load(name, loaded.module)
-
- self.check_common(loaded)
- self.check_common(reloaded)
-
- # Make sure the original __dict__ did not get replaced.
- self.assertEqual(id(loaded.module.__dict__),
- loaded.snapshot.ns_id)
- self.assertEqual(loaded.snapshot.ns.__dict__,
- loaded.module.__dict__)
-
- self.assertEqual(reloaded.module.__spec__.name, reloaded.name)
- self.assertEqual(reloaded.module.__name__,
- reloaded.snapshot.ns.__name__)
-
- self.assertIs(reloaded.module, loaded.module)
- self.assertIs(reloaded.module.__dict__, loaded.module.__dict__)
- # It only happens to be the same but that's good enough here.
- # We really just want to verify that the re-loaded attrs
- # didn't change.
- self.assertIs(reloaded.snapshot.lookedup,
- loaded.snapshot.lookedup)
- self.assertEqual(reloaded.snapshot.state_initialized,
- loaded.snapshot.state_initialized)
- self.assertEqual(reloaded.snapshot.init_count,
- loaded.snapshot.init_count)
-
- self.assertIs(reloaded.snapshot.cached, reloaded.module)
-
- def test_with_reinit_reloaded(self):
- # The module's m_init func is run again.
- self.maxDiff = None
-
- # Keep a reference around.
- basic = self.load(self.NAME)
-
- for name in [
- f'{self.NAME}_with_reinit', # m_size == 0
- f'{self.NAME}_with_state', # m_size > 0
- ]:
- self.add_module_cleanup(name)
- with self.subTest(name):
- loaded = self.load(name)
- reloaded = self.re_load(name, loaded.module)
-
- self.check_common(loaded)
- self.check_common(reloaded)
-
- # Make sure the original __dict__ did not get replaced.
- self.assertEqual(id(loaded.module.__dict__),
- loaded.snapshot.ns_id)
- self.assertEqual(loaded.snapshot.ns.__dict__,
- loaded.module.__dict__)
-
- self.assertEqual(reloaded.module.__spec__.name, reloaded.name)
- self.assertEqual(reloaded.module.__name__,
- reloaded.snapshot.ns.__name__)
-
- self.assertIsNot(reloaded.module, loaded.module)
- self.assertNotEqual(reloaded.module.__dict__,
- loaded.module.__dict__)
- self.assertIs(reloaded.snapshot.lookedup, reloaded.module)
- if loaded.snapshot.state_initialized is None:
- self.assertIs(reloaded.snapshot.state_initialized, None)
- else:
- self.assertGreater(reloaded.snapshot.state_initialized,
- loaded.snapshot.state_initialized)
-
- self.assertIs(reloaded.snapshot.cached, reloaded.module)
-
- # Currently, for every single-phrase init module loaded
- # in multiple interpreters, those interpreters share a
- # PyModuleDef for that object, which can be a problem.
- # Also, we test with a single-phase module that has global state,
- # which is shared by all interpreters.
-
- @requires_subinterpreters
- def test_basic_multiple_interpreters_main_no_reset(self):
- # without resetting; already loaded in main interpreter
-
- # At this point:
- # * alive in 0 interpreters
- # * module def may or may not be loaded already
- # * module def not in _PyRuntime.imports.extensions
- # * mod init func has not run yet (since reset, at least)
- # * m_copy not set (hasn't been loaded yet or already cleared)
- # * module's global state has not been initialized yet
- # (or already cleared)
-
- main_loaded = self.load(self.NAME)
- _testsinglephase = main_loaded.module
- # Attrs set after loading are not in m_copy.
- _testsinglephase.spam = 'spam, spam, spam, spam, eggs, and spam'
-
- self.check_common(main_loaded)
- self.check_fresh(main_loaded)
-
- interpid1 = self.add_subinterpreter()
- interpid2 = self.add_subinterpreter()
-
- # At this point:
- # * alive in 1 interpreter (main)
- # * module def in _PyRuntime.imports.extensions
- # * mod init func ran for the first time (since reset, at least)
- # * m_copy was copied from the main interpreter (was NULL)
- # * module's global state was initialized
-
- # Use an interpreter that gets destroyed right away.
- loaded = self.import_in_subinterp()
- self.check_common(loaded)
- self.check_copied(loaded, main_loaded)
-
- # At this point:
- # * alive in 1 interpreter (main)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy is NULL (claered when the interpreter was destroyed)
- # (was from main interpreter)
- # * module's global state was updated, not reset
-
- # Use a subinterpreter that sticks around.
- loaded = self.import_in_subinterp(interpid1)
- self.check_common(loaded)
- self.check_copied(loaded, main_loaded)
-
- # At this point:
- # * alive in 2 interpreters (main, interp1)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp1
- # * module's global state was updated, not reset
-
- # Use a subinterpreter while the previous one is still alive.
- loaded = self.import_in_subinterp(interpid2)
- self.check_common(loaded)
- self.check_copied(loaded, main_loaded)
-
- # At this point:
- # * alive in 3 interpreters (main, interp1, interp2)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp2 (was from interp1)
- # * module's global state was updated, not reset
-
- @requires_subinterpreters
- def test_basic_multiple_interpreters_deleted_no_reset(self):
- # without resetting; already loaded in a deleted interpreter
-
- # At this point:
- # * alive in 0 interpreters
- # * module def may or may not be loaded already
- # * module def not in _PyRuntime.imports.extensions
- # * mod init func has not run yet (since reset, at least)
- # * m_copy not set (hasn't been loaded yet or already cleared)
- # * module's global state has not been initialized yet
- # (or already cleared)
-
- interpid1 = self.add_subinterpreter()
- interpid2 = self.add_subinterpreter()
-
- # First, load in the main interpreter but then completely clear it.
- loaded_main = self.load(self.NAME)
- loaded_main.module._clear_globals()
- _testinternalcapi.clear_extension(self.NAME, self.FILE)
-
- # At this point:
- # * alive in 0 interpreters
- # * module def loaded already
- # * module def was in _PyRuntime.imports.extensions, but cleared
- # * mod init func ran for the first time (since reset, at least)
- # * m_copy was set, but cleared (was NULL)
- # * module's global state was initialized but cleared
-
- # Start with an interpreter that gets destroyed right away.
- base = self.import_in_subinterp(postscript='''
- # Attrs set after loading are not in m_copy.
- mod.spam = 'spam, spam, mash, spam, eggs, and spam'
- ''')
- self.check_common(base)
- self.check_fresh(base)
-
- # At this point:
- # * alive in 0 interpreters
- # * module def in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy is NULL (claered when the interpreter was destroyed)
- # * module's global state was initialized, not reset
-
- # Use a subinterpreter that sticks around.
- loaded_interp1 = self.import_in_subinterp(interpid1)
- self.check_common(loaded_interp1)
- self.check_semi_fresh(loaded_interp1, loaded_main, base)
-
- # At this point:
- # * alive in 1 interpreter (interp1)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp1 (was NULL)
- # * module's global state was updated, not reset
-
- # Use a subinterpreter while the previous one is still alive.
- loaded_interp2 = self.import_in_subinterp(interpid2)
- self.check_common(loaded_interp2)
- self.check_copied(loaded_interp2, loaded_interp1)
-
- # At this point:
- # * alive in 2 interpreters (interp1, interp2)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp2 (was from interp1)
- # * module's global state was updated, not reset
-
- @requires_subinterpreters
- @requires_load_dynamic
- def test_basic_multiple_interpreters_reset_each(self):
- # resetting between each interpreter
-
- # At this point:
- # * alive in 0 interpreters
- # * module def may or may not be loaded already
- # * module def not in _PyRuntime.imports.extensions
- # * mod init func has not run yet (since reset, at least)
- # * m_copy not set (hasn't been loaded yet or already cleared)
- # * module's global state has not been initialized yet
- # (or already cleared)
-
- interpid1 = self.add_subinterpreter()
- interpid2 = self.add_subinterpreter()
-
- # Use an interpreter that gets destroyed right away.
- loaded = self.import_in_subinterp(
- postscript='''
- # Attrs set after loading are not in m_copy.
- mod.spam = 'spam, spam, mash, spam, eggs, and spam'
- ''',
- postcleanup=True,
- )
- self.check_common(loaded)
- self.check_fresh(loaded)
-
- # At this point:
- # * alive in 0 interpreters
- # * module def in _PyRuntime.imports.extensions
- # * mod init func ran for the first time (since reset, at least)
- # * m_copy is NULL (claered when the interpreter was destroyed)
- # * module's global state was initialized, not reset
-
- # Use a subinterpreter that sticks around.
- loaded = self.import_in_subinterp(interpid1, postcleanup=True)
- self.check_common(loaded)
- self.check_fresh(loaded)
-
- # At this point:
- # * alive in 1 interpreter (interp1)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp1 (was NULL)
- # * module's global state was initialized, not reset
-
- # Use a subinterpreter while the previous one is still alive.
- loaded = self.import_in_subinterp(interpid2, postcleanup=True)
- self.check_common(loaded)
- self.check_fresh(loaded)
-
- # At this point:
- # * alive in 2 interpreters (interp2, interp2)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp2 (was from interp1)
- # * module's global state was initialized, not reset
-
-
-class ReloadTests(unittest.TestCase):
-
- """Very basic tests to make sure that imp.reload() operates just like
- reload()."""
-
- def test_source(self):
- # XXX (ncoghlan): It would be nice to use test.import_helper.CleanImport
- # here, but that breaks because the os module registers some
- # handlers in copy_reg on import. Since CleanImport doesn't
- # revert that registration, the module is left in a broken
- # state after reversion. Reinitialising the module contents
- # and just reverting os.environ to its previous state is an OK
- # workaround
- with os_helper.EnvironmentVarGuard():
- import os
- imp.reload(os)
-
- def test_extension(self):
- with import_helper.CleanImport('time'):
- import time
- imp.reload(time)
-
- def test_builtin(self):
- with import_helper.CleanImport('marshal'):
- import marshal
- imp.reload(marshal)
-
- def test_with_deleted_parent(self):
- # see #18681
- from html import parser
- html = sys.modules.pop('html')
- def cleanup():
- sys.modules['html'] = html
- self.addCleanup(cleanup)
- with self.assertRaisesRegex(ImportError, 'html'):
- imp.reload(parser)
-
-
-class PEP3147Tests(unittest.TestCase):
- """Tests of PEP 3147."""
-
- tag = imp.get_tag()
-
- @unittest.skipUnless(sys.implementation.cache_tag is not None,
- 'requires sys.implementation.cache_tag not be None')
- def test_cache_from_source(self):
- # Given the path to a .py file, return the path to its PEP 3147
- # defined .pyc file (i.e. under __pycache__).
- path = os.path.join('foo', 'bar', 'baz', 'qux.py')
- expect = os.path.join('foo', 'bar', 'baz', '__pycache__',
- 'qux.{}.pyc'.format(self.tag))
- self.assertEqual(imp.cache_from_source(path, True), expect)
-
- @unittest.skipUnless(sys.implementation.cache_tag is not None,
- 'requires sys.implementation.cache_tag to not be '
- 'None')
- def test_source_from_cache(self):
- # Given the path to a PEP 3147 defined .pyc file, return the path to
- # its source. This tests the good path.
- path = os.path.join('foo', 'bar', 'baz', '__pycache__',
- 'qux.{}.pyc'.format(self.tag))
- expect = os.path.join('foo', 'bar', 'baz', 'qux.py')
- self.assertEqual(imp.source_from_cache(path), expect)
-
-
-class NullImporterTests(unittest.TestCase):
- @unittest.skipIf(os_helper.TESTFN_UNENCODABLE is None,
- "Need an undecodeable filename")
- def test_unencodeable(self):
- name = os_helper.TESTFN_UNENCODABLE
- os.mkdir(name)
- try:
- self.assertRaises(ImportError, imp.NullImporter, name)
- finally:
- os.rmdir(name)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py
index 3ef07203c46c7e..41dfdaabe24664 100644
--- a/Lib/test/test_import/__init__.py
+++ b/Lib/test/test_import/__init__.py
@@ -2,6 +2,7 @@
import contextlib
import errno
import glob
+import json
import importlib.util
from importlib._bootstrap_external import _get_sourcefile
from importlib.machinery import (
@@ -18,13 +19,15 @@
import textwrap
import threading
import time
+import types
import unittest
from unittest import mock
+import _testinternalcapi
from test.support import os_helper
from test.support import (
STDLIB_DIR, swap_attr, swap_item, cpython_only, is_emscripten,
- is_wasi, run_in_subinterp_with_config)
+ is_wasi, run_in_subinterp, run_in_subinterp_with_config)
from test.support.import_helper import (
forget, make_legacy_pyc, unlink, unload, DirsOnSysPath, CleanImport)
from test.support.os_helper import (
@@ -41,6 +44,10 @@
import _testmultiphase
except ImportError:
_testmultiphase = None
+try:
+ import _xxsubinterpreters as _interpreters
+except ModuleNotFoundError:
+ _interpreters = None
skip_if_dont_write_bytecode = unittest.skipIf(
@@ -120,6 +127,182 @@ def _ready_to_import(name=None, source=""):
del sys.modules[name]
+def requires_subinterpreters(meth):
+ """Decorator to skip a test if subinterpreters are not supported."""
+ return unittest.skipIf(_interpreters is None,
+ 'subinterpreters required')(meth)
+
+
+def requires_singlephase_init(meth):
+ """Decorator to skip if single-phase init modules are not supported."""
+ meth = cpython_only(meth)
+ return unittest.skipIf(_testsinglephase is None,
+ 'test requires _testsinglephase module')(meth)
+
+
+class ModuleSnapshot(types.SimpleNamespace):
+ """A representation of a module for testing.
+
+ Fields:
+
+ * id - the module's object ID
+ * module - the actual module or an adequate substitute
+ * __file__
+ * __spec__
+ * name
+ * origin
+ * ns - a copy (dict) of the module's __dict__ (or None)
+ * ns_id - the object ID of the module's __dict__
+ * cached - the sys.modules[mod.__spec__.name] entry (or None)
+ * cached_id - the object ID of the sys.modules entry (or None)
+
+ In cases where the value is not available (e.g. due to serialization),
+ the value will be None.
+ """
+ _fields = tuple('id module ns ns_id cached cached_id'.split())
+
+ @classmethod
+ def from_module(cls, mod):
+ name = mod.__spec__.name
+ cached = sys.modules.get(name)
+ return cls(
+ id=id(mod),
+ module=mod,
+ ns=types.SimpleNamespace(**mod.__dict__),
+ ns_id=id(mod.__dict__),
+ cached=cached,
+ cached_id=id(cached),
+ )
+
+ SCRIPT = textwrap.dedent('''
+ {imports}
+
+ name = {name!r}
+
+ {prescript}
+
+ mod = {name}
+
+ {body}
+
+ {postscript}
+ ''')
+ IMPORTS = textwrap.dedent('''
+ import sys
+ ''').strip()
+ SCRIPT_BODY = textwrap.dedent('''
+ # Capture the snapshot data.
+ cached = sys.modules.get(name)
+ snapshot = dict(
+ id=id(mod),
+ module=dict(
+ __file__=mod.__file__,
+ __spec__=dict(
+ name=mod.__spec__.name,
+ origin=mod.__spec__.origin,
+ ),
+ ),
+ ns=None,
+ ns_id=id(mod.__dict__),
+ cached=None,
+ cached_id=id(cached) if cached else None,
+ )
+ ''').strip()
+ CLEANUP_SCRIPT = textwrap.dedent('''
+ # Clean up the module.
+ sys.modules.pop(name, None)
+ ''').strip()
+
+ @classmethod
+ def build_script(cls, name, *,
+ prescript=None,
+ import_first=False,
+ postscript=None,
+ postcleanup=False,
+ ):
+ if postcleanup is True:
+ postcleanup = cls.CLEANUP_SCRIPT
+ elif isinstance(postcleanup, str):
+ postcleanup = textwrap.dedent(postcleanup).strip()
+ postcleanup = cls.CLEANUP_SCRIPT + os.linesep + postcleanup
+ else:
+ postcleanup = ''
+ prescript = textwrap.dedent(prescript).strip() if prescript else ''
+ postscript = textwrap.dedent(postscript).strip() if postscript else ''
+
+ if postcleanup:
+ if postscript:
+ postscript = postscript + os.linesep * 2 + postcleanup
+ else:
+ postscript = postcleanup
+
+ if import_first:
+ prescript += textwrap.dedent(f'''
+
+ # Now import the module.
+ assert name not in sys.modules
+ import {name}''')
+
+ return cls.SCRIPT.format(
+ imports=cls.IMPORTS.strip(),
+ name=name,
+ prescript=prescript.strip(),
+ body=cls.SCRIPT_BODY.strip(),
+ postscript=postscript,
+ )
+
+ @classmethod
+ def parse(cls, text):
+ raw = json.loads(text)
+ mod = raw['module']
+ mod['__spec__'] = types.SimpleNamespace(**mod['__spec__'])
+ raw['module'] = types.SimpleNamespace(**mod)
+ return cls(**raw)
+
+ @classmethod
+ def from_subinterp(cls, name, interpid=None, *, pipe=None, **script_kwds):
+ if pipe is not None:
+ return cls._from_subinterp(name, interpid, pipe, script_kwds)
+ pipe = os.pipe()
+ try:
+ return cls._from_subinterp(name, interpid, pipe, script_kwds)
+ finally:
+ r, w = pipe
+ os.close(r)
+ os.close(w)
+
+ @classmethod
+ def _from_subinterp(cls, name, interpid, pipe, script_kwargs):
+ r, w = pipe
+
+ # Build the script.
+ postscript = textwrap.dedent(f'''
+ # Send the result over the pipe.
+ import json
+ import os
+ os.write({w}, json.dumps(snapshot).encode())
+
+ ''')
+ _postscript = script_kwargs.get('postscript')
+ if _postscript:
+ _postscript = textwrap.dedent(_postscript).lstrip()
+ postscript += _postscript
+ script_kwargs['postscript'] = postscript.strip()
+ script = cls.build_script(name, **script_kwargs)
+
+ # Run the script.
+ if interpid is None:
+ ret = run_in_subinterp(script)
+ if ret != 0:
+ raise AssertionError(f'{ret} != 0')
+ else:
+ _interpreters.run_string(interpid, script)
+
+ # Parse the results.
+ text = os.read(r, 1000)
+ return cls.parse(text.decode())
+
+
class ImportTests(unittest.TestCase):
def setUp(self):
@@ -1453,7 +1636,12 @@ class SubinterpImportTests(unittest.TestCase):
allow_exec=False,
allow_threads=True,
allow_daemon_threads=False,
+ # Isolation-related config values aren't included here.
+ )
+ ISOLATED = dict(
+ use_main_obmalloc=False,
)
+ NOT_ISOLATED = {k: not v for k, v in ISOLATED.items()}
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def pipe(self):
@@ -1486,6 +1674,7 @@ def import_script(self, name, fd, check_override=None):
def run_here(self, name, *,
check_singlephase_setting=False,
check_singlephase_override=None,
+ isolated=False,
):
"""
Try importing the named module in a subinterpreter.
@@ -1506,6 +1695,7 @@ def run_here(self, name, *,
kwargs = dict(
**self.RUN_KWARGS,
+ **(self.ISOLATED if isolated else self.NOT_ISOLATED),
check_multi_interp_extensions=check_singlephase_setting,
)
@@ -1516,33 +1706,36 @@ def run_here(self, name, *,
self.assertEqual(ret, 0)
return os.read(r, 100)
- def check_compatible_here(self, name, *, strict=False):
+ def check_compatible_here(self, name, *, strict=False, isolated=False):
# Verify that the named module may be imported in a subinterpreter.
# (See run_here() for more info.)
out = self.run_here(name,
check_singlephase_setting=strict,
+ isolated=isolated,
)
self.assertEqual(out, b'okay')
- def check_incompatible_here(self, name):
+ def check_incompatible_here(self, name, *, isolated=False):
# Differences from check_compatible_here():
# * verify that import fails
# * "strict" is always True
out = self.run_here(name,
check_singlephase_setting=True,
+ isolated=isolated,
)
self.assertEqual(
out.decode('utf-8'),
f'ImportError: module {name} does not support loading in subinterpreters',
)
- def check_compatible_fresh(self, name, *, strict=False):
+ def check_compatible_fresh(self, name, *, strict=False, isolated=False):
# Differences from check_compatible_here():
# * subinterpreter in a new process
# * module has never been imported before in that process
# * this tests importing the module for the first time
kwargs = dict(
**self.RUN_KWARGS,
+ **(self.ISOLATED if isolated else self.NOT_ISOLATED),
check_multi_interp_extensions=strict,
)
_, out, err = script_helper.assert_python_ok('-c', textwrap.dedent(f'''
@@ -1560,12 +1753,13 @@ def check_compatible_fresh(self, name, *, strict=False):
self.assertEqual(err, b'')
self.assertEqual(out, b'okay')
- def check_incompatible_fresh(self, name):
+ def check_incompatible_fresh(self, name, *, isolated=False):
# Differences from check_compatible_fresh():
# * verify that import fails
# * "strict" is always True
kwargs = dict(
**self.RUN_KWARGS,
+ **(self.ISOLATED if isolated else self.NOT_ISOLATED),
check_multi_interp_extensions=True,
)
_, out, err = script_helper.assert_python_ok('-c', textwrap.dedent(f'''
@@ -1604,7 +1798,7 @@ def test_frozen_compat(self):
with self.subTest(f'{module}: strict, not fresh'):
self.check_compatible_here(module, strict=True)
- @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module")
+ @requires_singlephase_init
def test_single_init_extension_compat(self):
module = '_testsinglephase'
require_extension(module)
@@ -1636,7 +1830,7 @@ def test_python_compat(self):
with self.subTest(f'{module}: strict, fresh'):
self.check_compatible_fresh(module, strict=True)
- @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module")
+ @requires_singlephase_init
def test_singlephase_check_with_setting_and_override(self):
module = '_testsinglephase'
require_extension(module)
@@ -1671,6 +1865,693 @@ def check_incompatible(setting, override):
with self.subTest('config: check disabled; override: disabled'):
check_compatible(False, -1)
+ def test_isolated_config(self):
+ module = 'threading'
+ require_pure_python(module)
+ with self.subTest(f'{module}: strict, not fresh'):
+ self.check_compatible_here(module, strict=True, isolated=True)
+ with self.subTest(f'{module}: strict, fresh'):
+ self.check_compatible_fresh(module, strict=True, isolated=True)
+
+
+class TestSinglePhaseSnapshot(ModuleSnapshot):
+
+ @classmethod
+ def from_module(cls, mod):
+ self = super().from_module(mod)
+ self.summed = mod.sum(1, 2)
+ self.lookedup = mod.look_up_self()
+ self.lookedup_id = id(self.lookedup)
+ self.state_initialized = mod.state_initialized()
+ if hasattr(mod, 'initialized_count'):
+ self.init_count = mod.initialized_count()
+ return self
+
+ SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent('''
+ snapshot['module'].update(dict(
+ int_const=mod.int_const,
+ str_const=mod.str_const,
+ _module_initialized=mod._module_initialized,
+ ))
+ snapshot.update(dict(
+ summed=mod.sum(1, 2),
+ lookedup_id=id(mod.look_up_self()),
+ state_initialized=mod.state_initialized(),
+ init_count=mod.initialized_count(),
+ has_spam=hasattr(mod, 'spam'),
+ spam=getattr(mod, 'spam', None),
+ ))
+ ''').rstrip()
+
+ @classmethod
+ def parse(cls, text):
+ self = super().parse(text)
+ if not self.has_spam:
+ del self.spam
+ del self.has_spam
+ return self
+
+
+@requires_singlephase_init
+class SinglephaseInitTests(unittest.TestCase):
+
+ NAME = '_testsinglephase'
+
+ @classmethod
+ def setUpClass(cls):
+ if '-R' in sys.argv or '--huntrleaks' in sys.argv:
+ # https://github.com/python/cpython/issues/102251
+ raise unittest.SkipTest('unresolved refleaks (see gh-102251)')
+
+ spec = importlib.util.find_spec(cls.NAME)
+ from importlib.machinery import ExtensionFileLoader
+ cls.FILE = spec.origin
+ cls.LOADER = type(spec.loader)
+ assert cls.LOADER is ExtensionFileLoader
+
+ # Start fresh.
+ cls.clean_up()
+
+ def tearDown(self):
+ # Clean up the module.
+ self.clean_up()
+
+ @classmethod
+ def clean_up(cls):
+ name = cls.NAME
+ filename = cls.FILE
+ if name in sys.modules:
+ if hasattr(sys.modules[name], '_clear_globals'):
+ assert sys.modules[name].__file__ == filename
+ sys.modules[name]._clear_globals()
+ del sys.modules[name]
+ # Clear all internally cached data for the extension.
+ _testinternalcapi.clear_extension(name, filename)
+
+ #########################
+ # helpers
+
+ def add_module_cleanup(self, name):
+ def clean_up():
+ # Clear all internally cached data for the extension.
+ _testinternalcapi.clear_extension(name, self.FILE)
+ self.addCleanup(clean_up)
+
+ def _load_dynamic(self, name, path):
+ """
+ Load an extension module.
+ """
+ # This is essentially copied from the old imp module.
+ from importlib._bootstrap import _load
+ loader = self.LOADER(name, path)
+
+ # Issue bpo-24748: Skip the sys.modules check in _load_module_shim;
+ # always load new extension.
+ spec = importlib.util.spec_from_file_location(name, path,
+ loader=loader)
+ return _load(spec)
+
+ def load(self, name):
+ try:
+ already_loaded = self.already_loaded
+ except AttributeError:
+ already_loaded = self.already_loaded = {}
+ assert name not in already_loaded
+ mod = self._load_dynamic(name, self.FILE)
+ self.assertNotIn(mod, already_loaded.values())
+ already_loaded[name] = mod
+ return types.SimpleNamespace(
+ name=name,
+ module=mod,
+ snapshot=TestSinglePhaseSnapshot.from_module(mod),
+ )
+
+ def re_load(self, name, mod):
+ assert sys.modules[name] is mod
+ assert mod.__dict__ == mod.__dict__
+ reloaded = self._load_dynamic(name, self.FILE)
+ return types.SimpleNamespace(
+ name=name,
+ module=reloaded,
+ snapshot=TestSinglePhaseSnapshot.from_module(reloaded),
+ )
+
+ # subinterpreters
+
+ def add_subinterpreter(self):
+ interpid = _interpreters.create(isolated=False)
+ _interpreters.run_string(interpid, textwrap.dedent('''
+ import sys
+ import _testinternalcapi
+ '''))
+ def clean_up():
+ _interpreters.run_string(interpid, textwrap.dedent(f'''
+ name = {self.NAME!r}
+ if name in sys.modules:
+ sys.modules[name]._clear_globals()
+ _testinternalcapi.clear_extension(name, {self.FILE!r})
+ '''))
+ _interpreters.destroy(interpid)
+ self.addCleanup(clean_up)
+ return interpid
+
+ def import_in_subinterp(self, interpid=None, *,
+ postscript=None,
+ postcleanup=False,
+ ):
+ name = self.NAME
+
+ if postcleanup:
+ import_ = 'import _testinternalcapi' if interpid is None else ''
+ postcleanup = f'''
+ {import_}
+ mod._clear_globals()
+ _testinternalcapi.clear_extension(name, {self.FILE!r})
+ '''
+
+ try:
+ pipe = self._pipe
+ except AttributeError:
+ r, w = pipe = self._pipe = os.pipe()
+ self.addCleanup(os.close, r)
+ self.addCleanup(os.close, w)
+
+ snapshot = TestSinglePhaseSnapshot.from_subinterp(
+ name,
+ interpid,
+ pipe=pipe,
+ import_first=True,
+ postscript=postscript,
+ postcleanup=postcleanup,
+ )
+
+ return types.SimpleNamespace(
+ name=name,
+ module=None,
+ snapshot=snapshot,
+ )
+
+ # checks
+
+ def check_common(self, loaded):
+ isolated = False
+
+ mod = loaded.module
+ if not mod:
+ # It came from a subinterpreter.
+ isolated = True
+ mod = loaded.snapshot.module
+ # mod.__name__ might not match, but the spec will.
+ self.assertEqual(mod.__spec__.name, loaded.name)
+ self.assertEqual(mod.__file__, self.FILE)
+ self.assertEqual(mod.__spec__.origin, self.FILE)
+ if not isolated:
+ self.assertTrue(issubclass(mod.error, Exception))
+ self.assertEqual(mod.int_const, 1969)
+ self.assertEqual(mod.str_const, 'something different')
+ self.assertIsInstance(mod._module_initialized, float)
+ self.assertGreater(mod._module_initialized, 0)
+
+ snap = loaded.snapshot
+ self.assertEqual(snap.summed, 3)
+ if snap.state_initialized is not None:
+ self.assertIsInstance(snap.state_initialized, float)
+ self.assertGreater(snap.state_initialized, 0)
+ if isolated:
+ # The "looked up" module is interpreter-specific
+ # (interp->imports.modules_by_index was set for the module).
+ self.assertEqual(snap.lookedup_id, snap.id)
+ self.assertEqual(snap.cached_id, snap.id)
+ with self.assertRaises(AttributeError):
+ snap.spam
+ else:
+ self.assertIs(snap.lookedup, mod)
+ self.assertIs(snap.cached, mod)
+
+ def check_direct(self, loaded):
+ # The module has its own PyModuleDef, with a matching name.
+ self.assertEqual(loaded.module.__name__, loaded.name)
+ self.assertIs(loaded.snapshot.lookedup, loaded.module)
+
+ def check_indirect(self, loaded, orig):
+ # The module re-uses another's PyModuleDef, with a different name.
+ assert orig is not loaded.module
+ assert orig.__name__ != loaded.name
+ self.assertNotEqual(loaded.module.__name__, loaded.name)
+ self.assertIs(loaded.snapshot.lookedup, loaded.module)
+
+ def check_basic(self, loaded, expected_init_count):
+ # m_size == -1
+ # The module loads fresh the first time and copies m_copy after.
+ snap = loaded.snapshot
+ self.assertIsNot(snap.state_initialized, None)
+ self.assertIsInstance(snap.init_count, int)
+ self.assertGreater(snap.init_count, 0)
+ self.assertEqual(snap.init_count, expected_init_count)
+
+ def check_with_reinit(self, loaded):
+ # m_size >= 0
+ # The module loads fresh every time.
+ pass
+
+ def check_fresh(self, loaded):
+ """
+ The module had not been loaded before (at least since fully reset).
+ """
+ snap = loaded.snapshot
+ # The module's init func was run.
+ # A copy of the module's __dict__ was stored in def->m_base.m_copy.
+ # The previous m_copy was deleted first.
+ # _PyRuntime.imports.extensions was set.
+ self.assertEqual(snap.init_count, 1)
+ # The global state was initialized.
+ # The module attrs were initialized from that state.
+ self.assertEqual(snap.module._module_initialized,
+ snap.state_initialized)
+
+ def check_semi_fresh(self, loaded, base, prev):
+ """
+ The module had been loaded before and then reset
+ (but the module global state wasn't).
+ """
+ snap = loaded.snapshot
+ # The module's init func was run again.
+ # A copy of the module's __dict__ was stored in def->m_base.m_copy.
+ # The previous m_copy was deleted first.
+ # The module globals did not get reset.
+ self.assertNotEqual(snap.id, base.snapshot.id)
+ self.assertNotEqual(snap.id, prev.snapshot.id)
+ self.assertEqual(snap.init_count, prev.snapshot.init_count + 1)
+ # The global state was updated.
+ # The module attrs were initialized from that state.
+ self.assertEqual(snap.module._module_initialized,
+ snap.state_initialized)
+ self.assertNotEqual(snap.state_initialized,
+ base.snapshot.state_initialized)
+ self.assertNotEqual(snap.state_initialized,
+ prev.snapshot.state_initialized)
+
+ def check_copied(self, loaded, base):
+ """
+ The module had been loaded before and never reset.
+ """
+ snap = loaded.snapshot
+ # The module's init func was not run again.
+ # The interpreter copied m_copy, as set by the other interpreter,
+ # with objects owned by the other interpreter.
+ # The module globals did not get reset.
+ self.assertNotEqual(snap.id, base.snapshot.id)
+ self.assertEqual(snap.init_count, base.snapshot.init_count)
+ # The global state was not updated since the init func did not run.
+ # The module attrs were not directly initialized from that state.
+ # The state and module attrs still match the previous loading.
+ self.assertEqual(snap.module._module_initialized,
+ snap.state_initialized)
+ self.assertEqual(snap.state_initialized,
+ base.snapshot.state_initialized)
+
+ #########################
+ # the tests
+
+ def test_cleared_globals(self):
+ loaded = self.load(self.NAME)
+ _testsinglephase = loaded.module
+ init_before = _testsinglephase.state_initialized()
+
+ _testsinglephase._clear_globals()
+ init_after = _testsinglephase.state_initialized()
+ init_count = _testsinglephase.initialized_count()
+
+ self.assertGreater(init_before, 0)
+ self.assertEqual(init_after, 0)
+ self.assertEqual(init_count, -1)
+
+ def test_variants(self):
+ # Exercise the most meaningful variants described in Python/import.c.
+ self.maxDiff = None
+
+ # Check the "basic" module.
+
+ name = self.NAME
+ expected_init_count = 1
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.check_direct(loaded)
+ self.check_basic(loaded, expected_init_count)
+ basic = loaded.module
+
+ # Check its indirect variants.
+
+ name = f'{self.NAME}_basic_wrapper'
+ self.add_module_cleanup(name)
+ expected_init_count += 1
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.check_indirect(loaded, basic)
+ self.check_basic(loaded, expected_init_count)
+
+ # Currently PyState_AddModule() always replaces the cached module.
+ self.assertIs(basic.look_up_self(), loaded.module)
+ self.assertEqual(basic.initialized_count(), expected_init_count)
+
+ # The cached module shouldn't change after this point.
+ basic_lookedup = loaded.module
+
+ # Check its direct variant.
+
+ name = f'{self.NAME}_basic_copy'
+ self.add_module_cleanup(name)
+ expected_init_count += 1
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.check_direct(loaded)
+ self.check_basic(loaded, expected_init_count)
+
+ # This should change the cached module for _testsinglephase.
+ self.assertIs(basic.look_up_self(), basic_lookedup)
+ self.assertEqual(basic.initialized_count(), expected_init_count)
+
+ # Check the non-basic variant that has no state.
+
+ name = f'{self.NAME}_with_reinit'
+ self.add_module_cleanup(name)
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.assertIs(loaded.snapshot.state_initialized, None)
+ self.check_direct(loaded)
+ self.check_with_reinit(loaded)
+
+ # This should change the cached module for _testsinglephase.
+ self.assertIs(basic.look_up_self(), basic_lookedup)
+ self.assertEqual(basic.initialized_count(), expected_init_count)
+
+ # Check the basic variant that has state.
+
+ name = f'{self.NAME}_with_state'
+ self.add_module_cleanup(name)
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.assertIsNot(loaded.snapshot.state_initialized, None)
+ self.check_direct(loaded)
+ self.check_with_reinit(loaded)
+
+ # This should change the cached module for _testsinglephase.
+ self.assertIs(basic.look_up_self(), basic_lookedup)
+ self.assertEqual(basic.initialized_count(), expected_init_count)
+
+ def test_basic_reloaded(self):
+ # m_copy is copied into the existing module object.
+ # Global state is not changed.
+ self.maxDiff = None
+
+ for name in [
+ self.NAME, # the "basic" module
+ f'{self.NAME}_basic_wrapper', # the indirect variant
+ f'{self.NAME}_basic_copy', # the direct variant
+ ]:
+ self.add_module_cleanup(name)
+ with self.subTest(name):
+ loaded = self.load(name)
+ reloaded = self.re_load(name, loaded.module)
+
+ self.check_common(loaded)
+ self.check_common(reloaded)
+
+ # Make sure the original __dict__ did not get replaced.
+ self.assertEqual(id(loaded.module.__dict__),
+ loaded.snapshot.ns_id)
+ self.assertEqual(loaded.snapshot.ns.__dict__,
+ loaded.module.__dict__)
+
+ self.assertEqual(reloaded.module.__spec__.name, reloaded.name)
+ self.assertEqual(reloaded.module.__name__,
+ reloaded.snapshot.ns.__name__)
+
+ self.assertIs(reloaded.module, loaded.module)
+ self.assertIs(reloaded.module.__dict__, loaded.module.__dict__)
+ # It only happens to be the same but that's good enough here.
+ # We really just want to verify that the re-loaded attrs
+ # didn't change.
+ self.assertIs(reloaded.snapshot.lookedup,
+ loaded.snapshot.lookedup)
+ self.assertEqual(reloaded.snapshot.state_initialized,
+ loaded.snapshot.state_initialized)
+ self.assertEqual(reloaded.snapshot.init_count,
+ loaded.snapshot.init_count)
+
+ self.assertIs(reloaded.snapshot.cached, reloaded.module)
+
+ def test_with_reinit_reloaded(self):
+ # The module's m_init func is run again.
+ self.maxDiff = None
+
+ # Keep a reference around.
+ basic = self.load(self.NAME)
+
+ for name in [
+ f'{self.NAME}_with_reinit', # m_size == 0
+ f'{self.NAME}_with_state', # m_size > 0
+ ]:
+ self.add_module_cleanup(name)
+ with self.subTest(name):
+ loaded = self.load(name)
+ reloaded = self.re_load(name, loaded.module)
+
+ self.check_common(loaded)
+ self.check_common(reloaded)
+
+ # Make sure the original __dict__ did not get replaced.
+ self.assertEqual(id(loaded.module.__dict__),
+ loaded.snapshot.ns_id)
+ self.assertEqual(loaded.snapshot.ns.__dict__,
+ loaded.module.__dict__)
+
+ self.assertEqual(reloaded.module.__spec__.name, reloaded.name)
+ self.assertEqual(reloaded.module.__name__,
+ reloaded.snapshot.ns.__name__)
+
+ self.assertIsNot(reloaded.module, loaded.module)
+ self.assertNotEqual(reloaded.module.__dict__,
+ loaded.module.__dict__)
+ self.assertIs(reloaded.snapshot.lookedup, reloaded.module)
+ if loaded.snapshot.state_initialized is None:
+ self.assertIs(reloaded.snapshot.state_initialized, None)
+ else:
+ self.assertGreater(reloaded.snapshot.state_initialized,
+ loaded.snapshot.state_initialized)
+
+ self.assertIs(reloaded.snapshot.cached, reloaded.module)
+
+ # Currently, for every single-phrase init module loaded
+ # in multiple interpreters, those interpreters share a
+ # PyModuleDef for that object, which can be a problem.
+ # Also, we test with a single-phase module that has global state,
+ # which is shared by all interpreters.
+
+ @requires_subinterpreters
+ def test_basic_multiple_interpreters_main_no_reset(self):
+ # without resetting; already loaded in main interpreter
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def may or may not be loaded already
+ # * module def not in _PyRuntime.imports.extensions
+ # * mod init func has not run yet (since reset, at least)
+ # * m_copy not set (hasn't been loaded yet or already cleared)
+ # * module's global state has not been initialized yet
+ # (or already cleared)
+
+ main_loaded = self.load(self.NAME)
+ _testsinglephase = main_loaded.module
+ # Attrs set after loading are not in m_copy.
+ _testsinglephase.spam = 'spam, spam, spam, spam, eggs, and spam'
+
+ self.check_common(main_loaded)
+ self.check_fresh(main_loaded)
+
+ interpid1 = self.add_subinterpreter()
+ interpid2 = self.add_subinterpreter()
+
+ # At this point:
+ # * alive in 1 interpreter (main)
+ # * module def in _PyRuntime.imports.extensions
+ # * mod init func ran for the first time (since reset, at least)
+ # * m_copy was copied from the main interpreter (was NULL)
+ # * module's global state was initialized
+
+ # Use an interpreter that gets destroyed right away.
+ loaded = self.import_in_subinterp()
+ self.check_common(loaded)
+ self.check_copied(loaded, main_loaded)
+
+ # At this point:
+ # * alive in 1 interpreter (main)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy is NULL (claered when the interpreter was destroyed)
+ # (was from main interpreter)
+ # * module's global state was updated, not reset
+
+ # Use a subinterpreter that sticks around.
+ loaded = self.import_in_subinterp(interpid1)
+ self.check_common(loaded)
+ self.check_copied(loaded, main_loaded)
+
+ # At this point:
+ # * alive in 2 interpreters (main, interp1)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp1
+ # * module's global state was updated, not reset
+
+ # Use a subinterpreter while the previous one is still alive.
+ loaded = self.import_in_subinterp(interpid2)
+ self.check_common(loaded)
+ self.check_copied(loaded, main_loaded)
+
+ # At this point:
+ # * alive in 3 interpreters (main, interp1, interp2)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp2 (was from interp1)
+ # * module's global state was updated, not reset
+
+ @requires_subinterpreters
+ def test_basic_multiple_interpreters_deleted_no_reset(self):
+ # without resetting; already loaded in a deleted interpreter
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def may or may not be loaded already
+ # * module def not in _PyRuntime.imports.extensions
+ # * mod init func has not run yet (since reset, at least)
+ # * m_copy not set (hasn't been loaded yet or already cleared)
+ # * module's global state has not been initialized yet
+ # (or already cleared)
+
+ interpid1 = self.add_subinterpreter()
+ interpid2 = self.add_subinterpreter()
+
+ # First, load in the main interpreter but then completely clear it.
+ loaded_main = self.load(self.NAME)
+ loaded_main.module._clear_globals()
+ _testinternalcapi.clear_extension(self.NAME, self.FILE)
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def loaded already
+ # * module def was in _PyRuntime.imports.extensions, but cleared
+ # * mod init func ran for the first time (since reset, at least)
+ # * m_copy was set, but cleared (was NULL)
+ # * module's global state was initialized but cleared
+
+ # Start with an interpreter that gets destroyed right away.
+ base = self.import_in_subinterp(postscript='''
+ # Attrs set after loading are not in m_copy.
+ mod.spam = 'spam, spam, mash, spam, eggs, and spam'
+ ''')
+ self.check_common(base)
+ self.check_fresh(base)
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy is NULL (claered when the interpreter was destroyed)
+ # * module's global state was initialized, not reset
+
+ # Use a subinterpreter that sticks around.
+ loaded_interp1 = self.import_in_subinterp(interpid1)
+ self.check_common(loaded_interp1)
+ self.check_semi_fresh(loaded_interp1, loaded_main, base)
+
+ # At this point:
+ # * alive in 1 interpreter (interp1)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp1 (was NULL)
+ # * module's global state was updated, not reset
+
+ # Use a subinterpreter while the previous one is still alive.
+ loaded_interp2 = self.import_in_subinterp(interpid2)
+ self.check_common(loaded_interp2)
+ self.check_copied(loaded_interp2, loaded_interp1)
+
+ # At this point:
+ # * alive in 2 interpreters (interp1, interp2)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp2 (was from interp1)
+ # * module's global state was updated, not reset
+
+ @requires_subinterpreters
+ def test_basic_multiple_interpreters_reset_each(self):
+ # resetting between each interpreter
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def may or may not be loaded already
+ # * module def not in _PyRuntime.imports.extensions
+ # * mod init func has not run yet (since reset, at least)
+ # * m_copy not set (hasn't been loaded yet or already cleared)
+ # * module's global state has not been initialized yet
+ # (or already cleared)
+
+ interpid1 = self.add_subinterpreter()
+ interpid2 = self.add_subinterpreter()
+
+ # Use an interpreter that gets destroyed right away.
+ loaded = self.import_in_subinterp(
+ postscript='''
+ # Attrs set after loading are not in m_copy.
+ mod.spam = 'spam, spam, mash, spam, eggs, and spam'
+ ''',
+ postcleanup=True,
+ )
+ self.check_common(loaded)
+ self.check_fresh(loaded)
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def in _PyRuntime.imports.extensions
+ # * mod init func ran for the first time (since reset, at least)
+ # * m_copy is NULL (claered when the interpreter was destroyed)
+ # * module's global state was initialized, not reset
+
+ # Use a subinterpreter that sticks around.
+ loaded = self.import_in_subinterp(interpid1, postcleanup=True)
+ self.check_common(loaded)
+ self.check_fresh(loaded)
+
+ # At this point:
+ # * alive in 1 interpreter (interp1)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp1 (was NULL)
+ # * module's global state was initialized, not reset
+
+ # Use a subinterpreter while the previous one is still alive.
+ loaded = self.import_in_subinterp(interpid2, postcleanup=True)
+ self.check_common(loaded)
+ self.check_fresh(loaded)
+
+ # At this point:
+ # * alive in 2 interpreters (interp2, interp2)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp2 (was from interp1)
+ # * module's global state was initialized, not reset
+
if __name__ == '__main__':
# Test needs to be a package, so we can do relative imports.
diff --git a/Lib/test/test_importlib/_context.py b/Lib/test/test_importlib/_context.py
new file mode 100644
index 00000000000000..8a53eb55d1503b
--- /dev/null
+++ b/Lib/test/test_importlib/_context.py
@@ -0,0 +1,13 @@
+import contextlib
+
+
+# from jaraco.context 4.3
+class suppress(contextlib.suppress, contextlib.ContextDecorator):
+ """
+ A version of contextlib.suppress with decorator support.
+
+ >>> @suppress(KeyError)
+ ... def key_error():
+ ... {}['']
+ >>> key_error()
+ """
diff --git a/Lib/test/test_importlib/_path.py b/Lib/test/test_importlib/_path.py
new file mode 100644
index 00000000000000..71a704389b986e
--- /dev/null
+++ b/Lib/test/test_importlib/_path.py
@@ -0,0 +1,109 @@
+# from jaraco.path 3.5
+
+import functools
+import pathlib
+from typing import Dict, Union
+
+try:
+ from typing import Protocol, runtime_checkable
+except ImportError: # pragma: no cover
+ # Python 3.7
+ from typing_extensions import Protocol, runtime_checkable # type: ignore
+
+
+FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore
+
+
+@runtime_checkable
+class TreeMaker(Protocol):
+ def __truediv__(self, *args, **kwargs):
+ ... # pragma: no cover
+
+ def mkdir(self, **kwargs):
+ ... # pragma: no cover
+
+ def write_text(self, content, **kwargs):
+ ... # pragma: no cover
+
+ def write_bytes(self, content):
+ ... # pragma: no cover
+
+
+def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker:
+ return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore
+
+
+def build(
+ spec: FilesSpec,
+ prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore
+):
+ """
+ Build a set of files/directories, as described by the spec.
+
+ Each key represents a pathname, and the value represents
+ the content. Content may be a nested directory.
+
+ >>> spec = {
+ ... 'README.txt': "A README file",
+ ... "foo": {
+ ... "__init__.py": "",
+ ... "bar": {
+ ... "__init__.py": "",
+ ... },
+ ... "baz.py": "# Some code",
+ ... }
+ ... }
+ >>> target = getfixture('tmp_path')
+ >>> build(spec, target)
+ >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
+ '# Some code'
+ """
+ for name, contents in spec.items():
+ create(contents, _ensure_tree_maker(prefix) / name)
+
+
+@functools.singledispatch
+def create(content: Union[str, bytes, FilesSpec], path):
+ path.mkdir(exist_ok=True)
+ build(content, prefix=path) # type: ignore
+
+
+@create.register
+def _(content: bytes, path):
+ path.write_bytes(content)
+
+
+@create.register
+def _(content: str, path):
+ path.write_text(content, encoding='utf-8')
+
+
+@create.register
+def _(content: str, path):
+ path.write_text(content, encoding='utf-8')
+
+
+class Recording:
+ """
+ A TreeMaker object that records everything that would be written.
+
+ >>> r = Recording()
+ >>> build({'foo': {'foo1.txt': 'yes'}, 'bar.txt': 'abc'}, r)
+ >>> r.record
+ ['foo/foo1.txt', 'bar.txt']
+ """
+
+ def __init__(self, loc=pathlib.PurePosixPath(), record=None):
+ self.loc = loc
+ self.record = record if record is not None else []
+
+ def __truediv__(self, other):
+ return Recording(self.loc / other, self.record)
+
+ def write_text(self, content, **kwargs):
+ self.record.append(str(self.loc))
+
+ write_bytes = write_text
+
+ def mkdir(self, **kwargs):
+ return
diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py
index e7be77b3957c67..a364a977bce781 100644
--- a/Lib/test/test_importlib/fixtures.py
+++ b/Lib/test/test_importlib/fixtures.py
@@ -10,7 +10,10 @@
from test.support.os_helper import FS_NONASCII
from test.support import requires_zlib
-from typing import Dict, Union
+
+from . import _path
+from ._path import FilesSpec
+
try:
from importlib import resources # type: ignore
@@ -83,13 +86,8 @@ def setUp(self):
self.fixtures.enter_context(self.add_sys_path(self.site_dir))
-# Except for python/mypy#731, prefer to define
-# FilesDef = Dict[str, Union['FilesDef', str]]
-FilesDef = Dict[str, Union[Dict[str, Union[Dict[str, str], str]], str]]
-
-
class DistInfoPkg(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"distinfo_pkg-1.0.0.dist-info": {
"METADATA": """
Name: distinfo-pkg
@@ -131,7 +129,7 @@ def make_uppercase(self):
class DistInfoPkgWithDot(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"pkg_dot-1.0.0.dist-info": {
"METADATA": """
Name: pkg.dot
@@ -146,7 +144,7 @@ def setUp(self):
class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"pkg.dot-1.0.0.dist-info": {
"METADATA": """
Name: pkg.dot
@@ -173,7 +171,7 @@ def setUp(self):
class EggInfoPkg(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"egginfo_pkg.egg-info": {
"PKG-INFO": """
Name: egginfo-pkg
@@ -212,8 +210,99 @@ def setUp(self):
build_files(EggInfoPkg.files, prefix=self.site_dir)
+class EggInfoPkgPipInstalledNoToplevel(OnSysPath, SiteDir):
+ files: FilesSpec = {
+ "egg_with_module_pkg.egg-info": {
+ "PKG-INFO": "Name: egg_with_module-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ egg_with_module.py
+ setup.py
+ egg_with_module_pkg.egg-info/PKG-INFO
+ egg_with_module_pkg.egg-info/SOURCES.txt
+ egg_with_module_pkg.egg-info/top_level.txt
+ """,
+ # installed-files.txt is written by pip, and is a strictly more
+ # accurate source than SOURCES.txt as to the installed contents of
+ # the package.
+ "installed-files.txt": """
+ ../egg_with_module.py
+ PKG-INFO
+ SOURCES.txt
+ top_level.txt
+ """,
+ # missing top_level.txt (to trigger fallback to installed-files.txt)
+ },
+ "egg_with_module.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+ def setUp(self):
+ super().setUp()
+ build_files(EggInfoPkgPipInstalledNoToplevel.files, prefix=self.site_dir)
+
+
+class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteDir):
+ files: FilesSpec = {
+ "egg_with_no_modules_pkg.egg-info": {
+ "PKG-INFO": "Name: egg_with_no_modules-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ setup.py
+ egg_with_no_modules_pkg.egg-info/PKG-INFO
+ egg_with_no_modules_pkg.egg-info/SOURCES.txt
+ egg_with_no_modules_pkg.egg-info/top_level.txt
+ """,
+ # installed-files.txt is written by pip, and is a strictly more
+ # accurate source than SOURCES.txt as to the installed contents of
+ # the package.
+ "installed-files.txt": """
+ PKG-INFO
+ SOURCES.txt
+ top_level.txt
+ """,
+ # top_level.txt correctly reflects that no modules are installed
+ "top_level.txt": b"\n",
+ },
+ }
+
+ def setUp(self):
+ super().setUp()
+ build_files(EggInfoPkgPipInstalledNoModules.files, prefix=self.site_dir)
+
+
+class EggInfoPkgSourcesFallback(OnSysPath, SiteDir):
+ files: FilesSpec = {
+ "sources_fallback_pkg.egg-info": {
+ "PKG-INFO": "Name: sources_fallback-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ sources_fallback.py
+ setup.py
+ sources_fallback_pkg.egg-info/PKG-INFO
+ sources_fallback_pkg.egg-info/SOURCES.txt
+ """,
+ # missing installed-files.txt (i.e. not installed by pip) and
+ # missing top_level.txt (to trigger fallback to SOURCES.txt)
+ },
+ "sources_fallback.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+ def setUp(self):
+ super().setUp()
+ build_files(EggInfoPkgSourcesFallback.files, prefix=self.site_dir)
+
+
class EggInfoFile(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"egginfo_file.egg-info": """
Metadata-Version: 1.0
Name: egginfo_file
@@ -233,38 +322,22 @@ def setUp(self):
build_files(EggInfoFile.files, prefix=self.site_dir)
-def build_files(file_defs, prefix=pathlib.Path()):
- """Build a set of files/directories, as described by the
+# dedent all text strings before writing
+orig = _path.create.registry[str]
+_path.create.register(str, lambda content, path: orig(DALS(content), path))
- file_defs dictionary. Each key/value pair in the dictionary is
- interpreted as a filename/contents pair. If the contents value is a
- dictionary, a directory is created, and the dictionary interpreted
- as the files within it, recursively.
- For example:
+build_files = _path.build
- {"README.txt": "A README file",
- "foo": {
- "__init__.py": "",
- "bar": {
- "__init__.py": "",
- },
- "baz.py": "# Some code",
- }
- }
- """
- for name, contents in file_defs.items():
- full_name = prefix / name
- if isinstance(contents, dict):
- full_name.mkdir()
- build_files(contents, prefix=full_name)
- else:
- if isinstance(contents, bytes):
- with full_name.open('wb') as f:
- f.write(contents)
- else:
- with full_name.open('w', encoding='utf-8') as f:
- f.write(DALS(contents))
+
+def build_record(file_defs):
+ return ''.join(f'{name},,\n' for name in record_names(file_defs))
+
+
+def record_names(file_defs):
+ recording = _path.Recording()
+ _path.build(file_defs, recording)
+ return recording.record
class FileBuilder:
diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py
index 30b68b6ae7d86e..46cd2b696d4cc8 100644
--- a/Lib/test/test_importlib/test_main.py
+++ b/Lib/test/test_importlib/test_main.py
@@ -1,7 +1,10 @@
import re
import pickle
import unittest
+import warnings
import importlib.metadata
+import contextlib
+import itertools
try:
import pyfakefs.fake_filesystem_unittest as ffs
@@ -9,6 +12,7 @@
from .stubs import fake_filesystem_unittest as ffs
from . import fixtures
+from ._context import suppress
from importlib.metadata import (
Distribution,
EntryPoint,
@@ -22,6 +26,13 @@
)
+@contextlib.contextmanager
+def suppress_known_deprecation():
+ with warnings.catch_warnings(record=True) as ctx:
+ warnings.simplefilter('default', category=DeprecationWarning)
+ yield ctx
+
+
class BasicTests(fixtures.DistInfoPkg, unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
@@ -37,7 +48,7 @@ def test_for_name_does_not_exist(self):
def test_package_not_found_mentions_metadata(self):
"""
When a package is not found, that could indicate that the
- packgae is not installed or that it is installed without
+ package is not installed or that it is installed without
metadata. Ensure the exception mentions metadata to help
guide users toward the cause. See #124.
"""
@@ -46,8 +57,12 @@ def test_package_not_found_mentions_metadata(self):
assert "metadata" in str(ctx.exception)
- def test_new_style_classes(self):
- self.assertIsInstance(Distribution, type)
+ # expected to fail until ABC is enforced
+ @suppress(AssertionError)
+ @suppress_known_deprecation()
+ def test_abc_enforced(self):
+ with self.assertRaises(TypeError):
+ type('DistributionSubclass', (Distribution,), {})()
@fixtures.parameterize(
dict(name=None),
@@ -172,11 +187,21 @@ def test_metadata_loads_egg_info(self):
assert meta['Description'] == 'pôrˈtend'
-class DiscoveryTests(fixtures.EggInfoPkg, fixtures.DistInfoPkg, unittest.TestCase):
+class DiscoveryTests(
+ fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgSourcesFallback,
+ fixtures.DistInfoPkg,
+ unittest.TestCase,
+):
def test_package_discovery(self):
dists = list(distributions())
assert all(isinstance(dist, Distribution) for dist in dists)
assert any(dist.metadata['Name'] == 'egginfo-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'egg_with_module-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'egg_with_no_modules-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'sources_fallback-pkg' for dist in dists)
assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists)
def test_invalid_usage(self):
@@ -324,3 +349,79 @@ def test_packages_distributions_neither_toplevel_nor_files(self):
prefix=self.site_dir,
)
packages_distributions()
+
+ def test_packages_distributions_all_module_types(self):
+ """
+ Test top-level modules detected on a package without 'top-level.txt'.
+ """
+ suffixes = importlib.machinery.all_suffixes()
+ metadata = dict(
+ METADATA="""
+ Name: all_distributions
+ Version: 1.0.0
+ """,
+ )
+ files = {
+ 'all_distributions-1.0.0.dist-info': metadata,
+ }
+ for i, suffix in enumerate(suffixes):
+ files.update(
+ {
+ f'importable-name {i}{suffix}': '',
+ f'in_namespace_{i}': {
+ f'mod{suffix}': '',
+ },
+ f'in_package_{i}': {
+ '__init__.py': '',
+ f'mod{suffix}': '',
+ },
+ }
+ )
+ metadata.update(RECORD=fixtures.build_record(files))
+ fixtures.build_files(files, prefix=self.site_dir)
+
+ distributions = packages_distributions()
+
+ for i in range(len(suffixes)):
+ assert distributions[f'importable-name {i}'] == ['all_distributions']
+ assert distributions[f'in_namespace_{i}'] == ['all_distributions']
+ assert distributions[f'in_package_{i}'] == ['all_distributions']
+
+ assert not any(name.endswith('.dist-info') for name in distributions)
+
+
+class PackagesDistributionsEggTest(
+ fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgSourcesFallback,
+ unittest.TestCase,
+):
+ def test_packages_distributions_on_eggs(self):
+ """
+ Test old-style egg packages with a variation of 'top_level.txt',
+ 'SOURCES.txt', and 'installed-files.txt', available.
+ """
+ distributions = packages_distributions()
+
+ def import_names_from_package(package_name):
+ return {
+ import_name
+ for import_name, package_names in distributions.items()
+ if package_name in package_names
+ }
+
+ # egginfo-pkg declares one import ('mod') via top_level.txt
+ assert import_names_from_package('egginfo-pkg') == {'mod'}
+
+ # egg_with_module-pkg has one import ('egg_with_module') inferred from
+ # installed-files.txt (top_level.txt is missing)
+ assert import_names_from_package('egg_with_module-pkg') == {'egg_with_module'}
+
+ # egg_with_no_modules-pkg should not be associated with any import names
+ # (top_level.txt is empty, and installed-files.txt has no .py files)
+ assert import_names_from_package('egg_with_no_modules-pkg') == set()
+
+ # sources_fallback-pkg has one import ('sources_fallback') inferred from
+ # SOURCES.txt (top_level.txt and installed-files.txt is missing)
+ assert import_names_from_package('sources_fallback-pkg') == {'sources_fallback'}
diff --git a/Lib/test/test_importlib/test_metadata_api.py b/Lib/test/test_importlib/test_metadata_api.py
index 71c47e62d27124..33c6e85ee94753 100644
--- a/Lib/test/test_importlib/test_metadata_api.py
+++ b/Lib/test/test_importlib/test_metadata_api.py
@@ -27,12 +27,14 @@ def suppress_known_deprecation():
class APITests(
fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgSourcesFallback,
fixtures.DistInfoPkg,
fixtures.DistInfoPkgWithDot,
fixtures.EggInfoFile,
unittest.TestCase,
):
-
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
@@ -63,15 +65,28 @@ def test_prefix_not_matched(self):
distribution(prefix)
def test_for_top_level(self):
- self.assertEqual(
- distribution('egginfo-pkg').read_text('top_level.txt').strip(), 'mod'
- )
+ tests = [
+ ('egginfo-pkg', 'mod'),
+ ('egg_with_no_modules-pkg', ''),
+ ]
+ for pkg_name, expect_content in tests:
+ with self.subTest(pkg_name):
+ self.assertEqual(
+ distribution(pkg_name).read_text('top_level.txt').strip(),
+ expect_content,
+ )
def test_read_text(self):
- top_level = [
- path for path in files('egginfo-pkg') if path.name == 'top_level.txt'
- ][0]
- self.assertEqual(top_level.read_text(), 'mod\n')
+ tests = [
+ ('egginfo-pkg', 'mod\n'),
+ ('egg_with_no_modules-pkg', '\n'),
+ ]
+ for pkg_name, expect_content in tests:
+ with self.subTest(pkg_name):
+ top_level = [
+ path for path in files(pkg_name) if path.name == 'top_level.txt'
+ ][0]
+ self.assertEqual(top_level.read_text(), expect_content)
def test_entry_points(self):
eps = entry_points()
@@ -137,6 +152,28 @@ def test_metadata_for_this_package(self):
classifiers = md.get_all('Classifier')
assert 'Topic :: Software Development :: Libraries' in classifiers
+ def test_missing_key_legacy(self):
+ """
+ Requesting a missing key will still return None, but warn.
+ """
+ md = metadata('distinfo-pkg')
+ with suppress_known_deprecation():
+ assert md['does-not-exist'] is None
+
+ def test_get_key(self):
+ """
+ Getting a key gets the key.
+ """
+ md = metadata('egginfo-pkg')
+ assert md.get('Name') == 'egginfo-pkg'
+
+ def test_get_missing_key(self):
+ """
+ Requesting a missing key will return None.
+ """
+ md = metadata('distinfo-pkg')
+ assert md.get('does-not-exist') is None
+
@staticmethod
def _test_files(files):
root = files[0].root
@@ -159,6 +196,9 @@ def test_files_dist_info(self):
def test_files_egg_info(self):
self._test_files(files('egginfo-pkg'))
+ self._test_files(files('egg_with_module-pkg'))
+ self._test_files(files('egg_with_no_modules-pkg'))
+ self._test_files(files('sources_fallback-pkg'))
def test_version_egg_info_file(self):
self.assertEqual(version('egginfo-file'), '0.1')
diff --git a/Lib/test/test_importlib/util.py b/Lib/test/test_importlib/util.py
index 9032fd18d3f95b..e348733f6ce3c3 100644
--- a/Lib/test/test_importlib/util.py
+++ b/Lib/test/test_importlib/util.py
@@ -131,9 +131,8 @@ def uncache(*names):
"""
for name in names:
- if name in ('sys', 'marshal', 'imp'):
- raise ValueError(
- "cannot uncache {0}".format(name))
+ if name in ('sys', 'marshal'):
+ raise ValueError("cannot uncache {}".format(name))
try:
del sys.modules[name]
except KeyError:
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index 3a3646f1861e80..42e3d709bd683f 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -1820,8 +1820,7 @@ def test_errors(self):
self.assertEqualException(f, '2, 3, 4')
self.assertEqualException(f, '1, 2, 3, a=1')
self.assertEqualException(f, '2, 3, 4, c=5')
- # XXX: success of this one depends on dict order
- ## self.assertEqualException(f, '2, 3, 4, a=1, c=5')
+ self.assertEqualException(f, '2, 3, 4, a=1, c=5')
# f got an unexpected keyword argument
self.assertEqualException(f, 'c=2')
self.assertEqualException(f, '2, c=3')
@@ -1832,17 +1831,19 @@ def test_errors(self):
self.assertEqualException(f, '1, a=2')
self.assertEqualException(f, '1, **{"a":2}')
self.assertEqualException(f, '1, 2, b=3')
- # XXX: Python inconsistency
- # - for functions and bound methods: unexpected keyword 'c'
- # - for unbound methods: multiple values for keyword 'a'
- #self.assertEqualException(f, '1, c=3, a=2')
+ self.assertEqualException(f, '1, c=3, a=2')
# issue11256:
f3 = self.makeCallable('**c')
self.assertEqualException(f3, '1, 2')
self.assertEqualException(f3, '1, 2, a=1, b=2')
f4 = self.makeCallable('*, a, b=0')
- self.assertEqualException(f3, '1, 2')
- self.assertEqualException(f3, '1, 2, a=1, b=2')
+ self.assertEqualException(f4, '1, 2')
+ self.assertEqualException(f4, '1, 2, a=1, b=2')
+ self.assertEqualException(f4, 'a=1, a=3')
+ self.assertEqualException(f4, 'a=1, c=3')
+ self.assertEqualException(f4, 'a=1, a=3, b=4')
+ self.assertEqualException(f4, 'a=1, b=2, a=3, b=4')
+ self.assertEqualException(f4, 'a=1, a=2, a=3, b=4')
# issue #20816: getcallargs() fails to iterate over non-existent
# kwonlydefaults and raises a wrong TypeError
@@ -2462,18 +2463,43 @@ def test_signature_object(self):
self.assertEqual(str(S()), '()')
self.assertEqual(repr(S().parameters), 'mappingproxy(OrderedDict())')
- def test(po, pk, pod=42, pkd=100, *args, ko, **kwargs):
+ def test(po, /, pk, pkd=100, *args, ko, kod=10, **kwargs):
pass
+
sig = inspect.signature(test)
- po = sig.parameters['po'].replace(kind=P.POSITIONAL_ONLY)
- pod = sig.parameters['pod'].replace(kind=P.POSITIONAL_ONLY)
+ self.assertTrue(repr(sig).startswith(' {42:'ham'}: pass
foo_partial = functools.partial(foo, a=1)
@@ -2872,8 +2901,6 @@ def foo(cls, *, arg):
def test_signature_on_partial(self):
from functools import partial
- Parameter = inspect.Parameter
-
def test():
pass
@@ -2988,8 +3015,6 @@ def test(a, b, c:int) -> 42:
((('c', ..., int, "positional_or_keyword"),),
42))
- psig = inspect.signature(partial(partial(test, 1), 2))
-
def foo(a):
return a
_foo = partial(partial(foo, a=10), a=20)
@@ -3044,14 +3069,9 @@ def foo(a=1, b=2, c=3):
self.assertEqual(_foo(*ba.args, **ba.kwargs), (12, 10, 20))
- def foo(a, b, c, d, **kwargs):
+ def foo(a, b, /, c, d, **kwargs):
pass
sig = inspect.signature(foo)
- params = sig.parameters.copy()
- params['a'] = params['a'].replace(kind=Parameter.POSITIONAL_ONLY)
- params['b'] = params['b'].replace(kind=Parameter.POSITIONAL_ONLY)
- foo.__signature__ = inspect.Signature(params.values())
- sig = inspect.signature(foo)
self.assertEqual(str(sig), '(a, b, /, c, d, **kwargs)')
self.assertEqual(self.signature(partial(foo, 1)),
@@ -3556,14 +3576,9 @@ def test_signature_str_positional_only(self):
P = inspect.Parameter
S = inspect.Signature
- def test(a_po, *, b, **kwargs):
+ def test(a_po, /, *, b, **kwargs):
return a_po, kwargs
- sig = inspect.signature(test)
- new_params = list(sig.parameters.values())
- new_params[0] = new_params[0].replace(kind=P.POSITIONAL_ONLY)
- test.__signature__ = sig.replace(parameters=new_params)
-
self.assertEqual(str(inspect.signature(test)),
'(a_po, /, *, b, **kwargs)')
@@ -3593,6 +3608,14 @@ def test() -> 42:
self.assertEqual(sig.return_annotation, 42)
self.assertEqual(sig, inspect.signature(test))
+ def test_signature_replaced(self):
+ def test():
+ pass
+
+ spam_param = inspect.Parameter('spam', inspect.Parameter.POSITIONAL_ONLY)
+ sig = test.__signature__ = inspect.Signature(parameters=(spam_param,))
+ self.assertEqual(sig, inspect.signature(test))
+
def test_signature_on_mangled_parameters(self):
class Spam:
def foo(self, __p1:1=2, *, __p2:2=3):
@@ -4155,18 +4178,9 @@ def test(a, *args, b, z=100, **kwargs):
self.assertEqual(ba.args, (10, 20))
def test_signature_bind_positional_only(self):
- P = inspect.Parameter
-
- def test(a_po, b_po, c_po=3, foo=42, *, bar=50, **kwargs):
+ def test(a_po, b_po, c_po=3, /, foo=42, *, bar=50, **kwargs):
return a_po, b_po, c_po, foo, bar, kwargs
- sig = inspect.signature(test)
- new_params = collections.OrderedDict(tuple(sig.parameters.items()))
- for name in ('a_po', 'b_po', 'c_po'):
- new_params[name] = new_params[name].replace(kind=P.POSITIONAL_ONLY)
- new_sig = sig.replace(parameters=new_params.values())
- test.__signature__ = new_sig
-
self.assertEqual(self.call(test, 1, 2, 4, 5, bar=6),
(1, 2, 4, 5, 6, {}))
@@ -4587,7 +4601,6 @@ def test_qualname_source(self):
self.assertEqual(err, b'')
def test_builtins(self):
- module = importlib.import_module('unittest')
_, out, err = assert_python_failure('-m', 'inspect',
'sys')
lines = err.decode().splitlines()
diff --git a/Lib/test/test_launcher.py b/Lib/test/test_launcher.py
index 2f35eaf08a2dc9..362b507d158288 100644
--- a/Lib/test/test_launcher.py
+++ b/Lib/test/test_launcher.py
@@ -394,17 +394,17 @@ def test_filter_to_company_with_default(self):
def test_filter_to_tag(self):
company = "PythonTestSuite"
- data = self.run_py([f"-V:3.100"])
+ data = self.run_py(["-V:3.100"])
self.assertEqual("X.Y.exe", data["LaunchCommand"])
self.assertEqual(company, data["env.company"])
self.assertEqual("3.100", data["env.tag"])
- data = self.run_py([f"-V:3.100-32"])
+ data = self.run_py(["-V:3.100-32"])
self.assertEqual("X.Y-32.exe", data["LaunchCommand"])
self.assertEqual(company, data["env.company"])
self.assertEqual("3.100-32", data["env.tag"])
- data = self.run_py([f"-V:3.100-arm64"])
+ data = self.run_py(["-V:3.100-arm64"])
self.assertEqual("X.Y-arm64.exe -X fake_arg_for_test", data["LaunchCommand"])
self.assertEqual(company, data["env.company"])
self.assertEqual("3.100-arm64", data["env.tag"])
@@ -421,7 +421,7 @@ def test_filter_to_company_and_tag(self):
def test_filter_with_single_install(self):
company = "PythonTestSuite1"
data = self.run_py(
- [f"-V:Nonexistent"],
+ ["-V:Nonexistent"],
env={"PYLAUNCHER_LIMIT_TO_COMPANY": company},
expect_returncode=103,
)
@@ -500,7 +500,7 @@ def test_py_default_short_argv0(self):
data = self.run_py(["--version"], argv=f'{argv0} --version')
self.assertEqual("PythonTestSuite", data["SearchInfo.company"])
self.assertEqual("3.100", data["SearchInfo.tag"])
- self.assertEqual(f'X.Y.exe --version', data["stdout"].strip())
+ self.assertEqual("X.Y.exe --version", data["stdout"].strip())
def test_py_default_in_list(self):
data = self.run_py(["-0"], env=TEST_PY_ENV)
@@ -662,7 +662,7 @@ def test_install(self):
self.assertIn("9PJPW5LDXLZ5", cmd)
def test_literal_shebang_absolute(self):
- with self.script(f"#! C:/some_random_app -witharg") as script:
+ with self.script("#! C:/some_random_app -witharg") as script:
data = self.run_py([script])
self.assertEqual(
f"C:\\some_random_app -witharg {script}",
@@ -670,7 +670,7 @@ def test_literal_shebang_absolute(self):
)
def test_literal_shebang_relative(self):
- with self.script(f"#! ..\\some_random_app -witharg") as script:
+ with self.script("#! ..\\some_random_app -witharg") as script:
data = self.run_py([script])
self.assertEqual(
f"{script.parent.parent}\\some_random_app -witharg {script}",
@@ -678,14 +678,14 @@ def test_literal_shebang_relative(self):
)
def test_literal_shebang_quoted(self):
- with self.script(f'#! "some random app" -witharg') as script:
+ with self.script('#! "some random app" -witharg') as script:
data = self.run_py([script])
self.assertEqual(
f'"{script.parent}\\some random app" -witharg {script}',
data["stdout"].strip(),
)
- with self.script(f'#! some" random "app -witharg') as script:
+ with self.script('#! some" random "app -witharg') as script:
data = self.run_py([script])
self.assertEqual(
f'"{script.parent}\\some random app" -witharg {script}',
@@ -693,7 +693,7 @@ def test_literal_shebang_quoted(self):
)
def test_literal_shebang_quoted_escape(self):
- with self.script(f'#! some\\" random "app -witharg') as script:
+ with self.script('#! some\\" random "app -witharg') as script:
data = self.run_py([script])
self.assertEqual(
f'"{script.parent}\\some\\ random app" -witharg {script}',
diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py
index 42b9587ca18107..0e57c165ca98ea 100644
--- a/Lib/test/test_ntpath.py
+++ b/Lib/test/test_ntpath.py
@@ -169,6 +169,7 @@ def test_splitroot(self):
# gh-81790: support device namespace, including UNC drives.
tester('ntpath.splitroot("//?/c:")', ("//?/c:", "", ""))
+ tester('ntpath.splitroot("//./c:")', ("//./c:", "", ""))
tester('ntpath.splitroot("//?/c:/")', ("//?/c:", "/", ""))
tester('ntpath.splitroot("//?/c:/dir")', ("//?/c:", "/", "dir"))
tester('ntpath.splitroot("//?/UNC")', ("//?/UNC", "", ""))
@@ -179,8 +180,12 @@ def test_splitroot(self):
tester('ntpath.splitroot("//?/VOLUME{00000000-0000-0000-0000-000000000000}/spam")',
('//?/VOLUME{00000000-0000-0000-0000-000000000000}', '/', 'spam'))
tester('ntpath.splitroot("//?/BootPartition/")', ("//?/BootPartition", "/", ""))
+ tester('ntpath.splitroot("//./BootPartition/")', ("//./BootPartition", "/", ""))
+ tester('ntpath.splitroot("//./PhysicalDrive0")', ("//./PhysicalDrive0", "", ""))
+ tester('ntpath.splitroot("//./nul")', ("//./nul", "", ""))
tester('ntpath.splitroot("\\\\?\\c:")', ("\\\\?\\c:", "", ""))
+ tester('ntpath.splitroot("\\\\.\\c:")', ("\\\\.\\c:", "", ""))
tester('ntpath.splitroot("\\\\?\\c:\\")', ("\\\\?\\c:", "\\", ""))
tester('ntpath.splitroot("\\\\?\\c:\\dir")', ("\\\\?\\c:", "\\", "dir"))
tester('ntpath.splitroot("\\\\?\\UNC")', ("\\\\?\\UNC", "", ""))
@@ -193,6 +198,9 @@ def test_splitroot(self):
tester('ntpath.splitroot("\\\\?\\VOLUME{00000000-0000-0000-0000-000000000000}\\spam")',
('\\\\?\\VOLUME{00000000-0000-0000-0000-000000000000}', '\\', 'spam'))
tester('ntpath.splitroot("\\\\?\\BootPartition\\")', ("\\\\?\\BootPartition", "\\", ""))
+ tester('ntpath.splitroot("\\\\.\\BootPartition\\")', ("\\\\.\\BootPartition", "\\", ""))
+ tester('ntpath.splitroot("\\\\.\\PhysicalDrive0")', ("\\\\.\\PhysicalDrive0", "", ""))
+ tester('ntpath.splitroot("\\\\.\\nul")', ("\\\\.\\nul", "", ""))
# gh-96290: support partial/invalid UNC drives
tester('ntpath.splitroot("//")', ("//", "", "")) # empty server & missing share
diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py
index e39b7260624899..57fed5d09fd7b8 100644
--- a/Lib/test/test_opcache.py
+++ b/Lib/test/test_opcache.py
@@ -1,6 +1,29 @@
import unittest
+class TestLoadSuperAttrCache(unittest.TestCase):
+ def test_descriptor_not_double_executed_on_spec_fail(self):
+ calls = []
+ class Descriptor:
+ def __get__(self, instance, owner):
+ calls.append((instance, owner))
+ return lambda: 1
+
+ class C:
+ d = Descriptor()
+
+ class D(C):
+ def f(self):
+ return super().d()
+
+ d = D()
+
+ self.assertEqual(d.f(), 1) # warmup
+ calls.clear()
+ self.assertEqual(d.f(), 1) # try to specialize
+ self.assertEqual(calls, [(d, D)])
+
+
class TestLoadAttrCache(unittest.TestCase):
def test_descriptor_added_after_optimization(self):
class Descriptor:
diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py
index 85a7438cb27629..8b5b61a818bbbc 100644
--- a/Lib/test/test_pathlib.py
+++ b/Lib/test/test_pathlib.py
@@ -810,6 +810,9 @@ def test_drive_root_parts(self):
check(('c:/a',), 'c:', '\\', ('c:\\', 'a'))
check(('/a',), '', '\\', ('\\', 'a'))
# UNC paths.
+ check(('//',), '\\\\', '', ('\\\\',))
+ check(('//a',), '\\\\a', '', ('\\\\a',))
+ check(('//a/',), '\\\\a\\', '', ('\\\\a\\',))
check(('//a/b',), '\\\\a\\b', '\\', ('\\\\a\\b\\',))
check(('//a/b/',), '\\\\a\\b', '\\', ('\\\\a\\b\\',))
check(('//a/b/c',), '\\\\a\\b', '\\', ('\\\\a\\b\\', 'c'))
@@ -823,12 +826,26 @@ def test_drive_root_parts(self):
# UNC paths.
check(('a', '//b/c//', 'd'), '\\\\b\\c', '\\', ('\\\\b\\c\\', 'd'))
# Extended paths.
+ check(('//./c:',), '\\\\.\\c:', '', ('\\\\.\\c:',))
check(('//?/c:/',), '\\\\?\\c:', '\\', ('\\\\?\\c:\\',))
check(('//?/c:/a',), '\\\\?\\c:', '\\', ('\\\\?\\c:\\', 'a'))
check(('//?/c:/a', '/b'), '\\\\?\\c:', '\\', ('\\\\?\\c:\\', 'b'))
# Extended UNC paths (format is "\\?\UNC\server\share").
+ check(('//?',), '\\\\?', '', ('\\\\?',))
+ check(('//?/',), '\\\\?\\', '', ('\\\\?\\',))
+ check(('//?/UNC',), '\\\\?\\UNC', '', ('\\\\?\\UNC',))
+ check(('//?/UNC/',), '\\\\?\\UNC\\', '', ('\\\\?\\UNC\\',))
+ check(('//?/UNC/b',), '\\\\?\\UNC\\b', '', ('\\\\?\\UNC\\b',))
+ check(('//?/UNC/b/',), '\\\\?\\UNC\\b\\', '', ('\\\\?\\UNC\\b\\',))
check(('//?/UNC/b/c',), '\\\\?\\UNC\\b\\c', '\\', ('\\\\?\\UNC\\b\\c\\',))
+ check(('//?/UNC/b/c/',), '\\\\?\\UNC\\b\\c', '\\', ('\\\\?\\UNC\\b\\c\\',))
check(('//?/UNC/b/c/d',), '\\\\?\\UNC\\b\\c', '\\', ('\\\\?\\UNC\\b\\c\\', 'd'))
+ # UNC device paths
+ check(('//./BootPartition/',), '\\\\.\\BootPartition', '\\', ('\\\\.\\BootPartition\\',))
+ check(('//?/BootPartition/',), '\\\\?\\BootPartition', '\\', ('\\\\?\\BootPartition\\',))
+ check(('//./PhysicalDrive0',), '\\\\.\\PhysicalDrive0', '', ('\\\\.\\PhysicalDrive0',))
+ check(('//?/Volume{}/',), '\\\\?\\Volume{}', '\\', ('\\\\?\\Volume{}\\',))
+ check(('//./nul',), '\\\\.\\nul', '', ('\\\\.\\nul',))
# Second part has a root but not drive.
check(('a', '/b', 'c'), '', '\\', ('\\', 'b', 'c'))
check(('Z:/a', '/b', 'c'), 'Z:', '\\', ('Z:\\', 'b', 'c'))
@@ -1371,6 +1388,13 @@ def test_join(self):
self.assertEqual(pp, P('C:/a/b/dd:s'))
pp = p.joinpath(P('E:d:s'))
self.assertEqual(pp, P('E:d:s'))
+ # Joining onto a UNC path with no root
+ pp = P('//').joinpath('server')
+ self.assertEqual(pp, P('//server'))
+ pp = P('//server').joinpath('share')
+ self.assertEqual(pp, P('//server/share'))
+ pp = P('//./BootPartition').joinpath('Windows')
+ self.assertEqual(pp, P('//./BootPartition/Windows'))
def test_div(self):
# Basically the same as joinpath().
@@ -2678,20 +2702,20 @@ def setUp(self):
del self.sub2_tree[1][:1]
def test_walk_topdown(self):
- all = list(self.walk_path.walk())
-
- self.assertEqual(len(all), 4)
- # We can't know which order SUB1 and SUB2 will appear in.
- # Not flipped: TESTFN, SUB1, SUB11, SUB2
- # flipped: TESTFN, SUB2, SUB1, SUB11
- flipped = all[0][1][0] != "SUB1"
- all[0][1].sort()
- all[3 - 2 * flipped][-1].sort()
- all[3 - 2 * flipped][1].sort()
- self.assertEqual(all[0], (self.walk_path, ["SUB1", "SUB2"], ["tmp1"]))
- self.assertEqual(all[1 + flipped], (self.sub1_path, ["SUB11"], ["tmp2"]))
- self.assertEqual(all[2 + flipped], (self.sub11_path, [], []))
- self.assertEqual(all[3 - 2 * flipped], self.sub2_tree)
+ walker = self.walk_path.walk()
+ entry = next(walker)
+ entry[1].sort() # Ensure we visit SUB1 before SUB2
+ self.assertEqual(entry, (self.walk_path, ["SUB1", "SUB2"], ["tmp1"]))
+ entry = next(walker)
+ self.assertEqual(entry, (self.sub1_path, ["SUB11"], ["tmp2"]))
+ entry = next(walker)
+ self.assertEqual(entry, (self.sub11_path, [], []))
+ entry = next(walker)
+ entry[1].sort()
+ entry[2].sort()
+ self.assertEqual(entry, self.sub2_tree)
+ with self.assertRaises(StopIteration):
+ next(walker)
def test_walk_prune(self, walk_path=None):
if walk_path is None:
@@ -2715,24 +2739,37 @@ def test_file_like_path(self):
self.test_walk_prune(FakePath(self.walk_path).__fspath__())
def test_walk_bottom_up(self):
- all = list(self.walk_path.walk( top_down=False))
-
- self.assertEqual(len(all), 4, all)
- # We can't know which order SUB1 and SUB2 will appear in.
- # Not flipped: SUB11, SUB1, SUB2, TESTFN
- # flipped: SUB2, SUB11, SUB1, TESTFN
- flipped = all[3][1][0] != "SUB1"
- all[3][1].sort()
- all[2 - 2 * flipped][-1].sort()
- all[2 - 2 * flipped][1].sort()
- self.assertEqual(all[3],
- (self.walk_path, ["SUB1", "SUB2"], ["tmp1"]))
- self.assertEqual(all[flipped],
- (self.sub11_path, [], []))
- self.assertEqual(all[flipped + 1],
- (self.sub1_path, ["SUB11"], ["tmp2"]))
- self.assertEqual(all[2 - 2 * flipped],
- self.sub2_tree)
+ seen_testfn = seen_sub1 = seen_sub11 = seen_sub2 = False
+ for path, dirnames, filenames in self.walk_path.walk(top_down=False):
+ if path == self.walk_path:
+ self.assertFalse(seen_testfn)
+ self.assertTrue(seen_sub1)
+ self.assertTrue(seen_sub2)
+ self.assertEqual(sorted(dirnames), ["SUB1", "SUB2"])
+ self.assertEqual(filenames, ["tmp1"])
+ seen_testfn = True
+ elif path == self.sub1_path:
+ self.assertFalse(seen_testfn)
+ self.assertFalse(seen_sub1)
+ self.assertTrue(seen_sub11)
+ self.assertEqual(dirnames, ["SUB11"])
+ self.assertEqual(filenames, ["tmp2"])
+ seen_sub1 = True
+ elif path == self.sub11_path:
+ self.assertFalse(seen_sub1)
+ self.assertFalse(seen_sub11)
+ self.assertEqual(dirnames, [])
+ self.assertEqual(filenames, [])
+ seen_sub11 = True
+ elif path == self.sub2_path:
+ self.assertFalse(seen_testfn)
+ self.assertFalse(seen_sub2)
+ self.assertEqual(sorted(dirnames), sorted(self.sub2_tree[1]))
+ self.assertEqual(sorted(filenames), sorted(self.sub2_tree[2]))
+ seen_sub2 = True
+ else:
+ raise AssertionError(f"Unexpected path: {path}")
+ self.assertTrue(seen_testfn)
@os_helper.skip_unless_symlink
def test_walk_follow_symlinks(self):
diff --git a/Lib/test/test_patma.py b/Lib/test/test_patma.py
index 0ed54079c99b30..3dbd19dfffd318 100644
--- a/Lib/test/test_patma.py
+++ b/Lib/test/test_patma.py
@@ -3165,6 +3165,19 @@ def f(command): # 0
self.assertListEqual(self._trace(f, "go x"), [1, 2, 3])
self.assertListEqual(self._trace(f, "spam"), [1, 2, 3])
+ def test_unreachable_code(self):
+ def f(command): # 0
+ match command: # 1
+ case 1: # 2
+ if False: # 3
+ return 1 # 4
+ case _: # 5
+ if False: # 6
+ return 0 # 7
+
+ self.assertListEqual(self._trace(f, 1), [1, 2, 3])
+ self.assertListEqual(self._trace(f, 0), [1, 2, 5, 6])
+
def test_parser_deeply_nested_patterns(self):
# Deeply nested patterns can cause exponential backtracking when parsing.
# See gh-93671 for more information.
diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py
index 9ad9a1c52ac102..2f712a10257984 100644
--- a/Lib/test/test_pdb.py
+++ b/Lib/test/test_pdb.py
@@ -1700,6 +1700,26 @@ def test_pdb_issue_gh_103225():
(Pdb) continue
"""
+def test_pdb_issue_gh_101517():
+ """See GH-101517
+
+ Make sure pdb doesn't crash when the exception is caught in a try/except* block
+
+ >>> def test_function():
+ ... try:
+ ... raise KeyError
+ ... except* Exception as e:
+ ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
+
+ >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE
+ ... 'continue'
+ ... ]):
+ ... test_function()
+ > (5)test_function()
+ -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
+ (Pdb) continue
+ """
+
@support.requires_subprocess()
class PdbTestCase(unittest.TestCase):
@@ -2376,6 +2396,12 @@ def _create_fake_frozen_module():
# verify that pdb found the source of the "frozen" function
self.assertIn('x = "Sentinel string for gh-93696"', stdout, "Sentinel statement not found")
+ def test_non_utf8_encoding(self):
+ script_dir = os.path.join(os.path.dirname(__file__), 'encoded_modules')
+ for filename in os.listdir(script_dir):
+ if filename.endswith(".py"):
+ self._run_pdb([os.path.join(script_dir, filename)], 'q')
+
class ChecklineTests(unittest.TestCase):
def setUp(self):
linecache.clearcache() # Pdb.checkline() uses linecache.getline()
diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py
index 01eb04b53060e9..bf7fc421a9df0a 100644
--- a/Lib/test/test_peepholer.py
+++ b/Lib/test/test_peepholer.py
@@ -810,7 +810,7 @@ def f():
self.assertInBytecode(f, 'LOAD_FAST', "a73")
def test_setting_lineno_no_undefined(self):
- code = textwrap.dedent(f"""\
+ code = textwrap.dedent("""\
def f():
x = y = 2
if not x:
@@ -842,7 +842,7 @@ def trace(frame, event, arg):
self.assertEqual(f.__code__.co_code, co_code)
def test_setting_lineno_one_undefined(self):
- code = textwrap.dedent(f"""\
+ code = textwrap.dedent("""\
def f():
x = y = 2
if not x:
@@ -876,7 +876,7 @@ def trace(frame, event, arg):
self.assertEqual(f.__code__.co_code, co_code)
def test_setting_lineno_two_undefined(self):
- code = textwrap.dedent(f"""\
+ code = textwrap.dedent("""\
def f():
x = y = 2
if not x:
diff --git a/Lib/test/test_pkgutil.py b/Lib/test/test_pkgutil.py
index 0cc99e0cc22763..4d9f5db3c6b3cf 100644
--- a/Lib/test/test_pkgutil.py
+++ b/Lib/test/test_pkgutil.py
@@ -541,14 +541,6 @@ def check_deprecated(self):
"Python 3.12; use 'importlib' instead",
DeprecationWarning))
- def test_importer_deprecated(self):
- with self.check_deprecated():
- pkgutil.ImpImporter("")
-
- def test_loader_deprecated(self):
- with self.check_deprecated():
- pkgutil.ImpLoader("", "", "", "")
-
def test_get_loader_avoids_emulation(self):
with check_warnings() as w:
self.assertIsNotNone(pkgutil.get_loader("sys"))
diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py
index 6b457440be5430..b08ababa341cfe 100644
--- a/Lib/test/test_plistlib.py
+++ b/Lib/test/test_plistlib.py
@@ -925,7 +925,7 @@ def test_large_timestamp(self):
# Issue #26709: 32-bit timestamp out of range
for ts in -2**31-1, 2**31:
with self.subTest(ts=ts):
- d = (datetime.datetime.utcfromtimestamp(0) +
+ d = (datetime.datetime(1970, 1, 1, 0, 0) +
datetime.timedelta(seconds=ts))
data = plistlib.dumps(d, fmt=plistlib.FMT_BINARY)
self.assertEqual(plistlib.loads(data), d)
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
index 77f42f7f9c937b..444f8abe4607b7 100644
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -231,6 +231,9 @@ def test_register_at_fork(self):
with self.assertRaises(TypeError, msg="Invalid arg was allowed"):
# Ensure a combination of valid and invalid is an error.
os.register_at_fork(before=None, after_in_parent=lambda: 3)
+ with self.assertRaises(TypeError, msg="At least one argument is required"):
+ # when no arg is passed
+ os.register_at_fork()
with self.assertRaises(TypeError, msg="Invalid arg was allowed"):
# Ensure a combination of valid and invalid is an error.
os.register_at_fork(before=lambda: None, after_in_child='')
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index 9eaf167a9fa3c9..36f0b8a31a3715 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -33,6 +33,7 @@
from test import support
from test.support import os_helper
from test.support.os_helper import TESTFN, FakePath
+from test.support import warnings_helper
TESTFN2 = TESTFN + "2"
TESTFN_SRC = TESTFN + "_SRC"
@@ -1841,12 +1842,14 @@ def test_register_archive_format(self):
### shutil.unpack_archive
- def check_unpack_archive(self, format):
- self.check_unpack_archive_with_converter(format, lambda path: path)
- self.check_unpack_archive_with_converter(format, pathlib.Path)
- self.check_unpack_archive_with_converter(format, FakePath)
+ def check_unpack_archive(self, format, **kwargs):
+ self.check_unpack_archive_with_converter(
+ format, lambda path: path, **kwargs)
+ self.check_unpack_archive_with_converter(
+ format, pathlib.Path, **kwargs)
+ self.check_unpack_archive_with_converter(format, FakePath, **kwargs)
- def check_unpack_archive_with_converter(self, format, converter):
+ def check_unpack_archive_with_converter(self, format, converter, **kwargs):
root_dir, base_dir = self._create_files()
expected = rlistdir(root_dir)
expected.remove('outer')
@@ -1856,36 +1859,48 @@ def check_unpack_archive_with_converter(self, format, converter):
# let's try to unpack it now
tmpdir2 = self.mkdtemp()
- unpack_archive(converter(filename), converter(tmpdir2))
+ unpack_archive(converter(filename), converter(tmpdir2), **kwargs)
self.assertEqual(rlistdir(tmpdir2), expected)
# and again, this time with the format specified
tmpdir3 = self.mkdtemp()
- unpack_archive(converter(filename), converter(tmpdir3), format=format)
+ unpack_archive(converter(filename), converter(tmpdir3), format=format,
+ **kwargs)
self.assertEqual(rlistdir(tmpdir3), expected)
- self.assertRaises(shutil.ReadError, unpack_archive, converter(TESTFN))
- self.assertRaises(ValueError, unpack_archive, converter(TESTFN), format='xxx')
+ with self.assertRaises(shutil.ReadError):
+ unpack_archive(converter(TESTFN), **kwargs)
+ with self.assertRaises(ValueError):
+ unpack_archive(converter(TESTFN), format='xxx', **kwargs)
+
+ def check_unpack_tarball(self, format):
+ self.check_unpack_archive(format, filter='fully_trusted')
+ self.check_unpack_archive(format, filter='data')
+ with warnings_helper.check_warnings(
+ ('Python 3.14', DeprecationWarning)):
+ self.check_unpack_archive(format)
def test_unpack_archive_tar(self):
- self.check_unpack_archive('tar')
+ self.check_unpack_tarball('tar')
@support.requires_zlib()
def test_unpack_archive_gztar(self):
- self.check_unpack_archive('gztar')
+ self.check_unpack_tarball('gztar')
@support.requires_bz2()
def test_unpack_archive_bztar(self):
- self.check_unpack_archive('bztar')
+ self.check_unpack_tarball('bztar')
@support.requires_lzma()
@unittest.skipIf(AIX and not _maxdataOK(), "AIX MAXDATA must be 0x20000000 or larger")
def test_unpack_archive_xztar(self):
- self.check_unpack_archive('xztar')
+ self.check_unpack_tarball('xztar')
@support.requires_zlib()
def test_unpack_archive_zip(self):
self.check_unpack_archive('zip')
+ with self.assertRaises(TypeError):
+ self.check_unpack_archive('zip', filter='data')
def test_unpack_registry(self):
diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py
index 2fa5069423327a..c81d559cde315d 100644
--- a/Lib/test/test_socketserver.py
+++ b/Lib/test/test_socketserver.py
@@ -47,16 +47,8 @@ def receive(sock, n, timeout=test.support.SHORT_TIMEOUT):
else:
raise RuntimeError("timed out on %r" % (sock,))
-if HAVE_UNIX_SOCKETS and HAVE_FORKING:
- class ForkingUnixStreamServer(socketserver.ForkingMixIn,
- socketserver.UnixStreamServer):
- pass
-
- class ForkingUnixDatagramServer(socketserver.ForkingMixIn,
- socketserver.UnixDatagramServer):
- pass
-
+@test.support.requires_fork()
@contextlib.contextmanager
def simple_subprocess(testcase):
"""Tests that a custom child process is not waited on (Issue 1540386)"""
@@ -211,7 +203,7 @@ def test_ThreadingUnixStreamServer(self):
@requires_forking
def test_ForkingUnixStreamServer(self):
with simple_subprocess(self):
- self.run_server(ForkingUnixStreamServer,
+ self.run_server(socketserver.ForkingUnixStreamServer,
socketserver.StreamRequestHandler,
self.stream_examine)
@@ -247,7 +239,7 @@ def test_ThreadingUnixDatagramServer(self):
@requires_unix_sockets
@requires_forking
def test_ForkingUnixDatagramServer(self):
- self.run_server(ForkingUnixDatagramServer,
+ self.run_server(socketserver.ForkingUnixDatagramServer,
socketserver.DatagramRequestHandler,
self.dgram_examine)
diff --git a/Lib/test/test_sqlite3/test_dbapi.py b/Lib/test/test_sqlite3/test_dbapi.py
index 3013abfa730ed5..1bb0e13e356e78 100644
--- a/Lib/test/test_sqlite3/test_dbapi.py
+++ b/Lib/test/test_sqlite3/test_dbapi.py
@@ -577,6 +577,30 @@ def test_connection_bad_reinit(self):
cx.executemany, "insert into t values(?)",
((v,) for v in range(3)))
+ def test_connection_config(self):
+ op = sqlite.SQLITE_DBCONFIG_ENABLE_FKEY
+ with memory_database() as cx:
+ with self.assertRaisesRegex(ValueError, "unknown"):
+ cx.getconfig(-1)
+
+ # Toggle and verify.
+ old = cx.getconfig(op)
+ new = not old
+ cx.setconfig(op, new)
+ self.assertEqual(cx.getconfig(op), new)
+
+ cx.setconfig(op) # defaults to True
+ self.assertTrue(cx.getconfig(op))
+
+ # Check that foreign key support was actually enabled.
+ with cx:
+ cx.executescript("""
+ create table t(t integer primary key);
+ create table u(u, foreign key(u) references t(t));
+ """)
+ with self.assertRaisesRegex(sqlite.IntegrityError, "constraint"):
+ cx.execute("insert into u values(0)")
+
class UninitialisedConnectionTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/test/test_sqlite3/test_regression.py b/Lib/test/test_sqlite3/test_regression.py
index ad83a97c8c40d6..7e8221e7227e6e 100644
--- a/Lib/test/test_sqlite3/test_regression.py
+++ b/Lib/test/test_sqlite3/test_regression.py
@@ -491,21 +491,21 @@ def tearDown(self):
def test_recursive_cursor_init(self):
conv = lambda x: self.cur.__init__(self.con)
with patch.dict(sqlite.converters, {"INIT": conv}):
- self.cur.execute(f'select x as "x [INIT]", x from test')
+ self.cur.execute('select x as "x [INIT]", x from test')
self.assertRaisesRegex(sqlite.ProgrammingError, self.msg,
self.cur.fetchall)
def test_recursive_cursor_close(self):
conv = lambda x: self.cur.close()
with patch.dict(sqlite.converters, {"CLOSE": conv}):
- self.cur.execute(f'select x as "x [CLOSE]", x from test')
+ self.cur.execute('select x as "x [CLOSE]", x from test')
self.assertRaisesRegex(sqlite.ProgrammingError, self.msg,
self.cur.fetchall)
def test_recursive_cursor_iter(self):
conv = lambda x, l=[]: self.cur.fetchone() if l else l.append(None)
with patch.dict(sqlite.converters, {"ITER": conv}):
- self.cur.execute(f'select x as "x [ITER]", x from test')
+ self.cur.execute('select x as "x [ITER]", x from test')
self.assertRaisesRegex(sqlite.ProgrammingError, self.msg,
self.cur.fetchall)
diff --git a/Lib/test/test_sqlite3/test_types.py b/Lib/test/test_sqlite3/test_types.py
index 5e0ff353cbbd6b..fde5f888e64009 100644
--- a/Lib/test/test_sqlite3/test_types.py
+++ b/Lib/test/test_sqlite3/test_types.py
@@ -517,7 +517,7 @@ def test_sqlite_timestamp(self):
self.assertEqual(ts, ts2)
def test_sql_timestamp(self):
- now = datetime.datetime.utcnow()
+ now = datetime.datetime.now(tz=datetime.UTC)
self.cur.execute("insert into test(ts) values (current_timestamp)")
self.cur.execute("select ts from test")
with self.assertWarnsRegex(DeprecationWarning, "converter"):
diff --git a/Lib/test/test_sqlite3/test_userfunctions.py b/Lib/test/test_sqlite3/test_userfunctions.py
index 0970b0378ad615..632d657d416fd4 100644
--- a/Lib/test/test_sqlite3/test_userfunctions.py
+++ b/Lib/test/test_sqlite3/test_userfunctions.py
@@ -562,7 +562,7 @@ def test_win_exception_in_finalize(self):
# callback errors to sqlite3_step(); this implies that OperationalError
# is _not_ raised.
with patch.object(WindowSumInt, "finalize", side_effect=BadWindow):
- name = f"exception_in_finalize"
+ name = "exception_in_finalize"
self.con.create_window_function(name, 1, WindowSumInt)
self.cur.execute(self.query % name)
self.cur.fetchall()
diff --git a/Lib/test/test_strptime.py b/Lib/test/test_strptime.py
index e3fcabef946116..810c5a36e02f41 100644
--- a/Lib/test/test_strptime.py
+++ b/Lib/test/test_strptime.py
@@ -242,6 +242,16 @@ def test_ValueError(self):
# 5. Julian/ordinal day (%j) is specified with %G, but not %Y
with self.assertRaises(ValueError):
_strptime._strptime("1999 256", "%G %j")
+ # 6. Invalid ISO weeks
+ invalid_iso_weeks = [
+ "2019-00-1",
+ "2019-54-1",
+ "2021-53-1",
+ ]
+ for invalid_iso_dtstr in invalid_iso_weeks:
+ with self.subTest(invalid_iso_dtstr):
+ with self.assertRaises(ValueError):
+ _strptime._strptime(invalid_iso_dtstr, "%G-%V-%u")
def test_strptime_exception_context(self):
diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py
index a68b38cf79d537..ed773a3cff2a6d 100644
--- a/Lib/test/test_super.py
+++ b/Lib/test/test_super.py
@@ -1,6 +1,8 @@
"""Unit tests for zero-argument super() & related machinery."""
import unittest
+from unittest.mock import patch
+from test import shadowed_super
class A:
@@ -283,17 +285,28 @@ def f(self):
def test_obscure_super_errors(self):
def f():
super()
- self.assertRaises(RuntimeError, f)
+ with self.assertRaisesRegex(RuntimeError, r"no arguments"):
+ f()
+
+ class C:
+ def f():
+ super()
+ with self.assertRaisesRegex(RuntimeError, r"no arguments"):
+ C.f()
+
def f(x):
del x
super()
- self.assertRaises(RuntimeError, f, None)
+ with self.assertRaisesRegex(RuntimeError, r"arg\[0\] deleted"):
+ f(None)
+
class X:
def f(x):
nonlocal __class__
del __class__
super()
- self.assertRaises(RuntimeError, X().f)
+ with self.assertRaisesRegex(RuntimeError, r"empty __class__ cell"):
+ X().f()
def test_cell_as_self(self):
class X:
@@ -325,6 +338,78 @@ def test_super_argtype(self):
with self.assertRaisesRegex(TypeError, "argument 1 must be a type"):
super(1, int)
+ def test_shadowed_global(self):
+ self.assertEqual(shadowed_super.C().method(), "truly super")
+
+ def test_shadowed_local(self):
+ class super:
+ msg = "quite super"
+
+ class C:
+ def method(self):
+ return super().msg
+
+ self.assertEqual(C().method(), "quite super")
+
+ def test_shadowed_dynamic(self):
+ class MySuper:
+ msg = "super super"
+
+ class C:
+ def method(self):
+ return super().msg
+
+ with patch("test.test_super.super", MySuper) as m:
+ self.assertEqual(C().method(), "super super")
+
+ def test_shadowed_dynamic_two_arg(self):
+ call_args = []
+ class MySuper:
+ def __init__(self, *args):
+ call_args.append(args)
+ msg = "super super"
+
+ class C:
+ def method(self):
+ return super(1, 2).msg
+
+ with patch("test.test_super.super", MySuper) as m:
+ self.assertEqual(C().method(), "super super")
+ self.assertEqual(call_args, [(1, 2)])
+
+ def test_attribute_error(self):
+ class C:
+ def method(self):
+ return super().msg
+
+ with self.assertRaisesRegex(AttributeError, "'super' object has no attribute 'msg'"):
+ C().method()
+
+ def test_bad_first_arg(self):
+ class C:
+ def method(self):
+ return super(1, self).method()
+
+ with self.assertRaisesRegex(TypeError, "argument 1 must be a type"):
+ C().method()
+
+ def test_super___class__(self):
+ class C:
+ def method(self):
+ return super().__class__
+
+ self.assertEqual(C().method(), super)
+
+ def test_super_subclass___class__(self):
+ class mysuper(super):
+ pass
+
+ class C:
+ def method(self):
+ return mysuper(C, self).__class__
+
+ self.assertEqual(C().method(), mysuper)
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index f23653558a9119..f959bbb4400702 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -1853,6 +1853,30 @@ def f(x: *b)
Traceback (most recent call last):
...
SyntaxError: invalid syntax
+
+Invalid bytes literals:
+
+ >>> b"Ā"
+ Traceback (most recent call last):
+ ...
+ b"Ā"
+ ^^^
+ SyntaxError: bytes can only contain ASCII literal characters
+
+ >>> b"абвгде"
+ Traceback (most recent call last):
+ ...
+ b"абвгде"
+ ^^^^^^^^
+ SyntaxError: bytes can only contain ASCII literal characters
+
+ >>> b"abc ъющый" # first 3 letters are ascii
+ Traceback (most recent call last):
+ ...
+ b"abc ъющый"
+ ^^^^^^^^^^^
+ SyntaxError: bytes can only contain ASCII literal characters
+
"""
import re
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 1aebe1b111f2e9..611cd27ecf1240 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -385,7 +385,8 @@ def test_refcount(self):
self.assertRaises(TypeError, sys.getrefcount)
c = sys.getrefcount(None)
n = None
- self.assertEqual(sys.getrefcount(None), c+1)
+ # Singleton refcnts don't change
+ self.assertEqual(sys.getrefcount(None), c)
del n
self.assertEqual(sys.getrefcount(None), c)
if hasattr(sys, "gettotalrefcount"):
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 39f6f499c818ef..e8d322d20a5a8e 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -2,9 +2,13 @@
import os
import io
from hashlib import sha256
-from contextlib import contextmanager
+from contextlib import contextmanager, ExitStack
from random import Random
import pathlib
+import shutil
+import re
+import warnings
+import stat
import unittest
import unittest.mock
@@ -13,6 +17,7 @@
from test import support
from test.support import os_helper
from test.support import script_helper
+from test.support import warnings_helper
# Check for our compression modules.
try:
@@ -108,7 +113,7 @@ def test_fileobj_regular_file(self):
"regular file extraction failed")
def test_fileobj_readlines(self):
- self.tar.extract("ustar/regtype", TEMPDIR)
+ self.tar.extract("ustar/regtype", TEMPDIR, filter='data')
tarinfo = self.tar.getmember("ustar/regtype")
with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1:
lines1 = fobj1.readlines()
@@ -126,7 +131,7 @@ def test_fileobj_readlines(self):
"fileobj.readlines() failed")
def test_fileobj_iter(self):
- self.tar.extract("ustar/regtype", TEMPDIR)
+ self.tar.extract("ustar/regtype", TEMPDIR, filter='data')
tarinfo = self.tar.getmember("ustar/regtype")
with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1:
lines1 = fobj1.readlines()
@@ -136,7 +141,8 @@ def test_fileobj_iter(self):
"fileobj.__iter__() failed")
def test_fileobj_seek(self):
- self.tar.extract("ustar/regtype", TEMPDIR)
+ self.tar.extract("ustar/regtype", TEMPDIR,
+ filter='data')
with open(os.path.join(TEMPDIR, "ustar/regtype"), "rb") as fobj:
data = fobj.read()
@@ -467,7 +473,7 @@ def test_premature_end_of_archive(self):
t = tar.next()
with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"):
- tar.extract(t, TEMPDIR)
+ tar.extract(t, TEMPDIR, filter='data')
with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"):
tar.extractfile(t).read()
@@ -629,16 +635,16 @@ def test_find_members(self):
def test_extract_hardlink(self):
# Test hardlink extraction (e.g. bug #857297).
with tarfile.open(tarname, errorlevel=1, encoding="iso8859-1") as tar:
- tar.extract("ustar/regtype", TEMPDIR)
+ tar.extract("ustar/regtype", TEMPDIR, filter='data')
self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/regtype"))
- tar.extract("ustar/lnktype", TEMPDIR)
+ tar.extract("ustar/lnktype", TEMPDIR, filter='data')
self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/lnktype"))
with open(os.path.join(TEMPDIR, "ustar/lnktype"), "rb") as f:
data = f.read()
self.assertEqual(sha256sum(data), sha256_regtype)
- tar.extract("ustar/symtype", TEMPDIR)
+ tar.extract("ustar/symtype", TEMPDIR, filter='data')
self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/symtype"))
with open(os.path.join(TEMPDIR, "ustar/symtype"), "rb") as f:
data = f.read()
@@ -653,13 +659,14 @@ def test_extractall(self):
os.mkdir(DIR)
try:
directories = [t for t in tar if t.isdir()]
- tar.extractall(DIR, directories)
+ tar.extractall(DIR, directories, filter='fully_trusted')
for tarinfo in directories:
path = os.path.join(DIR, tarinfo.name)
if sys.platform != "win32":
# Win32 has no support for fine grained permissions.
self.assertEqual(tarinfo.mode & 0o777,
- os.stat(path).st_mode & 0o777)
+ os.stat(path).st_mode & 0o777,
+ tarinfo.name)
def format_mtime(mtime):
if isinstance(mtime, float):
return "{} ({})".format(mtime, mtime.hex())
@@ -683,7 +690,7 @@ def test_extract_directory(self):
try:
with tarfile.open(tarname, encoding="iso8859-1") as tar:
tarinfo = tar.getmember(dirtype)
- tar.extract(tarinfo, path=DIR)
+ tar.extract(tarinfo, path=DIR, filter='fully_trusted')
extracted = os.path.join(DIR, dirtype)
self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime)
if sys.platform != "win32":
@@ -696,7 +703,7 @@ def test_extractall_pathlike_name(self):
with os_helper.temp_dir(DIR), \
tarfile.open(tarname, encoding="iso8859-1") as tar:
directories = [t for t in tar if t.isdir()]
- tar.extractall(DIR, directories)
+ tar.extractall(DIR, directories, filter='fully_trusted')
for tarinfo in directories:
path = DIR / tarinfo.name
self.assertEqual(os.path.getmtime(path), tarinfo.mtime)
@@ -707,7 +714,7 @@ def test_extract_pathlike_name(self):
with os_helper.temp_dir(DIR), \
tarfile.open(tarname, encoding="iso8859-1") as tar:
tarinfo = tar.getmember(dirtype)
- tar.extract(tarinfo, path=DIR)
+ tar.extract(tarinfo, path=DIR, filter='fully_trusted')
extracted = DIR / dirtype
self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime)
@@ -1075,7 +1082,7 @@ class GNUReadTest(LongnameTest, ReadTest, unittest.TestCase):
# an all platforms, and after that a test that will work only on
# platforms/filesystems that prove to support sparse files.
def _test_sparse_file(self, name):
- self.tar.extract(name, TEMPDIR)
+ self.tar.extract(name, TEMPDIR, filter='data')
filename = os.path.join(TEMPDIR, name)
with open(filename, "rb") as fobj:
data = fobj.read()
@@ -1442,7 +1449,8 @@ def test_extractall_symlinks(self):
with tarfile.open(temparchive, errorlevel=2) as tar:
# this should not raise OSError: [Errno 17] File exists
try:
- tar.extractall(path=tempdir)
+ tar.extractall(path=tempdir,
+ filter='fully_trusted')
except OSError:
self.fail("extractall failed with symlinked files")
finally:
@@ -2547,6 +2555,15 @@ def make_simple_tarfile(self, tar_name):
for tardata in files:
tf.add(tardata, arcname=os.path.basename(tardata))
+ def make_evil_tarfile(self, tar_name):
+ files = [support.findfile('tokenize_tests.txt')]
+ self.addCleanup(os_helper.unlink, tar_name)
+ with tarfile.open(tar_name, 'w') as tf:
+ benign = tarfile.TarInfo('benign')
+ tf.addfile(benign, fileobj=io.BytesIO(b''))
+ evil = tarfile.TarInfo('../evil')
+ tf.addfile(evil, fileobj=io.BytesIO(b''))
+
def test_bad_use(self):
rc, out, err = self.tarfilecmd_failure()
self.assertEqual(out, b'')
@@ -2703,6 +2720,25 @@ def test_extract_command_verbose(self):
finally:
os_helper.rmtree(tarextdir)
+ def test_extract_command_filter(self):
+ self.make_evil_tarfile(tmpname)
+ # Make an inner directory, so the member named '../evil'
+ # is still extracted into `tarextdir`
+ destdir = os.path.join(tarextdir, 'dest')
+ os.mkdir(tarextdir)
+ try:
+ with os_helper.temp_cwd(destdir):
+ self.tarfilecmd_failure('-e', tmpname,
+ '-v',
+ '--filter', 'data')
+ out = self.tarfilecmd('-e', tmpname,
+ '-v',
+ '--filter', 'fully_trusted',
+ PYTHONIOENCODING='utf-8')
+ self.assertIn(b' file is extracted.', out)
+ finally:
+ os_helper.rmtree(tarextdir)
+
def test_extract_command_different_directory(self):
self.make_simple_tarfile(tmpname)
try:
@@ -2786,7 +2822,7 @@ class LinkEmulationTest(ReadTest, unittest.TestCase):
# symbolic or hard links tarfile tries to extract these types of members
# as the regular files they point to.
def _test_link_extraction(self, name):
- self.tar.extract(name, TEMPDIR)
+ self.tar.extract(name, TEMPDIR, filter='fully_trusted')
with open(os.path.join(TEMPDIR, name), "rb") as f:
data = f.read()
self.assertEqual(sha256sum(data), sha256_regtype)
@@ -2918,8 +2954,10 @@ def test_extract_with_numeric_owner(self, mock_geteuid, mock_chmod,
mock_chown):
with self._setup_test(mock_geteuid) as (tarfl, filename_1, _,
filename_2):
- tarfl.extract(filename_1, TEMPDIR, numeric_owner=True)
- tarfl.extract(filename_2 , TEMPDIR, numeric_owner=True)
+ tarfl.extract(filename_1, TEMPDIR, numeric_owner=True,
+ filter='fully_trusted')
+ tarfl.extract(filename_2 , TEMPDIR, numeric_owner=True,
+ filter='fully_trusted')
# convert to filesystem paths
f_filename_1 = os.path.join(TEMPDIR, filename_1)
@@ -2937,7 +2975,8 @@ def test_extractall_with_numeric_owner(self, mock_geteuid, mock_chmod,
mock_chown):
with self._setup_test(mock_geteuid) as (tarfl, filename_1, dirname_1,
filename_2):
- tarfl.extractall(TEMPDIR, numeric_owner=True)
+ tarfl.extractall(TEMPDIR, numeric_owner=True,
+ filter='fully_trusted')
# convert to filesystem paths
f_filename_1 = os.path.join(TEMPDIR, filename_1)
@@ -2962,7 +3001,8 @@ def test_extractall_with_numeric_owner(self, mock_geteuid, mock_chmod,
def test_extract_without_numeric_owner(self, mock_geteuid, mock_chmod,
mock_chown):
with self._setup_test(mock_geteuid) as (tarfl, filename_1, _, _):
- tarfl.extract(filename_1, TEMPDIR, numeric_owner=False)
+ tarfl.extract(filename_1, TEMPDIR, numeric_owner=False,
+ filter='fully_trusted')
# convert to filesystem paths
f_filename_1 = os.path.join(TEMPDIR, filename_1)
@@ -2976,6 +3016,910 @@ def test_keyword_only(self, mock_geteuid):
tarfl.extract, filename_1, TEMPDIR, False, True)
+class ReplaceTests(ReadTest, unittest.TestCase):
+ def test_replace_name(self):
+ member = self.tar.getmember('ustar/regtype')
+ replaced = member.replace(name='misc/other')
+ self.assertEqual(replaced.name, 'misc/other')
+ self.assertEqual(member.name, 'ustar/regtype')
+ self.assertEqual(self.tar.getmember('ustar/regtype').name,
+ 'ustar/regtype')
+
+ def test_replace_deep(self):
+ member = self.tar.getmember('pax/regtype1')
+ replaced = member.replace()
+ replaced.pax_headers['gname'] = 'not-bar'
+ self.assertEqual(member.pax_headers['gname'], 'bar')
+ self.assertEqual(
+ self.tar.getmember('pax/regtype1').pax_headers['gname'], 'bar')
+
+ def test_replace_shallow(self):
+ member = self.tar.getmember('pax/regtype1')
+ replaced = member.replace(deep=False)
+ replaced.pax_headers['gname'] = 'not-bar'
+ self.assertEqual(member.pax_headers['gname'], 'not-bar')
+ self.assertEqual(
+ self.tar.getmember('pax/regtype1').pax_headers['gname'], 'not-bar')
+
+ def test_replace_all(self):
+ member = self.tar.getmember('ustar/regtype')
+ for attr_name in ('name', 'mtime', 'mode', 'linkname',
+ 'uid', 'gid', 'uname', 'gname'):
+ with self.subTest(attr_name=attr_name):
+ replaced = member.replace(**{attr_name: None})
+ self.assertEqual(getattr(replaced, attr_name), None)
+ self.assertNotEqual(getattr(member, attr_name), None)
+
+ def test_replace_internal(self):
+ member = self.tar.getmember('ustar/regtype')
+ with self.assertRaises(TypeError):
+ member.replace(offset=123456789)
+
+
+class NoneInfoExtractTests(ReadTest):
+ # These mainly check that all kinds of members are extracted successfully
+ # if some metadata is None.
+ # Some of the methods do additional spot checks.
+
+ # We also test that the default filters can deal with None.
+
+ extraction_filter = None
+
+ @classmethod
+ def setUpClass(cls):
+ tar = tarfile.open(tarname, mode='r', encoding="iso8859-1")
+ cls.control_dir = pathlib.Path(TEMPDIR) / "extractall_ctrl"
+ tar.errorlevel = 0
+ with ExitStack() as cm:
+ if cls.extraction_filter is None:
+ cm.enter_context(warnings.catch_warnings(
+ action="ignore", category=DeprecationWarning))
+ tar.extractall(cls.control_dir, filter=cls.extraction_filter)
+ tar.close()
+ cls.control_paths = set(
+ p.relative_to(cls.control_dir)
+ for p in pathlib.Path(cls.control_dir).glob('**/*'))
+
+ @classmethod
+ def tearDownClass(cls):
+ shutil.rmtree(cls.control_dir)
+
+ def check_files_present(self, directory):
+ got_paths = set(
+ p.relative_to(directory)
+ for p in pathlib.Path(directory).glob('**/*'))
+ self.assertEqual(self.control_paths, got_paths)
+
+ @contextmanager
+ def extract_with_none(self, *attr_names):
+ DIR = pathlib.Path(TEMPDIR) / "extractall_none"
+ self.tar.errorlevel = 0
+ for member in self.tar.getmembers():
+ for attr_name in attr_names:
+ setattr(member, attr_name, None)
+ with os_helper.temp_dir(DIR):
+ self.tar.extractall(DIR, filter='fully_trusted')
+ self.check_files_present(DIR)
+ yield DIR
+
+ def test_extractall_none_mtime(self):
+ # mtimes of extracted files should be later than 'now' -- the mtime
+ # of a previously created directory.
+ now = pathlib.Path(TEMPDIR).stat().st_mtime
+ with self.extract_with_none('mtime') as DIR:
+ for path in pathlib.Path(DIR).glob('**/*'):
+ with self.subTest(path=path):
+ try:
+ mtime = path.stat().st_mtime
+ except OSError:
+ # Some systems can't stat symlinks, ignore those
+ if not path.is_symlink():
+ raise
+ else:
+ self.assertGreaterEqual(path.stat().st_mtime, now)
+
+ def test_extractall_none_mode(self):
+ # modes of directories and regular files should match the mode
+ # of a "normally" created directory or regular file
+ dir_mode = pathlib.Path(TEMPDIR).stat().st_mode
+ regular_file = pathlib.Path(TEMPDIR) / 'regular_file'
+ regular_file.write_text('')
+ regular_file_mode = regular_file.stat().st_mode
+ with self.extract_with_none('mode') as DIR:
+ for path in pathlib.Path(DIR).glob('**/*'):
+ with self.subTest(path=path):
+ if path.is_dir():
+ self.assertEqual(path.stat().st_mode, dir_mode)
+ elif path.is_file():
+ self.assertEqual(path.stat().st_mode,
+ regular_file_mode)
+
+ def test_extractall_none_uid(self):
+ with self.extract_with_none('uid'):
+ pass
+
+ def test_extractall_none_gid(self):
+ with self.extract_with_none('gid'):
+ pass
+
+ def test_extractall_none_uname(self):
+ with self.extract_with_none('uname'):
+ pass
+
+ def test_extractall_none_gname(self):
+ with self.extract_with_none('gname'):
+ pass
+
+ def test_extractall_none_ownership(self):
+ with self.extract_with_none('uid', 'gid', 'uname', 'gname'):
+ pass
+
+class NoneInfoExtractTests_Data(NoneInfoExtractTests, unittest.TestCase):
+ extraction_filter = 'data'
+
+class NoneInfoExtractTests_FullyTrusted(NoneInfoExtractTests,
+ unittest.TestCase):
+ extraction_filter = 'fully_trusted'
+
+class NoneInfoExtractTests_Tar(NoneInfoExtractTests, unittest.TestCase):
+ extraction_filter = 'tar'
+
+class NoneInfoExtractTests_Default(NoneInfoExtractTests,
+ unittest.TestCase):
+ extraction_filter = None
+
+class NoneInfoTests_Misc(unittest.TestCase):
+ def test_add(self):
+ # When addfile() encounters None metadata, it raises a ValueError
+ bio = io.BytesIO()
+ for tarformat in (tarfile.USTAR_FORMAT, tarfile.GNU_FORMAT,
+ tarfile.PAX_FORMAT):
+ with self.subTest(tarformat=tarformat):
+ tar = tarfile.open(fileobj=bio, mode='w', format=tarformat)
+ tarinfo = tar.gettarinfo(tarname)
+ try:
+ tar.addfile(tarinfo)
+ except Exception:
+ if tarformat == tarfile.USTAR_FORMAT:
+ # In the old, limited format, adding might fail for
+ # reasons like the UID being too large
+ pass
+ else:
+ raise
+ else:
+ for attr_name in ('mtime', 'mode', 'uid', 'gid',
+ 'uname', 'gname'):
+ with self.subTest(attr_name=attr_name):
+ replaced = tarinfo.replace(**{attr_name: None})
+ with self.assertRaisesRegex(ValueError,
+ f"{attr_name}"):
+ tar.addfile(replaced)
+
+ def test_list(self):
+ # Change some metadata to None, then compare list() output
+ # word-for-word. We want list() to not raise, and to only change
+ # printout for the affected piece of metadata.
+ # (n.b.: some contents of the test archive are hardcoded.)
+ for attr_names in ({'mtime'}, {'mode'}, {'uid'}, {'gid'},
+ {'uname'}, {'gname'},
+ {'uid', 'uname'}, {'gid', 'gname'}):
+ with (self.subTest(attr_names=attr_names),
+ tarfile.open(tarname, encoding="iso8859-1") as tar):
+ tio_prev = io.TextIOWrapper(io.BytesIO(), 'ascii', newline='\n')
+ with support.swap_attr(sys, 'stdout', tio_prev):
+ tar.list()
+ for member in tar.getmembers():
+ for attr_name in attr_names:
+ setattr(member, attr_name, None)
+ tio_new = io.TextIOWrapper(io.BytesIO(), 'ascii', newline='\n')
+ with support.swap_attr(sys, 'stdout', tio_new):
+ tar.list()
+ for expected, got in zip(tio_prev.detach().getvalue().split(),
+ tio_new.detach().getvalue().split()):
+ if attr_names == {'mtime'} and re.match(rb'2003-01-\d\d', expected):
+ self.assertEqual(got, b'????-??-??')
+ elif attr_names == {'mtime'} and re.match(rb'\d\d:\d\d:\d\d', expected):
+ self.assertEqual(got, b'??:??:??')
+ elif attr_names == {'mode'} and re.match(
+ rb'.([r-][w-][x-]){3}', expected):
+ self.assertEqual(got, b'??????????')
+ elif attr_names == {'uname'} and expected.startswith(
+ (b'tarfile/', b'lars/', b'foo/')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_group, exp_group)
+ self.assertRegex(got_user, b'[0-9]+')
+ elif attr_names == {'gname'} and expected.endswith(
+ (b'/tarfile', b'/users', b'/bar')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_user, exp_user)
+ self.assertRegex(got_group, b'[0-9]+')
+ elif attr_names == {'uid'} and expected.startswith(
+ (b'1000/')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_group, exp_group)
+ self.assertEqual(got_user, b'None')
+ elif attr_names == {'gid'} and expected.endswith((b'/100')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_user, exp_user)
+ self.assertEqual(got_group, b'None')
+ elif attr_names == {'uid', 'uname'} and expected.startswith(
+ (b'tarfile/', b'lars/', b'foo/', b'1000/')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_group, exp_group)
+ self.assertEqual(got_user, b'None')
+ elif attr_names == {'gname', 'gid'} and expected.endswith(
+ (b'/tarfile', b'/users', b'/bar', b'/100')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_user, exp_user)
+ self.assertEqual(got_group, b'None')
+ else:
+ # In other cases the output should be the same
+ self.assertEqual(expected, got)
+
+def _filemode_to_int(mode):
+ """Inverse of `stat.filemode` (for permission bits)
+
+ Using mode strings rather than numbers makes the later tests more readable.
+ """
+ str_mode = mode[1:]
+ result = (
+ {'r': stat.S_IRUSR, '-': 0}[str_mode[0]]
+ | {'w': stat.S_IWUSR, '-': 0}[str_mode[1]]
+ | {'x': stat.S_IXUSR, '-': 0,
+ 's': stat.S_IXUSR | stat.S_ISUID,
+ 'S': stat.S_ISUID}[str_mode[2]]
+ | {'r': stat.S_IRGRP, '-': 0}[str_mode[3]]
+ | {'w': stat.S_IWGRP, '-': 0}[str_mode[4]]
+ | {'x': stat.S_IXGRP, '-': 0,
+ 's': stat.S_IXGRP | stat.S_ISGID,
+ 'S': stat.S_ISGID}[str_mode[5]]
+ | {'r': stat.S_IROTH, '-': 0}[str_mode[6]]
+ | {'w': stat.S_IWOTH, '-': 0}[str_mode[7]]
+ | {'x': stat.S_IXOTH, '-': 0,
+ 't': stat.S_IXOTH | stat.S_ISVTX,
+ 'T': stat.S_ISVTX}[str_mode[8]]
+ )
+ # check we did this right
+ assert stat.filemode(result)[1:] == mode[1:]
+
+ return result
+
+class ArchiveMaker:
+ """Helper to create a tar file with specific contents
+
+ Usage:
+
+ with ArchiveMaker() as t:
+ t.add('filename', ...)
+
+ with t.open() as tar:
+ ... # `tar` is now a TarFile with 'filename' in it!
+ """
+ def __init__(self):
+ self.bio = io.BytesIO()
+
+ def __enter__(self):
+ self.tar_w = tarfile.TarFile(mode='w', fileobj=self.bio)
+ return self
+
+ def __exit__(self, *exc):
+ self.tar_w.close()
+ self.contents = self.bio.getvalue()
+ self.bio = None
+
+ def add(self, name, *, type=None, symlink_to=None, hardlink_to=None,
+ mode=None, **kwargs):
+ """Add a member to the test archive. Call within `with`."""
+ name = str(name)
+ tarinfo = tarfile.TarInfo(name).replace(**kwargs)
+ if mode:
+ tarinfo.mode = _filemode_to_int(mode)
+ if symlink_to is not None:
+ type = tarfile.SYMTYPE
+ tarinfo.linkname = str(symlink_to)
+ if hardlink_to is not None:
+ type = tarfile.LNKTYPE
+ tarinfo.linkname = str(hardlink_to)
+ if name.endswith('/') and type is None:
+ type = tarfile.DIRTYPE
+ if type is not None:
+ tarinfo.type = type
+ if tarinfo.isreg():
+ fileobj = io.BytesIO(bytes(tarinfo.size))
+ else:
+ fileobj = None
+ self.tar_w.addfile(tarinfo, fileobj)
+
+ def open(self, **kwargs):
+ """Open the resulting archive as TarFile. Call after `with`."""
+ bio = io.BytesIO(self.contents)
+ return tarfile.open(fileobj=bio, **kwargs)
+
+# Under WASI, `os_helper.can_symlink` is False to make
+# `skip_unless_symlink` skip symlink tests. "
+# But in the following tests we use can_symlink to *determine* which
+# behavior is expected.
+# Like other symlink tests, skip these on WASI for now.
+if support.is_wasi:
+ def symlink_test(f):
+ return unittest.skip("WASI: Skip symlink test for now")(f)
+else:
+ def symlink_test(f):
+ return f
+
+
+class TestExtractionFilters(unittest.TestCase):
+
+ # A temporary directory for the extraction results.
+ # All files that "escape" the destination path should still end
+ # up in this directory.
+ outerdir = pathlib.Path(TEMPDIR) / 'outerdir'
+
+ # The destination for the extraction, within `outerdir`
+ destdir = outerdir / 'dest'
+
+ @contextmanager
+ def check_context(self, tar, filter):
+ """Extracts `tar` to `self.destdir` and allows checking the result
+
+ If an error occurs, it must be checked using `expect_exception`
+
+ Otherwise, all resulting files must be checked using `expect_file`,
+ except the destination directory itself and parent directories of
+ other files.
+ When checking directories, do so before their contents.
+ """
+ with os_helper.temp_dir(self.outerdir):
+ try:
+ tar.extractall(self.destdir, filter=filter)
+ except Exception as exc:
+ self.raised_exception = exc
+ self.expected_paths = set()
+ else:
+ self.raised_exception = None
+ self.expected_paths = set(self.outerdir.glob('**/*'))
+ self.expected_paths.discard(self.destdir)
+ try:
+ yield
+ finally:
+ tar.close()
+ if self.raised_exception:
+ raise self.raised_exception
+ self.assertEqual(self.expected_paths, set())
+
+ def expect_file(self, name, type=None, symlink_to=None, mode=None):
+ """Check a single file. See check_context."""
+ if self.raised_exception:
+ raise self.raised_exception
+ # use normpath() rather than resolve() so we don't follow symlinks
+ path = pathlib.Path(os.path.normpath(self.destdir / name))
+ self.assertIn(path, self.expected_paths)
+ self.expected_paths.remove(path)
+ if mode is not None and os_helper.can_chmod():
+ got = stat.filemode(stat.S_IMODE(path.stat().st_mode))
+ self.assertEqual(got, mode)
+ if type is None and isinstance(name, str) and name.endswith('/'):
+ type = tarfile.DIRTYPE
+ if symlink_to is not None:
+ got = (self.destdir / name).readlink()
+ expected = pathlib.Path(symlink_to)
+ # The symlink might be the same (textually) as what we expect,
+ # but some systems change the link to an equivalent path, so
+ # we fall back to samefile().
+ if expected != got:
+ self.assertTrue(got.samefile(expected))
+ elif type == tarfile.REGTYPE or type is None:
+ self.assertTrue(path.is_file())
+ elif type == tarfile.DIRTYPE:
+ self.assertTrue(path.is_dir())
+ elif type == tarfile.FIFOTYPE:
+ self.assertTrue(path.is_fifo())
+ else:
+ raise NotImplementedError(type)
+ for parent in path.parents:
+ self.expected_paths.discard(parent)
+
+ def expect_exception(self, exc_type, message_re='.'):
+ with self.assertRaisesRegex(exc_type, message_re):
+ if self.raised_exception is not None:
+ raise self.raised_exception
+ self.raised_exception = None
+
+ def test_benign_file(self):
+ with ArchiveMaker() as arc:
+ arc.add('benign.txt')
+ for filter in 'fully_trusted', 'tar', 'data':
+ with self.check_context(arc.open(), filter):
+ self.expect_file('benign.txt')
+
+ def test_absolute(self):
+ # Test handling a member with an absolute path
+ # Inspired by 'absolute1' in https://github.com/jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add(self.outerdir / 'escaped.evil')
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ self.expect_file('../escaped.evil')
+
+ for filter in 'tar', 'data':
+ with self.check_context(arc.open(), filter):
+ if str(self.outerdir).startswith('/'):
+ # We strip leading slashes, as e.g. GNU tar does
+ # (without --absolute-filenames).
+ outerdir_stripped = str(self.outerdir).lstrip('/')
+ self.expect_file(f'{outerdir_stripped}/escaped.evil')
+ else:
+ # On this system, absolute paths don't have leading
+ # slashes.
+ # So, there's nothing to strip. We refuse to unpack
+ # to an absolute path, nonetheless.
+ self.expect_exception(
+ tarfile.AbsolutePathError,
+ """['"].*escaped.evil['"] has an absolute path""")
+
+ @symlink_test
+ def test_parent_symlink(self):
+ # Test interplaying symlinks
+ # Inspired by 'dirsymlink2a' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('current', symlink_to='.')
+ arc.add('parent', symlink_to='current/..')
+ arc.add('parent/evil')
+
+ if os_helper.can_symlink():
+ with self.check_context(arc.open(), 'fully_trusted'):
+ if self.raised_exception is not None:
+ # Windows will refuse to create a file that's a symlink to itself
+ # (and tarfile doesn't swallow that exception)
+ self.expect_exception(FileExistsError)
+ # The other cases will fail with this error too.
+ # Skip the rest of this test.
+ return
+ else:
+ self.expect_file('current', symlink_to='.')
+ self.expect_file('parent', symlink_to='current/..')
+ self.expect_file('../evil')
+
+ with self.check_context(arc.open(), 'tar'):
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ """'parent/evil' would be extracted to ['"].*evil['"], """
+ + "which is outside the destination")
+
+ with self.check_context(arc.open(), 'data'):
+ self.expect_exception(
+ tarfile.LinkOutsideDestinationError,
+ """'parent' would link to ['"].*outerdir['"], """
+ + "which is outside the destination")
+
+ else:
+ # No symlink support. The symlinks are ignored.
+ with self.check_context(arc.open(), 'fully_trusted'):
+ self.expect_file('parent/evil')
+ with self.check_context(arc.open(), 'tar'):
+ self.expect_file('parent/evil')
+ with self.check_context(arc.open(), 'data'):
+ self.expect_file('parent/evil')
+
+ @symlink_test
+ def test_parent_symlink2(self):
+ # Test interplaying symlinks
+ # Inspired by 'dirsymlink2b' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('current', symlink_to='.')
+ arc.add('current/parent', symlink_to='..')
+ arc.add('parent/evil')
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ if os_helper.can_symlink():
+ self.expect_file('current', symlink_to='.')
+ self.expect_file('parent', symlink_to='..')
+ self.expect_file('../evil')
+ else:
+ self.expect_file('current/')
+ self.expect_file('parent/evil')
+
+ with self.check_context(arc.open(), 'tar'):
+ if os_helper.can_symlink():
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ "'parent/evil' would be extracted to "
+ + """['"].*evil['"], which is outside """
+ + "the destination")
+ else:
+ self.expect_file('current/')
+ self.expect_file('parent/evil')
+
+ with self.check_context(arc.open(), 'data'):
+ self.expect_exception(
+ tarfile.LinkOutsideDestinationError,
+ """'current/parent' would link to ['"].*['"], """
+ + "which is outside the destination")
+
+ @symlink_test
+ def test_absolute_symlink(self):
+ # Test symlink to an absolute path
+ # Inspired by 'dirsymlink' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('parent', symlink_to=self.outerdir)
+ arc.add('parent/evil')
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ if os_helper.can_symlink():
+ self.expect_file('parent', symlink_to=self.outerdir)
+ self.expect_file('../evil')
+ else:
+ self.expect_file('parent/evil')
+
+ with self.check_context(arc.open(), 'tar'):
+ if os_helper.can_symlink():
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ "'parent/evil' would be extracted to "
+ + """['"].*evil['"], which is outside """
+ + "the destination")
+ else:
+ self.expect_file('parent/evil')
+
+ with self.check_context(arc.open(), 'data'):
+ self.expect_exception(
+ tarfile.AbsoluteLinkError,
+ "'parent' is a symlink to an absolute path")
+
+ @symlink_test
+ def test_sly_relative0(self):
+ # Inspired by 'relative0' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('../moo', symlink_to='..//tmp/moo')
+
+ try:
+ with self.check_context(arc.open(), filter='fully_trusted'):
+ if os_helper.can_symlink():
+ if isinstance(self.raised_exception, FileExistsError):
+ # XXX TarFile happens to fail creating a parent
+ # directory.
+ # This might be a bug, but fixing it would hurt
+ # security.
+ # Note that e.g. GNU `tar` rejects '..' components,
+ # so you could argue this is an invalid archive and we
+ # just raise an bad type of exception.
+ self.expect_exception(FileExistsError)
+ else:
+ self.expect_file('../moo', symlink_to='..//tmp/moo')
+ else:
+ # The symlink can't be extracted and is ignored
+ pass
+ except FileExistsError:
+ pass
+
+ for filter in 'tar', 'data':
+ with self.check_context(arc.open(), filter):
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ "'../moo' would be extracted to "
+ + "'.*moo', which is outside "
+ + "the destination")
+
+ @symlink_test
+ def test_sly_relative2(self):
+ # Inspired by 'relative2' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('tmp/')
+ arc.add('tmp/../../moo', symlink_to='tmp/../..//tmp/moo')
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ self.expect_file('tmp', type=tarfile.DIRTYPE)
+ if os_helper.can_symlink():
+ self.expect_file('../moo', symlink_to='tmp/../../tmp/moo')
+
+ for filter in 'tar', 'data':
+ with self.check_context(arc.open(), filter):
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ "'tmp/../../moo' would be extracted to "
+ + """['"].*moo['"], which is outside the """
+ + "destination")
+
+ def test_modes(self):
+ # Test how file modes are extracted
+ # (Note that the modes are ignored on platforms without working chmod)
+ with ArchiveMaker() as arc:
+ arc.add('all_bits', mode='?rwsrwsrwt')
+ arc.add('perm_bits', mode='?rwxrwxrwx')
+ arc.add('exec_group_other', mode='?rw-rwxrwx')
+ arc.add('read_group_only', mode='?---r-----')
+ arc.add('no_bits', mode='?---------')
+ arc.add('dir/', mode='?---rwsrwt')
+
+ # On some systems, setting the sticky bit is a no-op.
+ # Check if that's the case.
+ tmp_filename = os.path.join(TEMPDIR, "tmp.file")
+ with open(tmp_filename, 'w'):
+ pass
+ os.chmod(tmp_filename, os.stat(tmp_filename).st_mode | stat.S_ISVTX)
+ have_sticky_files = (os.stat(tmp_filename).st_mode & stat.S_ISVTX)
+ os.unlink(tmp_filename)
+
+ os.mkdir(tmp_filename)
+ os.chmod(tmp_filename, os.stat(tmp_filename).st_mode | stat.S_ISVTX)
+ have_sticky_dirs = (os.stat(tmp_filename).st_mode & stat.S_ISVTX)
+ os.rmdir(tmp_filename)
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ if have_sticky_files:
+ self.expect_file('all_bits', mode='?rwsrwsrwt')
+ else:
+ self.expect_file('all_bits', mode='?rwsrwsrwx')
+ self.expect_file('perm_bits', mode='?rwxrwxrwx')
+ self.expect_file('exec_group_other', mode='?rw-rwxrwx')
+ self.expect_file('read_group_only', mode='?---r-----')
+ self.expect_file('no_bits', mode='?---------')
+ if have_sticky_dirs:
+ self.expect_file('dir/', mode='?---rwsrwt')
+ else:
+ self.expect_file('dir/', mode='?---rwsrwx')
+
+ with self.check_context(arc.open(), 'tar'):
+ self.expect_file('all_bits', mode='?rwxr-xr-x')
+ self.expect_file('perm_bits', mode='?rwxr-xr-x')
+ self.expect_file('exec_group_other', mode='?rw-r-xr-x')
+ self.expect_file('read_group_only', mode='?---r-----')
+ self.expect_file('no_bits', mode='?---------')
+ self.expect_file('dir/', mode='?---r-xr-x')
+
+ with self.check_context(arc.open(), 'data'):
+ normal_dir_mode = stat.filemode(stat.S_IMODE(
+ self.outerdir.stat().st_mode))
+ self.expect_file('all_bits', mode='?rwxr-xr-x')
+ self.expect_file('perm_bits', mode='?rwxr-xr-x')
+ self.expect_file('exec_group_other', mode='?rw-r--r--')
+ self.expect_file('read_group_only', mode='?rw-r-----')
+ self.expect_file('no_bits', mode='?rw-------')
+ self.expect_file('dir/', mode=normal_dir_mode)
+
+ def test_pipe(self):
+ # Test handling of a special file
+ with ArchiveMaker() as arc:
+ arc.add('foo', type=tarfile.FIFOTYPE)
+
+ for filter in 'fully_trusted', 'tar':
+ with self.check_context(arc.open(), filter):
+ if hasattr(os, 'mkfifo'):
+ self.expect_file('foo', type=tarfile.FIFOTYPE)
+ else:
+ # The pipe can't be extracted and is skipped.
+ pass
+
+ with self.check_context(arc.open(), 'data'):
+ self.expect_exception(
+ tarfile.SpecialFileError,
+ "'foo' is a special file")
+
+ def test_special_files(self):
+ # Creating device files is tricky. Instead of attempting that let's
+ # only check the filter result.
+ for special_type in tarfile.FIFOTYPE, tarfile.CHRTYPE, tarfile.BLKTYPE:
+ tarinfo = tarfile.TarInfo('foo')
+ tarinfo.type = special_type
+ trusted = tarfile.fully_trusted_filter(tarinfo, '')
+ self.assertIs(trusted, tarinfo)
+ tar = tarfile.tar_filter(tarinfo, '')
+ self.assertEqual(tar.type, special_type)
+ with self.assertRaises(tarfile.SpecialFileError) as cm:
+ tarfile.data_filter(tarinfo, '')
+ self.assertIsInstance(cm.exception.tarinfo, tarfile.TarInfo)
+ self.assertEqual(cm.exception.tarinfo.name, 'foo')
+
+ def test_fully_trusted_filter(self):
+ # The 'fully_trusted' filter returns the original TarInfo objects.
+ with tarfile.TarFile.open(tarname) as tar:
+ for tarinfo in tar.getmembers():
+ filtered = tarfile.fully_trusted_filter(tarinfo, '')
+ self.assertIs(filtered, tarinfo)
+
+ def test_tar_filter(self):
+ # The 'tar' filter returns TarInfo objects with the same name/type.
+ # (It can also fail for particularly "evil" input, but we don't have
+ # that in the test archive.)
+ with tarfile.TarFile.open(tarname) as tar:
+ for tarinfo in tar.getmembers():
+ filtered = tarfile.tar_filter(tarinfo, '')
+ self.assertIs(filtered.name, tarinfo.name)
+ self.assertIs(filtered.type, tarinfo.type)
+
+ def test_data_filter(self):
+ # The 'data' filter either raises, or returns TarInfo with the same
+ # name/type.
+ with tarfile.TarFile.open(tarname) as tar:
+ for tarinfo in tar.getmembers():
+ try:
+ filtered = tarfile.data_filter(tarinfo, '')
+ except tarfile.FilterError:
+ continue
+ self.assertIs(filtered.name, tarinfo.name)
+ self.assertIs(filtered.type, tarinfo.type)
+
+ def test_default_filter_warns(self):
+ """Ensure the default filter warns"""
+ with ArchiveMaker() as arc:
+ arc.add('foo')
+ with warnings_helper.check_warnings(
+ ('Python 3.14', DeprecationWarning)):
+ with self.check_context(arc.open(), None):
+ self.expect_file('foo')
+
+ def test_change_default_filter_on_instance(self):
+ tar = tarfile.TarFile(tarname, 'r')
+ def strict_filter(tarinfo, path):
+ if tarinfo.name == 'ustar/regtype':
+ return tarinfo
+ else:
+ return None
+ tar.extraction_filter = strict_filter
+ with self.check_context(tar, None):
+ self.expect_file('ustar/regtype')
+
+ def test_change_default_filter_on_class(self):
+ def strict_filter(tarinfo, path):
+ if tarinfo.name == 'ustar/regtype':
+ return tarinfo
+ else:
+ return None
+ tar = tarfile.TarFile(tarname, 'r')
+ with support.swap_attr(tarfile.TarFile, 'extraction_filter',
+ staticmethod(strict_filter)):
+ with self.check_context(tar, None):
+ self.expect_file('ustar/regtype')
+
+ def test_change_default_filter_on_subclass(self):
+ class TarSubclass(tarfile.TarFile):
+ def extraction_filter(self, tarinfo, path):
+ if tarinfo.name == 'ustar/regtype':
+ return tarinfo
+ else:
+ return None
+
+ tar = TarSubclass(tarname, 'r')
+ with self.check_context(tar, None):
+ self.expect_file('ustar/regtype')
+
+ def test_change_default_filter_to_string(self):
+ tar = tarfile.TarFile(tarname, 'r')
+ tar.extraction_filter = 'data'
+ with self.check_context(tar, None):
+ self.expect_exception(TypeError)
+
+ def test_custom_filter(self):
+ def custom_filter(tarinfo, path):
+ self.assertIs(path, self.destdir)
+ if tarinfo.name == 'move_this':
+ return tarinfo.replace(name='moved')
+ if tarinfo.name == 'ignore_this':
+ return None
+ return tarinfo
+
+ with ArchiveMaker() as arc:
+ arc.add('move_this')
+ arc.add('ignore_this')
+ arc.add('keep')
+ with self.check_context(arc.open(), custom_filter):
+ self.expect_file('moved')
+ self.expect_file('keep')
+
+ def test_bad_filter_name(self):
+ with ArchiveMaker() as arc:
+ arc.add('foo')
+ with self.check_context(arc.open(), 'bad filter name'):
+ self.expect_exception(ValueError)
+
+ def test_stateful_filter(self):
+ # Stateful filters should be possible.
+ # (This doesn't really test tarfile. Rather, it demonstrates
+ # that third parties can implement a stateful filter.)
+ class StatefulFilter:
+ def __enter__(self):
+ self.num_files_processed = 0
+ return self
+
+ def __call__(self, tarinfo, path):
+ try:
+ tarinfo = tarfile.data_filter(tarinfo, path)
+ except tarfile.FilterError:
+ return None
+ self.num_files_processed += 1
+ return tarinfo
+
+ def __exit__(self, *exc_info):
+ self.done = True
+
+ with ArchiveMaker() as arc:
+ arc.add('good')
+ arc.add('bad', symlink_to='/')
+ arc.add('good')
+ with StatefulFilter() as custom_filter:
+ with self.check_context(arc.open(), custom_filter):
+ self.expect_file('good')
+ self.assertEqual(custom_filter.num_files_processed, 2)
+ self.assertEqual(custom_filter.done, True)
+
+ def test_errorlevel(self):
+ def extracterror_filter(tarinfo, path):
+ raise tarfile.ExtractError('failed with ExtractError')
+ def filtererror_filter(tarinfo, path):
+ raise tarfile.FilterError('failed with FilterError')
+ def oserror_filter(tarinfo, path):
+ raise OSError('failed with OSError')
+ def tarerror_filter(tarinfo, path):
+ raise tarfile.TarError('failed with base TarError')
+ def valueerror_filter(tarinfo, path):
+ raise ValueError('failed with ValueError')
+
+ with ArchiveMaker() as arc:
+ arc.add('file')
+
+ # If errorlevel is 0, errors affected by errorlevel are ignored
+
+ with self.check_context(arc.open(errorlevel=0), extracterror_filter):
+ self.expect_file('file')
+
+ with self.check_context(arc.open(errorlevel=0), filtererror_filter):
+ self.expect_file('file')
+
+ with self.check_context(arc.open(errorlevel=0), oserror_filter):
+ self.expect_file('file')
+
+ with self.check_context(arc.open(errorlevel=0), tarerror_filter):
+ self.expect_exception(tarfile.TarError)
+
+ with self.check_context(arc.open(errorlevel=0), valueerror_filter):
+ self.expect_exception(ValueError)
+
+ # If 1, all fatal errors are raised
+
+ with self.check_context(arc.open(errorlevel=1), extracterror_filter):
+ self.expect_file('file')
+
+ with self.check_context(arc.open(errorlevel=1), filtererror_filter):
+ self.expect_exception(tarfile.FilterError)
+
+ with self.check_context(arc.open(errorlevel=1), oserror_filter):
+ self.expect_exception(OSError)
+
+ with self.check_context(arc.open(errorlevel=1), tarerror_filter):
+ self.expect_exception(tarfile.TarError)
+
+ with self.check_context(arc.open(errorlevel=1), valueerror_filter):
+ self.expect_exception(ValueError)
+
+ # If 2, all non-fatal errors are raised as well.
+
+ with self.check_context(arc.open(errorlevel=2), extracterror_filter):
+ self.expect_exception(tarfile.ExtractError)
+
+ with self.check_context(arc.open(errorlevel=2), filtererror_filter):
+ self.expect_exception(tarfile.FilterError)
+
+ with self.check_context(arc.open(errorlevel=2), oserror_filter):
+ self.expect_exception(OSError)
+
+ with self.check_context(arc.open(errorlevel=2), tarerror_filter):
+ self.expect_exception(tarfile.TarError)
+
+ with self.check_context(arc.open(errorlevel=2), valueerror_filter):
+ self.expect_exception(ValueError)
+
+ # We only handle ExtractionError, FilterError & OSError specially.
+
+ with self.check_context(arc.open(errorlevel='boo!'), filtererror_filter):
+ self.expect_exception(TypeError) # errorlevel is not int
+
+
def setUpModule():
os_helper.unlink(TEMPDIR)
os.makedirs(TEMPDIR)
diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py
index 11a43aca17e88a..db08fb1c7f2a42 100644
--- a/Lib/test/test_tempfile.py
+++ b/Lib/test/test_tempfile.py
@@ -850,6 +850,15 @@ def test_for_tempdir_is_bytes_issue40701_api_warts(self):
finally:
tempfile.tempdir = orig_tempdir
+ def test_path_is_absolute(self):
+ # Test that the path returned by mkdtemp with a relative `dir`
+ # argument is absolute
+ try:
+ path = tempfile.mkdtemp(dir=".")
+ self.assertTrue(os.path.isabs(path))
+ finally:
+ os.rmdir(path)
+
class TestMktemp(BaseTestCase):
"""Test mktemp()."""
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index a39a267b403d83..fdd74c37e26235 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -1343,6 +1343,7 @@ def func():
import test.support
test.support.run_in_subinterp_with_config(
{subinterp_code!r},
+ use_main_obmalloc=True,
allow_fork=True,
allow_exec=True,
allow_threads={allowed},
diff --git a/Lib/test/test_tkinter/test_widgets.py b/Lib/test/test_tkinter/test_widgets.py
index 64c9472706549b..ba4ef49078c5a7 100644
--- a/Lib/test/test_tkinter/test_widgets.py
+++ b/Lib/test/test_tkinter/test_widgets.py
@@ -1377,6 +1377,11 @@ class MenuTest(AbstractWidgetTest, unittest.TestCase):
def create(self, **kwargs):
return tkinter.Menu(self.root, **kwargs)
+ def test_indexcommand_none(self):
+ widget = self.create()
+ i = widget.index('none')
+ self.assertIsNone(i)
+
def test_configure_postcommand(self):
widget = self.create()
self.checkCommandParam(widget, 'postcommand')
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 63c2501cfe2338..283a7c23609e67 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -1625,6 +1625,10 @@ def test_random_files(self):
# 7 more testfiles fail. Remove them also until the failure is diagnosed.
testfiles.remove(os.path.join(tempdir, "test_unicode_identifiers.py"))
+
+ # TODO: Remove this once we can unparse PEP 701 syntax
+ testfiles.remove(os.path.join(tempdir, "test_fstring.py"))
+
for f in ('buffer', 'builtin', 'fileio', 'inspect', 'os', 'platform', 'sys'):
testfiles.remove(os.path.join(tempdir, "test_%s.py") % f)
@@ -1937,25 +1941,39 @@ def test_string(self):
""")
self.check_tokenize('f"abc"', """\
- STRING 'f"abc"' (1, 0) (1, 6)
+ FSTRING_START 'f"' (1, 0) (1, 2)
+ FSTRING_MIDDLE 'abc' (1, 2) (1, 5)
+ FSTRING_END '"' (1, 5) (1, 6)
""")
self.check_tokenize('fR"a{b}c"', """\
- STRING 'fR"a{b}c"' (1, 0) (1, 9)
+ FSTRING_START 'fR"' (1, 0) (1, 3)
+ FSTRING_MIDDLE 'a' (1, 3) (1, 4)
+ LBRACE '{' (1, 4) (1, 5)
+ NAME 'b' (1, 5) (1, 6)
+ RBRACE '}' (1, 6) (1, 7)
+ FSTRING_MIDDLE 'c' (1, 7) (1, 8)
+ FSTRING_END '"' (1, 8) (1, 9)
""")
self.check_tokenize('f"""abc"""', """\
- STRING 'f\"\"\"abc\"\"\"' (1, 0) (1, 10)
+ FSTRING_START 'f\"""' (1, 0) (1, 4)
+ FSTRING_MIDDLE 'abc' (1, 4) (1, 7)
+ FSTRING_END '\"""' (1, 7) (1, 10)
""")
self.check_tokenize(r'f"abc\
def"', """\
- STRING 'f"abc\\\\\\ndef"' (1, 0) (2, 4)
+ FSTRING_START \'f"\' (1, 0) (1, 2)
+ FSTRING_MIDDLE 'abc\\\\\\ndef' (1, 2) (2, 3)
+ FSTRING_END '"' (2, 3) (2, 4)
""")
self.check_tokenize(r'Rf"abc\
def"', """\
- STRING 'Rf"abc\\\\\\ndef"' (1, 0) (2, 4)
+ FSTRING_START 'Rf"' (1, 0) (1, 3)
+ FSTRING_MIDDLE 'abc\\\\\\ndef' (1, 3) (2, 3)
+ FSTRING_END '"' (2, 3) (2, 4)
""")
def test_function(self):
diff --git a/Lib/test/test_tools/test_sundry.py b/Lib/test/test_tools/test_sundry.py
index 6a3dc12781b2b6..3177fafb84a65b 100644
--- a/Lib/test/test_tools/test_sundry.py
+++ b/Lib/test/test_tools/test_sundry.py
@@ -1,4 +1,4 @@
-"""Tests for scripts in the Tools directory.
+"""Tests for scripts in the Tools/scripts directory.
This file contains extremely basic regression tests for the scripts found in
the Tools directory of a Python checkout or tarball which don't have separate
@@ -17,14 +17,7 @@ class TestSundryScripts(unittest.TestCase):
# At least make sure the rest don't have syntax errors. When tests are
# added for a script it should be added to the allowlist below.
- # scripts that have independent tests.
- allowlist = ['reindent']
- # scripts that can't be imported without running
- denylist = ['make_ctype']
- # denylisted for other reasons
- other = ['2to3']
-
- skiplist = denylist + allowlist + other
+ skiplist = ['2to3']
# import logging registers "atfork" functions which keep indirectly the
# logging module dictionary alive. Mock the function to be able to unload
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index a6172ff05eed47..5e2b353782994e 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -802,12 +802,12 @@ def f():
)()
actual = self.get_exception(f)
expected = [
- f"Traceback (most recent call last):",
+ "Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
- f" callable()",
+ " callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f",
- f" .method",
- f" ^^^^^^",
+ " .method",
+ " ^^^^^^",
]
self.assertEqual(actual, expected)
@@ -818,11 +818,11 @@ def f():
)()
actual = self.get_exception(f)
expected = [
- f"Traceback (most recent call last):",
+ "Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
- f" callable()",
+ " callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f",
- f" method",
+ " method",
]
self.assertEqual(actual, expected)
@@ -833,12 +833,12 @@ def f():
)()
actual = self.get_exception(f)
expected = [
- f"Traceback (most recent call last):",
+ "Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
- f" callable()",
+ " callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f",
- f" . method",
- f" ^^^^^^",
+ " . method",
+ " ^^^^^^",
]
self.assertEqual(actual, expected)
@@ -848,11 +848,11 @@ def f():
actual = self.get_exception(f)
expected = [
- f"Traceback (most recent call last):",
+ "Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
- f" callable()",
+ " callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 1}, in f",
- f" width",
+ " width",
]
self.assertEqual(actual, expected)
@@ -864,11 +864,11 @@ def f():
actual = self.get_exception(f)
expected = [
- f"Traceback (most recent call last):",
+ "Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
- f" callable()",
+ " callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f",
- f" raise ValueError(width)",
+ " raise ValueError(width)",
]
self.assertEqual(actual, expected)
@@ -882,12 +882,12 @@ def f():
actual = self.get_exception(f)
expected = [
- f"Traceback (most recent call last):",
+ "Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
- f" callable()",
+ " callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 4}, in f",
- f" print(1, www(",
- f" ^^^^",
+ " print(1, www(",
+ " ^^^^",
]
self.assertEqual(actual, expected)
@@ -2844,26 +2844,26 @@ def test_max_group_width(self):
formatted = ''.join(teg.format()).split('\n')
expected = [
- f' | ExceptionGroup: eg (2 sub-exceptions)',
- f' +-+---------------- 1 ----------------',
- f' | ExceptionGroup: eg1 (3 sub-exceptions)',
- f' +-+---------------- 1 ----------------',
- f' | ValueError: 0',
- f' +---------------- 2 ----------------',
- f' | ValueError: 1',
- f' +---------------- ... ----------------',
- f' | and 1 more exception',
- f' +------------------------------------',
- f' +---------------- 2 ----------------',
- f' | ExceptionGroup: eg2 (10 sub-exceptions)',
- f' +-+---------------- 1 ----------------',
- f' | TypeError: 0',
- f' +---------------- 2 ----------------',
- f' | TypeError: 1',
- f' +---------------- ... ----------------',
- f' | and 8 more exceptions',
- f' +------------------------------------',
- f'']
+ ' | ExceptionGroup: eg (2 sub-exceptions)',
+ ' +-+---------------- 1 ----------------',
+ ' | ExceptionGroup: eg1 (3 sub-exceptions)',
+ ' +-+---------------- 1 ----------------',
+ ' | ValueError: 0',
+ ' +---------------- 2 ----------------',
+ ' | ValueError: 1',
+ ' +---------------- ... ----------------',
+ ' | and 1 more exception',
+ ' +------------------------------------',
+ ' +---------------- 2 ----------------',
+ ' | ExceptionGroup: eg2 (10 sub-exceptions)',
+ ' +-+---------------- 1 ----------------',
+ ' | TypeError: 0',
+ ' +---------------- 2 ----------------',
+ ' | TypeError: 1',
+ ' +---------------- ... ----------------',
+ ' | and 8 more exceptions',
+ ' +------------------------------------',
+ '']
self.assertEqual(formatted, expected)
@@ -2876,22 +2876,22 @@ def test_max_group_depth(self):
formatted = ''.join(teg.format()).split('\n')
expected = [
- f' | ExceptionGroup: exc (3 sub-exceptions)',
- f' +-+---------------- 1 ----------------',
- f' | ValueError: -2',
- f' +---------------- 2 ----------------',
- f' | ExceptionGroup: exc (3 sub-exceptions)',
- f' +-+---------------- 1 ----------------',
- f' | ValueError: -1',
- f' +---------------- 2 ----------------',
- f' | ... (max_group_depth is 2)',
- f' +---------------- 3 ----------------',
- f' | ValueError: 1',
- f' +------------------------------------',
- f' +---------------- 3 ----------------',
- f' | ValueError: 2',
- f' +------------------------------------',
- f'']
+ ' | ExceptionGroup: exc (3 sub-exceptions)',
+ ' +-+---------------- 1 ----------------',
+ ' | ValueError: -2',
+ ' +---------------- 2 ----------------',
+ ' | ExceptionGroup: exc (3 sub-exceptions)',
+ ' +-+---------------- 1 ----------------',
+ ' | ValueError: -1',
+ ' +---------------- 2 ----------------',
+ ' | ... (max_group_depth is 2)',
+ ' +---------------- 3 ----------------',
+ ' | ValueError: 1',
+ ' +------------------------------------',
+ ' +---------------- 3 ----------------',
+ ' | ValueError: 2',
+ ' +------------------------------------',
+ '']
self.assertEqual(formatted, expected)
diff --git a/Lib/test/test_type_cache.py b/Lib/test/test_type_cache.py
index 8502f6b0584b00..24f83cd3e172c7 100644
--- a/Lib/test/test_type_cache.py
+++ b/Lib/test/test_type_cache.py
@@ -9,6 +9,7 @@
# Skip this test if the _testcapi module isn't available.
type_get_version = import_helper.import_module('_testcapi').type_get_version
+type_assign_version = import_helper.import_module('_testcapi').type_assign_version
@support.cpython_only
@@ -42,6 +43,19 @@ def test_tp_version_tag_unique(self):
self.assertEqual(len(set(all_version_tags)), 30,
msg=f"{all_version_tags} contains non-unique versions")
+ def test_type_assign_version(self):
+ class C:
+ x = 5
+
+ self.assertEqual(type_assign_version(C), 1)
+ c_ver = type_get_version(C)
+
+ C.x = 6
+ self.assertEqual(type_get_version(C), 0)
+ self.assertEqual(type_assign_version(C), 1)
+ self.assertNotEqual(type_get_version(C), 0)
+ self.assertNotEqual(type_get_version(C), c_ver)
+
if __name__ == "__main__":
support.run_unittest(TypeCacheTests)
diff --git a/Lib/test/test_type_comments.py b/Lib/test/test_type_comments.py
index 8db7394d1512aa..aba4a44be9da96 100644
--- a/Lib/test/test_type_comments.py
+++ b/Lib/test/test_type_comments.py
@@ -272,7 +272,7 @@ def test_matmul(self):
pass
def test_fstring(self):
- for tree in self.parse_all(fstring, minver=6):
+ for tree in self.parse_all(fstring):
pass
def test_underscorednumber(self):
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index af095632a36fcb..89548100da62d7 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -925,6 +925,35 @@ def test_or_type_operator_with_SpecialForm(self):
assert typing.Optional[int] | str == typing.Union[int, str, None]
assert typing.Union[int, bool] | str == typing.Union[int, bool, str]
+ def test_or_type_operator_with_Literal(self):
+ Literal = typing.Literal
+ self.assertEqual((Literal[1] | Literal[2]).__args__,
+ (Literal[1], Literal[2]))
+
+ self.assertEqual((Literal[0] | Literal[False]).__args__,
+ (Literal[0], Literal[False]))
+ self.assertEqual((Literal[1] | Literal[True]).__args__,
+ (Literal[1], Literal[True]))
+
+ self.assertEqual(Literal[1] | Literal[1], Literal[1])
+ self.assertEqual(Literal['a'] | Literal['a'], Literal['a'])
+
+ import enum
+ class Ints(enum.IntEnum):
+ A = 0
+ B = 1
+
+ self.assertEqual(Literal[Ints.A] | Literal[Ints.A], Literal[Ints.A])
+ self.assertEqual(Literal[Ints.B] | Literal[Ints.B], Literal[Ints.B])
+
+ self.assertEqual((Literal[Ints.B] | Literal[Ints.A]).__args__,
+ (Literal[Ints.B], Literal[Ints.A]))
+
+ self.assertEqual((Literal[0] | Literal[Ints.A]).__args__,
+ (Literal[0], Literal[Ints.A]))
+ self.assertEqual((Literal[1] | Literal[Ints.B]).__args__,
+ (Literal[1], Literal[Ints.B]))
+
def test_or_type_repr(self):
assert repr(int | str) == "int | str"
assert repr((int | str) | list) == "int | str | list"
@@ -1360,6 +1389,67 @@ class C: pass
D = types.new_class('D', (A(), C, B()), {})
self.assertEqual(D.__bases__, (A1, A2, A3, C, B1, B2))
+ def test_get_original_bases(self):
+ T = typing.TypeVar('T')
+ class A: pass
+ class B(typing.Generic[T]): pass
+ class C(B[int]): pass
+ class D(B[str], float): pass
+ self.assertEqual(types.get_original_bases(A), (object,))
+ self.assertEqual(types.get_original_bases(B), (typing.Generic[T],))
+ self.assertEqual(types.get_original_bases(C), (B[int],))
+ self.assertEqual(types.get_original_bases(int), (object,))
+ self.assertEqual(types.get_original_bases(D), (B[str], float))
+
+ class E(list[T]): pass
+ class F(list[int]): pass
+
+ self.assertEqual(types.get_original_bases(E), (list[T],))
+ self.assertEqual(types.get_original_bases(F), (list[int],))
+
+ class ClassBasedNamedTuple(typing.NamedTuple):
+ x: int
+
+ class GenericNamedTuple(typing.NamedTuple, typing.Generic[T]):
+ x: T
+
+ CallBasedNamedTuple = typing.NamedTuple("CallBasedNamedTuple", [("x", int)])
+
+ self.assertIs(
+ types.get_original_bases(ClassBasedNamedTuple)[0], typing.NamedTuple
+ )
+ self.assertEqual(
+ types.get_original_bases(GenericNamedTuple),
+ (typing.NamedTuple, typing.Generic[T])
+ )
+ self.assertIs(
+ types.get_original_bases(CallBasedNamedTuple)[0], typing.NamedTuple
+ )
+
+ class ClassBasedTypedDict(typing.TypedDict):
+ x: int
+
+ class GenericTypedDict(typing.TypedDict, typing.Generic[T]):
+ x: T
+
+ CallBasedTypedDict = typing.TypedDict("CallBasedTypedDict", {"x": int})
+
+ self.assertIs(
+ types.get_original_bases(ClassBasedTypedDict)[0],
+ typing.TypedDict
+ )
+ self.assertEqual(
+ types.get_original_bases(GenericTypedDict),
+ (typing.TypedDict, typing.Generic[T])
+ )
+ self.assertIs(
+ types.get_original_bases(CallBasedTypedDict)[0],
+ typing.TypedDict
+ )
+
+ with self.assertRaisesRegex(TypeError, "Expected an instance of type"):
+ types.get_original_bases(object())
+
# Many of the following tests are derived from test_descr.py
def test_prepare_class(self):
# Basic test of metaclass derivation
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index f983efe956f902..f36bb958c88ef9 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -1789,6 +1789,35 @@ def Elem(*args):
Union[Elem, str] # Nor should this
+ def test_union_of_literals(self):
+ self.assertEqual(Union[Literal[1], Literal[2]].__args__,
+ (Literal[1], Literal[2]))
+ self.assertEqual(Union[Literal[1], Literal[1]],
+ Literal[1])
+
+ self.assertEqual(Union[Literal[False], Literal[0]].__args__,
+ (Literal[False], Literal[0]))
+ self.assertEqual(Union[Literal[True], Literal[1]].__args__,
+ (Literal[True], Literal[1]))
+
+ import enum
+ class Ints(enum.IntEnum):
+ A = 0
+ B = 1
+
+ self.assertEqual(Union[Literal[Ints.A], Literal[Ints.A]],
+ Literal[Ints.A])
+ self.assertEqual(Union[Literal[Ints.B], Literal[Ints.B]],
+ Literal[Ints.B])
+
+ self.assertEqual(Union[Literal[Ints.A], Literal[Ints.B]].__args__,
+ (Literal[Ints.A], Literal[Ints.B]))
+
+ self.assertEqual(Union[Literal[0], Literal[Ints.A], Literal[False]].__args__,
+ (Literal[0], Literal[Ints.A], Literal[False]))
+ self.assertEqual(Union[Literal[1], Literal[Ints.B], Literal[True]].__args__,
+ (Literal[1], Literal[Ints.B], Literal[True]))
+
class TupleTests(BaseTestCase):
@@ -2156,6 +2185,13 @@ def test_basics(self):
Literal[Literal[1, 2], Literal[4, 5]]
Literal[b"foo", u"bar"]
+ def test_enum(self):
+ import enum
+ class My(enum.Enum):
+ A = 'A'
+
+ self.assertEqual(Literal[My.A].__args__, (My.A,))
+
def test_illegal_parameters_do_not_raise_runtime_errors(self):
# Type checkers should reject these types, but we do not
# raise errors at runtime to maintain maximum flexibility.
@@ -2245,6 +2281,20 @@ def test_flatten(self):
self.assertEqual(l, Literal[1, 2, 3])
self.assertEqual(l.__args__, (1, 2, 3))
+ def test_does_not_flatten_enum(self):
+ import enum
+ class Ints(enum.IntEnum):
+ A = 1
+ B = 2
+
+ l = Literal[
+ Literal[Ints.A],
+ Literal[Ints.B],
+ Literal[1],
+ Literal[2],
+ ]
+ self.assertEqual(l.__args__, (Ints.A, Ints.B, 1, 2))
+
XK = TypeVar('XK', str, bytes)
XV = TypeVar('XV')
@@ -6695,6 +6745,22 @@ def test_copy_and_pickle(self):
self.assertEqual(jane2, jane)
self.assertIsInstance(jane2, cls)
+ def test_orig_bases(self):
+ T = TypeVar('T')
+
+ class SimpleNamedTuple(NamedTuple):
+ pass
+
+ class GenericNamedTuple(NamedTuple, Generic[T]):
+ pass
+
+ self.assertEqual(SimpleNamedTuple.__orig_bases__, (NamedTuple,))
+ self.assertEqual(GenericNamedTuple.__orig_bases__, (NamedTuple, Generic[T]))
+
+ CallNamedTuple = NamedTuple('CallNamedTuple', [])
+
+ self.assertEqual(CallNamedTuple.__orig_bases__, (NamedTuple,))
+
class TypedDictTests(BaseTestCase):
def test_basics_functional_syntax(self):
@@ -7126,6 +7192,49 @@ class TD(TypedDict):
self.assertIs(type(a), dict)
self.assertEqual(a, {'a': 1})
+ def test_orig_bases(self):
+ T = TypeVar('T')
+
+ class Parent(TypedDict):
+ pass
+
+ class Child(Parent):
+ pass
+
+ class OtherChild(Parent):
+ pass
+
+ class MixedChild(Child, OtherChild, Parent):
+ pass
+
+ class GenericParent(TypedDict, Generic[T]):
+ pass
+
+ class GenericChild(GenericParent[int]):
+ pass
+
+ class OtherGenericChild(GenericParent[str]):
+ pass
+
+ class MixedGenericChild(GenericChild, OtherGenericChild, GenericParent[float]):
+ pass
+
+ class MultipleGenericBases(GenericParent[int], GenericParent[float]):
+ pass
+
+ CallTypedDict = TypedDict('CallTypedDict', {})
+
+ self.assertEqual(Parent.__orig_bases__, (TypedDict,))
+ self.assertEqual(Child.__orig_bases__, (Parent,))
+ self.assertEqual(OtherChild.__orig_bases__, (Parent,))
+ self.assertEqual(MixedChild.__orig_bases__, (Child, OtherChild, Parent,))
+ self.assertEqual(GenericParent.__orig_bases__, (TypedDict, Generic[T]))
+ self.assertEqual(GenericChild.__orig_bases__, (GenericParent[int],))
+ self.assertEqual(OtherGenericChild.__orig_bases__, (GenericParent[str],))
+ self.assertEqual(MixedGenericChild.__orig_bases__, (GenericChild, OtherGenericChild, GenericParent[float]))
+ self.assertEqual(MultipleGenericBases.__orig_bases__, (GenericParent[int], GenericParent[float]))
+ self.assertEqual(CallTypedDict.__orig_bases__, (TypedDict,))
+
class RequiredTests(BaseTestCase):
diff --git a/Lib/test/test_unittest/test_program.py b/Lib/test/test_unittest/test_program.py
index f138f6836514e0..f6d52f93e4a25f 100644
--- a/Lib/test/test_unittest/test_program.py
+++ b/Lib/test/test_unittest/test_program.py
@@ -71,15 +71,22 @@ def testExpectedFailure(self):
def testUnexpectedSuccess(self):
pass
- class FooBarLoader(unittest.TestLoader):
- """Test loader that returns a suite containing FooBar."""
+ class Empty(unittest.TestCase):
+ pass
+
+ class TestLoader(unittest.TestLoader):
+ """Test loader that returns a suite containing the supplied testcase."""
+
+ def __init__(self, testcase):
+ self.testcase = testcase
+
def loadTestsFromModule(self, module):
return self.suiteClass(
- [self.loadTestsFromTestCase(Test_TestProgram.FooBar)])
+ [self.loadTestsFromTestCase(self.testcase)])
def loadTestsFromNames(self, names, module):
return self.suiteClass(
- [self.loadTestsFromTestCase(Test_TestProgram.FooBar)])
+ [self.loadTestsFromTestCase(self.testcase)])
def test_defaultTest_with_string(self):
class FakeRunner(object):
@@ -92,7 +99,7 @@ def run(self, test):
runner = FakeRunner()
program = unittest.TestProgram(testRunner=runner, exit=False,
defaultTest='test.test_unittest',
- testLoader=self.FooBarLoader())
+ testLoader=self.TestLoader(self.FooBar))
sys.argv = old_argv
self.assertEqual(('test.test_unittest',), program.testNames)
@@ -108,7 +115,7 @@ def run(self, test):
program = unittest.TestProgram(
testRunner=runner, exit=False,
defaultTest=['test.test_unittest', 'test.test_unittest2'],
- testLoader=self.FooBarLoader())
+ testLoader=self.TestLoader(self.FooBar))
sys.argv = old_argv
self.assertEqual(['test.test_unittest', 'test.test_unittest2'],
program.testNames)
@@ -118,7 +125,7 @@ def test_NonExit(self):
program = unittest.main(exit=False,
argv=["foobar"],
testRunner=unittest.TextTestRunner(stream=stream),
- testLoader=self.FooBarLoader())
+ testLoader=self.TestLoader(self.FooBar))
self.assertTrue(hasattr(program, 'result'))
out = stream.getvalue()
self.assertIn('\nFAIL: testFail ', out)
@@ -130,13 +137,13 @@ def test_NonExit(self):
def test_Exit(self):
stream = BufferedWriter()
- self.assertRaises(
- SystemExit,
- unittest.main,
- argv=["foobar"],
- testRunner=unittest.TextTestRunner(stream=stream),
- exit=True,
- testLoader=self.FooBarLoader())
+ with self.assertRaises(SystemExit) as cm:
+ unittest.main(
+ argv=["foobar"],
+ testRunner=unittest.TextTestRunner(stream=stream),
+ exit=True,
+ testLoader=self.TestLoader(self.FooBar))
+ self.assertEqual(cm.exception.code, 1)
out = stream.getvalue()
self.assertIn('\nFAIL: testFail ', out)
self.assertIn('\nERROR: testError ', out)
@@ -147,12 +154,11 @@ def test_Exit(self):
def test_ExitAsDefault(self):
stream = BufferedWriter()
- self.assertRaises(
- SystemExit,
- unittest.main,
- argv=["foobar"],
- testRunner=unittest.TextTestRunner(stream=stream),
- testLoader=self.FooBarLoader())
+ with self.assertRaises(SystemExit):
+ unittest.main(
+ argv=["foobar"],
+ testRunner=unittest.TextTestRunner(stream=stream),
+ testLoader=self.TestLoader(self.FooBar))
out = stream.getvalue()
self.assertIn('\nFAIL: testFail ', out)
self.assertIn('\nERROR: testError ', out)
@@ -161,6 +167,17 @@ def test_ExitAsDefault(self):
'expected failures=1, unexpected successes=1)\n')
self.assertTrue(out.endswith(expected))
+ def test_ExitEmptySuite(self):
+ stream = BufferedWriter()
+ with self.assertRaises(SystemExit) as cm:
+ unittest.main(
+ argv=["empty"],
+ testRunner=unittest.TextTestRunner(stream=stream),
+ testLoader=self.TestLoader(self.Empty))
+ self.assertEqual(cm.exception.code, 5)
+ out = stream.getvalue()
+ self.assertIn('\nNO TESTS RAN\n', out)
+
class InitialisableProgram(unittest.TestProgram):
exit = False
diff --git a/Lib/test/test_unittest/test_result.py b/Lib/test/test_unittest/test_result.py
index 37d0fe12409ea4..db551b7890ca3e 100644
--- a/Lib/test/test_unittest/test_result.py
+++ b/Lib/test/test_unittest/test_result.py
@@ -451,6 +451,7 @@ def testFailFastSetByRunner(self):
stream = BufferedWriter()
runner = unittest.TextTestRunner(stream=stream, failfast=True)
def test(result):
+ result.testsRun += 1
self.assertTrue(result.failfast)
result = runner.run(test)
stream.flush()
diff --git a/Lib/test/test_unittest/test_runner.py b/Lib/test/test_unittest/test_runner.py
index 1ce42a106c5883..f3b2c0cffd4513 100644
--- a/Lib/test/test_unittest/test_runner.py
+++ b/Lib/test/test_unittest/test_runner.py
@@ -577,6 +577,16 @@ def test(self):
'inner setup', 'inner test', 'inner cleanup',
'end outer test', 'outer cleanup'])
+ def test_run_empty_suite_error_message(self):
+ class EmptyTest(unittest.TestCase):
+ pass
+
+ suite = unittest.defaultTestLoader.loadTestsFromTestCase(EmptyTest)
+ runner = getRunner()
+ runner.run(suite)
+
+ self.assertIn("\nNO TESTS RAN\n", runner.stream.getvalue())
+
class TestModuleCleanUp(unittest.TestCase):
def test_add_and_do_ModuleCleanup(self):
@@ -1367,7 +1377,7 @@ def testSpecifiedStreamUsed(self):
self.assertTrue(runner.stream.stream is f)
def test_durations(self):
- def run(test, expect_durations):
+ def run(test, *, expect_durations=True):
stream = BufferedWriter()
runner = unittest.TextTestRunner(stream=stream, durations=5, verbosity=2)
result = runner.run(test)
@@ -1389,21 +1399,21 @@ class Foo(unittest.TestCase):
def test_1(self):
pass
- run(Foo('test_1'), True)
+ run(Foo('test_1'), expect_durations=True)
# failure
class Foo(unittest.TestCase):
def test_1(self):
self.assertEqual(0, 1)
- run(Foo('test_1'), True)
+ run(Foo('test_1'), expect_durations=True)
# error
class Foo(unittest.TestCase):
def test_1(self):
1 / 0
- run(Foo('test_1'), True)
+ run(Foo('test_1'), expect_durations=True)
# error in setUp and tearDown
@@ -1414,7 +1424,7 @@ def setUp(self):
def test_1(self):
pass
- run(Foo('test_1'), True)
+ run(Foo('test_1'), expect_durations=True)
# skip (expect no durations)
class Foo(unittest.TestCase):
@@ -1422,7 +1432,7 @@ class Foo(unittest.TestCase):
def test_1(self):
pass
- run(Foo('test_1'), False)
+ run(Foo('test_1'), expect_durations=False)
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 633d596ac3de3f..99c9e24994732f 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -3,6 +3,7 @@
from test.support import os_helper
from test.support import warnings_helper
from test import test_urllib
+from unittest import mock
import os
import io
@@ -484,7 +485,18 @@ def build_test_opener(*handler_instances):
return opener
-class MockHTTPHandler(urllib.request.BaseHandler):
+class MockHTTPHandler(urllib.request.HTTPHandler):
+ # Very simple mock HTTP handler with no special behavior other than using a mock HTTP connection
+
+ def __init__(self, debuglevel=None):
+ super(MockHTTPHandler, self).__init__(debuglevel=debuglevel)
+ self.httpconn = MockHTTPClass()
+
+ def http_open(self, req):
+ return self.do_open(self.httpconn, req)
+
+
+class MockHTTPHandlerRedirect(urllib.request.BaseHandler):
# useful for testing redirections and auth
# sends supplied headers and code as first response
# sends 200 OK as second response
@@ -512,16 +524,17 @@ def http_open(self, req):
return MockResponse(200, "OK", msg, "", req.get_full_url())
-class MockHTTPSHandler(urllib.request.AbstractHTTPHandler):
- # Useful for testing the Proxy-Authorization request by verifying the
- # properties of httpcon
+if hasattr(http.client, 'HTTPSConnection'):
+ class MockHTTPSHandler(urllib.request.HTTPSHandler):
+ # Useful for testing the Proxy-Authorization request by verifying the
+ # properties of httpcon
- def __init__(self, debuglevel=0):
- urllib.request.AbstractHTTPHandler.__init__(self, debuglevel=debuglevel)
- self.httpconn = MockHTTPClass()
+ def __init__(self, debuglevel=None, context=None, check_hostname=None):
+ super(MockHTTPSHandler, self).__init__(debuglevel, context, check_hostname)
+ self.httpconn = MockHTTPClass()
- def https_open(self, req):
- return self.do_open(self.httpconn, req)
+ def https_open(self, req):
+ return self.do_open(self.httpconn, req)
class MockHTTPHandlerCheckAuth(urllib.request.BaseHandler):
@@ -1048,12 +1061,37 @@ def test_http_body_array(self):
newreq = h.do_request_(req)
self.assertEqual(int(newreq.get_header('Content-length')),16)
- def test_http_handler_debuglevel(self):
+ def test_http_handler_global_debuglevel(self):
+ with mock.patch.object(http.client.HTTPConnection, 'debuglevel', 6):
+ o = OpenerDirector()
+ h = MockHTTPHandler()
+ o.add_handler(h)
+ o.open("http://www.example.com")
+ self.assertEqual(h._debuglevel, 6)
+
+ def test_http_handler_local_debuglevel(self):
+ o = OpenerDirector()
+ h = MockHTTPHandler(debuglevel=5)
+ o.add_handler(h)
+ o.open("http://www.example.com")
+ self.assertEqual(h._debuglevel, 5)
+
+ @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.')
+ def test_https_handler_global_debuglevel(self):
+ with mock.patch.object(http.client.HTTPSConnection, 'debuglevel', 7):
+ o = OpenerDirector()
+ h = MockHTTPSHandler()
+ o.add_handler(h)
+ o.open("https://www.example.com")
+ self.assertEqual(h._debuglevel, 7)
+
+ @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.')
+ def test_https_handler_local_debuglevel(self):
o = OpenerDirector()
- h = MockHTTPSHandler(debuglevel=1)
+ h = MockHTTPSHandler(debuglevel=4)
o.add_handler(h)
o.open("https://www.example.com")
- self.assertEqual(h._debuglevel, 1)
+ self.assertEqual(h._debuglevel, 4)
def test_http_doubleslash(self):
# Checks the presence of any unnecessary double slash in url does not
@@ -1289,7 +1327,7 @@ def test_cookie_redirect(self):
cj = CookieJar()
interact_netscape(cj, "http://www.example.com/", "spam=eggs")
- hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n")
+ hh = MockHTTPHandlerRedirect(302, "Location: http://www.cracker.com/\r\n\r\n")
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
cp = urllib.request.HTTPCookieProcessor(cj)
@@ -1299,7 +1337,7 @@ def test_cookie_redirect(self):
def test_redirect_fragment(self):
redirected_url = 'http://www.example.com/index.html#OK\r\n\r\n'
- hh = MockHTTPHandler(302, 'Location: ' + redirected_url)
+ hh = MockHTTPHandlerRedirect(302, 'Location: ' + redirected_url)
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
o = build_test_opener(hh, hdeh, hrh)
@@ -1421,6 +1459,7 @@ def test_proxy_https(self):
self.assertEqual([(handlers[0], "https_open")],
[tup[0:2] for tup in o.calls])
+ @unittest.skipUnless(hasattr(http.client, 'HTTPSConnection'), 'HTTPSConnection required for HTTPS tests.')
def test_proxy_https_proxy_authorization(self):
o = OpenerDirector()
ph = urllib.request.ProxyHandler(dict(https='proxy.example.com:3128'))
@@ -1484,7 +1523,7 @@ def check_basic_auth(self, headers, realm):
password_manager = MockPasswordManager()
auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
body = '\r\n'.join(headers) + '\r\n\r\n'
- http_handler = MockHTTPHandler(401, body)
+ http_handler = MockHTTPHandlerRedirect(401, body)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
@@ -1544,7 +1583,7 @@ def test_proxy_basic_auth(self):
password_manager = MockPasswordManager()
auth_handler = urllib.request.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
@@ -1588,7 +1627,7 @@ def http_error_401(self, *args, **kwds):
digest_handler = TestDigestAuthHandler(password_manager)
basic_handler = TestBasicAuthHandler(password_manager)
realm = "ACME Networks"
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(basic_handler)
opener.add_handler(digest_handler)
@@ -1608,7 +1647,7 @@ def test_unsupported_auth_digest_handler(self):
opener = OpenerDirector()
# While using DigestAuthHandler
digest_auth_handler = urllib.request.HTTPDigestAuthHandler(None)
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
401, 'WWW-Authenticate: Kerberos\r\n\r\n')
opener.add_handler(digest_auth_handler)
opener.add_handler(http_handler)
@@ -1618,7 +1657,7 @@ def test_unsupported_auth_basic_handler(self):
# While using BasicAuthHandler
opener = OpenerDirector()
basic_auth_handler = urllib.request.HTTPBasicAuthHandler(None)
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
401, 'WWW-Authenticate: NTLM\r\n\r\n')
opener.add_handler(basic_auth_handler)
opener.add_handler(http_handler)
@@ -1705,7 +1744,7 @@ def test_basic_prior_auth_send_after_first_success(self):
opener = OpenerDirector()
opener.add_handler(auth_prior_handler)
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % None)
opener.add_handler(http_handler)
diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py
index 5da41c37bbfb8e..d8d882b2d33589 100644
--- a/Lib/test/test_urllib2net.py
+++ b/Lib/test/test_urllib2net.py
@@ -134,7 +134,9 @@ def setUp(self):
# They do sometimes catch some major disasters, though.
def test_ftp(self):
+ # Testing the same URL twice exercises the caching in CacheFTPHandler
urls = [
+ 'ftp://www.pythontest.net/README',
'ftp://www.pythontest.net/README',
('ftp://www.pythontest.net/non-existent-file',
None, urllib.error.URLError),
diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py
index 7cccbe84f4ebfa..95944c7c711620 100644
--- a/Lib/test/test_venv.py
+++ b/Lib/test/test_venv.py
@@ -227,7 +227,6 @@ def pip_cmd_checker(cmd, **kwargs):
'install',
'--upgrade',
'pip',
- 'setuptools'
]
)
@@ -601,9 +600,15 @@ def test_zippath_from_non_installed_posix(self):
ld_library_path_env = "DYLD_LIBRARY_PATH"
else:
ld_library_path_env = "LD_LIBRARY_PATH"
- subprocess.check_call(cmd,
- env={"PYTHONPATH": pythonpath,
- ld_library_path_env: ld_library_path})
+ # Note that in address sanitizer mode, the current runtime
+ # implementation leaks memory due to not being able to correctly
+ # clean all unicode objects during runtime shutdown. Therefore,
+ # this uses subprocess.run instead of subprocess.check_call to
+ # maintain the core of the test while not failing due to the refleaks.
+ # This should be able to use check_call once all refleaks are fixed.
+ subprocess.run(cmd,
+ env={"PYTHONPATH": pythonpath,
+ ld_library_path_env: ld_library_path})
envpy = os.path.join(self.env_dir, self.bindir, self.exe)
# Now check the venv created from the non-installed python has
# correct zip path in pythonpath.
@@ -745,7 +750,6 @@ def do_test_with_pip(self, system_site_packages):
# future pip versions, this test can likely be relaxed further.
out = out.decode("latin-1") # Force to text, prevent decoding errors
self.assertIn("Successfully uninstalled pip", out)
- self.assertIn("Successfully uninstalled setuptools", out)
# Check pip is now gone from the virtual environment. This only
# applies in the system_site_packages=False case, because in the
# other case, pip may still be available in the system site-packages
diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py
index 7c5920797d2538..1bc1d05f7daba9 100644
--- a/Lib/test/test_weakref.py
+++ b/Lib/test/test_weakref.py
@@ -116,6 +116,17 @@ def test_basic_ref(self):
del o
repr(wr)
+ def test_repr_failure_gh99184(self):
+ class MyConfig(dict):
+ def __getattr__(self, x):
+ return self[x]
+
+ obj = MyConfig(offset=5)
+ obj_weakref = weakref.ref(obj)
+
+ self.assertIn('MyConfig', repr(obj_weakref))
+ self.assertIn('MyConfig', str(obj_weakref))
+
def test_basic_callback(self):
self.check_basic_callback(C)
self.check_basic_callback(create_function)
diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py
index 769ab67b0f5611..924a962781a75b 100644
--- a/Lib/test/test_winreg.py
+++ b/Lib/test/test_winreg.py
@@ -1,11 +1,12 @@
# Test the windows specific win32reg module.
# Only win32reg functions not hit here: FlushKey, LoadKey and SaveKey
+import gc
import os, sys, errno
-import unittest
-from test.support import import_helper
import threading
+import unittest
from platform import machine, win32_edition
+from test.support import cpython_only, import_helper
# Do this first so test will be skipped if module doesn't exist
import_helper.import_module('winreg', required_on=['win'])
@@ -49,6 +50,17 @@
("Japanese 日本", "日本語", REG_SZ),
]
+
+@cpython_only
+class HeapTypeTests(unittest.TestCase):
+ def test_have_gc(self):
+ self.assertTrue(gc.is_tracked(HKEYType))
+
+ def test_immutable(self):
+ with self.assertRaisesRegex(TypeError, "immutable"):
+ HKEYType.foo = "bar"
+
+
class BaseWinregTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py
index 479daf0e5abfc3..bf0b3b92155938 100644
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -3430,8 +3430,7 @@ def entryconfigure(self, index, cnf=None, **kw):
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
- if i == 'none': return None
- return self.tk.getint(i)
+ return None if i in ('', 'none') else self.tk.getint(i) # GH-103685.
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
diff --git a/Lib/token.py b/Lib/token.py
index 95b107c6643b3f..1459d12b376f82 100644
--- a/Lib/token.py
+++ b/Lib/token.py
@@ -57,18 +57,22 @@
RARROW = 51
ELLIPSIS = 52
COLONEQUAL = 53
-OP = 54
-AWAIT = 55
-ASYNC = 56
-TYPE_IGNORE = 57
-TYPE_COMMENT = 58
-SOFT_KEYWORD = 59
+EXCLAMATION = 54
+OP = 55
+AWAIT = 56
+ASYNC = 57
+TYPE_IGNORE = 58
+TYPE_COMMENT = 59
+SOFT_KEYWORD = 60
+FSTRING_START = 61
+FSTRING_MIDDLE = 62
+FSTRING_END = 63
# These aren't used by the C tokenizer but are needed for tokenize.py
-ERRORTOKEN = 60
-COMMENT = 61
-NL = 62
-ENCODING = 63
-N_TOKENS = 64
+ERRORTOKEN = 64
+COMMENT = 65
+NL = 66
+ENCODING = 67
+N_TOKENS = 68
# Special definitions for cooperation with parser
NT_OFFSET = 256
@@ -78,6 +82,7 @@
__all__.extend(tok_name.values())
EXACT_TOKEN_TYPES = {
+ '!': EXCLAMATION,
'!=': NOTEQUAL,
'%': PERCENT,
'%=': PERCENTEQUAL,
diff --git a/Lib/trace.py b/Lib/trace.py
index 213e46517d683d..fb9a423ea09fce 100755
--- a/Lib/trace.py
+++ b/Lib/trace.py
@@ -49,6 +49,7 @@
"""
__all__ = ['Trace', 'CoverageResults']
+import io
import linecache
import os
import sys
@@ -716,7 +717,7 @@ def parse_ignore_dir(s):
sys.argv = [opts.progname, *opts.arguments]
sys.path[0] = os.path.dirname(opts.progname)
- with open(opts.progname, 'rb') as fp:
+ with io.open_code(opts.progname) as fp:
code = compile(fp.read(), opts.progname, 'exec')
# try to emulate __main__ namespace as much as possible
globs = {
diff --git a/Lib/types.py b/Lib/types.py
index aa8a1c84722399..6110e6e1de7249 100644
--- a/Lib/types.py
+++ b/Lib/types.py
@@ -143,6 +143,38 @@ def _calculate_meta(meta, bases):
"of the metaclasses of all its bases")
return winner
+
+def get_original_bases(cls, /):
+ """Return the class's "original" bases prior to modification by `__mro_entries__`.
+
+ Examples::
+
+ from typing import TypeVar, Generic, NamedTuple, TypedDict
+
+ T = TypeVar("T")
+ class Foo(Generic[T]): ...
+ class Bar(Foo[int], float): ...
+ class Baz(list[str]): ...
+ Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+ Spam = TypedDict("Spam", {"a": int, "b": str})
+
+ assert get_original_bases(Bar) == (Foo[int], float)
+ assert get_original_bases(Baz) == (list[str],)
+ assert get_original_bases(Eggs) == (NamedTuple,)
+ assert get_original_bases(Spam) == (TypedDict,)
+ assert get_original_bases(int) == (object,)
+ """
+ try:
+ return cls.__orig_bases__
+ except AttributeError:
+ try:
+ return cls.__bases__
+ except AttributeError:
+ raise TypeError(
+ f'Expected an instance of type, not {type(cls).__name__!r}'
+ ) from None
+
+
class DynamicClassAttribute:
"""Route attribute access on a class to __getattr__.
diff --git a/Lib/typing.py b/Lib/typing.py
index 7c165562c2b53d..354bc80eb3abfa 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -2962,7 +2962,9 @@ class Employee(NamedTuple):
elif kwargs:
raise TypeError("Either list of fields or keywords"
" can be provided to NamedTuple, not both")
- return _make_nmtuple(typename, fields, module=_caller())
+ nt = _make_nmtuple(typename, fields, module=_caller())
+ nt.__orig_bases__ = (NamedTuple,)
+ return nt
_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
@@ -2994,6 +2996,9 @@ def __new__(cls, name, bases, ns, total=True):
tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns)
+ if not hasattr(tp_dict, '__orig_bases__'):
+ tp_dict.__orig_bases__ = bases
+
annotations = {}
own_annotations = ns.get('__annotations__', {})
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
@@ -3104,7 +3109,9 @@ class body be required.
# Setting correct module is necessary to make typed dict classes pickleable.
ns['__module__'] = module
- return _TypedDictMeta(typename, (), ns, total=total)
+ td = _TypedDictMeta(typename, (), ns, total=total)
+ td.__orig_bases__ = (TypedDict,)
+ return td
_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
diff --git a/Lib/unittest/main.py b/Lib/unittest/main.py
index 0792750ffd9e0d..51b81a6c3728bb 100644
--- a/Lib/unittest/main.py
+++ b/Lib/unittest/main.py
@@ -9,6 +9,7 @@
from .signals import installHandler
__unittest = True
+_NO_TESTS_EXITCODE = 5
MAIN_EXAMPLES = """\
Examples:
@@ -279,6 +280,12 @@ def runTests(self):
testRunner = self.testRunner
self.result = testRunner.run(self.test)
if self.exit:
- sys.exit(not self.result.wasSuccessful())
+ if self.result.testsRun == 0:
+ sys.exit(_NO_TESTS_EXITCODE)
+ elif self.result.wasSuccessful():
+ sys.exit(0)
+ else:
+ sys.exit(1)
+
main = TestProgram
diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py
index fa9bea47c88829..7757dba9670b43 100644
--- a/Lib/unittest/result.py
+++ b/Lib/unittest/result.py
@@ -159,7 +159,11 @@ def addUnexpectedSuccess(self, test):
self.unexpectedSuccesses.append(test)
def addDuration(self, test, elapsed):
- """Called when a test finished to run, regardless of its outcome."""
+ """Called when a test finished to run, regardless of its outcome.
+ *test* is the test case corresponding to the test method.
+ *elapsed* is the time represented in seconds, and it includes the
+ execution of cleanup functions.
+ """
# support for a TextTestRunner using an old TestResult class
if hasattr(self, "collectedDurations"):
self.collectedDurations.append((test, elapsed))
diff --git a/Lib/unittest/runner.py b/Lib/unittest/runner.py
index a51c5c562df09d..e3c020e0ace96d 100644
--- a/Lib/unittest/runner.py
+++ b/Lib/unittest/runner.py
@@ -274,6 +274,8 @@ def run(self, test):
infos.append("failures=%d" % failed)
if errored:
infos.append("errors=%d" % errored)
+ elif run == 0:
+ self.stream.write("NO TESTS RAN")
else:
self.stream.write("OK")
if skipped:
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 151034e6a81bf9..5314b3f26021eb 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -1251,8 +1251,8 @@ def http_error_407(self, req, fp, code, msg, headers):
class AbstractHTTPHandler(BaseHandler):
- def __init__(self, debuglevel=0):
- self._debuglevel = debuglevel
+ def __init__(self, debuglevel=None):
+ self._debuglevel = debuglevel if debuglevel is not None else http.client.HTTPConnection.debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
@@ -1378,7 +1378,8 @@ def http_open(self, req):
class HTTPSHandler(AbstractHTTPHandler):
- def __init__(self, debuglevel=0, context=None, check_hostname=None):
+ def __init__(self, debuglevel=None, context=None, check_hostname=None):
+ debuglevel = debuglevel if debuglevel is not None else http.client.HTTPSConnection.debuglevel
AbstractHTTPHandler.__init__(self, debuglevel)
if context is None:
http_version = http.client.HTTPSConnection._http_vsn
@@ -2474,7 +2475,13 @@ def retrfile(self, file, type):
return (ftpobj, retrlen)
def endtransfer(self):
+ if not self.busy:
+ return
self.busy = 0
+ try:
+ self.ftp.voidresp()
+ except ftperrors():
+ pass
def close(self):
self.keepalive = False
diff --git a/Lib/uuid.py b/Lib/uuid.py
index 698be34873b9dc..697f3b45597023 100644
--- a/Lib/uuid.py
+++ b/Lib/uuid.py
@@ -401,7 +401,7 @@ def _get_command_stdout(command, *args):
# over locally administered ones since the former are globally unique, but
# we'll return the first of the latter found if that's all the machine has.
#
-# See https://en.wikipedia.org/wiki/MAC_address#Universal_vs._local
+# See https://en.wikipedia.org/wiki/MAC_address#Universal_vs._local_(U/L_bit)
def _is_universal(mac):
return not (mac & (1 << 41))
@@ -615,7 +615,7 @@ def _random_getnode():
# significant bit of the first octet". This works out to be the 41st bit
# counting from 1 being the least significant bit, or 1<<40.
#
- # See https://en.wikipedia.org/wiki/MAC_address#Unicast_vs._multicast
+ # See https://en.wikipedia.org/w/index.php?title=MAC_address&oldid=1128764812#Universal_vs._local_(U/L_bit)
import random
return random.getrandbits(48) | (1 << 40)
diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py
index 2f87c62ccba866..2173c9b13e5cf7 100644
--- a/Lib/venv/__init__.py
+++ b/Lib/venv/__init__.py
@@ -13,7 +13,7 @@
import types
-CORE_VENV_DEPS = ('pip', 'setuptools')
+CORE_VENV_DEPS = ('pip',)
logger = logging.getLogger(__name__)
@@ -523,7 +523,7 @@ def main(args=None):
'this environment.')
parser.add_argument('--upgrade-deps', default=False, action='store_true',
dest='upgrade_deps',
- help=f'Upgrade core dependencies: {", ".join(CORE_VENV_DEPS)} '
+ help=f'Upgrade core dependencies ({", ".join(CORE_VENV_DEPS)}) '
'to the latest version in PyPI')
options = parser.parse_args(args)
if options.upgrade and options.clear:
diff --git a/Lib/venv/scripts/common/activate b/Lib/venv/scripts/common/activate
index cb898b39670c47..408df5cb93b9e9 100644
--- a/Lib/venv/scripts/common/activate
+++ b/Lib/venv/scripts/common/activate
@@ -1,5 +1,5 @@
# This file must be used with "source bin/activate" *from bash*
-# you cannot run it directly
+# You cannot run it directly
deactivate () {
# reset old environment variables
diff --git a/Lib/venv/scripts/posix/activate.csh b/Lib/venv/scripts/posix/activate.csh
index d6f697c55ed81c..5e8d66fa9e5061 100644
--- a/Lib/venv/scripts/posix/activate.csh
+++ b/Lib/venv/scripts/posix/activate.csh
@@ -1,5 +1,6 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
+
# Created by Davide Di Blasi .
# Ported to Python 3.3 venv by Andrew Svetlov
diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish
index 9aa4446005f4d8..91ad6442e05692 100644
--- a/Lib/venv/scripts/posix/activate.fish
+++ b/Lib/venv/scripts/posix/activate.fish
@@ -1,5 +1,5 @@
# This file must be used with "source /bin/activate.fish" *from fish*
-# (https://fishshell.com/); you cannot run it directly.
+# (https://fishshell.com/). You cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
diff --git a/Mac/BuildScript/scripts/postflight.ensurepip b/Mac/BuildScript/scripts/postflight.ensurepip
index 36d05945b6fd90..ce3c6c1c2bf9e6 100755
--- a/Mac/BuildScript/scripts/postflight.ensurepip
+++ b/Mac/BuildScript/scripts/postflight.ensurepip
@@ -56,19 +56,19 @@ if [ -d /usr/local/bin ] ; then
cd /usr/local/bin
- # Create pipx.y and easy_install-x.y links if /usr/local/bin/pythonx.y
+ # Create pipx.y links if /usr/local/bin/pythonx.y
# is linked to this framework version
install_links_if_our_fw "python${PYVER}" \
- "pip${PYVER}" "easy_install-${PYVER}"
+ "pip${PYVER}"
# Create pipx link if /usr/local/bin/pythonx is linked to this version
install_links_if_our_fw "python${PYMAJOR}" \
"pip${PYMAJOR}"
- # Create pip and easy_install link if /usr/local/bin/python
+ # Create pip link if /usr/local/bin/python
# is linked to this version
install_links_if_our_fw "python" \
- "pip" "easy_install"
+ "pip"
)
fi
exit 0
diff --git a/Mac/Makefile.in b/Mac/Makefile.in
index f9691288414538..69ab4198988570 100644
--- a/Mac/Makefile.in
+++ b/Mac/Makefile.in
@@ -166,7 +166,6 @@ altinstallunixtools:
-if test "x$(ENSUREPIP)" != "xno" ; then \
cd "$(DESTDIR)$(FRAMEWORKUNIXTOOLSPREFIX)/bin" && \
for fn in \
- easy_install-$(VERSION) \
pip$(VERSION) \
; \
do \
diff --git a/Makefile.pre.in b/Makefile.pre.in
index afd503ef126339..b285ef9e832db5 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -38,6 +38,7 @@ CC= @CC@
CXX= @CXX@
LINKCC= @LINKCC@
AR= @AR@
+READELF= @READELF@
SOABI= @SOABI@
LDVERSION= @LDVERSION@
LIBPYTHON= @LIBPYTHON@
@@ -670,13 +671,18 @@ profile-opt: profile-run-stamp
bolt-opt: @PREBOLT_RULE@
rm -f *.fdata
- @LLVM_BOLT@ ./$(BUILDPYTHON) -instrument -instrumentation-file-append-pid -instrumentation-file=$(abspath $(BUILDPYTHON).bolt) -o $(BUILDPYTHON).bolt_inst
- ./$(BUILDPYTHON).bolt_inst $(PROFILE_TASK) || true
- @MERGE_FDATA@ $(BUILDPYTHON).*.fdata > $(BUILDPYTHON).fdata
- @LLVM_BOLT@ ./$(BUILDPYTHON) -o $(BUILDPYTHON).bolt -data=$(BUILDPYTHON).fdata -update-debug-sections -reorder-blocks=ext-tsp -reorder-functions=hfsort+ -split-functions -icf=1 -inline-all -split-eh -reorder-functions-use-hot-size -peepholes=none -jump-tables=aggressive -inline-ap -indirect-call-promotion=all -dyno-stats -use-gnu-stack -frame-opt=hot
- rm -f *.fdata
- rm -f $(BUILDPYTHON).bolt_inst
- mv $(BUILDPYTHON).bolt $(BUILDPYTHON)
+ @if $(READELF) -p .note.bolt_info $(BUILDPYTHON) | grep BOLT > /dev/null; then\
+ echo "skip: $(BUILDPYTHON) is already BOLTed."; \
+ else \
+ @LLVM_BOLT@ ./$(BUILDPYTHON) -instrument -instrumentation-file-append-pid -instrumentation-file=$(abspath $(BUILDPYTHON).bolt) -o $(BUILDPYTHON).bolt_inst; \
+ ./$(BUILDPYTHON).bolt_inst $(PROFILE_TASK) || true; \
+ @MERGE_FDATA@ $(BUILDPYTHON).*.fdata > $(BUILDPYTHON).fdata; \
+ @LLVM_BOLT@ ./$(BUILDPYTHON) -o $(BUILDPYTHON).bolt -data=$(BUILDPYTHON).fdata -update-debug-sections -reorder-blocks=ext-tsp -reorder-functions=hfsort+ -split-functions -icf=1 -inline-all -split-eh -reorder-functions-use-hot-size -peepholes=none -jump-tables=aggressive -inline-ap -indirect-call-promotion=all -dyno-stats -use-gnu-stack -frame-opt=hot; \
+ rm -f *.fdata; \
+ rm -f $(BUILDPYTHON).bolt_inst; \
+ mv $(BUILDPYTHON).bolt $(BUILDPYTHON); \
+ fi
+
# Compile and run with gcov
.PHONY=coverage coverage-lcov coverage-report
@@ -976,7 +982,7 @@ Makefile Modules/config.c: Makefile.pre \
Modules/Setup.local \
Modules/Setup.bootstrap \
Modules/Setup.stdlib
- $(SHELL) $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
+ $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
-s Modules \
Modules/Setup.local \
Modules/Setup.stdlib \
@@ -1188,7 +1194,7 @@ Tools/build/freeze_modules.py: $(FREEZE_MODULE)
.PHONY: regen-frozen
regen-frozen: Tools/build/freeze_modules.py $(FROZEN_FILES_IN)
- $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/freeze_modules.py
+ $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/freeze_modules.py --frozen-modules
@echo "The Makefile was updated, you may need to re-run make."
############################################################################
@@ -2037,16 +2043,26 @@ LIBSUBDIRS= asyncio \
zoneinfo \
__phello__
TESTSUBDIRS= idlelib/idle_test \
- test test/audiodata \
- test/capath test/cjkencodings \
- test/data test/decimaltestdata \
- test/dtracedata test/eintrdata \
- test/encoded_modules test/imghdrdata \
- test/libregrtest test/sndhdrdata \
- test/subprocessdata test/support \
+ test \
+ test/audiodata \
+ test/capath \
+ test/cjkencodings \
+ test/crashers \
+ test/data \
+ test/decimaltestdata \
+ test/dtracedata \
+ test/encoded_modules \
+ test/imghdrdata \
+ test/leakers \
+ test/libregrtest \
+ test/sndhdrdata \
+ test/subprocessdata \
+ test/support \
test/test_asyncio \
+ test/test_capi \
test/test_ctypes \
- test/test_email test/test_email/data \
+ test/test_email \
+ test/test_email/data \
test/test_import \
test/test_import/data \
test/test_import/data/circular_imports \
@@ -2105,16 +2121,39 @@ TESTSUBDIRS= idlelib/idle_test \
test/test_lib2to3/data/fixers \
test/test_lib2to3/data/fixers/myfixes \
test/test_peg_generator \
+ test/test_sqlite3 \
test/test_tkinter \
+ test/test_tomllib \
+ test/test_tomllib/data \
+ test/test_tomllib/data/invalid \
+ test/test_tomllib/data/invalid/array \
+ test/test_tomllib/data/invalid/array-of-tables \
+ test/test_tomllib/data/invalid/boolean \
+ test/test_tomllib/data/invalid/dates-and-times \
+ test/test_tomllib/data/invalid/dotted-keys \
+ test/test_tomllib/data/invalid/inline-table \
+ test/test_tomllib/data/invalid/keys-and-vals \
+ test/test_tomllib/data/invalid/literal-str \
+ test/test_tomllib/data/invalid/multiline-basic-str \
+ test/test_tomllib/data/invalid/multiline-literal-str \
+ test/test_tomllib/data/invalid/table \
+ test/test_tomllib/data/valid \
+ test/test_tomllib/data/valid/array \
+ test/test_tomllib/data/valid/dates-and-times \
+ test/test_tomllib/data/valid/multiline-basic-str \
test/test_tools \
test/test_ttk \
- test/test_warnings test/test_warnings/data \
+ test/test_unittest \
+ test/test_unittest/testmock \
+ test/test_warnings \
+ test/test_warnings/data \
test/test_zipfile \
- test/test_zoneinfo test/test_zoneinfo/data \
- test/test_unittest test/test_unittest/testmock \
+ test/test_zoneinfo \
+ test/test_zoneinfo/data \
test/tracedmodules \
test/typinganndata \
- test/xmltestdata test/xmltestdata/c14n-20 \
+ test/xmltestdata \
+ test/xmltestdata/c14n-20 \
test/ziptestdata
COMPILEALL_OPTS=-j0
@@ -2417,12 +2456,12 @@ frameworkinstallextras:
# Build the toplevel Makefile
Makefile.pre: $(srcdir)/Makefile.pre.in config.status
- CONFIG_FILES=Makefile.pre CONFIG_HEADERS= $(SHELL) config.status
+ CONFIG_FILES=Makefile.pre CONFIG_HEADERS= ./config.status
$(MAKE) -f Makefile.pre Makefile
# Run the configure script.
config.status: $(srcdir)/configure
- $(SHELL) $(srcdir)/configure $(CONFIG_ARGS)
+ $(srcdir)/configure $(CONFIG_ARGS)
.PRECIOUS: config.status $(BUILDPYTHON) Makefile Makefile.pre
@@ -2447,8 +2486,8 @@ reindent:
# Rerun configure with the same options as it was run last time,
# provided the config.status script exists
recheck:
- $(SHELL) config.status --recheck
- $(SHELL) config.status
+ ./config.status --recheck
+ ./config.status
# Regenerate configure and pyconfig.h.in
.PHONY: autoconf
@@ -2663,6 +2702,15 @@ MODULE__SSL_DEPS=$(srcdir)/Modules/_ssl.h $(srcdir)/Modules/_ssl/cert.c $(srcdir
MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/_testcapi/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h
MODULE__SQLITE3_DEPS=$(srcdir)/Modules/_sqlite/connection.h $(srcdir)/Modules/_sqlite/cursor.h $(srcdir)/Modules/_sqlite/microprotocols.h $(srcdir)/Modules/_sqlite/module.h $(srcdir)/Modules/_sqlite/prepare_protocol.h $(srcdir)/Modules/_sqlite/row.h $(srcdir)/Modules/_sqlite/util.h
+CODECS_COMMON_HEADERS=$(srcdir)/Modules/cjkcodecs/multibytecodec.h $(srcdir)/Modules/cjkcodecs/cjkcodecs.h
+MODULE__CODECS_CN_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_cn.h $(CODECS_COMMON_HEADERS)
+MODULE__CODECS_HK_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_hk.h $(CODECS_COMMON_HEADERS)
+MODULE__CODECS_ISO2022_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_jisx0213_pair.h $(srcdir)/Modules/cjkcodecs/alg_jisx0201.h $(srcdir)/Modules/cjkcodecs/emu_jisx0213_2000.h $(CODECS_COMMON_HEADERS)
+MODULE__CODECS_JP_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_jisx0213_pair.h $(srcdir)/Modules/cjkcodecs/alg_jisx0201.h $(srcdir)/Modules/cjkcodecs/emu_jisx0213_2000.h $(srcdir)/Modules/cjkcodecs/mappings_jp.h $(CODECS_COMMON_HEADERS)
+MODULE__CODECS_KR_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_kr.h $(CODECS_COMMON_HEADERS)
+MODULE__CODECS_TW_DEPS=$(srcdir)/Modules/cjkcodecs/mappings_tw.h $(CODECS_COMMON_HEADERS)
+MODULE__MULTIBYTECODEC_DEPS=$(srcdir)/Modules/cjkcodecs/multibytecodec.h
+
# IF YOU PUT ANYTHING HERE IT WILL GO AWAY
# Local Variables:
# mode: makefile
diff --git a/Misc/ACKS b/Misc/ACKS
index d0ff4e8aeb5c90..65be5cfc3c7945 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -160,6 +160,7 @@ Brice Berna
Olivier Bernard
Vivien Bernet-Rollande
Maxwell Bernstein
+Jay Berry
Eric Beser
Steven Bethard
Stephen Bevan
@@ -298,6 +299,7 @@ Dave Chambers
Pascal Chambon
Nicholas Chammas
Ofey Chan
+Juhi Chandalia
John Chandler
Hye-Shik Chang
Jeffrey Chang
@@ -1511,6 +1513,7 @@ Vlad Riscutia
Wes Rishel
Daniel Riti
Juan M. Bello Rivas
+Stefano Rivera
Llandy Riveron Del Risco
Mohd Sanad Zaki Rizvi
Davide Rizzo
diff --git a/Misc/NEWS.d/3.7.0b2.rst b/Misc/NEWS.d/3.7.0b2.rst
index b2ade206bd5f97..9590914599bb86 100644
--- a/Misc/NEWS.d/3.7.0b2.rst
+++ b/Misc/NEWS.d/3.7.0b2.rst
@@ -357,7 +357,7 @@ Wirtel
Add TLSVersion constants and SSLContext.maximum_version / minimum_version
attributes. The new API wraps OpenSSL 1.1
-https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html
+https://web.archive.org/web/20180309043602/https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html
feature.
..
diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst
index 991bbc128670b2..db2eba32e6ea34 100644
--- a/Misc/NEWS.d/3.8.0a1.rst
+++ b/Misc/NEWS.d/3.8.0a1.rst
@@ -5951,7 +5951,7 @@ Wirtel
Add TLSVersion constants and SSLContext.maximum_version / minimum_version
attributes. The new API wraps OpenSSL 1.1
-https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html
+https://web.archive.org/web/20180309043602/https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html
feature.
..
diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst
index 633620583838df..0888a5c43087b5 100644
--- a/Misc/NEWS.d/3.9.0a1.rst
+++ b/Misc/NEWS.d/3.9.0a1.rst
@@ -4887,7 +4887,7 @@ Fix use of registry values to launch Python from Microsoft Store app.
.. section: Windows
Fix memory leak on Windows in creating an SSLContext object or running
-urllib.request.urlopen('https://...').
+``urllib.request.urlopen('https://...')``.
..
diff --git a/Misc/NEWS.d/3.9.0a2.rst b/Misc/NEWS.d/3.9.0a2.rst
index 226ea0d3df2243..a03eb10f1d523a 100644
--- a/Misc/NEWS.d/3.9.0a2.rst
+++ b/Misc/NEWS.d/3.9.0a2.rst
@@ -686,7 +686,7 @@ added.
Update documentation to state that to activate virtual environments under
fish one should use `source`, not `.` as documented at
-https://fishshell.com/docs/current/commands.html#source.
+https://fishshell.com/docs/current/cmds/source.html.
..
diff --git a/Misc/NEWS.d/3.9.0a4.rst b/Misc/NEWS.d/3.9.0a4.rst
index 2aef8b26b01696..019b34c4082d10 100644
--- a/Misc/NEWS.d/3.9.0a4.rst
+++ b/Misc/NEWS.d/3.9.0a4.rst
@@ -392,7 +392,7 @@ The distutils ``bdist_msi`` command is deprecated in Python 3.9, use
Improved performance of zipfile.Path for files with a large number of
entries. Also improved performance and fixed minor issue as published with
`importlib_metadata 1.5
-`_.
+`_.
..
diff --git a/Misc/NEWS.d/next/Build/2023-04-14-10-24-37.gh-issue-103532.H1djkd.rst b/Misc/NEWS.d/next/Build/2023-04-14-10-24-37.gh-issue-103532.H1djkd.rst
new file mode 100644
index 00000000000000..255c9833282c2f
--- /dev/null
+++ b/Misc/NEWS.d/next/Build/2023-04-14-10-24-37.gh-issue-103532.H1djkd.rst
@@ -0,0 +1,4 @@
+The ``TKINTER_PROTECT_LOADTK`` macro is no longer defined or used in the
+``_tkinter`` module. It was previously only defined when building against
+Tk 8.4.13 and older, but Tk older than 8.5.12 has been unsupported since
+gh-issue-91152.
diff --git a/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst b/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst
new file mode 100644
index 00000000000000..172d66163d42e6
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst
@@ -0,0 +1,2 @@
+:c:func:`PyObject_GC_Resize` should calculate preheader size if needed.
+Patch by Dong-hee Na.
diff --git a/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst b/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst
new file mode 100644
index 00000000000000..28c77b6816af87
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst
@@ -0,0 +1 @@
+Add a new C-API function to eagerly assign a version tag to a PyTypeObject: ``PyUnstable_Type_AssignVersionTag()``.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst b/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst
new file mode 100644
index 00000000000000..13c054fdd68276
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2019-12-01-12-58-31.bpo-31821.1FNmwk.rst
@@ -0,0 +1 @@
+Fix :func:`!pause_reading` to work when called from :func:`!connection_made` in :mod:`asyncio`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst b/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst
new file mode 100644
index 00000000000000..d65e0f3db9d6f5
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst
@@ -0,0 +1,2 @@
+``len()`` for 0-dimensional :class:`memoryview`` objects (such as ``memoryview(ctypes.c_uint8(42))``) now raises a :exc:`TypeError`.
+Previously this returned ``1``, which was not consistent with ``mem_0d[0]`` raising an :exc:`IndexError``.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst
new file mode 100644
index 00000000000000..80076831badfea
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst
@@ -0,0 +1,2 @@
+Bypass instance attribute access of ``__name__`` in ``repr`` of
+:class:`weakref.ref`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst
new file mode 100644
index 00000000000000..c4d8ce75b35a30
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst
@@ -0,0 +1,3 @@
+The implementation of PEP-683 which adds Immortal Objects by using a fixed
+reference count that skips reference counting to make objects truly
+immutable.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst
new file mode 100644
index 00000000000000..347c91d973e5ce
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst
@@ -0,0 +1,3 @@
+We've replaced our use of ``_PyRuntime.tstate_current`` with a thread-local
+variable. This is a fairly low-level implementation detail, and there
+should be no change in behavior.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst
new file mode 100644
index 00000000000000..9d75de1565a170
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-12-20-22-03.gh-issue-87729.99A7UO.rst
@@ -0,0 +1,4 @@
+Add :opcode:`LOAD_SUPER_ATTR` (and a specialization for ``super().method()``) to
+speed up ``super().method()`` and ``super().attr``. This makes
+``super().method()`` roughly 2.3x faster and brings it within 20% of the
+performance of a simple method call. Patch by Vladimir Matveev and Carl Meyer.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-13-00-58-55.gh-issue-103492.P4k0Ay.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-13-00-58-55.gh-issue-103492.P4k0Ay.rst
new file mode 100644
index 00000000000000..929650968173e7
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-13-00-58-55.gh-issue-103492.P4k0Ay.rst
@@ -0,0 +1 @@
+Clarify :exc:`SyntaxWarning` with literal ``is`` comparison by specifying which literal is problematic, since comparisons using ``is`` with e.g. None and bool literals are idiomatic.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst
new file mode 100644
index 00000000000000..730c6cd40d7235
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst
@@ -0,0 +1 @@
+Fix bug in line numbers of instructions emitted for :keyword:`except* `.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst
new file mode 100644
index 00000000000000..5b1bcc4a680fc3
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst
@@ -0,0 +1 @@
+Clarify the error message raised when the called part of a class pattern isn't actually a class.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst
new file mode 100644
index 00000000000000..35eceb83816bcb
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst
@@ -0,0 +1 @@
+Implement the required C tokenizer changes for PEP 701. Patch by Pablo Galindo Salgado, Lysandros Nikolaou, Batuhan Taskaya, Marta Gómez Macías and sunmy2019.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst
new file mode 100644
index 00000000000000..af733a8207a2c1
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-16-12-41.gh-issue-103590.7DHDOE.rst
@@ -0,0 +1 @@
+Do not wrap a single exception raised from a ``try-except*`` construct in an :exc:`ExceptionGroup`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst
new file mode 100644
index 00000000000000..15cb6c64adbab1
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst
@@ -0,0 +1 @@
+Change the error range for invalid bytes literals.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst
new file mode 100644
index 00000000000000..c48348798e7142
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-14-38-16.gh-issue-103793.kqoH6Q.rst
@@ -0,0 +1,3 @@
+Optimized asyncio Task creation by deferring expensive string formatting
+(task name generation) from Task creation to the first time ``get_name`` is
+called. This makes asyncio benchmarks up to 5% faster.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-24-21-47-38.gh-issue-103801.WaBanq.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-21-47-38.gh-issue-103801.WaBanq.rst
new file mode 100644
index 00000000000000..6f07d72fafdfc3
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-24-21-47-38.gh-issue-103801.WaBanq.rst
@@ -0,0 +1 @@
+Adds three minor linting fixes to the wasm module caught that were caught by ruff.
diff --git a/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst b/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst
new file mode 100644
index 00000000000000..619505cf6ee5b8
--- /dev/null
+++ b/Misc/NEWS.d/next/Documentation/2023-04-25-22-58-08.gh-issue-48241.l1Gxxh.rst
@@ -0,0 +1 @@
+Clarifying documentation about the url parameter to urllib.request.urlopen and urllib.request.Requst needing to be encoded properly.
diff --git a/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst b/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst
new file mode 100644
index 00000000000000..6dc0a1cb5a3e4f
--- /dev/null
+++ b/Misc/NEWS.d/next/Documentation/2023-04-26-23-55-31.gh-issue-103629.-0reqn.rst
@@ -0,0 +1,2 @@
+Mention the new way of typing ``**kwargs`` with ``Unpack`` and ``TypedDict``
+introduced in :pep:`692`.
diff --git a/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst b/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst
new file mode 100644
index 00000000000000..6e690f996569a4
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2020-02-25-00-43-22.bpo-39744.hgK689.rst
@@ -0,0 +1 @@
+Make :func:`asyncio.subprocess.Process.communicate` close the subprocess's stdin even when called with ``input=None``.
diff --git a/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst b/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst
new file mode 100644
index 00000000000000..7719b74b8e5ef1
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst
@@ -0,0 +1 @@
+Convert private :meth:`_posixsubprocess.fork_exec` to use Argument Clinic.
diff --git a/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst b/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst
new file mode 100644
index 00000000000000..50a3d6a4629182
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-07-06-11-10-37.gh-issue-51574.sveUeD.rst
@@ -0,0 +1,2 @@
+Make :func:`tempfile.mkdtemp` return absolute paths when its *dir*
+parameter is relative.
diff --git a/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst b/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst
new file mode 100644
index 00000000000000..f67bffcb0ddc6c
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-10-21-17-20-57.gh-issue-98040.3btbmA.rst
@@ -0,0 +1 @@
+Remove the long-deprecated ``imp`` module.
diff --git a/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst b/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst
new file mode 100644
index 00000000000000..1ad42d5c9aa53d
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst
@@ -0,0 +1,3 @@
+Respect the :class:`http.client.HTTPConnection` ``.debuglevel`` flag
+in :class:`urllib.request.AbstractHTTPHandler` when its constructor
+parameter ``debuglevel`` is not set. And do the same for ``*HTTPS*``.
diff --git a/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst b/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst
new file mode 100644
index 00000000000000..29c30848e09a83
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst
@@ -0,0 +1 @@
+Remove the bundled setuptools wheel from ``ensurepip``, and stop installing setuptools in environments created by ``venv``.
diff --git a/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst b/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst
new file mode 100644
index 00000000000000..e85e7a4ff2e73a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst
@@ -0,0 +1,4 @@
+Fix datetime.astimezone method return value when invoked on a naive datetime
+instance that represents local time falling in a timezone transition gap.
+PEP 495 requires that instances with fold=1 produce earlier times than those
+with fold=0 in this case.
diff --git a/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst b/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst
new file mode 100644
index 00000000000000..6df69463931494
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst
@@ -0,0 +1,2 @@
+Implement :func:`types.get_original_bases` to provide further introspection
+for types.
diff --git a/Misc/NEWS.d/next/Library/2023-02-17-21-14-40.gh-issue-78079.z3Szr6.rst b/Misc/NEWS.d/next/Library/2023-02-17-21-14-40.gh-issue-78079.z3Szr6.rst
new file mode 100644
index 00000000000000..bbb9ac3e3f8faa
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-02-17-21-14-40.gh-issue-78079.z3Szr6.rst
@@ -0,0 +1,3 @@
+Fix incorrect normalization of UNC device path roots, and partial UNC share
+path roots, in :class:`pathlib.PurePath`. Pathlib no longer appends a
+trailing slash to such paths.
diff --git a/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst b/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst
new file mode 100644
index 00000000000000..a8d66ea48c3278
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-02-19-12-37-08.gh-issue-62432.GnBFIB.rst
@@ -0,0 +1,3 @@
+The :mod:`unittest` runner will now exit with status code 5 if no tests
+were run. It is common for test runner misconfiguration to fail to find
+any tests, this should be an error.
diff --git a/Misc/NEWS.d/next/Library/2023-02-21-14-57-34.gh-issue-102114.uUDQzb.rst b/Misc/NEWS.d/next/Library/2023-02-21-14-57-34.gh-issue-102114.uUDQzb.rst
new file mode 100644
index 00000000000000..4140c9a96cd272
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-02-21-14-57-34.gh-issue-102114.uUDQzb.rst
@@ -0,0 +1 @@
+Functions in the :mod:`dis` module that accept a source code string as argument now print a more concise traceback when the string contains a syntax or indentation error.
diff --git a/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst b/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst
new file mode 100644
index 00000000000000..48a105a4a17b29
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst
@@ -0,0 +1,4 @@
+The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`,
+have a new a *filter* argument that allows limiting tar features than may be
+surprising or dangerous, such as creating files outside the destination
+directory. See :ref:`tarfile-extraction-filter` for details.
diff --git a/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst b/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst
new file mode 100644
index 00000000000000..dcac1a28ca5847
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-03-28-09-13-31.gh-issue-103015.ETTfNf.rst
@@ -0,0 +1,3 @@
+Add *entrypoint* keyword-only parameter to
+:meth:`sqlite3.Connection.load_extension`, for overriding the SQLite
+extension entry point. Patch by Erlend E. Aasland.
diff --git a/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst b/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst
new file mode 100644
index 00000000000000..62b4364c2b1665
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst
@@ -0,0 +1 @@
+Improve performance of :func:`ast.get_source_segment`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst b/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst
new file mode 100644
index 00000000000000..64ae5b5b6d564b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst
@@ -0,0 +1 @@
+Support ``sys.last_exc`` in :mod:`idlelib`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst b/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst
new file mode 100644
index 00000000000000..0f2108fee763d0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst
@@ -0,0 +1 @@
+Adapt the :mod:`winreg` extension module to :pep:`687`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst b/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst
new file mode 100644
index 00000000000000..0b2b47af1cbaab
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst
@@ -0,0 +1 @@
+Fix a bug in doc string generation in :func:`dataclasses.dataclass`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst b/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst
new file mode 100644
index 00000000000000..264564d018ceb4
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-13-13-17-47.gh-issue-103489.ZSZgmu.rst
@@ -0,0 +1,4 @@
+Add :meth:`~sqlite3.Connection.getconfig` and
+:meth:`~sqlite3.Connection.setconfig` to :class:`~sqlite3.Connection` to
+make configuration changes to a database connection. Patch by Erlend E.
+Aasland.
diff --git a/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst b/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst
new file mode 100644
index 00000000000000..2c9d67e2c4bf71
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst
@@ -0,0 +1 @@
+Update the bundled copy of pip to version 23.1.1.
diff --git a/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst b/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst
new file mode 100644
index 00000000000000..fe2267b7b79019
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst
@@ -0,0 +1,3 @@
+Now creating :class:`inspect.Signature` objects with positional-only
+parameter with a default followed by a positional-or-keyword parameter
+without one is impossible.
diff --git a/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst b/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst
new file mode 100644
index 00000000000000..69986c2a15b39e
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-16-18-29-04.gh-issue-103578.fly1wc.rst
@@ -0,0 +1 @@
+Fixed a bug where :mod:`pdb` crashes when reading source file with different encoding by replacing :func:`io.open` with :func:`io.open_code`. The new method would also call into the hook set by :func:`PyFile_SetOpenCodeHook`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst b/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst
new file mode 100644
index 00000000000000..6d7c93ade9cd94
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst
@@ -0,0 +1,12 @@
+Updated ``importlib.metadata`` with changes from ``importlib_metadata`` 5.2
+through 6.5.0, including: Support ``installed-files.txt`` for
+``Distribution.files`` when present. ``PackageMetadata`` now stipulates an
+additional ``get`` method allowing for easy querying of metadata keys that
+may not be present. ``packages_distributions`` now honors packages and
+modules with Python modules that not ``.py`` sources (e.g. ``.pyc``,
+``.so``). Expand protocol for ``PackageMetadata.get_all`` to match the
+upstream implementation of ``email.message.Message.get_all`` in
+python/typeshed#9620. Deprecated use of ``Distribution`` without defining
+abstract methods. Deprecated expectation that
+``PackageMetadata.__getitem__`` will return ``None`` for missing keys. In
+the future, it will raise a ``KeyError``.
diff --git a/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst b/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst
new file mode 100644
index 00000000000000..2fa27e60b58efe
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst
@@ -0,0 +1,2 @@
+Attributes/methods are no longer shadowed by same-named enum members,
+although they may be shadowed by enum.property's.
diff --git a/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst b/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst
new file mode 100644
index 00000000000000..b3b5085250f078
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-21-10-25-39.gh-issue-103636.YK6NEa.rst
@@ -0,0 +1 @@
+Added Enum for months and days in the calendar module.
diff --git a/Misc/NEWS.d/next/Library/2023-04-22-02-41-06.gh-issue-103673.oE7S_k.rst b/Misc/NEWS.d/next/Library/2023-04-22-02-41-06.gh-issue-103673.oE7S_k.rst
new file mode 100644
index 00000000000000..bd5317744ff140
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-22-02-41-06.gh-issue-103673.oE7S_k.rst
@@ -0,0 +1,2 @@
+:mod:`socketserver` gains ``ForkingUnixStreamServer`` and
+``ForkingUnixDatagramServer`` classes. Patch by Jay Berry.
diff --git a/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst b/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst
new file mode 100644
index 00000000000000..a5b99a2f1360f0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-22-11-20-27.gh-issue-89415.YHk760.rst
@@ -0,0 +1,2 @@
+Add :mod:`socket` constants for source-specific multicast.
+Patch by Reese Hyde.
diff --git a/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst b/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst
new file mode 100644
index 00000000000000..60547a25a109bc
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst
@@ -0,0 +1,2 @@
+Add ``__orig_bases__`` to non-generic TypedDicts, call-based TypedDicts, and
+call-based NamedTuples. Other TypedDicts and NamedTuples already had the attribute.
diff --git a/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst b/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst
new file mode 100644
index 00000000000000..6adb71f7677229
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst
@@ -0,0 +1,3 @@
+:class:`urllib.request.CacheFTPHandler` no longer raises :class:`URLError`
+if a cached FTP instance is reused. ftplib's endtransfer method calls
+voidresp to drain the connection to handle FTP instance reuse properly.
diff --git a/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst b/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst
new file mode 100644
index 00000000000000..31df04790721a8
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst
@@ -0,0 +1 @@
+Prepare :meth:`tkinter.Menu.index` for Tk 8.7 so that it does not raise ``TclError: expected integer but got ""`` when it should return ``None``.
diff --git a/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst b/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst
new file mode 100644
index 00000000000000..99e10f140f5049
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-24-16-00-28.gh-issue-90750.da0Xi8.rst
@@ -0,0 +1,3 @@
+Use :meth:`datetime.datetime.fromisocalendar` in the implementation of
+:meth:`datetime.datetime.strptime`, which should now accept only valid ISO
+dates. (Patch by Paul Ganssle)
diff --git a/Misc/NEWS.d/next/Library/2023-04-24-23-07-56.gh-issue-103791.bBPWdS.rst b/Misc/NEWS.d/next/Library/2023-04-24-23-07-56.gh-issue-103791.bBPWdS.rst
new file mode 100644
index 00000000000000..f00384cde9706e
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-24-23-07-56.gh-issue-103791.bBPWdS.rst
@@ -0,0 +1,3 @@
+:class:`contextlib.suppress` now supports suppressing exceptions raised as
+part of an :exc:`ExceptionGroup`. If other exceptions exist on the group, they
+are re-raised in a group that does not contain the suppressed exceptions.
diff --git a/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst b/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst
new file mode 100644
index 00000000000000..3bd370dabf4ed5
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-25-17-03-18.gh-issue-103857.Mr2Cak.rst
@@ -0,0 +1,2 @@
+Deprecated :meth:`datetime.datetime.utcnow` and
+:meth:`datetime.datetime.utcfromtimestamp`. (Patch by Paul Ganssle)
diff --git a/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst b/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst
new file mode 100644
index 00000000000000..c37d795f3eb33d
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-25-22-06-00.gh-issue-74940.TOacQ9.rst
@@ -0,0 +1,2 @@
+The C.UTF-8 locale is no longer converted to en_US.UTF-8, enabling the use
+of UTF-8 encoding on systems which have no locales installed.
diff --git a/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst b/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst
new file mode 100644
index 00000000000000..80238a65e32a41
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-25-22-59-06.gh-issue-99944.pst8iT.rst
@@ -0,0 +1 @@
+Make :mod:`dis` display the value of oparg of :opcode:`KW_NAMES`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst b/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst
new file mode 100644
index 00000000000000..b840f9f5769f08
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-26-09-38-47.gh-issue-103872.8LBsDz.rst
@@ -0,0 +1 @@
+Update the bundled copy of pip to version 23.1.2.
diff --git a/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst b/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst
new file mode 100644
index 00000000000000..8c92ee40831619
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-26-15-14-36.gh-issue-103583.iCMDFt.rst
@@ -0,0 +1,2 @@
+Isolate :mod:`!_multibytecodec` and codecs extension modules. Patches by
+Erlend E. Aasland.
diff --git a/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst b/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst
new file mode 100644
index 00000000000000..a05a6f5cbcdb99
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-26-18-12-13.gh-issue-103636.-KvCgO.rst
@@ -0,0 +1 @@
+Module-level attributes ``January`` and ``February`` are deprecated from :mod:`calendar`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst b/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst
new file mode 100644
index 00000000000000..eaaca5b41ba5e2
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-27-00-05-32.gh-issue-102628.X230E-.rst
@@ -0,0 +1,2 @@
+Substitute CTRL-D with CTRL-Z in :mod:`sqlite3` CLI banner when running on
+Windows.
diff --git a/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst b/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst
new file mode 100644
index 00000000000000..71b2d87249c47b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-27-20-03-08.gh-issue-103935.Uaf2M0.rst
@@ -0,0 +1 @@
+Use :func:`io.open_code` for files to be executed instead of raw :func:`open`
diff --git a/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst b/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst
new file mode 100644
index 00000000000000..5bd005ffacb800
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2023-04-24-15-51-11.gh-issue-82814.GI3UkZ.rst
@@ -0,0 +1,3 @@
+Fix a potential ``[Errno 13] Permission denied`` when using :func:`shutil.copystat`
+within Windows Subsystem for Linux (WSL) on a mounted filesystem by adding
+``errno.EACCES`` to the list of ignored errors within the internal implementation.
diff --git a/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst b/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst
new file mode 100644
index 00000000000000..f274d3b898f15d
--- /dev/null
+++ b/Misc/NEWS.d/next/macOS/2023-04-24-18-37-48.gh-issue-60436.in-IyF.rst
@@ -0,0 +1 @@
+update curses textbox to additionally handle backspace using the ``curses.ascii.DEL`` key press.
diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c
index 2476dca6f58ebf..82dbc087322aa9 100644
--- a/Modules/_asynciomodule.c
+++ b/Modules/_asynciomodule.c
@@ -2069,8 +2069,10 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop,
Py_XSETREF(self->task_coro, coro);
if (name == Py_None) {
- name = PyUnicode_FromFormat("Task-%" PRIu64,
- ++state->task_name_counter);
+ // optimization: defer task name formatting
+ // store the task counter as PyLong in the name
+ // for deferred formatting in get_name
+ name = PyLong_FromUnsignedLongLong(++state->task_name_counter);
} else if (!PyUnicode_CheckExact(name)) {
name = PyObject_Str(name);
} else {
@@ -2449,6 +2451,13 @@ _asyncio_Task_get_name_impl(TaskObj *self)
/*[clinic end generated code: output=0ecf1570c3b37a8f input=a4a6595d12f4f0f8]*/
{
if (self->task_name) {
+ if (PyLong_CheckExact(self->task_name)) {
+ PyObject *name = PyUnicode_FromFormat("Task-%S", self->task_name);
+ if (name == NULL) {
+ return NULL;
+ }
+ Py_SETREF(self->task_name, name);
+ }
return Py_NewRef(self->task_name);
}
diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
index 6f92ca08dd537b..c7ed6bd2229c79 100644
--- a/Modules/_ctypes/_ctypes.c
+++ b/Modules/_ctypes/_ctypes.c
@@ -126,6 +126,8 @@ bytes(cdata)
#include "pycore_long.h" // _PyLong_GetZero()
+ctypes_state global_state;
+
PyObject *PyExc_ArgError = NULL;
/* This dict maps ctypes types to POINTER types */
@@ -150,13 +152,32 @@ typedef struct {
PyObject *dict;
} DictRemoverObject;
+static int
+_DictRemover_traverse(DictRemoverObject *self, visitproc visit, void *arg)
+{
+ Py_VISIT(Py_TYPE(self));
+ Py_VISIT(self->key);
+ Py_VISIT(self->dict);
+ return 0;
+}
+
+static int
+_DictRemover_clear(DictRemoverObject *self)
+{
+ Py_CLEAR(self->key);
+ Py_CLEAR(self->dict);
+ return 0;
+}
+
static void
_DictRemover_dealloc(PyObject *myself)
{
+ PyTypeObject *tp = Py_TYPE(myself);
DictRemoverObject *self = (DictRemoverObject *)myself;
- Py_XDECREF(self->key);
- Py_XDECREF(self->dict);
- Py_TYPE(self)->tp_free(myself);
+ PyObject_GC_UnTrack(myself);
+ (void)_DictRemover_clear(self);
+ tp->tp_free(myself);
+ Py_DECREF(tp);
}
static PyObject *
@@ -173,47 +194,23 @@ _DictRemover_call(PyObject *myself, PyObject *args, PyObject *kw)
Py_RETURN_NONE;
}
-static PyTypeObject DictRemover_Type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "_ctypes.DictRemover", /* tp_name */
- sizeof(DictRemoverObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- _DictRemover_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- _DictRemover_call, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
-/* XXX should participate in GC? */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- PyDoc_STR("deletes a key from a dictionary"), /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- 0, /* tp_new */
- 0, /* tp_free */
+PyDoc_STRVAR(dictremover_doc, "deletes a key from a dictionary");
+
+static PyType_Slot dictremover_slots[] = {
+ {Py_tp_dealloc, _DictRemover_dealloc},
+ {Py_tp_traverse, _DictRemover_traverse},
+ {Py_tp_clear, _DictRemover_clear},
+ {Py_tp_call, _DictRemover_call},
+ {Py_tp_doc, (void *)dictremover_doc},
+ {0, NULL},
+};
+
+static PyType_Spec dictremover_spec = {
+ .name = "_ctypes.DictRemover",
+ .basicsize = sizeof(DictRemoverObject),
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
+ Py_TPFLAGS_IMMUTABLETYPE),
+ .slots = dictremover_slots,
};
int
@@ -224,7 +221,8 @@ PyDict_SetItemProxy(PyObject *dict, PyObject *key, PyObject *item)
PyObject *proxy;
int result;
- obj = _PyObject_CallNoArgs((PyObject *)&DictRemover_Type);
+ ctypes_state *st = GLOBAL_STATE();
+ obj = _PyObject_CallNoArgs((PyObject *)st->DictRemover_Type);
if (obj == NULL)
return -1;
@@ -415,23 +413,45 @@ typedef struct {
PyObject *keep; // If set, a reference to the original CDataObject.
} StructParamObject;
+static int
+StructParam_traverse(StructParamObject *self, visitproc visit, void *arg)
+{
+ Py_VISIT(Py_TYPE(self));
+ return 0;
+}
+
+static int
+StructParam_clear(StructParamObject *self)
+{
+ Py_CLEAR(self->keep);
+ return 0;
+}
static void
StructParam_dealloc(PyObject *myself)
{
StructParamObject *self = (StructParamObject *)myself;
- Py_XDECREF(self->keep);
+ PyTypeObject *tp = Py_TYPE(self);
+ PyObject_GC_UnTrack(myself);
+ (void)StructParam_clear(self);
PyMem_Free(self->ptr);
- Py_TYPE(self)->tp_free(myself);
+ tp->tp_free(myself);
+ Py_DECREF(tp);
}
+static PyType_Slot structparam_slots[] = {
+ {Py_tp_traverse, StructParam_traverse},
+ {Py_tp_clear, StructParam_clear},
+ {Py_tp_dealloc, StructParam_dealloc},
+ {0, NULL},
+};
-static PyTypeObject StructParam_Type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- .tp_name = "_ctypes.StructParam_Type",
- .tp_basicsize = sizeof(StructParamObject),
- .tp_dealloc = StructParam_dealloc,
- .tp_flags = Py_TPFLAGS_DEFAULT,
+static PyType_Spec structparam_spec = {
+ .name = "_ctypes.StructParam_Type",
+ .basicsize = sizeof(StructParamObject),
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE |
+ Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_DISALLOW_INSTANTIATION),
+ .slots = structparam_slots,
};
@@ -460,7 +480,9 @@ StructUnionType_paramfunc(CDataObject *self)
/* Create a Python object which calls PyMem_Free(ptr) in
its deallocator. The object will be destroyed
at _ctypes_callproc() cleanup. */
- obj = (&StructParam_Type)->tp_alloc(&StructParam_Type, 0);
+ ctypes_state *st = GLOBAL_STATE();
+ PyTypeObject *tp = st->StructParam_Type;
+ obj = tp->tp_alloc(tp, 0);
if (obj == NULL) {
PyMem_Free(ptr);
return NULL;
@@ -800,7 +822,8 @@ CDataType_from_param(PyObject *type, PyObject *value)
if (res) {
return Py_NewRef(value);
}
- if (PyCArg_CheckExact(value)) {
+ ctypes_state *st = GLOBAL_STATE();
+ if (PyCArg_CheckExact(st, value)) {
PyCArgObject *p = (PyCArgObject *)value;
PyObject *ob = p->obj;
const char *ob_name;
@@ -1683,7 +1706,8 @@ c_wchar_p_from_param(PyObject *type, PyObject *value)
return Py_NewRef(value);
}
}
- if (PyCArg_CheckExact(value)) {
+ ctypes_state *st = GLOBAL_STATE();
+ if (PyCArg_CheckExact(st, value)) {
/* byref(c_char(...)) */
PyCArgObject *a = (PyCArgObject *)value;
StgDictObject *dict = PyObject_stgdict(a->obj);
@@ -1746,7 +1770,8 @@ c_char_p_from_param(PyObject *type, PyObject *value)
return Py_NewRef(value);
}
}
- if (PyCArg_CheckExact(value)) {
+ ctypes_state *st = GLOBAL_STATE();
+ if (PyCArg_CheckExact(st, value)) {
/* byref(c_char(...)) */
PyCArgObject *a = (PyCArgObject *)value;
StgDictObject *dict = PyObject_stgdict(a->obj);
@@ -1847,7 +1872,8 @@ c_void_p_from_param(PyObject *type, PyObject *value)
return Py_NewRef(value);
}
/* byref(...) */
- if (PyCArg_CheckExact(value)) {
+ ctypes_state *st = GLOBAL_STATE();
+ if (PyCArg_CheckExact(st, value)) {
/* byref(c_xxx()) */
PyCArgObject *a = (PyCArgObject *)value;
if (a->tag == 'P') {
@@ -5635,12 +5661,22 @@ _ctypes_add_types(PyObject *mod)
} \
} while (0)
+#define CREATE_TYPE(MOD, TP, SPEC) do { \
+ PyObject *type = PyType_FromMetaclass(NULL, MOD, SPEC, NULL); \
+ if (type == NULL) { \
+ return -1; \
+ } \
+ TP = (PyTypeObject *)type; \
+} while (0)
+
+ ctypes_state *st = GLOBAL_STATE();
+
/* Note:
ob_type is the metatype (the 'type'), defaults to PyType_Type,
tp_base is the base type, defaults to 'object' aka PyBaseObject_Type.
*/
- TYPE_READY(&PyCArg_Type);
- TYPE_READY(&PyCThunk_Type);
+ CREATE_TYPE(mod, st->PyCArg_Type, &carg_spec);
+ CREATE_TYPE(mod, st->PyCThunk_Type, &cthunk_spec);
TYPE_READY(&PyCData_Type);
/* StgDict is derived from PyDict_Type */
TYPE_READY_BASE(&PyCStgDict_Type, &PyDict_Type);
@@ -5673,17 +5709,15 @@ _ctypes_add_types(PyObject *mod)
* Simple classes
*/
- /* PyCField_Type is derived from PyBaseObject_Type */
- TYPE_READY(&PyCField_Type);
+ CREATE_TYPE(mod, st->PyCField_Type, &cfield_spec);
/*************************************************
*
* Other stuff
*/
- DictRemover_Type.tp_new = PyType_GenericNew;
- TYPE_READY(&DictRemover_Type);
- TYPE_READY(&StructParam_Type);
+ CREATE_TYPE(mod, st->DictRemover_Type, &dictremover_spec);
+ CREATE_TYPE(mod, st->StructParam_Type, &structparam_spec);
#ifdef MS_WIN32
TYPE_READY_BASE(&PyComError_Type, (PyTypeObject*)PyExc_Exception);
@@ -5692,6 +5726,7 @@ _ctypes_add_types(PyObject *mod)
#undef TYPE_READY
#undef TYPE_READY_BASE
#undef MOD_ADD_TYPE
+#undef CREATE_TYPE
return 0;
}
diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c
index bc8750091f65f3..8e694ba852c1d4 100644
--- a/Modules/_ctypes/callbacks.c
+++ b/Modules/_ctypes/callbacks.c
@@ -28,23 +28,11 @@
/**************************************************************/
-static void
-CThunkObject_dealloc(PyObject *myself)
-{
- CThunkObject *self = (CThunkObject *)myself;
- PyObject_GC_UnTrack(self);
- Py_XDECREF(self->converters);
- Py_XDECREF(self->callable);
- Py_XDECREF(self->restype);
- if (self->pcl_write)
- Py_ffi_closure_free(self->pcl_write);
- PyObject_GC_Del(self);
-}
-
static int
CThunkObject_traverse(PyObject *myself, visitproc visit, void *arg)
{
CThunkObject *self = (CThunkObject *)myself;
+ Py_VISIT(Py_TYPE(self));
Py_VISIT(self->converters);
Py_VISIT(self->callable);
Py_VISIT(self->restype);
@@ -61,36 +49,35 @@ CThunkObject_clear(PyObject *myself)
return 0;
}
-PyTypeObject PyCThunk_Type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "_ctypes.CThunkObject",
- sizeof(CThunkObject), /* tp_basicsize */
- sizeof(ffi_type), /* tp_itemsize */
- CThunkObject_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- PyDoc_STR("CThunkObject"), /* tp_doc */
- CThunkObject_traverse, /* tp_traverse */
- CThunkObject_clear, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
+static void
+CThunkObject_dealloc(PyObject *myself)
+{
+ CThunkObject *self = (CThunkObject *)myself;
+ PyTypeObject *tp = Py_TYPE(myself);
+ PyObject_GC_UnTrack(self);
+ (void)CThunkObject_clear(myself);
+ if (self->pcl_write) {
+ Py_ffi_closure_free(self->pcl_write);
+ }
+ PyObject_GC_Del(self);
+ Py_DECREF(tp);
+}
+
+static PyType_Slot cthunk_slots[] = {
+ {Py_tp_doc, (void *)PyDoc_STR("CThunkObject")},
+ {Py_tp_dealloc, CThunkObject_dealloc},
+ {Py_tp_traverse, CThunkObject_traverse},
+ {Py_tp_clear, CThunkObject_clear},
+ {0, NULL},
+};
+
+PyType_Spec cthunk_spec = {
+ .name = "_ctypes.CThunkObject",
+ .basicsize = sizeof(CThunkObject),
+ .itemsize = sizeof(ffi_type),
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
+ Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION),
+ .slots = cthunk_slots,
};
/**************************************************************/
@@ -320,7 +307,8 @@ static CThunkObject* CThunkObject_new(Py_ssize_t nargs)
CThunkObject *p;
Py_ssize_t i;
- p = PyObject_GC_NewVar(CThunkObject, &PyCThunk_Type, nargs);
+ ctypes_state *st = GLOBAL_STATE();
+ p = PyObject_GC_NewVar(CThunkObject, st->PyCThunk_Type, nargs);
if (p == NULL) {
return NULL;
}
@@ -357,7 +345,10 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable,
if (p == NULL)
return NULL;
- assert(CThunk_CheckExact((PyObject *)p));
+#ifdef Py_DEBUG
+ ctypes_state *st = GLOBAL_STATE();
+ assert(CThunk_CheckExact(st, (PyObject *)p));
+#endif
p->pcl_write = Py_ffi_closure_alloc(sizeof(ffi_closure), &p->pcl_exec);
if (p->pcl_write == NULL) {
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
index 4438727332bc11..93bc784df5386f 100644
--- a/Modules/_ctypes/callproc.c
+++ b/Modules/_ctypes/callproc.c
@@ -469,21 +469,41 @@ PyCArgObject *
PyCArgObject_new(void)
{
PyCArgObject *p;
- p = PyObject_New(PyCArgObject, &PyCArg_Type);
+ ctypes_state *st = GLOBAL_STATE();
+ p = PyObject_GC_New(PyCArgObject, st->PyCArg_Type);
if (p == NULL)
return NULL;
p->pffi_type = NULL;
p->tag = '\0';
p->obj = NULL;
memset(&p->value, 0, sizeof(p->value));
+ PyObject_GC_Track(p);
return p;
}
+static int
+PyCArg_traverse(PyCArgObject *self, visitproc visit, void *arg)
+{
+ Py_VISIT(Py_TYPE(self));
+ Py_VISIT(self->obj);
+ return 0;
+}
+
+static int
+PyCArg_clear(PyCArgObject *self)
+{
+ Py_CLEAR(self->obj);
+ return 0;
+}
+
static void
PyCArg_dealloc(PyCArgObject *self)
{
- Py_XDECREF(self->obj);
- PyObject_Free(self);
+ PyTypeObject *tp = Py_TYPE(self);
+ PyObject_GC_UnTrack(self);
+ (void)PyCArg_clear(self);
+ tp->tp_free((PyObject *)self);
+ Py_DECREF(tp);
}
static int
@@ -567,36 +587,21 @@ static PyMemberDef PyCArgType_members[] = {
{ NULL },
};
-PyTypeObject PyCArg_Type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "CArgObject",
- sizeof(PyCArgObject),
- 0,
- (destructor)PyCArg_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- (reprfunc)PyCArg_repr, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- 0, /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- PyCArgType_members, /* tp_members */
+static PyType_Slot carg_slots[] = {
+ {Py_tp_dealloc, PyCArg_dealloc},
+ {Py_tp_traverse, PyCArg_traverse},
+ {Py_tp_clear, PyCArg_clear},
+ {Py_tp_repr, PyCArg_repr},
+ {Py_tp_members, PyCArgType_members},
+ {0, NULL},
+};
+
+PyType_Spec carg_spec = {
+ .name = "_ctypes.CArgObject",
+ .basicsize = sizeof(PyCArgObject),
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
+ Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION),
+ .slots = carg_slots,
};
/****************************************************************/
@@ -669,7 +674,8 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa)
return 0;
}
- if (PyCArg_CheckExact(obj)) {
+ ctypes_state *st = GLOBAL_STATE();
+ if (PyCArg_CheckExact(st, obj)) {
PyCArgObject *carg = (PyCArgObject *)obj;
pa->ffi_type = carg->pffi_type;
pa->keep = Py_NewRef(obj);
diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c
index 796a1bec966de1..128506a9eed920 100644
--- a/Modules/_ctypes/cfield.c
+++ b/Modules/_ctypes/cfield.c
@@ -61,7 +61,9 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index,
#define CONT_BITFIELD 2
#define EXPAND_BITFIELD 3
- self = (CFieldObject *)PyCField_Type.tp_alloc((PyTypeObject *)&PyCField_Type, 0);
+ ctypes_state *st = GLOBAL_STATE();
+ PyTypeObject *tp = st->PyCField_Type;
+ self = (CFieldObject *)tp->tp_alloc(tp, 0);
if (self == NULL)
return NULL;
dict = PyType_stgdict(desc);
@@ -256,6 +258,7 @@ static PyGetSetDef PyCField_getset[] = {
static int
PyCField_traverse(CFieldObject *self, visitproc visit, void *arg)
{
+ Py_VISIT(Py_TYPE(self));
Py_VISIT(self->proto);
return 0;
}
@@ -270,9 +273,11 @@ PyCField_clear(CFieldObject *self)
static void
PyCField_dealloc(PyObject *self)
{
+ PyTypeObject *tp = Py_TYPE(self);
PyObject_GC_UnTrack(self);
- PyCField_clear((CFieldObject *)self);
+ (void)PyCField_clear((CFieldObject *)self);
Py_TYPE(self)->tp_free((PyObject *)self);
+ Py_DECREF(tp);
}
static PyObject *
@@ -296,46 +301,24 @@ PyCField_repr(CFieldObject *self)
return result;
}
-PyTypeObject PyCField_Type = {
- PyVarObject_HEAD_INIT(NULL, 0)
- "_ctypes.CField", /* tp_name */
- sizeof(CFieldObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- PyCField_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- (reprfunc)PyCField_repr, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- PyDoc_STR("Structure/Union member"), /* tp_doc */
- (traverseproc)PyCField_traverse, /* tp_traverse */
- (inquiry)PyCField_clear, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- PyCField_getset, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- (descrgetfunc)PyCField_get, /* tp_descr_get */
- (descrsetfunc)PyCField_set, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- 0, /* tp_new */
- 0, /* tp_free */
+static PyType_Slot cfield_slots[] = {
+ {Py_tp_dealloc, PyCField_dealloc},
+ {Py_tp_repr, PyCField_repr},
+ {Py_tp_doc, (void *)PyDoc_STR("Structure/Union member")},
+ {Py_tp_traverse, PyCField_traverse},
+ {Py_tp_clear, PyCField_clear},
+ {Py_tp_getset, PyCField_getset},
+ {Py_tp_descr_get, PyCField_get},
+ {Py_tp_descr_set, PyCField_set},
+ {0, NULL},
+};
+
+PyType_Spec cfield_spec = {
+ .name = "_ctypes.CField",
+ .basicsize = sizeof(CFieldObject),
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
+ Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION),
+ .slots = cfield_slots,
};
diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h
index a7029b6e6da2b8..252d9da7dbb56d 100644
--- a/Modules/_ctypes/ctypes.h
+++ b/Modules/_ctypes/ctypes.h
@@ -32,6 +32,22 @@
#endif
#endif
+typedef struct {
+ PyTypeObject *DictRemover_Type;
+ PyTypeObject *PyCArg_Type;
+ PyTypeObject *PyCField_Type;
+ PyTypeObject *PyCThunk_Type;
+ PyTypeObject *StructParam_Type;
+} ctypes_state;
+
+extern ctypes_state global_state;
+
+#define GLOBAL_STATE() (&global_state)
+
+extern PyType_Spec carg_spec;
+extern PyType_Spec cfield_spec;
+extern PyType_Spec cthunk_spec;
+
typedef struct tagPyCArgObject PyCArgObject;
typedef struct tagCDataObject CDataObject;
typedef PyObject *(* GETFUNC)(void *, Py_ssize_t size);
@@ -88,8 +104,7 @@ typedef struct {
ffi_type *ffi_restype;
ffi_type *atypes[1];
} CThunkObject;
-extern PyTypeObject PyCThunk_Type;
-#define CThunk_CheckExact(v) Py_IS_TYPE(v, &PyCThunk_Type)
+#define CThunk_CheckExact(st, v) Py_IS_TYPE(v, st->PyCThunk_Type)
typedef struct {
/* First part identical to tagCDataObject */
@@ -141,7 +156,6 @@ extern PyTypeObject PyCSimpleType_Type;
#define PyCSimpleTypeObject_CheckExact(v) Py_IS_TYPE(v, &PyCSimpleType_Type)
#define PyCSimpleTypeObject_Check(v) PyObject_TypeCheck(v, &PyCSimpleType_Type)
-extern PyTypeObject PyCField_Type;
extern struct fielddesc *_ctypes_get_fielddesc(const char *fmt);
@@ -334,8 +348,7 @@ struct tagPyCArgObject {
Py_ssize_t size; /* for the 'V' tag */
};
-extern PyTypeObject PyCArg_Type;
-#define PyCArg_CheckExact(v) Py_IS_TYPE(v, &PyCArg_Type)
+#define PyCArg_CheckExact(st, v) Py_IS_TYPE(v, st->PyCArg_Type)
extern PyCArgObject *PyCArgObject_new(void);
extern PyObject *
diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c
index 83a52757d60979..b1b2bac1455e67 100644
--- a/Modules/_ctypes/stgdict.c
+++ b/Modules/_ctypes/stgdict.c
@@ -225,6 +225,8 @@ MakeFields(PyObject *type, CFieldObject *descr,
if (fieldlist == NULL)
return -1;
+ ctypes_state *st = GLOBAL_STATE();
+ PyTypeObject *cfield_tp = st->PyCField_Type;
for (i = 0; i < PySequence_Fast_GET_SIZE(fieldlist); ++i) {
PyObject *pair = PySequence_Fast_GET_ITEM(fieldlist, i); /* borrowed */
PyObject *fname, *ftype, *bits;
@@ -240,7 +242,7 @@ MakeFields(PyObject *type, CFieldObject *descr,
Py_DECREF(fieldlist);
return -1;
}
- if (!Py_IS_TYPE(fdescr, &PyCField_Type)) {
+ if (!Py_IS_TYPE(fdescr, cfield_tp)) {
PyErr_SetString(PyExc_TypeError, "unexpected type");
Py_DECREF(fdescr);
Py_DECREF(fieldlist);
@@ -257,13 +259,13 @@ MakeFields(PyObject *type, CFieldObject *descr,
}
continue;
}
- new_descr = (CFieldObject *)PyCField_Type.tp_alloc((PyTypeObject *)&PyCField_Type, 0);
+ new_descr = (CFieldObject *)cfield_tp->tp_alloc(cfield_tp, 0);
if (new_descr == NULL) {
Py_DECREF(fdescr);
Py_DECREF(fieldlist);
return -1;
}
- assert(Py_IS_TYPE(new_descr, &PyCField_Type));
+ assert(Py_IS_TYPE(new_descr, cfield_tp));
new_descr->size = fdescr->size;
new_descr->offset = fdescr->offset + offset;
new_descr->index = fdescr->index + index;
@@ -304,6 +306,8 @@ MakeAnonFields(PyObject *type)
if (anon_names == NULL)
return -1;
+ ctypes_state *st = GLOBAL_STATE();
+ PyTypeObject *cfield_tp = st->PyCField_Type;
for (i = 0; i < PySequence_Fast_GET_SIZE(anon_names); ++i) {
PyObject *fname = PySequence_Fast_GET_ITEM(anon_names, i); /* borrowed */
CFieldObject *descr = (CFieldObject *)PyObject_GetAttr(type, fname);
@@ -311,7 +315,7 @@ MakeAnonFields(PyObject *type)
Py_DECREF(anon_names);
return -1;
}
- if (!Py_IS_TYPE(descr, &PyCField_Type)) {
+ if (!Py_IS_TYPE(descr, cfield_tp)) {
PyErr_Format(PyExc_AttributeError,
"'%U' is specified in _anonymous_ but not in "
"_fields_",
diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c
index eda8c5610ba659..8f86fc91966205 100644
--- a/Modules/_datetimemodule.c
+++ b/Modules/_datetimemodule.c
@@ -5144,6 +5144,13 @@ datetime_datetime_now_impl(PyTypeObject *type, PyObject *tz)
static PyObject *
datetime_utcnow(PyObject *cls, PyObject *dummy)
{
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "datetime.utcnow() is deprecated and scheduled for removal in a "
+ "future version. Use timezone-aware objects to represent datetimes "
+ "in UTC: datetime.now(datetime.UTC).", 2))
+ {
+ return NULL;
+ }
return datetime_best_possible(cls, _PyTime_gmtime, Py_None);
}
@@ -5180,6 +5187,13 @@ datetime_fromtimestamp(PyObject *cls, PyObject *args, PyObject *kw)
static PyObject *
datetime_utcfromtimestamp(PyObject *cls, PyObject *args)
{
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "datetime.utcfromtimestamp() is deprecated and scheduled for removal "
+ "in a future version. Use timezone-aware objects to represent "
+ "datetimes in UTC: datetime.now(datetime.UTC).", 2))
+ {
+ return NULL;
+ }
PyObject *timestamp;
PyObject *result = NULL;
@@ -6153,17 +6167,31 @@ local_to_seconds(int year, int month, int day,
static PyObject *
local_timezone_from_local(PyDateTime_DateTime *local_dt)
{
- long long seconds;
+ long long seconds, seconds2;
time_t timestamp;
+ int fold = DATE_GET_FOLD(local_dt);
seconds = local_to_seconds(GET_YEAR(local_dt),
GET_MONTH(local_dt),
GET_DAY(local_dt),
DATE_GET_HOUR(local_dt),
DATE_GET_MINUTE(local_dt),
DATE_GET_SECOND(local_dt),
- DATE_GET_FOLD(local_dt));
+ fold);
if (seconds == -1)
return NULL;
+ seconds2 = local_to_seconds(GET_YEAR(local_dt),
+ GET_MONTH(local_dt),
+ GET_DAY(local_dt),
+ DATE_GET_HOUR(local_dt),
+ DATE_GET_MINUTE(local_dt),
+ DATE_GET_SECOND(local_dt),
+ !fold);
+ if (seconds2 == -1)
+ return NULL;
+ /* Detect gap */
+ if (seconds2 != seconds && (seconds2 > seconds) == fold)
+ seconds = seconds2;
+
/* XXX: add bounds check */
timestamp = seconds - epoch;
return local_timezone_from_timestamp(timestamp);
diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c
index 5644cc05c45800..a3bfbc9ac5a1b1 100644
--- a/Modules/_io/_iomodule.c
+++ b/Modules/_io/_iomodule.c
@@ -616,8 +616,9 @@ iomodule_clear(PyObject *mod) {
}
static void
-iomodule_free(PyObject *mod) {
- iomodule_clear(mod);
+iomodule_free(void *mod)
+{
+ (void)iomodule_clear((PyObject *)mod);
}
@@ -670,13 +671,11 @@ static PyTypeObject* static_types[] = {
PyStatus
_PyIO_InitTypes(PyInterpreterState *interp)
{
- if (!_Py_IsMainInterpreter(interp)) {
- return _PyStatus_OK();
- }
-
- // Set type base classes
#ifdef HAVE_WINDOWS_CONSOLE_IO
- PyWindowsConsoleIO_Type.tp_base = &PyRawIOBase_Type;
+ if (_Py_IsMainInterpreter(interp)) {
+ // Set type base classes
+ PyWindowsConsoleIO_Type.tp_base = &PyRawIOBase_Type;
+ }
#endif
for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) {
diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c
index f3ff39215eab76..f5bce8cd7628ad 100644
--- a/Modules/_posixsubprocess.c
+++ b/Modules/_posixsubprocess.c
@@ -75,6 +75,28 @@
static struct PyModuleDef _posixsubprocessmodule;
+/*[clinic input]
+module _posixsubprocess
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c62211df27cf7334]*/
+
+/*[python input]
+class pid_t_converter(CConverter):
+ type = 'pid_t'
+ format_unit = '" _Py_PARSE_PID "'
+
+ def parse_arg(self, argname, displayname):
+ return """
+ {paramname} = PyLong_AsPid({argname});
+ if ({paramname} == -1 && PyErr_Occurred()) {{{{
+ goto exit;
+ }}}}
+ """.format(argname=argname, paramname=self.parser_name)
+[python start generated code]*/
+/*[python end generated code: output=da39a3ee5e6b4b0d input=5af1c116d56cbb5a]*/
+
+#include "clinic/_posixsubprocess.c.h"
+
/* Convert ASCII to a positive int, no libc call. no overflow. -1 on error. */
static int
_pos_int_from_ascii(const char *name)
@@ -744,7 +766,7 @@ do_fork_exec(char *const exec_array[],
assert(preexec_fn == Py_None);
pid = vfork();
- if (pid == -1) {
+ if (pid == (pid_t)-1) {
/* If vfork() fails, fall back to using fork(). When it isn't
* allowed in a process by the kernel, vfork can return -1
* with errno EINVAL. https://bugs.python.org/issue47151. */
@@ -784,44 +806,81 @@ do_fork_exec(char *const exec_array[],
return 0; /* Dead code to avoid a potential compiler warning. */
}
+/*[clinic input]
+_posixsubprocess.fork_exec as subprocess_fork_exec
+ args as process_args: object
+ executable_list: object
+ close_fds: bool
+ pass_fds as py_fds_to_keep: object(subclass_of='&PyTuple_Type')
+ cwd as cwd_obj: object
+ env as env_list: object
+ p2cread: int
+ p2cwrite: int
+ c2pread: int
+ c2pwrite: int
+ errread: int
+ errwrite: int
+ errpipe_read: int
+ errpipe_write: int
+ restore_signals: bool
+ call_setsid: bool
+ pgid_to_set: pid_t
+ gid as gid_object: object
+ extra_groups as extra_groups_packed: object
+ uid as uid_object: object
+ child_umask: int
+ preexec_fn: object
+ allow_vfork: bool
+ /
+
+Spawn a fresh new child process.
+
+Fork a child process, close parent file descriptors as appropriate in the
+child and duplicate the few that are needed before calling exec() in the
+child process.
+
+If close_fds is True, close file descriptors 3 and higher, except those listed
+in the sorted tuple pass_fds.
+
+The preexec_fn, if supplied, will be called immediately before closing file
+descriptors and exec.
+
+WARNING: preexec_fn is NOT SAFE if your application uses threads.
+ It may trigger infrequent, difficult to debug deadlocks.
+
+If an error occurs in the child process before the exec, it is
+serialized and written to the errpipe_write fd per subprocess.py.
+
+Returns: the child process's PID.
+
+Raises: Only on an error in the parent process.
+[clinic start generated code]*/
static PyObject *
-subprocess_fork_exec(PyObject *module, PyObject *args)
+subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
+ PyObject *executable_list, int close_fds,
+ PyObject *py_fds_to_keep, PyObject *cwd_obj,
+ PyObject *env_list, int p2cread, int p2cwrite,
+ int c2pread, int c2pwrite, int errread,
+ int errwrite, int errpipe_read, int errpipe_write,
+ int restore_signals, int call_setsid,
+ pid_t pgid_to_set, PyObject *gid_object,
+ PyObject *extra_groups_packed,
+ PyObject *uid_object, int child_umask,
+ PyObject *preexec_fn, int allow_vfork)
+/*[clinic end generated code: output=7ee4f6ee5cf22b5b input=51757287ef266ffa]*/
{
- PyObject *gc_module = NULL;
- PyObject *executable_list, *py_fds_to_keep;
- PyObject *env_list, *preexec_fn;
- PyObject *process_args, *converted_args = NULL, *fast_args = NULL;
+ PyObject *converted_args = NULL, *fast_args = NULL;
PyObject *preexec_fn_args_tuple = NULL;
- PyObject *extra_groups_packed;
- PyObject *uid_object, *gid_object;
- int p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite;
- int errpipe_read, errpipe_write, close_fds, restore_signals;
- int call_setsid;
- pid_t pgid_to_set = -1;
gid_t *extra_groups = NULL;
- int child_umask;
- PyObject *cwd_obj, *cwd_obj2 = NULL;
- const char *cwd;
+ PyObject *cwd_obj2 = NULL;
+ const char *cwd = NULL;
pid_t pid = -1;
int need_to_reenable_gc = 0;
- char *const *exec_array, *const *argv = NULL, *const *envp = NULL;
- Py_ssize_t arg_num, extra_group_size = 0;
+ char *const *argv = NULL, *const *envp = NULL;
+ Py_ssize_t extra_group_size = 0;
int need_after_fork = 0;
int saved_errno = 0;
- int allow_vfork;
-
- if (!PyArg_ParseTuple(
- args, "OOpO!OOiiiiiiiipp" _Py_PARSE_PID "OOOiOp:fork_exec",
- &process_args, &executable_list,
- &close_fds, &PyTuple_Type, &py_fds_to_keep,
- &cwd_obj, &env_list,
- &p2cread, &p2cwrite, &c2pread, &c2pwrite,
- &errread, &errwrite, &errpipe_read, &errpipe_write,
- &restore_signals, &call_setsid, &pgid_to_set,
- &gid_object, &extra_groups_packed, &uid_object, &child_umask,
- &preexec_fn, &allow_vfork))
- return NULL;
PyInterpreterState *interp = PyInterpreterState_Get();
if ((preexec_fn != Py_None) && (interp != PyInterpreterState_Main())) {
@@ -844,7 +903,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
need_to_reenable_gc = PyGC_Disable();
}
- exec_array = _PySequence_BytesToCharpArray(executable_list);
+ char *const *exec_array = _PySequence_BytesToCharpArray(executable_list);
if (!exec_array)
goto cleanup;
@@ -862,7 +921,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
converted_args = PyTuple_New(num_args);
if (converted_args == NULL)
goto cleanup;
- for (arg_num = 0; arg_num < num_args; ++arg_num) {
+ for (Py_ssize_t arg_num = 0; arg_num < num_args; ++arg_num) {
PyObject *borrowed_arg, *converted_arg;
if (PySequence_Fast_GET_SIZE(fast_args) != num_args) {
PyErr_SetString(PyExc_RuntimeError, "args changed during iteration");
@@ -891,8 +950,6 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
if (PyUnicode_FSConverter(cwd_obj, &cwd_obj2) == 0)
goto cleanup;
cwd = PyBytes_AsString(cwd_obj2);
- } else {
- cwd = NULL;
}
if (extra_groups_packed != Py_None) {
@@ -1019,7 +1076,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
py_fds_to_keep, preexec_fn, preexec_fn_args_tuple);
/* Parent (original) process */
- if (pid == -1) {
+ if (pid == (pid_t)-1) {
/* Capture errno for the exception. */
saved_errno = errno;
}
@@ -1068,47 +1125,17 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
if (need_to_reenable_gc) {
PyGC_Enable();
}
- Py_XDECREF(gc_module);
return pid == -1 ? NULL : PyLong_FromPid(pid);
}
-
-PyDoc_STRVAR(subprocess_fork_exec_doc,
-"fork_exec(args, executable_list, close_fds, pass_fds, cwd, env,\n\
- p2cread, p2cwrite, c2pread, c2pwrite,\n\
- errread, errwrite, errpipe_read, errpipe_write,\n\
- restore_signals, call_setsid, pgid_to_set,\n\
- gid, extra_groups, uid,\n\
- preexec_fn)\n\
-\n\
-Forks a child process, closes parent file descriptors as appropriate in the\n\
-child and dups the few that are needed before calling exec() in the child\n\
-process.\n\
-\n\
-If close_fds is true, close file descriptors 3 and higher, except those listed\n\
-in the sorted tuple pass_fds.\n\
-\n\
-The preexec_fn, if supplied, will be called immediately before closing file\n\
-descriptors and exec.\n\
-WARNING: preexec_fn is NOT SAFE if your application uses threads.\n\
- It may trigger infrequent, difficult to debug deadlocks.\n\
-\n\
-If an error occurs in the child process before the exec, it is\n\
-serialized and written to the errpipe_write fd per subprocess.py.\n\
-\n\
-Returns: the child process's PID.\n\
-\n\
-Raises: Only on an error in the parent process.\n\
-");
-
/* module level code ********************************************************/
PyDoc_STRVAR(module_doc,
"A POSIX helper for the subprocess module.");
static PyMethodDef module_methods[] = {
- {"fork_exec", subprocess_fork_exec, METH_VARARGS, subprocess_fork_exec_doc},
+ SUBPROCESS_FORK_EXEC_METHODDEF
{NULL, NULL} /* sentinel */
};
diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h
index 4c3fd1bd27411b..182754cca36d61 100644
--- a/Modules/_sqlite/clinic/connection.c.h
+++ b/Modules/_sqlite/clinic/connection.c.h
@@ -846,30 +846,63 @@ pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *a
#if defined(PY_SQLITE_ENABLE_LOAD_EXTENSION)
PyDoc_STRVAR(pysqlite_connection_load_extension__doc__,
-"load_extension($self, name, /)\n"
+"load_extension($self, name, /, *, entrypoint=None)\n"
"--\n"
"\n"
"Load SQLite extension module.");
#define PYSQLITE_CONNECTION_LOAD_EXTENSION_METHODDEF \
- {"load_extension", (PyCFunction)pysqlite_connection_load_extension, METH_O, pysqlite_connection_load_extension__doc__},
+ {"load_extension", _PyCFunction_CAST(pysqlite_connection_load_extension), METH_FASTCALL|METH_KEYWORDS, pysqlite_connection_load_extension__doc__},
static PyObject *
pysqlite_connection_load_extension_impl(pysqlite_Connection *self,
- const char *extension_name);
+ const char *extension_name,
+ const char *entrypoint);
static PyObject *
-pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *arg)
+pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_item = { &_Py_ID(entrypoint), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"", "entrypoint", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "load_extension",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
const char *extension_name;
+ const char *entrypoint = NULL;
- if (!PyUnicode_Check(arg)) {
- _PyArg_BadArgument("load_extension", "argument", "str", arg);
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!PyUnicode_Check(args[0])) {
+ _PyArg_BadArgument("load_extension", "argument 1", "str", args[0]);
goto exit;
}
Py_ssize_t extension_name_length;
- extension_name = PyUnicode_AsUTF8AndSize(arg, &extension_name_length);
+ extension_name = PyUnicode_AsUTF8AndSize(args[0], &extension_name_length);
if (extension_name == NULL) {
goto exit;
}
@@ -877,7 +910,29 @@ pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *arg)
PyErr_SetString(PyExc_ValueError, "embedded null character");
goto exit;
}
- return_value = pysqlite_connection_load_extension_impl(self, extension_name);
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ if (args[1] == Py_None) {
+ entrypoint = NULL;
+ }
+ else if (PyUnicode_Check(args[1])) {
+ Py_ssize_t entrypoint_length;
+ entrypoint = PyUnicode_AsUTF8AndSize(args[1], &entrypoint_length);
+ if (entrypoint == NULL) {
+ goto exit;
+ }
+ if (strlen(entrypoint) != (size_t)entrypoint_length) {
+ PyErr_SetString(PyExc_ValueError, "embedded null character");
+ goto exit;
+ }
+ }
+ else {
+ _PyArg_BadArgument("load_extension", "argument 'entrypoint'", "str or None", args[1]);
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = pysqlite_connection_load_extension_impl(self, extension_name, entrypoint);
exit:
return return_value;
@@ -1513,6 +1568,85 @@ getlimit(pysqlite_Connection *self, PyObject *arg)
return return_value;
}
+PyDoc_STRVAR(setconfig__doc__,
+"setconfig($self, op, enable=True, /)\n"
+"--\n"
+"\n"
+"Set a boolean connection configuration option.\n"
+"\n"
+" op\n"
+" The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes.");
+
+#define SETCONFIG_METHODDEF \
+ {"setconfig", _PyCFunction_CAST(setconfig), METH_FASTCALL, setconfig__doc__},
+
+static PyObject *
+setconfig_impl(pysqlite_Connection *self, int op, int enable);
+
+static PyObject *
+setconfig(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ int op;
+ int enable = 1;
+
+ if (!_PyArg_CheckPositional("setconfig", nargs, 1, 2)) {
+ goto exit;
+ }
+ op = _PyLong_AsInt(args[0]);
+ if (op == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ enable = PyObject_IsTrue(args[1]);
+ if (enable < 0) {
+ goto exit;
+ }
+skip_optional:
+ return_value = setconfig_impl(self, op, enable);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(getconfig__doc__,
+"getconfig($self, op, /)\n"
+"--\n"
+"\n"
+"Query a boolean connection configuration option.\n"
+"\n"
+" op\n"
+" The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes.");
+
+#define GETCONFIG_METHODDEF \
+ {"getconfig", (PyCFunction)getconfig, METH_O, getconfig__doc__},
+
+static int
+getconfig_impl(pysqlite_Connection *self, int op);
+
+static PyObject *
+getconfig(pysqlite_Connection *self, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ int op;
+ int _return_value;
+
+ op = _PyLong_AsInt(arg);
+ if (op == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ _return_value = getconfig_impl(self, op);
+ if ((_return_value == -1) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyBool_FromLong((long)_return_value);
+
+exit:
+ return return_value;
+}
+
#ifndef CREATE_WINDOW_FUNCTION_METHODDEF
#define CREATE_WINDOW_FUNCTION_METHODDEF
#endif /* !defined(CREATE_WINDOW_FUNCTION_METHODDEF) */
@@ -1532,4 +1666,4 @@ getlimit(pysqlite_Connection *self, PyObject *arg)
#ifndef DESERIALIZE_METHODDEF
#define DESERIALIZE_METHODDEF
#endif /* !defined(DESERIALIZE_METHODDEF) */
-/*[clinic end generated code: output=f10306e10427488b input=a9049054013a1b77]*/
+/*[clinic end generated code: output=8b03149c115ee6da input=a9049054013a1b77]*/
diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
index fb61ef82ef869b..aec3aa8bbf4ed8 100644
--- a/Modules/_sqlite/connection.c
+++ b/Modules/_sqlite/connection.c
@@ -30,6 +30,8 @@
#include "prepare_protocol.h"
#include "util.h"
+#include
+
#if SQLITE_VERSION_NUMBER >= 3014000
#define HAVE_TRACE_V2
#endif
@@ -1601,14 +1603,17 @@ _sqlite3.Connection.load_extension as pysqlite_connection_load_extension
name as extension_name: str
/
+ *
+ entrypoint: str(accept={str, NoneType}) = None
Load SQLite extension module.
[clinic start generated code]*/
static PyObject *
pysqlite_connection_load_extension_impl(pysqlite_Connection *self,
- const char *extension_name)
-/*[clinic end generated code: output=47eb1d7312bc97a7 input=edd507389d89d621]*/
+ const char *extension_name,
+ const char *entrypoint)
+/*[clinic end generated code: output=7e61a7add9de0286 input=c36b14ea702e04f5]*/
{
int rc;
char* errmsg;
@@ -1621,7 +1626,7 @@ pysqlite_connection_load_extension_impl(pysqlite_Connection *self,
return NULL;
}
- rc = sqlite3_load_extension(self->db, extension_name, 0, &errmsg);
+ rc = sqlite3_load_extension(self->db, extension_name, entrypoint, &errmsg);
if (rc != 0) {
PyErr_SetString(self->OperationalError, errmsg);
return NULL;
@@ -2340,6 +2345,119 @@ getlimit_impl(pysqlite_Connection *self, int category)
return setlimit_impl(self, category, -1);
}
+static inline bool
+is_int_config(const int op)
+{
+ switch (op) {
+ case SQLITE_DBCONFIG_ENABLE_FKEY:
+ case SQLITE_DBCONFIG_ENABLE_TRIGGER:
+#if SQLITE_VERSION_NUMBER >= 3012002
+ case SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3013000
+ case SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3016000
+ case SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3020000
+ case SQLITE_DBCONFIG_ENABLE_QPSG:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3022000
+ case SQLITE_DBCONFIG_TRIGGER_EQP:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3024000
+ case SQLITE_DBCONFIG_RESET_DATABASE:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3026000
+ case SQLITE_DBCONFIG_DEFENSIVE:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3028000
+ case SQLITE_DBCONFIG_WRITABLE_SCHEMA:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3029000
+ case SQLITE_DBCONFIG_DQS_DDL:
+ case SQLITE_DBCONFIG_DQS_DML:
+ case SQLITE_DBCONFIG_LEGACY_ALTER_TABLE:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3030000
+ case SQLITE_DBCONFIG_ENABLE_VIEW:
+#endif
+#if SQLITE_VERSION_NUMBER >= 3031000
+ case SQLITE_DBCONFIG_LEGACY_FILE_FORMAT:
+ case SQLITE_DBCONFIG_TRUSTED_SCHEMA:
+#endif
+ return true;
+ default:
+ return false;
+ }
+}
+
+/*[clinic input]
+_sqlite3.Connection.setconfig as setconfig
+
+ op: int
+ The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes.
+ enable: bool = True
+ /
+
+Set a boolean connection configuration option.
+[clinic start generated code]*/
+
+static PyObject *
+setconfig_impl(pysqlite_Connection *self, int op, int enable)
+/*[clinic end generated code: output=c60b13e618aff873 input=a10f1539c2d7da6b]*/
+{
+ if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) {
+ return NULL;
+ }
+ if (!is_int_config(op)) {
+ return PyErr_Format(PyExc_ValueError, "unknown config 'op': %d", op);
+ }
+
+ int actual;
+ int rc = sqlite3_db_config(self->db, op, enable, &actual);
+ if (rc != SQLITE_OK) {
+ (void)_pysqlite_seterror(self->state, self->db);
+ return NULL;
+ }
+ if (enable != actual) {
+ PyErr_SetString(self->state->OperationalError, "Unable to set config");
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
+/*[clinic input]
+_sqlite3.Connection.getconfig as getconfig -> bool
+
+ op: int
+ The configuration verb; one of the sqlite3.SQLITE_DBCONFIG codes.
+ /
+
+Query a boolean connection configuration option.
+[clinic start generated code]*/
+
+static int
+getconfig_impl(pysqlite_Connection *self, int op)
+/*[clinic end generated code: output=25ac05044c7b78a3 input=b0526d7e432e3f2f]*/
+{
+ if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) {
+ return -1;
+ }
+ if (!is_int_config(op)) {
+ PyErr_Format(PyExc_ValueError, "unknown config 'op': %d", op);
+ return -1;
+ }
+
+ int current;
+ int rc = sqlite3_db_config(self->db, op, -1, ¤t);
+ if (rc != SQLITE_OK) {
+ (void)_pysqlite_seterror(self->state, self->db);
+ return -1;
+ }
+ return current;
+}
static PyObject *
get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx))
@@ -2421,6 +2539,8 @@ static PyMethodDef connection_methods[] = {
DESERIALIZE_METHODDEF
CREATE_WINDOW_FUNCTION_METHODDEF
BLOBOPEN_METHODDEF
+ SETCONFIG_METHODDEF
+ GETCONFIG_METHODDEF
{NULL, NULL}
};
diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c
index 6db3d51fd20220..9c42faa232c70d 100644
--- a/Modules/_sqlite/module.c
+++ b/Modules/_sqlite/module.c
@@ -499,6 +499,49 @@ add_integer_constants(PyObject *module) {
#if SQLITE_VERSION_NUMBER >= 3008007
ADD_INT(SQLITE_LIMIT_WORKER_THREADS);
#endif
+
+ /*
+ * Database connection configuration options.
+ * See https://www.sqlite.org/c3ref/c_dbconfig_defensive.html
+ */
+ ADD_INT(SQLITE_DBCONFIG_ENABLE_FKEY);
+ ADD_INT(SQLITE_DBCONFIG_ENABLE_TRIGGER);
+#if SQLITE_VERSION_NUMBER >= 3012002
+ ADD_INT(SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3013000
+ ADD_INT(SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3016000
+ ADD_INT(SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3020000
+ ADD_INT(SQLITE_DBCONFIG_ENABLE_QPSG);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3022000
+ ADD_INT(SQLITE_DBCONFIG_TRIGGER_EQP);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3024000
+ ADD_INT(SQLITE_DBCONFIG_RESET_DATABASE);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3026000
+ ADD_INT(SQLITE_DBCONFIG_DEFENSIVE);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3028000
+ ADD_INT(SQLITE_DBCONFIG_WRITABLE_SCHEMA);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3029000
+ ADD_INT(SQLITE_DBCONFIG_DQS_DDL);
+ ADD_INT(SQLITE_DBCONFIG_DQS_DML);
+ ADD_INT(SQLITE_DBCONFIG_LEGACY_ALTER_TABLE);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3030000
+ ADD_INT(SQLITE_DBCONFIG_ENABLE_VIEW);
+#endif
+#if SQLITE_VERSION_NUMBER >= 3031000
+ ADD_INT(SQLITE_DBCONFIG_LEGACY_FILE_FORMAT);
+ ADD_INT(SQLITE_DBCONFIG_TRUSTED_SCHEMA);
+#endif
#undef ADD_INT
return 0;
}
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 557a6d46ed4632..c1892f6fa0a4b8 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -1482,6 +1482,7 @@ static PyObject *
run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
{
const char *code;
+ int use_main_obmalloc = -1;
int allow_fork = -1;
int allow_exec = -1;
int allow_threads = -1;
@@ -1493,6 +1494,7 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
PyCompilerFlags cflags = {0};
static char *kwlist[] = {"code",
+ "use_main_obmalloc",
"allow_fork",
"allow_exec",
"allow_threads",
@@ -1500,12 +1502,17 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
"check_multi_interp_extensions",
NULL};
if (!PyArg_ParseTupleAndKeywords(args, kwargs,
- "s$ppppp:run_in_subinterp_with_config", kwlist,
- &code, &allow_fork, &allow_exec,
+ "s$pppppp:run_in_subinterp_with_config", kwlist,
+ &code, &use_main_obmalloc,
+ &allow_fork, &allow_exec,
&allow_threads, &allow_daemon_threads,
&check_multi_interp_extensions)) {
return NULL;
}
+ if (use_main_obmalloc < 0) {
+ PyErr_SetString(PyExc_ValueError, "missing use_main_obmalloc");
+ return NULL;
+ }
if (allow_fork < 0) {
PyErr_SetString(PyExc_ValueError, "missing allow_fork");
return NULL;
@@ -1532,6 +1539,7 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
PyThreadState_Swap(NULL);
const _PyInterpreterConfig config = {
+ .use_main_obmalloc = use_main_obmalloc,
.allow_fork = allow_fork,
.allow_exec = allow_exec,
.allow_threads = allow_threads,
@@ -2733,6 +2741,18 @@ type_get_version(PyObject *self, PyObject *type)
}
+static PyObject *
+type_assign_version(PyObject *self, PyObject *type)
+{
+ if (!PyType_Check(type)) {
+ PyErr_SetString(PyExc_TypeError, "argument must be a type");
+ return NULL;
+ }
+ int res = PyUnstable_Type_AssignVersionTag((PyTypeObject *)type);
+ return PyLong_FromLong(res);
+}
+
+
// Test PyThreadState C API
static PyObject *
test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args))
@@ -3530,6 +3550,7 @@ static PyMethodDef TestMethods[] = {
{"test_py_is_macros", test_py_is_macros, METH_NOARGS},
{"test_py_is_funcs", test_py_is_funcs, METH_NOARGS},
{"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")},
+ {"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")},
{"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL},
{"frame_getlocals", frame_getlocals, METH_O, NULL},
{"frame_getglobals", frame_getglobals, METH_O, NULL},
diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c
index e34854f7025798..cf8990a2df0a9b 100644
--- a/Modules/_testmultiphase.c
+++ b/Modules/_testmultiphase.c
@@ -884,15 +884,3 @@ PyInit__test_module_state_shared(void)
}
return module;
}
-
-
-/*** Helper for imp test ***/
-
-static PyModuleDef imp_dummy_def = TEST_MODULE_DEF("imp_dummy", main_slots, testexport_methods);
-
-PyMODINIT_FUNC
-PyInit_imp_dummy(void)
-{
- return PyModuleDef_Init(&imp_dummy_def);
-}
-
diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c
index 9c12c696757439..fd2fd9ab25f113 100644
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -946,7 +946,7 @@ local_setattro(localobject *self, PyObject *name, PyObject *v)
}
if (r == 1) {
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object attribute '%U' is read-only",
+ "'%.100s' object attribute '%U' is read-only",
Py_TYPE(self)->tp_name, name);
return -1;
}
diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c
index 20e01c79668549..385a05932a77ed 100644
--- a/Modules/_tkinter.c
+++ b/Modules/_tkinter.c
@@ -324,10 +324,6 @@ static int quitMainLoop = 0;
static int errorInCmd = 0;
static PyObject *excInCmd;
-#ifdef TKINTER_PROTECT_LOADTK
-static int tk_load_failed = 0;
-#endif
-
static PyObject *Tkapp_UnicodeResult(TkappObject *);
@@ -532,17 +528,7 @@ Tcl_AppInit(Tcl_Interp *interp)
return TCL_OK;
}
-#ifdef TKINTER_PROTECT_LOADTK
- if (tk_load_failed) {
- PySys_WriteStderr("Tk_Init error: %s\n", TKINTER_LOADTK_ERRMSG);
- return TCL_ERROR;
- }
-#endif
-
if (Tk_Init(interp) == TCL_ERROR) {
-#ifdef TKINTER_PROTECT_LOADTK
- tk_load_failed = 1;
-#endif
PySys_WriteStderr("Tk_Init error: %s\n", Tcl_GetStringResult(interp));
return TCL_ERROR;
}
@@ -635,12 +621,6 @@ Tkapp_New(const char *screenName, const char *className,
Tcl_SetVar(v->interp,
"_tkinter_skip_tk_init", "1", TCL_GLOBAL_ONLY);
}
-#ifdef TKINTER_PROTECT_LOADTK
- else if (tk_load_failed) {
- Tcl_SetVar(v->interp,
- "_tkinter_tk_failed", "1", TCL_GLOBAL_ONLY);
- }
-#endif
/* some initial arguments need to be in argv */
if (sync || use) {
@@ -702,18 +682,6 @@ Tkapp_New(const char *screenName, const char *className,
if (Tcl_AppInit(v->interp) != TCL_OK) {
PyObject *result = Tkinter_Error(v);
-#ifdef TKINTER_PROTECT_LOADTK
- if (wantTk) {
- const char *_tkinter_tk_failed;
- _tkinter_tk_failed = Tcl_GetVar(v->interp,
- "_tkinter_tk_failed", TCL_GLOBAL_ONLY);
-
- if ( _tkinter_tk_failed != NULL &&
- strcmp(_tkinter_tk_failed, "1") == 0) {
- tk_load_failed = 1;
- }
- }
-#endif
Py_DECREF((PyObject *)v);
return (TkappObject *)result;
}
@@ -2780,18 +2748,6 @@ _tkinter_tkapp_loadtk_impl(TkappObject *self)
const char * _tk_exists = NULL;
int err;
-#ifdef TKINTER_PROTECT_LOADTK
- /* Up to Tk 8.4.13, Tk_Init deadlocks on the second call when the
- * first call failed.
- * To avoid the deadlock, we just refuse the second call through
- * a static variable.
- */
- if (tk_load_failed) {
- PyErr_SetString(Tkinter_TclError, TKINTER_LOADTK_ERRMSG);
- return NULL;
- }
-#endif
-
/* We want to guard against calling Tk_Init() multiple times */
CHECK_TCL_APPARTMENT;
ENTER_TCL
@@ -2811,9 +2767,6 @@ _tkinter_tkapp_loadtk_impl(TkappObject *self)
if (_tk_exists == NULL || strcmp(_tk_exists, "1") != 0) {
if (Tk_Init(interp) == TCL_ERROR) {
Tkinter_Error(self);
-#ifdef TKINTER_PROTECT_LOADTK
- tk_load_failed = 1;
-#endif
return NULL;
}
}
diff --git a/Modules/cjkcodecs/_codecs_cn.c b/Modules/cjkcodecs/_codecs_cn.c
index 8a62f7e257c6b1..e2c7908c9bb275 100644
--- a/Modules/cjkcodecs/_codecs_cn.c
+++ b/Modules/cjkcodecs/_codecs_cn.c
@@ -453,14 +453,14 @@ DECODER(hz)
}
-BEGIN_MAPPINGS_LIST
+BEGIN_MAPPINGS_LIST(4)
MAPPING_DECONLY(gb2312)
MAPPING_DECONLY(gbkext)
MAPPING_ENCONLY(gbcommon)
MAPPING_ENCDEC(gb18030ext)
END_MAPPINGS_LIST
-BEGIN_CODECS_LIST
+BEGIN_CODECS_LIST(4)
CODEC_STATELESS(gb2312)
CODEC_STATELESS(gbk)
CODEC_STATELESS(gb18030)
diff --git a/Modules/cjkcodecs/_codecs_hk.c b/Modules/cjkcodecs/_codecs_hk.c
index 4f21569a0ce73f..e7273bf18e3494 100644
--- a/Modules/cjkcodecs/_codecs_hk.c
+++ b/Modules/cjkcodecs/_codecs_hk.c
@@ -6,6 +6,10 @@
#define USING_IMPORTED_MAPS
+#define CJK_MOD_SPECIFIC_STATE \
+ const encode_map *big5_encmap; \
+ const decode_map *big5_decmap;
+
#include "cjkcodecs.h"
#include "mappings_hk.h"
@@ -13,16 +17,12 @@
* BIG5HKSCS codec
*/
-static const encode_map *big5_encmap = NULL;
-static const decode_map *big5_decmap = NULL;
-
CODEC_INIT(big5hkscs)
{
- static int initialized = 0;
-
- if (!initialized && IMPORT_MAP(tw, big5, &big5_encmap, &big5_decmap))
+ cjkcodecs_module_state *st = codec->modstate;
+ if (IMPORT_MAP(tw, big5, &st->big5_encmap, &st->big5_decmap)) {
return -1;
- initialized = 1;
+ }
return 0;
}
@@ -81,7 +81,7 @@ ENCODER(big5hkscs)
}
}
}
- else if (TRYMAP_ENC(big5, code, c))
+ else if (TRYMAP_ENC_ST(big5, code, c))
;
else
return 1;
@@ -122,7 +122,7 @@ DECODER(big5hkscs)
REQUIRE_INBUF(2);
if (0xc6 > c || c > 0xc8 || (c < 0xc7 && INBYTE2 < 0xa1)) {
- if (TRYMAP_DEC(big5, decoded, c, INBYTE2)) {
+ if (TRYMAP_DEC_ST(big5, decoded, c, INBYTE2)) {
OUTCHAR(decoded);
NEXT_IN(2);
continue;
@@ -177,14 +177,13 @@ DECODER(big5hkscs)
return 0;
}
-
-BEGIN_MAPPINGS_LIST
+BEGIN_MAPPINGS_LIST(3)
MAPPING_DECONLY(big5hkscs)
MAPPING_ENCONLY(big5hkscs_bmp)
MAPPING_ENCONLY(big5hkscs_nonbmp)
END_MAPPINGS_LIST
-BEGIN_CODECS_LIST
+BEGIN_CODECS_LIST(1)
CODEC_STATELESS_WINIT(big5hkscs)
END_CODECS_LIST
diff --git a/Modules/cjkcodecs/_codecs_iso2022.c b/Modules/cjkcodecs/_codecs_iso2022.c
index 7394cf67e0e7dd..86bb73b982a551 100644
--- a/Modules/cjkcodecs/_codecs_iso2022.c
+++ b/Modules/cjkcodecs/_codecs_iso2022.c
@@ -10,6 +10,27 @@
#define EMULATE_JISX0213_2000_ENCODE_INVALID MAP_UNMAPPABLE
#define EMULATE_JISX0213_2000_DECODE_INVALID MAP_UNMAPPABLE
+#define CJK_MOD_SPECIFIC_STATE \
+ /* kr */ \
+ const encode_map *cp949_encmap; \
+ const decode_map *ksx1001_decmap; \
+ \
+ /* jp */ \
+ const encode_map *jisxcommon_encmap; \
+ const decode_map *jisx0208_decmap; \
+ const decode_map *jisx0212_decmap; \
+ const encode_map *jisx0213_bmp_encmap; \
+ const decode_map *jisx0213_1_bmp_decmap; \
+ const decode_map *jisx0213_2_bmp_decmap; \
+ const encode_map *jisx0213_emp_encmap; \
+ const decode_map *jisx0213_1_emp_decmap; \
+ const decode_map *jisx0213_2_emp_decmap; \
+ \
+ /* cn */ \
+ const encode_map *gbcommon_encmap; \
+ const decode_map *gb2312_decmap;
+
+
#include "cjkcodecs.h"
#include "alg_jisx0201.h"
#include "emu_jisx0213_2000.h"
@@ -90,7 +111,7 @@
#define STATE_CLEARFLAG(f) do { ((state)->c[4]) &= ~(f); } while (0)
#define STATE_CLEARFLAGS() do { ((state)->c[4]) = 0; } while (0)
-#define ISO2022_CONFIG ((const struct iso2022_config *)config)
+#define ISO2022_CONFIG ((const struct iso2022_config *)(codec->config))
#define CONFIG_ISSET(flag) (ISO2022_CONFIG->flags & (flag))
#define CONFIG_DESIGNATIONS (ISO2022_CONFIG->designations)
@@ -101,9 +122,12 @@
/*-*- internal data structures -*-*/
-typedef int (*iso2022_init_func)(void);
-typedef Py_UCS4 (*iso2022_decode_func)(const unsigned char *data);
-typedef DBCHAR (*iso2022_encode_func)(const Py_UCS4 *data, Py_ssize_t *length);
+typedef int (*iso2022_init_func)(const MultibyteCodec *codec);
+typedef Py_UCS4 (*iso2022_decode_func)(const MultibyteCodec *codec,
+ const unsigned char *data);
+typedef DBCHAR (*iso2022_encode_func)(const MultibyteCodec *codec,
+ const Py_UCS4 *data,
+ Py_ssize_t *length);
struct iso2022_designation {
unsigned char mark;
@@ -124,9 +148,11 @@ struct iso2022_config {
CODEC_INIT(iso2022)
{
const struct iso2022_designation *desig;
- for (desig = CONFIG_DESIGNATIONS; desig->mark; desig++)
- if (desig->initializer != NULL && desig->initializer() != 0)
+ for (desig = CONFIG_DESIGNATIONS; desig->mark; desig++) {
+ if (desig->initializer != NULL && desig->initializer(codec) != 0) {
return -1;
+ }
+ }
return 0;
}
@@ -182,7 +208,7 @@ ENCODER(iso2022)
encoded = MAP_UNMAPPABLE;
for (dsg = CONFIG_DESIGNATIONS; dsg->mark; dsg++) {
Py_ssize_t length = 1;
- encoded = dsg->encoder(&c, &length);
+ encoded = dsg->encoder(codec, &c, &length);
if (encoded == MAP_MULTIPLE_AVAIL) {
/* this implementation won't work for pair
* of non-bmp characters. */
@@ -193,7 +219,7 @@ ENCODER(iso2022)
}
else
length = 2;
- encoded = dsg->encoder(&c, &length);
+ encoded = dsg->encoder(codec, &c, &length);
if (encoded != MAP_UNMAPPABLE) {
insize = length;
break;
@@ -288,7 +314,7 @@ DECODER_RESET(iso2022)
}
static Py_ssize_t
-iso2022processesc(const void *config, MultibyteCodec_State *state,
+iso2022processesc(const MultibyteCodec *codec, MultibyteCodec_State *state,
const unsigned char **inbuf, Py_ssize_t *inleft)
{
unsigned char charset, designation;
@@ -388,7 +414,7 @@ iso2022processesc(const void *config, MultibyteCodec_State *state,
}
static Py_ssize_t
-iso2022processg2(const void *config, MultibyteCodec_State *state,
+iso2022processg2(const MultibyteCodec *codec, MultibyteCodec_State *state,
const unsigned char **inbuf, Py_ssize_t *inleft,
_PyUnicodeWriter *writer)
{
@@ -442,14 +468,14 @@ DECODER(iso2022)
case ESC:
REQUIRE_INBUF(2);
if (IS_ISO2022ESC(INBYTE2)) {
- err = iso2022processesc(config, state,
+ err = iso2022processesc(codec, state,
inbuf, &inleft);
if (err != 0)
return err;
}
else if (CONFIG_ISSET(USE_G2) && INBYTE2 == 'N') {/* SS2 */
REQUIRE_INBUF(3);
- err = iso2022processg2(config, state,
+ err = iso2022processg2(codec, state,
inbuf, &inleft, writer);
if (err != 0)
return err;
@@ -517,7 +543,7 @@ DECODER(iso2022)
}
REQUIRE_INBUF(dsg->width);
- decoded = dsg->decoder(*inbuf);
+ decoded = dsg->decoder(codec, *inbuf);
if (decoded == MAP_UNMAPPABLE)
return dsg->width;
@@ -538,64 +564,38 @@ DECODER(iso2022)
return 0;
}
-/*-*- mapping table holders -*-*/
-
-#define ENCMAP(enc) static const encode_map *enc##_encmap = NULL;
-#define DECMAP(enc) static const decode_map *enc##_decmap = NULL;
-
-/* kr */
-ENCMAP(cp949)
-DECMAP(ksx1001)
-
-/* jp */
-ENCMAP(jisxcommon)
-DECMAP(jisx0208)
-DECMAP(jisx0212)
-ENCMAP(jisx0213_bmp)
-DECMAP(jisx0213_1_bmp)
-DECMAP(jisx0213_2_bmp)
-ENCMAP(jisx0213_emp)
-DECMAP(jisx0213_1_emp)
-DECMAP(jisx0213_2_emp)
-
-/* cn */
-ENCMAP(gbcommon)
-DECMAP(gb2312)
-
-/* tw */
-
/*-*- mapping access functions -*-*/
static int
-ksx1001_init(void)
+ksx1001_init(const MultibyteCodec *codec)
{
- static int initialized = 0;
-
- if (!initialized && (
- IMPORT_MAP(kr, cp949, &cp949_encmap, NULL) ||
- IMPORT_MAP(kr, ksx1001, NULL, &ksx1001_decmap)))
+ cjkcodecs_module_state *st = codec->modstate;
+ if (IMPORT_MAP(kr, cp949, &st->cp949_encmap, NULL) ||
+ IMPORT_MAP(kr, ksx1001, NULL, &st->ksx1001_decmap))
+ {
return -1;
- initialized = 1;
+ }
return 0;
}
static Py_UCS4
-ksx1001_decoder(const unsigned char *data)
+ksx1001_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
- if (TRYMAP_DEC(ksx1001, u, data[0], data[1]))
+ if (TRYMAP_DEC_ST(ksx1001, u, data[0], data[1]))
return u;
else
return MAP_UNMAPPABLE;
}
static DBCHAR
-ksx1001_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+ksx1001_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
DBCHAR coded;
assert(*length == 1);
if (*data < 0x10000) {
- if (TRYMAP_ENC(cp949, coded, *data)) {
+ if (TRYMAP_ENC_ST(cp949, coded, *data)) {
if (!(coded & 0x8000))
return coded;
}
@@ -604,39 +604,39 @@ ksx1001_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static int
-jisx0208_init(void)
+jisx0208_init(const MultibyteCodec *codec)
{
- static int initialized = 0;
-
- if (!initialized && (
- IMPORT_MAP(jp, jisxcommon, &jisxcommon_encmap, NULL) ||
- IMPORT_MAP(jp, jisx0208, NULL, &jisx0208_decmap)))
+ cjkcodecs_module_state *st = codec->modstate;
+ if (IMPORT_MAP(jp, jisxcommon, &st->jisxcommon_encmap, NULL) ||
+ IMPORT_MAP(jp, jisx0208, NULL, &st->jisx0208_decmap))
+ {
return -1;
- initialized = 1;
+ }
return 0;
}
static Py_UCS4
-jisx0208_decoder(const unsigned char *data)
+jisx0208_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */
return 0xff3c;
- else if (TRYMAP_DEC(jisx0208, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1]))
return u;
else
return MAP_UNMAPPABLE;
}
static DBCHAR
-jisx0208_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0208_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
DBCHAR coded;
assert(*length == 1);
if (*data < 0x10000) {
if (*data == 0xff3c) /* F/W REVERSE SOLIDUS */
return 0x2140;
- else if (TRYMAP_ENC(jisxcommon, coded, *data)) {
+ else if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) {
if (!(coded & 0x8000))
return coded;
}
@@ -645,35 +645,35 @@ jisx0208_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static int
-jisx0212_init(void)
+jisx0212_init(const MultibyteCodec *codec)
{
- static int initialized = 0;
-
- if (!initialized && (
- IMPORT_MAP(jp, jisxcommon, &jisxcommon_encmap, NULL) ||
- IMPORT_MAP(jp, jisx0212, NULL, &jisx0212_decmap)))
+ cjkcodecs_module_state *st = codec->modstate;
+ if (IMPORT_MAP(jp, jisxcommon, &st->jisxcommon_encmap, NULL) ||
+ IMPORT_MAP(jp, jisx0212, NULL, &st->jisx0212_decmap))
+ {
return -1;
- initialized = 1;
+ }
return 0;
}
static Py_UCS4
-jisx0212_decoder(const unsigned char *data)
+jisx0212_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
- if (TRYMAP_DEC(jisx0212, u, data[0], data[1]))
+ if (TRYMAP_DEC_ST(jisx0212, u, data[0], data[1]))
return u;
else
return MAP_UNMAPPABLE;
}
static DBCHAR
-jisx0212_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0212_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
DBCHAR coded;
assert(*length == 1);
if (*data < 0x10000) {
- if (TRYMAP_ENC(jisxcommon, coded, *data)) {
+ if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) {
if (coded & 0x8000)
return coded & 0x7fff;
}
@@ -682,44 +682,37 @@ jisx0212_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static int
-jisx0213_init(void)
+jisx0213_init(const MultibyteCodec *codec)
{
- static int initialized = 0;
-
- if (!initialized && (
- jisx0208_init() ||
- IMPORT_MAP(jp, jisx0213_bmp,
- &jisx0213_bmp_encmap, NULL) ||
- IMPORT_MAP(jp, jisx0213_1_bmp,
- NULL, &jisx0213_1_bmp_decmap) ||
- IMPORT_MAP(jp, jisx0213_2_bmp,
- NULL, &jisx0213_2_bmp_decmap) ||
- IMPORT_MAP(jp, jisx0213_emp,
- &jisx0213_emp_encmap, NULL) ||
- IMPORT_MAP(jp, jisx0213_1_emp,
- NULL, &jisx0213_1_emp_decmap) ||
- IMPORT_MAP(jp, jisx0213_2_emp,
- NULL, &jisx0213_2_emp_decmap) ||
- IMPORT_MAP(jp, jisx0213_pair, &jisx0213_pair_encmap,
- &jisx0213_pair_decmap)))
+ cjkcodecs_module_state *st = codec->modstate;
+ if (jisx0208_init(codec) ||
+ IMPORT_MAP(jp, jisx0213_bmp, &st->jisx0213_bmp_encmap, NULL) ||
+ IMPORT_MAP(jp, jisx0213_1_bmp, NULL, &st->jisx0213_1_bmp_decmap) ||
+ IMPORT_MAP(jp, jisx0213_2_bmp, NULL, &st->jisx0213_2_bmp_decmap) ||
+ IMPORT_MAP(jp, jisx0213_emp, &st->jisx0213_emp_encmap, NULL) ||
+ IMPORT_MAP(jp, jisx0213_1_emp, NULL, &st->jisx0213_1_emp_decmap) ||
+ IMPORT_MAP(jp, jisx0213_2_emp, NULL, &st->jisx0213_2_emp_decmap) ||
+ IMPORT_MAP(jp, jisx0213_pair,
+ &jisx0213_pair_encmap, &jisx0213_pair_decmap))
+ {
return -1;
- initialized = 1;
+ }
return 0;
}
#define config ((void *)2000)
static Py_UCS4
-jisx0213_2000_1_decoder(const unsigned char *data)
+jisx0213_2000_1_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
- EMULATE_JISX0213_2000_DECODE_PLANE1(u, data[0], data[1])
+ EMULATE_JISX0213_2000_DECODE_PLANE1(config, u, data[0], data[1])
else if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */
return 0xff3c;
- else if (TRYMAP_DEC(jisx0208, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1]))
;
- else if (TRYMAP_DEC(jisx0213_1_bmp, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0213_1_bmp, u, data[0], data[1]))
;
- else if (TRYMAP_DEC(jisx0213_1_emp, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0213_1_emp, u, data[0], data[1]))
u |= 0x20000;
else if (TRYMAP_DEC(jisx0213_pair, u, data[0], data[1]))
;
@@ -729,13 +722,13 @@ jisx0213_2000_1_decoder(const unsigned char *data)
}
static Py_UCS4
-jisx0213_2000_2_decoder(const unsigned char *data)
+jisx0213_2000_2_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
- EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(u, data[0], data[1])
- if (TRYMAP_DEC(jisx0213_2_bmp, u, data[0], data[1]))
+ EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(config, u, data[0], data[1])
+ if (TRYMAP_DEC_ST(jisx0213_2_bmp, u, data[0], data[1]))
;
- else if (TRYMAP_DEC(jisx0213_2_emp, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0213_2_emp, u, data[0], data[1]))
u |= 0x20000;
else
return MAP_UNMAPPABLE;
@@ -744,16 +737,16 @@ jisx0213_2000_2_decoder(const unsigned char *data)
#undef config
static Py_UCS4
-jisx0213_2004_1_decoder(const unsigned char *data)
+jisx0213_2004_1_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
if (data[0] == 0x21 && data[1] == 0x40) /* F/W REVERSE SOLIDUS */
return 0xff3c;
- else if (TRYMAP_DEC(jisx0208, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0208, u, data[0], data[1]))
;
- else if (TRYMAP_DEC(jisx0213_1_bmp, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0213_1_bmp, u, data[0], data[1]))
;
- else if (TRYMAP_DEC(jisx0213_1_emp, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0213_1_emp, u, data[0], data[1]))
u |= 0x20000;
else if (TRYMAP_DEC(jisx0213_pair, u, data[0], data[1]))
;
@@ -763,12 +756,12 @@ jisx0213_2004_1_decoder(const unsigned char *data)
}
static Py_UCS4
-jisx0213_2004_2_decoder(const unsigned char *data)
+jisx0213_2004_2_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
- if (TRYMAP_DEC(jisx0213_2_bmp, u, data[0], data[1]))
+ if (TRYMAP_DEC_ST(jisx0213_2_bmp, u, data[0], data[1]))
;
- else if (TRYMAP_DEC(jisx0213_2_emp, u, data[0], data[1]))
+ else if (TRYMAP_DEC_ST(jisx0213_2_emp, u, data[0], data[1]))
u |= 0x20000;
else
return MAP_UNMAPPABLE;
@@ -776,7 +769,8 @@ jisx0213_2004_2_decoder(const unsigned char *data)
}
static DBCHAR
-jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config)
+jisx0213_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length, const void *config)
{
DBCHAR coded;
@@ -784,19 +778,19 @@ jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config)
case 1: /* first character */
if (*data >= 0x10000) {
if ((*data) >> 16 == 0x20000 >> 16) {
- EMULATE_JISX0213_2000_ENCODE_EMP(coded, *data)
- else if (TRYMAP_ENC(jisx0213_emp, coded, (*data) & 0xffff))
+ EMULATE_JISX0213_2000_ENCODE_EMP(config, coded, *data)
+ else if (TRYMAP_ENC_ST(jisx0213_emp, coded, (*data) & 0xffff))
return coded;
}
return MAP_UNMAPPABLE;
}
- EMULATE_JISX0213_2000_ENCODE_BMP(coded, *data)
- else if (TRYMAP_ENC(jisx0213_bmp, coded, *data)) {
+ EMULATE_JISX0213_2000_ENCODE_BMP(config, coded, *data)
+ else if (TRYMAP_ENC_ST(jisx0213_bmp, coded, *data)) {
if (coded == MULTIC)
return MAP_MULTIPLE_AVAIL;
}
- else if (TRYMAP_ENC(jisxcommon, coded, *data)) {
+ else if (TRYMAP_ENC_ST(jisxcommon, coded, *data)) {
if (coded & 0x8000)
return MAP_UNMAPPABLE;
}
@@ -827,9 +821,10 @@ jisx0213_encoder(const Py_UCS4 *data, Py_ssize_t *length, void *config)
}
static DBCHAR
-jisx0213_2000_1_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0213_2000_1_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
- DBCHAR coded = jisx0213_encoder(data, length, (void *)2000);
+ DBCHAR coded = jisx0213_encoder(codec, data, length, (void *)2000);
if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL)
return coded;
else if (coded & 0x8000)
@@ -839,12 +834,13 @@ jisx0213_2000_1_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static DBCHAR
-jisx0213_2000_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0213_2000_1_encoder_paironly(const MultibyteCodec *codec,
+ const Py_UCS4 *data, Py_ssize_t *length)
{
DBCHAR coded;
Py_ssize_t ilength = *length;
- coded = jisx0213_encoder(data, length, (void *)2000);
+ coded = jisx0213_encoder(codec, data, length, (void *)2000);
switch (ilength) {
case 1:
if (coded == MAP_MULTIPLE_AVAIL)
@@ -862,9 +858,10 @@ jisx0213_2000_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length)
}
static DBCHAR
-jisx0213_2000_2_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0213_2000_2_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
- DBCHAR coded = jisx0213_encoder(data, length, (void *)2000);
+ DBCHAR coded = jisx0213_encoder(codec, data, length, (void *)2000);
if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL)
return coded;
else if (coded & 0x8000)
@@ -874,9 +871,10 @@ jisx0213_2000_2_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static DBCHAR
-jisx0213_2004_1_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0213_2004_1_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
- DBCHAR coded = jisx0213_encoder(data, length, NULL);
+ DBCHAR coded = jisx0213_encoder(codec, data, length, NULL);
if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL)
return coded;
else if (coded & 0x8000)
@@ -886,12 +884,13 @@ jisx0213_2004_1_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static DBCHAR
-jisx0213_2004_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0213_2004_1_encoder_paironly(const MultibyteCodec *codec,
+ const Py_UCS4 *data, Py_ssize_t *length)
{
DBCHAR coded;
Py_ssize_t ilength = *length;
- coded = jisx0213_encoder(data, length, NULL);
+ coded = jisx0213_encoder(codec, data, length, NULL);
switch (ilength) {
case 1:
if (coded == MAP_MULTIPLE_AVAIL)
@@ -909,9 +908,10 @@ jisx0213_2004_1_encoder_paironly(const Py_UCS4 *data, Py_ssize_t *length)
}
static DBCHAR
-jisx0213_2004_2_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0213_2004_2_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
- DBCHAR coded = jisx0213_encoder(data, length, NULL);
+ DBCHAR coded = jisx0213_encoder(codec, data, length, NULL);
if (coded == MAP_UNMAPPABLE || coded == MAP_MULTIPLE_AVAIL)
return coded;
else if (coded & 0x8000)
@@ -921,7 +921,7 @@ jisx0213_2004_2_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static Py_UCS4
-jisx0201_r_decoder(const unsigned char *data)
+jisx0201_r_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
JISX0201_R_DECODE_CHAR(*data, u)
@@ -931,7 +931,8 @@ jisx0201_r_decoder(const unsigned char *data)
}
static DBCHAR
-jisx0201_r_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0201_r_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
DBCHAR coded;
JISX0201_R_ENCODE(*data, coded)
@@ -941,7 +942,7 @@ jisx0201_r_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static Py_UCS4
-jisx0201_k_decoder(const unsigned char *data)
+jisx0201_k_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
JISX0201_K_DECODE_CHAR(*data ^ 0x80, u)
@@ -951,7 +952,8 @@ jisx0201_k_decoder(const unsigned char *data)
}
static DBCHAR
-jisx0201_k_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+jisx0201_k_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
DBCHAR coded;
JISX0201_K_ENCODE(*data, coded)
@@ -961,35 +963,35 @@ jisx0201_k_encoder(const Py_UCS4 *data, Py_ssize_t *length)
}
static int
-gb2312_init(void)
+gb2312_init(const MultibyteCodec *codec)
{
- static int initialized = 0;
-
- if (!initialized && (
- IMPORT_MAP(cn, gbcommon, &gbcommon_encmap, NULL) ||
- IMPORT_MAP(cn, gb2312, NULL, &gb2312_decmap)))
+ cjkcodecs_module_state *st = codec->modstate;
+ if (IMPORT_MAP(cn, gbcommon, &st->gbcommon_encmap, NULL) ||
+ IMPORT_MAP(cn, gb2312, NULL, &st->gb2312_decmap))
+ {
return -1;
- initialized = 1;
+ }
return 0;
}
static Py_UCS4
-gb2312_decoder(const unsigned char *data)
+gb2312_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
Py_UCS4 u;
- if (TRYMAP_DEC(gb2312, u, data[0], data[1]))
+ if (TRYMAP_DEC_ST(gb2312, u, data[0], data[1]))
return u;
else
return MAP_UNMAPPABLE;
}
static DBCHAR
-gb2312_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+gb2312_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
DBCHAR coded;
assert(*length == 1);
if (*data < 0x10000) {
- if (TRYMAP_ENC(gbcommon, coded, *data)) {
+ if (TRYMAP_ENC_ST(gbcommon, coded, *data)) {
if (!(coded & 0x8000))
return coded;
}
@@ -999,13 +1001,14 @@ gb2312_encoder(const Py_UCS4 *data, Py_ssize_t *length)
static Py_UCS4
-dummy_decoder(const unsigned char *data)
+dummy_decoder(const MultibyteCodec *codec, const unsigned char *data)
{
return MAP_UNMAPPABLE;
}
static DBCHAR
-dummy_encoder(const Py_UCS4 *data, Py_ssize_t *length)
+dummy_encoder(const MultibyteCodec *codec, const Py_UCS4 *data,
+ Py_ssize_t *length)
{
return MAP_UNMAPPABLE;
}
@@ -1119,18 +1122,19 @@ static const struct iso2022_designation iso2022_jp_ext_designations[] = {
CONFIGDEF(jp_ext, NO_SHIFT | USE_JISX0208_EXT)
-BEGIN_MAPPINGS_LIST
+BEGIN_MAPPINGS_LIST(0)
/* no mapping table here */
END_MAPPINGS_LIST
-#define ISO2022_CODEC(variation) { \
+#define ISO2022_CODEC(variation) \
+NEXT_CODEC = (MultibyteCodec){ \
"iso2022_" #variation, \
&iso2022_##variation##_config, \
iso2022_codec_init, \
_STATEFUL_METHODS(iso2022) \
-},
+};
-BEGIN_CODECS_LIST
+BEGIN_CODECS_LIST(7)
ISO2022_CODEC(kr)
ISO2022_CODEC(jp)
ISO2022_CODEC(jp_1)
diff --git a/Modules/cjkcodecs/_codecs_jp.c b/Modules/cjkcodecs/_codecs_jp.c
index 3a332953b957cb..f7127487aa5f59 100644
--- a/Modules/cjkcodecs/_codecs_jp.c
+++ b/Modules/cjkcodecs/_codecs_jp.c
@@ -164,7 +164,7 @@ ENCODER(euc_jis_2004)
insize = 1;
if (c <= 0xFFFF) {
- EMULATE_JISX0213_2000_ENCODE_BMP(code, c)
+ EMULATE_JISX0213_2000_ENCODE_BMP(codec->config, code, c)
else if (TRYMAP_ENC(jisx0213_bmp, code, c)) {
if (code == MULTIC) {
if (inlen - *inpos < 2) {
@@ -215,7 +215,7 @@ ENCODER(euc_jis_2004)
return 1;
}
else if (c >> 16 == EMPBASE >> 16) {
- EMULATE_JISX0213_2000_ENCODE_EMP(code, c)
+ EMULATE_JISX0213_2000_ENCODE_EMP(codec->config, code, c)
else if (TRYMAP_ENC(jisx0213_emp, code, c & 0xffff))
;
else
@@ -271,7 +271,7 @@ DECODER(euc_jis_2004)
c3 = INBYTE3 ^ 0x80;
/* JIS X 0213 Plane 2 or JIS X 0212 (see NOTES) */
- EMULATE_JISX0213_2000_DECODE_PLANE2(writer, c2, c3)
+ EMULATE_JISX0213_2000_DECODE_PLANE2(codec->config, writer, c2, c3)
else if (TRYMAP_DEC(jisx0213_2_bmp, decoded, c2, c3))
OUTCHAR(decoded);
else if (TRYMAP_DEC(jisx0213_2_emp, code, c2, c3)) {
@@ -293,7 +293,7 @@ DECODER(euc_jis_2004)
c2 = INBYTE2 ^ 0x80;
/* JIS X 0213 Plane 1 */
- EMULATE_JISX0213_2000_DECODE_PLANE1(writer, c, c2)
+ EMULATE_JISX0213_2000_DECODE_PLANE1(codec->config, writer, c, c2)
else if (c == 0x21 && c2 == 0x40)
OUTCHAR(0xff3c);
else if (c == 0x22 && c2 == 0x32)
@@ -582,7 +582,7 @@ ENCODER(shift_jis_2004)
if (code == NOCHAR) {
if (c <= 0xffff) {
- EMULATE_JISX0213_2000_ENCODE_BMP(code, c)
+ EMULATE_JISX0213_2000_ENCODE_BMP(codec->config, code, c)
else if (TRYMAP_ENC(jisx0213_bmp, code, c)) {
if (code == MULTIC) {
if (inlen - *inpos < 2) {
@@ -625,7 +625,7 @@ ENCODER(shift_jis_2004)
return 1;
}
else if (c >> 16 == EMPBASE >> 16) {
- EMULATE_JISX0213_2000_ENCODE_EMP(code, c)
+ EMULATE_JISX0213_2000_ENCODE_EMP(codec->config, code, c)
else if (TRYMAP_ENC(jisx0213_emp, code, c&0xffff))
;
else
@@ -686,7 +686,7 @@ DECODER(shift_jis_2004)
if (c1 < 0x5e) { /* Plane 1 */
c1 += 0x21;
- EMULATE_JISX0213_2000_DECODE_PLANE1(writer,
+ EMULATE_JISX0213_2000_DECODE_PLANE1(codec->config, writer,
c1, c2)
else if (TRYMAP_DEC(jisx0208, decoded, c1, c2))
OUTCHAR(decoded);
@@ -708,7 +708,7 @@ DECODER(shift_jis_2004)
else
c1 -= 0x3d;
- EMULATE_JISX0213_2000_DECODE_PLANE2(writer,
+ EMULATE_JISX0213_2000_DECODE_PLANE2(codec->config, writer,
c1, c2)
else if (TRYMAP_DEC(jisx0213_2_bmp, decoded, c1, c2))
OUTCHAR(decoded);
@@ -733,7 +733,7 @@ DECODER(shift_jis_2004)
}
-BEGIN_MAPPINGS_LIST
+BEGIN_MAPPINGS_LIST(11)
MAPPING_DECONLY(jisx0208)
MAPPING_DECONLY(jisx0212)
MAPPING_ENCONLY(jisxcommon)
@@ -747,14 +747,19 @@ BEGIN_MAPPINGS_LIST
MAPPING_ENCDEC(cp932ext)
END_MAPPINGS_LIST
-BEGIN_CODECS_LIST
+#define CODEC_CUSTOM(NAME, N, METH) \
+ NEXT_CODEC = (MultibyteCodec){NAME, (void *)N, NULL, _STATELESS_METHODS(METH)};
+
+BEGIN_CODECS_LIST(7)
CODEC_STATELESS(shift_jis)
CODEC_STATELESS(cp932)
CODEC_STATELESS(euc_jp)
CODEC_STATELESS(shift_jis_2004)
CODEC_STATELESS(euc_jis_2004)
- { "euc_jisx0213", (void *)2000, NULL, _STATELESS_METHODS(euc_jis_2004) },
- { "shift_jisx0213", (void *)2000, NULL, _STATELESS_METHODS(shift_jis_2004) },
+ CODEC_CUSTOM("euc_jisx0213", 2000, euc_jis_2004)
+ CODEC_CUSTOM("shift_jisx0213", 2000, shift_jis_2004)
END_CODECS_LIST
+#undef CODEC_CUSTOM
+
I_AM_A_MODULE_FOR(jp)
diff --git a/Modules/cjkcodecs/_codecs_kr.c b/Modules/cjkcodecs/_codecs_kr.c
index 72641e495af0b0..fd9a9fd92db1fd 100644
--- a/Modules/cjkcodecs/_codecs_kr.c
+++ b/Modules/cjkcodecs/_codecs_kr.c
@@ -453,13 +453,13 @@ DECODER(johab)
#undef FILL
-BEGIN_MAPPINGS_LIST
+BEGIN_MAPPINGS_LIST(3)
MAPPING_DECONLY(ksx1001)
MAPPING_ENCONLY(cp949)
MAPPING_DECONLY(cp949ext)
END_MAPPINGS_LIST
-BEGIN_CODECS_LIST
+BEGIN_CODECS_LIST(3)
CODEC_STATELESS(euc_kr)
CODEC_STATELESS(cp949)
CODEC_STATELESS(johab)
diff --git a/Modules/cjkcodecs/_codecs_tw.c b/Modules/cjkcodecs/_codecs_tw.c
index 722b26b128a708..3e440991414434 100644
--- a/Modules/cjkcodecs/_codecs_tw.c
+++ b/Modules/cjkcodecs/_codecs_tw.c
@@ -130,12 +130,12 @@ DECODER(cp950)
-BEGIN_MAPPINGS_LIST
+BEGIN_MAPPINGS_LIST(2)
MAPPING_ENCDEC(big5)
MAPPING_ENCDEC(cp950ext)
END_MAPPINGS_LIST
-BEGIN_CODECS_LIST
+BEGIN_CODECS_LIST(2)
CODEC_STATELESS(big5)
CODEC_STATELESS(cp950)
END_CODECS_LIST
diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h
index d9aeec2ff40b08..e553ff3e17b898 100644
--- a/Modules/cjkcodecs/cjkcodecs.h
+++ b/Modules/cjkcodecs/cjkcodecs.h
@@ -60,37 +60,55 @@ struct pair_encodemap {
DBCHAR code;
};
-static const MultibyteCodec *codec_list;
-static const struct dbcs_map *mapping_list;
+#ifndef CJK_MOD_SPECIFIC_STATE
+#define CJK_MOD_SPECIFIC_STATE
+#endif
+
+typedef struct _cjk_mod_state {
+ int num_mappings;
+ int num_codecs;
+ struct dbcs_map *mapping_list;
+ MultibyteCodec *codec_list;
+
+ CJK_MOD_SPECIFIC_STATE
+} cjkcodecs_module_state;
+
+static inline cjkcodecs_module_state *
+get_module_state(PyObject *mod)
+{
+ void *state = PyModule_GetState(mod);
+ assert(state != NULL);
+ return (cjkcodecs_module_state *)state;
+}
#define CODEC_INIT(encoding) \
- static int encoding##_codec_init(const void *config)
+ static int encoding##_codec_init(const MultibyteCodec *codec)
#define ENCODER_INIT(encoding) \
static int encoding##_encode_init( \
- MultibyteCodec_State *state, const void *config)
+ MultibyteCodec_State *state, const MultibyteCodec *codec)
#define ENCODER(encoding) \
static Py_ssize_t encoding##_encode( \
- MultibyteCodec_State *state, const void *config, \
+ MultibyteCodec_State *state, const MultibyteCodec *codec, \
int kind, const void *data, \
Py_ssize_t *inpos, Py_ssize_t inlen, \
unsigned char **outbuf, Py_ssize_t outleft, int flags)
#define ENCODER_RESET(encoding) \
static Py_ssize_t encoding##_encode_reset( \
- MultibyteCodec_State *state, const void *config, \
+ MultibyteCodec_State *state, const MultibyteCodec *codec, \
unsigned char **outbuf, Py_ssize_t outleft)
#define DECODER_INIT(encoding) \
static int encoding##_decode_init( \
- MultibyteCodec_State *state, const void *config)
+ MultibyteCodec_State *state, const MultibyteCodec *codec)
#define DECODER(encoding) \
static Py_ssize_t encoding##_decode( \
- MultibyteCodec_State *state, const void *config, \
+ MultibyteCodec_State *state, const MultibyteCodec *codec, \
const unsigned char **inbuf, Py_ssize_t inleft, \
_PyUnicodeWriter *writer)
#define DECODER_RESET(encoding) \
static Py_ssize_t encoding##_decode_reset( \
- MultibyteCodec_State *state, const void *config)
+ MultibyteCodec_State *state, const MultibyteCodec *codec)
#define NEXT_IN(i) \
do { \
@@ -193,6 +211,9 @@ static const struct dbcs_map *mapping_list;
(m)->bottom]) != NOCHAR)
#define TRYMAP_ENC(charset, assi, uni) \
_TRYMAP_ENC(&charset##_encmap[(uni) >> 8], assi, (uni) & 0xff)
+#define TRYMAP_ENC_ST(charset, assi, uni) \
+ _TRYMAP_ENC(&(codec->modstate->charset##_encmap)[(uni) >> 8], \
+ assi, (uni) & 0xff)
#define _TRYMAP_DEC(m, assi, val) \
((m)->map != NULL && \
@@ -201,17 +222,45 @@ static const struct dbcs_map *mapping_list;
((assi) = (m)->map[(val) - (m)->bottom]) != UNIINV)
#define TRYMAP_DEC(charset, assi, c1, c2) \
_TRYMAP_DEC(&charset##_decmap[c1], assi, c2)
+#define TRYMAP_DEC_ST(charset, assi, c1, c2) \
+ _TRYMAP_DEC(&(codec->modstate->charset##_decmap)[c1], assi, c2)
+
+#define BEGIN_MAPPINGS_LIST(NUM) \
+static int \
+add_mappings(cjkcodecs_module_state *st) \
+{ \
+ int idx = 0; \
+ (void)idx; \
+ st->num_mappings = NUM; \
+ st->mapping_list = PyMem_Calloc(NUM, sizeof(struct dbcs_map)); \
+ if (st->mapping_list == NULL) { \
+ return -1; \
+ }
-#define BEGIN_MAPPINGS_LIST static const struct dbcs_map _mapping_list[] = {
-#define MAPPING_ENCONLY(enc) {#enc, (void*)enc##_encmap, NULL},
-#define MAPPING_DECONLY(enc) {#enc, NULL, (void*)enc##_decmap},
-#define MAPPING_ENCDEC(enc) {#enc, (void*)enc##_encmap, (void*)enc##_decmap},
-#define END_MAPPINGS_LIST \
- {"", NULL, NULL} }; \
- static const struct dbcs_map *mapping_list = \
- (const struct dbcs_map *)_mapping_list;
+#define MAPPING_ENCONLY(enc) \
+ st->mapping_list[idx++] = (struct dbcs_map){#enc, (void*)enc##_encmap, NULL};
+#define MAPPING_DECONLY(enc) \
+ st->mapping_list[idx++] = (struct dbcs_map){#enc, NULL, (void*)enc##_decmap};
+#define MAPPING_ENCDEC(enc) \
+ st->mapping_list[idx++] = (struct dbcs_map){#enc, (void*)enc##_encmap, (void*)enc##_decmap};
+
+#define END_MAPPINGS_LIST \
+ assert(st->num_mappings == idx); \
+ return 0; \
+}
+
+#define BEGIN_CODECS_LIST(NUM) \
+static int \
+add_codecs(cjkcodecs_module_state *st) \
+{ \
+ int idx = 0; \
+ (void)idx; \
+ st->num_codecs = NUM; \
+ st->codec_list = PyMem_Calloc(NUM, sizeof(MultibyteCodec)); \
+ if (st->codec_list == NULL) { \
+ return -1; \
+ }
-#define BEGIN_CODECS_LIST static const MultibyteCodec _codec_list[] = {
#define _STATEFUL_METHODS(enc) \
enc##_encode, \
enc##_encode_init, \
@@ -222,23 +271,24 @@ static const struct dbcs_map *mapping_list;
#define _STATELESS_METHODS(enc) \
enc##_encode, NULL, NULL, \
enc##_decode, NULL, NULL,
-#define CODEC_STATEFUL(enc) { \
- #enc, NULL, NULL, \
- _STATEFUL_METHODS(enc) \
-},
-#define CODEC_STATELESS(enc) { \
- #enc, NULL, NULL, \
- _STATELESS_METHODS(enc) \
-},
-#define CODEC_STATELESS_WINIT(enc) { \
- #enc, NULL, \
- enc##_codec_init, \
- _STATELESS_METHODS(enc) \
-},
-#define END_CODECS_LIST \
- {"", NULL,} }; \
- static const MultibyteCodec *codec_list = \
- (const MultibyteCodec *)_codec_list;
+
+#define NEXT_CODEC \
+ st->codec_list[idx++]
+
+#define CODEC_STATEFUL(enc) \
+ NEXT_CODEC = (MultibyteCodec){#enc, NULL, NULL, _STATEFUL_METHODS(enc)};
+#define CODEC_STATELESS(enc) \
+ NEXT_CODEC = (MultibyteCodec){#enc, NULL, NULL, _STATELESS_METHODS(enc)};
+#define CODEC_STATELESS_WINIT(enc) \
+ NEXT_CODEC = (MultibyteCodec){#enc, NULL, enc##_codec_init, _STATELESS_METHODS(enc)};
+
+#define END_CODECS_LIST \
+ assert(st->num_codecs == idx); \
+ for (int i = 0; i < st->num_codecs; i++) { \
+ st->codec_list[i].modstate = st; \
+ } \
+ return 0; \
+}
@@ -248,59 +298,102 @@ getmultibytecodec(void)
return _PyImport_GetModuleAttrString("_multibytecodec", "__create_codec");
}
-static PyObject *
-getcodec(PyObject *self, PyObject *encoding)
+static void
+destroy_codec_capsule(PyObject *capsule)
{
- PyObject *codecobj, *r, *cofunc;
- const MultibyteCodec *codec;
- const char *enc;
+ void *ptr = PyCapsule_GetPointer(capsule, CODEC_CAPSULE);
+ codec_capsule *data = (codec_capsule *)ptr;
+ Py_DECREF(data->cjk_module);
+ PyMem_Free(ptr);
+}
- if (!PyUnicode_Check(encoding)) {
- PyErr_SetString(PyExc_TypeError,
- "encoding name must be a string.");
+static codec_capsule *
+capsulate_codec(PyObject *mod, const MultibyteCodec *codec)
+{
+ codec_capsule *data = PyMem_Malloc(sizeof(codec_capsule));
+ if (data == NULL) {
+ PyErr_NoMemory();
return NULL;
}
- enc = PyUnicode_AsUTF8(encoding);
- if (enc == NULL)
- return NULL;
+ data->codec = codec;
+ data->cjk_module = Py_NewRef(mod);
+ return data;
+}
- cofunc = getmultibytecodec();
- if (cofunc == NULL)
+static PyObject *
+_getcodec(PyObject *self, const MultibyteCodec *codec)
+{
+ PyObject *cofunc = getmultibytecodec();
+ if (cofunc == NULL) {
return NULL;
+ }
- for (codec = codec_list; codec->encoding[0]; codec++)
- if (strcmp(codec->encoding, enc) == 0)
- break;
-
- if (codec->encoding[0] == '\0') {
- PyErr_SetString(PyExc_LookupError,
- "no such codec is supported.");
+ codec_capsule *data = capsulate_codec(self, codec);
+ if (data == NULL) {
+ Py_DECREF(cofunc);
return NULL;
}
-
- codecobj = PyCapsule_New((void *)codec, PyMultibyteCodec_CAPSULE_NAME, NULL);
- if (codecobj == NULL)
+ PyObject *codecobj = PyCapsule_New(data, CODEC_CAPSULE,
+ destroy_codec_capsule);
+ if (codecobj == NULL) {
+ PyMem_Free(data);
+ Py_DECREF(cofunc);
return NULL;
+ }
- r = PyObject_CallOneArg(cofunc, codecobj);
+ PyObject *res = PyObject_CallOneArg(cofunc, codecobj);
Py_DECREF(codecobj);
Py_DECREF(cofunc);
+ return res;
+}
+
+static PyObject *
+getcodec(PyObject *self, PyObject *encoding)
+{
+ if (!PyUnicode_Check(encoding)) {
+ PyErr_SetString(PyExc_TypeError,
+ "encoding name must be a string.");
+ return NULL;
+ }
+ const char *enc = PyUnicode_AsUTF8(encoding);
+ if (enc == NULL) {
+ return NULL;
+ }
+
+ cjkcodecs_module_state *st = get_module_state(self);
+ for (int i = 0; i < st->num_codecs; i++) {
+ const MultibyteCodec *codec = &st->codec_list[i];
+ if (strcmp(codec->encoding, enc) == 0) {
+ return _getcodec(self, codec);
+ }
+ }
- return r;
+ PyErr_SetString(PyExc_LookupError,
+ "no such codec is supported.");
+ return NULL;
}
+static int add_mappings(cjkcodecs_module_state *);
+static int add_codecs(cjkcodecs_module_state *);
static int
register_maps(PyObject *module)
{
- const struct dbcs_map *h;
+ // Init module state.
+ cjkcodecs_module_state *st = get_module_state(module);
+ if (add_mappings(st) < 0) {
+ return -1;
+ }
+ if (add_codecs(st) < 0) {
+ return -1;
+ }
- for (h = mapping_list; h->charset[0] != '\0'; h++) {
+ for (int i = 0; i < st->num_mappings; i++) {
+ const struct dbcs_map *h = &st->mapping_list[i];
char mhname[256] = "__map_";
strcpy(mhname + sizeof("__map_") - 1, h->charset);
- PyObject *capsule = PyCapsule_New((void *)h,
- PyMultibyteCodec_CAPSULE_NAME, NULL);
+ PyObject *capsule = PyCapsule_New((void *)h, MAP_CAPSULE, NULL);
if (capsule == NULL) {
return -1;
}
@@ -364,14 +457,14 @@ importmap(const char *modname, const char *symbol,
o = PyObject_GetAttrString(mod, symbol);
if (o == NULL)
goto errorexit;
- else if (!PyCapsule_IsValid(o, PyMultibyteCodec_CAPSULE_NAME)) {
+ else if (!PyCapsule_IsValid(o, MAP_CAPSULE)) {
PyErr_SetString(PyExc_ValueError,
"map data must be a Capsule.");
goto errorexit;
}
else {
struct dbcs_map *map;
- map = PyCapsule_GetPointer(o, PyMultibyteCodec_CAPSULE_NAME);
+ map = PyCapsule_GetPointer(o, MAP_CAPSULE);
if (encmap != NULL)
*encmap = map->encmap;
if (decmap != NULL)
@@ -394,6 +487,13 @@ _cjk_exec(PyObject *module)
return register_maps(module);
}
+static void
+_cjk_free(void *mod)
+{
+ cjkcodecs_module_state *st = get_module_state((PyObject *)mod);
+ PyMem_Free(st->mapping_list);
+ PyMem_Free(st->codec_list);
+}
static struct PyMethodDef _cjk_methods[] = {
{"getcodec", (PyCFunction)getcodec, METH_O, ""},
@@ -409,9 +509,10 @@ static PyModuleDef_Slot _cjk_slots[] = {
static struct PyModuleDef _cjk_module = { \
PyModuleDef_HEAD_INIT, \
.m_name = "_codecs_"#loc, \
- .m_size = 0, \
+ .m_size = sizeof(cjkcodecs_module_state), \
.m_methods = _cjk_methods, \
.m_slots = _cjk_slots, \
+ .m_free = _cjk_free, \
}; \
\
PyMODINIT_FUNC \
diff --git a/Modules/cjkcodecs/emu_jisx0213_2000.h b/Modules/cjkcodecs/emu_jisx0213_2000.h
index a5d5a7063d37e6..c30c948a2b1279 100644
--- a/Modules/cjkcodecs/emu_jisx0213_2000.h
+++ b/Modules/cjkcodecs/emu_jisx0213_2000.h
@@ -5,8 +5,8 @@
# define EMULATE_JISX0213_2000_ENCODE_INVALID 1
#endif
-#define EMULATE_JISX0213_2000_ENCODE_BMP(assi, c) \
- if (config == (void *)2000 && ( \
+#define EMULATE_JISX0213_2000_ENCODE_BMP(config, assi, c) \
+ if ((config) == (void *)2000 && ( \
(c) == 0x9B1C || (c) == 0x4FF1 || \
(c) == 0x525D || (c) == 0x541E || \
(c) == 0x5653 || (c) == 0x59F8 || \
@@ -14,12 +14,12 @@
(c) == 0x7626 || (c) == 0x7E6B)) { \
return EMULATE_JISX0213_2000_ENCODE_INVALID; \
} \
- else if (config == (void *)2000 && (c) == 0x9B1D) { \
+ else if ((config) == (void *)2000 && (c) == 0x9B1D) { \
(assi) = 0x8000 | 0x7d3b; \
}
-#define EMULATE_JISX0213_2000_ENCODE_EMP(assi, c) \
- if (config == (void *)2000 && (c) == 0x20B9F) { \
+#define EMULATE_JISX0213_2000_ENCODE_EMP(config, assi, c) \
+ if ((config) == (void *)2000 && (c) == 0x20B9F) { \
return EMULATE_JISX0213_2000_ENCODE_INVALID; \
}
@@ -27,8 +27,8 @@
# define EMULATE_JISX0213_2000_DECODE_INVALID 2
#endif
-#define EMULATE_JISX0213_2000_DECODE_PLANE1(assi, c1, c2) \
- if (config == (void *)2000 && \
+#define EMULATE_JISX0213_2000_DECODE_PLANE1(config, assi, c1, c2) \
+ if ((config) == (void *)2000 && \
(((c1) == 0x2E && (c2) == 0x21) || \
((c1) == 0x2F && (c2) == 0x7E) || \
((c1) == 0x4F && (c2) == 0x54) || \
@@ -42,13 +42,13 @@
return EMULATE_JISX0213_2000_DECODE_INVALID; \
}
-#define EMULATE_JISX0213_2000_DECODE_PLANE2(writer, c1, c2) \
- if (config == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \
+#define EMULATE_JISX0213_2000_DECODE_PLANE2(config, writer, c1, c2) \
+ if ((config) == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \
OUTCHAR(0x9B1D); \
}
-#define EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(assi, c1, c2) \
- if (config == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \
+#define EMULATE_JISX0213_2000_DECODE_PLANE2_CHAR(config, assi, c1, c2) \
+ if ((config) == (void *)2000 && (c1) == 0x7D && (c2) == 0x3B) { \
(assi) = 0x9B1D; \
}
diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c
index 8564494f6262fb..233fc3020fd6a8 100644
--- a/Modules/cjkcodecs/multibytecodec.c
+++ b/Modules/cjkcodecs/multibytecodec.c
@@ -19,26 +19,27 @@ typedef struct {
PyTypeObject *writer_type;
PyTypeObject *multibytecodec_type;
PyObject *str_write;
-} _multibytecodec_state;
+} module_state;
-static _multibytecodec_state *
-_multibytecodec_get_state(PyObject *module)
+static module_state *
+get_module_state(PyObject *module)
{
- _multibytecodec_state *state = PyModule_GetState(module);
+ module_state *state = PyModule_GetState(module);
assert(state != NULL);
return state;
}
static struct PyModuleDef _multibytecodecmodule;
-static _multibytecodec_state *
-_multibyte_codec_find_state_by_type(PyTypeObject *type)
+
+static module_state *
+find_state_by_def(PyTypeObject *type)
{
PyObject *module = PyType_GetModuleByDef(type, &_multibytecodecmodule);
assert(module != NULL);
- return _multibytecodec_get_state(module);
+ return get_module_state(module);
}
-#define clinic_get_state() _multibyte_codec_find_state_by_type(type)
+#define clinic_get_state() find_state_by_def(type)
/*[clinic input]
module _multibytecodec
class _multibytecodec.MultibyteCodec "MultibyteCodecObject *" "clinic_get_state()->multibytecodec_type"
@@ -66,7 +67,7 @@ typedef struct {
static char *incnewkwarglist[] = {"errors", NULL};
static char *streamkwarglist[] = {"stream", "errors", NULL};
-static PyObject *multibytecodec_encode(MultibyteCodec *,
+static PyObject *multibytecodec_encode(const MultibyteCodec *,
MultibyteCodec_State *, PyObject *, Py_ssize_t *,
PyObject *, int);
@@ -220,7 +221,7 @@ expand_encodebuffer(MultibyteEncodeBuffer *buf, Py_ssize_t esize)
*/
static int
-multibytecodec_encerror(MultibyteCodec *codec,
+multibytecodec_encerror(const MultibyteCodec *codec,
MultibyteCodec_State *state,
MultibyteEncodeBuffer *buf,
PyObject *errors, Py_ssize_t e)
@@ -271,7 +272,7 @@ multibytecodec_encerror(MultibyteCodec *codec,
for (;;) {
Py_ssize_t outleft = (Py_ssize_t)(buf->outbuf_end - buf->outbuf);
- r = codec->encode(state, codec->config,
+ r = codec->encode(state, codec,
kind, data, &inpos, 1,
&buf->outbuf, outleft, 0);
if (r == MBERR_TOOSMALL) {
@@ -374,7 +375,7 @@ multibytecodec_encerror(MultibyteCodec *codec,
}
static int
-multibytecodec_decerror(MultibyteCodec *codec,
+multibytecodec_decerror(const MultibyteCodec *codec,
MultibyteCodec_State *state,
MultibyteDecodeBuffer *buf,
PyObject *errors, Py_ssize_t e)
@@ -478,7 +479,7 @@ multibytecodec_decerror(MultibyteCodec *codec,
}
static PyObject *
-multibytecodec_encode(MultibyteCodec *codec,
+multibytecodec_encode(const MultibyteCodec *codec,
MultibyteCodec_State *state,
PyObject *text, Py_ssize_t *inpos_t,
PyObject *errors, int flags)
@@ -520,7 +521,7 @@ multibytecodec_encode(MultibyteCodec *codec,
* error callbacks can relocate the cursor anywhere on buffer*/
Py_ssize_t outleft = (Py_ssize_t)(buf.outbuf_end - buf.outbuf);
- r = codec->encode(state, codec->config,
+ r = codec->encode(state, codec,
kind, data,
&buf.inpos, buf.inlen,
&buf.outbuf, outleft, flags);
@@ -537,7 +538,7 @@ multibytecodec_encode(MultibyteCodec *codec,
Py_ssize_t outleft;
outleft = (Py_ssize_t)(buf.outbuf_end - buf.outbuf);
- r = codec->encreset(state, codec->config, &buf.outbuf,
+ r = codec->encreset(state, codec, &buf.outbuf,
outleft);
if (r == 0)
break;
@@ -615,7 +616,7 @@ _multibytecodec_MultibyteCodec_encode_impl(MultibyteCodecObject *self,
}
if (self->codec->encinit != NULL &&
- self->codec->encinit(&state, self->codec->config) != 0)
+ self->codec->encinit(&state, self->codec) != 0)
goto errorexit;
r = multibytecodec_encode(self->codec, &state,
input, NULL, errorcb,
@@ -679,7 +680,7 @@ _multibytecodec_MultibyteCodec_decode_impl(MultibyteCodecObject *self,
buf.inbuf_end = buf.inbuf_top + datalen;
if (self->codec->decinit != NULL &&
- self->codec->decinit(&state, self->codec->config) != 0)
+ self->codec->decinit(&state, self->codec) != 0)
goto errorexit;
while (buf.inbuf < buf.inbuf_end) {
@@ -687,7 +688,7 @@ _multibytecodec_MultibyteCodec_decode_impl(MultibyteCodecObject *self,
inleft = (Py_ssize_t)(buf.inbuf_end - buf.inbuf);
- r = self->codec->decode(&state, self->codec->config,
+ r = self->codec->decode(&state, self->codec,
&buf.inbuf, inleft, &buf.writer);
if (r == 0)
break;
@@ -719,9 +720,17 @@ static struct PyMethodDef multibytecodec_methods[] = {
};
static int
-multibytecodec_traverse(PyObject *self, visitproc visit, void *arg)
+multibytecodec_clear(MultibyteCodecObject *self)
+{
+ Py_CLEAR(self->cjk_module);
+ return 0;
+}
+
+static int
+multibytecodec_traverse(MultibyteCodecObject *self, visitproc visit, void *arg)
{
Py_VISIT(Py_TYPE(self));
+ Py_VISIT(self->cjk_module);
return 0;
}
@@ -730,6 +739,7 @@ multibytecodec_dealloc(MultibyteCodecObject *self)
{
PyObject_GC_UnTrack(self);
PyTypeObject *tp = Py_TYPE(self);
+ (void)multibytecodec_clear(self);
tp->tp_free(self);
Py_DECREF(tp);
}
@@ -739,6 +749,7 @@ static PyType_Slot multibytecodec_slots[] = {
{Py_tp_getattro, PyObject_GenericGetAttr},
{Py_tp_methods, multibytecodec_methods},
{Py_tp_traverse, multibytecodec_traverse},
+ {Py_tp_clear, multibytecodec_clear},
{0, NULL},
};
@@ -877,7 +888,7 @@ decoder_feed_buffer(MultibyteStatefulDecoderContext *ctx,
inleft = (Py_ssize_t)(buf->inbuf_end - buf->inbuf);
- r = ctx->codec->decode(&ctx->state, ctx->codec->config,
+ r = ctx->codec->decode(&ctx->state, ctx->codec,
&buf->inbuf, inleft, &buf->writer);
if (r == 0 || r == MBERR_TOOFEW)
break;
@@ -1004,7 +1015,7 @@ _multibytecodec_MultibyteIncrementalEncoder_reset_impl(MultibyteIncrementalEncod
Py_ssize_t r;
if (self->codec->encreset != NULL) {
outbuf = buffer;
- r = self->codec->encreset(&self->state, self->codec->config,
+ r = self->codec->encreset(&self->state, self->codec,
&outbuf, sizeof(buffer));
if (r != 0)
return NULL;
@@ -1040,7 +1051,7 @@ mbiencoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (codec == NULL)
goto errorexit;
- _multibytecodec_state *state = _multibyte_codec_find_state_by_type(type);
+ module_state *state = find_state_by_def(type);
if (!MultibyteCodec_Check(state, codec)) {
PyErr_SetString(PyExc_TypeError, "codec is unexpected type");
goto errorexit;
@@ -1052,7 +1063,7 @@ mbiencoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (self->errors == NULL)
goto errorexit;
if (self->codec->encinit != NULL &&
- self->codec->encinit(&self->state, self->codec->config) != 0)
+ self->codec->encinit(&self->state, self->codec) != 0)
goto errorexit;
Py_DECREF(codec);
@@ -1281,7 +1292,7 @@ _multibytecodec_MultibyteIncrementalDecoder_reset_impl(MultibyteIncrementalDecod
/*[clinic end generated code: output=da423b1782c23ed1 input=3b63b3be85b2fb45]*/
{
if (self->codec->decreset != NULL &&
- self->codec->decreset(&self->state, self->codec->config) != 0)
+ self->codec->decreset(&self->state, self->codec) != 0)
return NULL;
self->pendingsize = 0;
@@ -1315,7 +1326,7 @@ mbidecoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (codec == NULL)
goto errorexit;
- _multibytecodec_state *state = _multibyte_codec_find_state_by_type(type);
+ module_state *state = find_state_by_def(type);
if (!MultibyteCodec_Check(state, codec)) {
PyErr_SetString(PyExc_TypeError, "codec is unexpected type");
goto errorexit;
@@ -1327,7 +1338,7 @@ mbidecoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (self->errors == NULL)
goto errorexit;
if (self->codec->decinit != NULL &&
- self->codec->decinit(&self->state, self->codec->config) != 0)
+ self->codec->decinit(&self->state, self->codec) != 0)
goto errorexit;
Py_DECREF(codec);
@@ -1589,7 +1600,7 @@ _multibytecodec_MultibyteStreamReader_reset_impl(MultibyteStreamReaderObject *se
/*[clinic end generated code: output=138490370a680abc input=5d4140db84b5e1e2]*/
{
if (self->codec->decreset != NULL &&
- self->codec->decreset(&self->state, self->codec->config) != 0)
+ self->codec->decreset(&self->state, self->codec) != 0)
return NULL;
self->pendingsize = 0;
@@ -1630,7 +1641,7 @@ mbstreamreader_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (codec == NULL)
goto errorexit;
- _multibytecodec_state *state = _multibyte_codec_find_state_by_type(type);
+ module_state *state = find_state_by_def(type);
if (!MultibyteCodec_Check(state, codec)) {
PyErr_SetString(PyExc_TypeError, "codec is unexpected type");
goto errorexit;
@@ -1643,7 +1654,7 @@ mbstreamreader_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (self->errors == NULL)
goto errorexit;
if (self->codec->decinit != NULL &&
- self->codec->decinit(&self->state, self->codec->config) != 0)
+ self->codec->decinit(&self->state, self->codec) != 0)
goto errorexit;
Py_DECREF(codec);
@@ -1735,7 +1746,7 @@ _multibytecodec_MultibyteStreamWriter_write_impl(MultibyteStreamWriterObject *se
PyObject *strobj)
/*[clinic end generated code: output=68ade3aea26410ac input=199f26f68bd8425a]*/
{
- _multibytecodec_state *state = PyType_GetModuleState(cls);
+ module_state *state = PyType_GetModuleState(cls);
assert(state != NULL);
if (mbstreamwriter_iwrite(self, strobj, state->str_write)) {
return NULL;
@@ -1766,7 +1777,7 @@ _multibytecodec_MultibyteStreamWriter_writelines_impl(MultibyteStreamWriterObjec
return NULL;
}
- _multibytecodec_state *state = PyType_GetModuleState(cls);
+ module_state *state = PyType_GetModuleState(cls);
assert(state != NULL);
for (i = 0; i < PySequence_Length(lines); i++) {
/* length can be changed even within this loop */
@@ -1817,7 +1828,7 @@ _multibytecodec_MultibyteStreamWriter_reset_impl(MultibyteStreamWriterObject *se
assert(PyBytes_Check(pwrt));
- _multibytecodec_state *state = PyType_GetModuleState(cls);
+ module_state *state = PyType_GetModuleState(cls);
assert(state != NULL);
if (PyBytes_Size(pwrt) > 0) {
@@ -1853,7 +1864,7 @@ mbstreamwriter_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (codec == NULL)
goto errorexit;
- _multibytecodec_state *state = _multibyte_codec_find_state_by_type(type);
+ module_state *state = find_state_by_def(type);
if (!MultibyteCodec_Check(state, codec)) {
PyErr_SetString(PyExc_TypeError, "codec is unexpected type");
goto errorexit;
@@ -1866,7 +1877,7 @@ mbstreamwriter_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (self->errors == NULL)
goto errorexit;
if (self->codec->encinit != NULL &&
- self->codec->encinit(&self->state, self->codec->config) != 0)
+ self->codec->encinit(&self->state, self->codec) != 0)
goto errorexit;
Py_DECREF(codec);
@@ -1952,22 +1963,23 @@ _multibytecodec___create_codec(PyObject *module, PyObject *arg)
/*[clinic end generated code: output=cfa3dce8260e809d input=6840b2a6b183fcfa]*/
{
MultibyteCodecObject *self;
- MultibyteCodec *codec;
- if (!PyCapsule_IsValid(arg, PyMultibyteCodec_CAPSULE_NAME)) {
+ if (!PyCapsule_IsValid(arg, CODEC_CAPSULE)) {
PyErr_SetString(PyExc_ValueError, "argument type invalid");
return NULL;
}
- codec = PyCapsule_GetPointer(arg, PyMultibyteCodec_CAPSULE_NAME);
- if (codec->codecinit != NULL && codec->codecinit(codec->config) != 0)
+ codec_capsule *data = PyCapsule_GetPointer(arg, CODEC_CAPSULE);
+ const MultibyteCodec *codec = data->codec;
+ if (codec->codecinit != NULL && codec->codecinit(codec) != 0)
return NULL;
- _multibytecodec_state *state = _multibytecodec_get_state(module);
+ module_state *state = get_module_state(module);
self = PyObject_GC_New(MultibyteCodecObject, state->multibytecodec_type);
if (self == NULL)
return NULL;
self->codec = codec;
+ self->cjk_module = Py_NewRef(data->cjk_module);
PyObject_GC_Track(self);
return (PyObject *)self;
@@ -1976,7 +1988,7 @@ _multibytecodec___create_codec(PyObject *module, PyObject *arg)
static int
_multibytecodec_traverse(PyObject *mod, visitproc visit, void *arg)
{
- _multibytecodec_state *state = _multibytecodec_get_state(mod);
+ module_state *state = get_module_state(mod);
Py_VISIT(state->multibytecodec_type);
Py_VISIT(state->encoder_type);
Py_VISIT(state->decoder_type);
@@ -1988,7 +2000,7 @@ _multibytecodec_traverse(PyObject *mod, visitproc visit, void *arg)
static int
_multibytecodec_clear(PyObject *mod)
{
- _multibytecodec_state *state = _multibytecodec_get_state(mod);
+ module_state *state = get_module_state(mod);
Py_CLEAR(state->multibytecodec_type);
Py_CLEAR(state->encoder_type);
Py_CLEAR(state->decoder_type);
@@ -2022,7 +2034,7 @@ _multibytecodec_free(void *mod)
static int
_multibytecodec_exec(PyObject *mod)
{
- _multibytecodec_state *state = _multibytecodec_get_state(mod);
+ module_state *state = get_module_state(mod);
state->str_write = PyUnicode_InternFromString("write");
if (state->str_write == NULL) {
return -1;
@@ -2056,7 +2068,7 @@ static PyModuleDef_Slot _multibytecodec_slots[] = {
static struct PyModuleDef _multibytecodecmodule = {
.m_base = PyModuleDef_HEAD_INIT,
.m_name = "_multibytecodec",
- .m_size = sizeof(_multibytecodec_state),
+ .m_size = sizeof(module_state),
.m_methods = _multibytecodec_methods,
.m_slots = _multibytecodec_slots,
.m_traverse = _multibytecodec_traverse,
diff --git a/Modules/cjkcodecs/multibytecodec.h b/Modules/cjkcodecs/multibytecodec.h
index 69404ba96aa1f0..f59362205d26fc 100644
--- a/Modules/cjkcodecs/multibytecodec.h
+++ b/Modules/cjkcodecs/multibytecodec.h
@@ -27,28 +27,31 @@ typedef struct {
unsigned char c[8];
} MultibyteCodec_State;
-typedef int (*mbcodec_init)(const void *config);
+struct _cjk_mod_state;
+struct _multibyte_codec;
+
+typedef int (*mbcodec_init)(const struct _multibyte_codec *codec);
typedef Py_ssize_t (*mbencode_func)(MultibyteCodec_State *state,
- const void *config,
+ const struct _multibyte_codec *codec,
int kind, const void *data,
Py_ssize_t *inpos, Py_ssize_t inlen,
unsigned char **outbuf, Py_ssize_t outleft,
int flags);
typedef int (*mbencodeinit_func)(MultibyteCodec_State *state,
- const void *config);
+ const struct _multibyte_codec *codec);
typedef Py_ssize_t (*mbencodereset_func)(MultibyteCodec_State *state,
- const void *config,
+ const struct _multibyte_codec *codec,
unsigned char **outbuf, Py_ssize_t outleft);
typedef Py_ssize_t (*mbdecode_func)(MultibyteCodec_State *state,
- const void *config,
+ const struct _multibyte_codec *codec,
const unsigned char **inbuf, Py_ssize_t inleft,
_PyUnicodeWriter *writer);
typedef int (*mbdecodeinit_func)(MultibyteCodec_State *state,
- const void *config);
+ const struct _multibyte_codec *codec);
typedef Py_ssize_t (*mbdecodereset_func)(MultibyteCodec_State *state,
- const void *config);
+ const struct _multibyte_codec *codec);
-typedef struct {
+typedef struct _multibyte_codec {
const char *encoding;
const void *config;
mbcodec_init codecinit;
@@ -58,18 +61,20 @@ typedef struct {
mbdecode_func decode;
mbdecodeinit_func decinit;
mbdecodereset_func decreset;
+ struct _cjk_mod_state *modstate;
} MultibyteCodec;
typedef struct {
PyObject_HEAD
- MultibyteCodec *codec;
+ const MultibyteCodec *codec;
+ PyObject *cjk_module;
} MultibyteCodecObject;
#define MultibyteCodec_Check(state, op) Py_IS_TYPE((op), state->multibytecodec_type)
#define _MultibyteStatefulCodec_HEAD \
PyObject_HEAD \
- MultibyteCodec *codec; \
+ const MultibyteCodec *codec; \
MultibyteCodec_State state; \
PyObject *errors;
typedef struct {
@@ -130,7 +135,13 @@ typedef struct {
#define MBENC_FLUSH 0x0001 /* encode all characters encodable */
#define MBENC_MAX MBENC_FLUSH
-#define PyMultibyteCodec_CAPSULE_NAME "multibytecodec.__map_*"
+typedef struct {
+ const MultibyteCodec *codec;
+ PyObject *cjk_module;
+} codec_capsule;
+
+#define MAP_CAPSULE "multibytecodec.map"
+#define CODEC_CAPSULE "multibytecodec.codec"
#ifdef __cplusplus
diff --git a/Modules/clinic/_posixsubprocess.c.h b/Modules/clinic/_posixsubprocess.c.h
new file mode 100644
index 00000000000000..f08878cf668908
--- /dev/null
+++ b/Modules/clinic/_posixsubprocess.c.h
@@ -0,0 +1,162 @@
+/*[clinic input]
+preserve
+[clinic start generated code]*/
+
+#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+# include "pycore_gc.h" // PyGC_Head
+# include "pycore_runtime.h" // _Py_ID()
+#endif
+
+
+PyDoc_STRVAR(subprocess_fork_exec__doc__,
+"fork_exec($module, args, executable_list, close_fds, pass_fds, cwd,\n"
+" env, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite,\n"
+" errpipe_read, errpipe_write, restore_signals, call_setsid,\n"
+" pgid_to_set, gid, extra_groups, uid, child_umask, preexec_fn,\n"
+" allow_vfork, /)\n"
+"--\n"
+"\n"
+"Spawn a fresh new child process.\n"
+"\n"
+"Fork a child process, close parent file descriptors as appropriate in the\n"
+"child and duplicate the few that are needed before calling exec() in the\n"
+"child process.\n"
+"\n"
+"If close_fds is True, close file descriptors 3 and higher, except those listed\n"
+"in the sorted tuple pass_fds.\n"
+"\n"
+"The preexec_fn, if supplied, will be called immediately before closing file\n"
+"descriptors and exec.\n"
+"\n"
+"WARNING: preexec_fn is NOT SAFE if your application uses threads.\n"
+" It may trigger infrequent, difficult to debug deadlocks.\n"
+"\n"
+"If an error occurs in the child process before the exec, it is\n"
+"serialized and written to the errpipe_write fd per subprocess.py.\n"
+"\n"
+"Returns: the child process\'s PID.\n"
+"\n"
+"Raises: Only on an error in the parent process.");
+
+#define SUBPROCESS_FORK_EXEC_METHODDEF \
+ {"fork_exec", _PyCFunction_CAST(subprocess_fork_exec), METH_FASTCALL, subprocess_fork_exec__doc__},
+
+static PyObject *
+subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
+ PyObject *executable_list, int close_fds,
+ PyObject *py_fds_to_keep, PyObject *cwd_obj,
+ PyObject *env_list, int p2cread, int p2cwrite,
+ int c2pread, int c2pwrite, int errread,
+ int errwrite, int errpipe_read, int errpipe_write,
+ int restore_signals, int call_setsid,
+ pid_t pgid_to_set, PyObject *gid_object,
+ PyObject *extra_groups_packed,
+ PyObject *uid_object, int child_umask,
+ PyObject *preexec_fn, int allow_vfork);
+
+static PyObject *
+subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *process_args;
+ PyObject *executable_list;
+ int close_fds;
+ PyObject *py_fds_to_keep;
+ PyObject *cwd_obj;
+ PyObject *env_list;
+ int p2cread;
+ int p2cwrite;
+ int c2pread;
+ int c2pwrite;
+ int errread;
+ int errwrite;
+ int errpipe_read;
+ int errpipe_write;
+ int restore_signals;
+ int call_setsid;
+ pid_t pgid_to_set;
+ PyObject *gid_object;
+ PyObject *extra_groups_packed;
+ PyObject *uid_object;
+ int child_umask;
+ PyObject *preexec_fn;
+ int allow_vfork;
+
+ if (!_PyArg_CheckPositional("fork_exec", nargs, 23, 23)) {
+ goto exit;
+ }
+ process_args = args[0];
+ executable_list = args[1];
+ close_fds = PyObject_IsTrue(args[2]);
+ if (close_fds < 0) {
+ goto exit;
+ }
+ if (!PyTuple_Check(args[3])) {
+ _PyArg_BadArgument("fork_exec", "argument 4", "tuple", args[3]);
+ goto exit;
+ }
+ py_fds_to_keep = args[3];
+ cwd_obj = args[4];
+ env_list = args[5];
+ p2cread = _PyLong_AsInt(args[6]);
+ if (p2cread == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ p2cwrite = _PyLong_AsInt(args[7]);
+ if (p2cwrite == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ c2pread = _PyLong_AsInt(args[8]);
+ if (c2pread == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ c2pwrite = _PyLong_AsInt(args[9]);
+ if (c2pwrite == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ errread = _PyLong_AsInt(args[10]);
+ if (errread == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ errwrite = _PyLong_AsInt(args[11]);
+ if (errwrite == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ errpipe_read = _PyLong_AsInt(args[12]);
+ if (errpipe_read == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ errpipe_write = _PyLong_AsInt(args[13]);
+ if (errpipe_write == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ restore_signals = PyObject_IsTrue(args[14]);
+ if (restore_signals < 0) {
+ goto exit;
+ }
+ call_setsid = PyObject_IsTrue(args[15]);
+ if (call_setsid < 0) {
+ goto exit;
+ }
+ pgid_to_set = PyLong_AsPid(args[16]);
+ if (pgid_to_set == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ gid_object = args[17];
+ extra_groups_packed = args[18];
+ uid_object = args[19];
+ child_umask = _PyLong_AsInt(args[20]);
+ if (child_umask == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ preexec_fn = args[21];
+ allow_vfork = PyObject_IsTrue(args[22]);
+ if (allow_vfork < 0) {
+ goto exit;
+ }
+ return_value = subprocess_fork_exec_impl(module, process_args, executable_list, close_fds, py_fds_to_keep, cwd_obj, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, call_setsid, pgid_to_set, gid_object, extra_groups_packed, uid_object, child_umask, preexec_fn, allow_vfork);
+
+exit:
+ return return_value;
+}
+/*[clinic end generated code: output=46d71e86845c93d7 input=a9049054013a1b77]*/
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
index 4eaa5490b6134c..966c1e615502ef 100644
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -418,8 +418,20 @@ validate_list(PyGC_Head *head, enum flagstates flags)
static void
update_refs(PyGC_Head *containers)
{
+ PyGC_Head *next;
PyGC_Head *gc = GC_NEXT(containers);
- for (; gc != containers; gc = GC_NEXT(gc)) {
+
+ while (gc != containers) {
+ next = GC_NEXT(gc);
+ /* Move any object that might have become immortal to the
+ * permanent generation as the reference count is not accurately
+ * reflecting the actual number of live references to this object
+ */
+ if (_Py_IsImmortal(FROM_GC(gc))) {
+ gc_list_move(gc, &get_gc_state()->permanent_generation.head);
+ gc = next;
+ continue;
+ }
gc_reset_refs(gc, Py_REFCNT(FROM_GC(gc)));
/* Python's cyclic gc should never see an incoming refcount
* of 0: if something decref'ed to 0, it should have been
@@ -440,6 +452,7 @@ update_refs(PyGC_Head *containers)
* check instead of an assert?
*/
_PyObject_ASSERT(FROM_GC(gc), gc_get_refs(gc) != 0);
+ gc = next;
}
}
@@ -2348,16 +2361,17 @@ PyVarObject *
_PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems)
{
const size_t basicsize = _PyObject_VAR_SIZE(Py_TYPE(op), nitems);
+ const size_t presize = _PyType_PreHeaderSize(((PyObject *)op)->ob_type);
_PyObject_ASSERT((PyObject *)op, !_PyObject_GC_IS_TRACKED(op));
- if (basicsize > (size_t)PY_SSIZE_T_MAX - sizeof(PyGC_Head)) {
+ if (basicsize > (size_t)PY_SSIZE_T_MAX - presize) {
return (PyVarObject *)PyErr_NoMemory();
}
-
- PyGC_Head *g = AS_GC(op);
- g = (PyGC_Head *)PyObject_Realloc(g, sizeof(PyGC_Head) + basicsize);
- if (g == NULL)
+ char *mem = (char *)op - presize;
+ mem = (char *)PyObject_Realloc(mem, presize + basicsize);
+ if (mem == NULL) {
return (PyVarObject *)PyErr_NoMemory();
- op = (PyVarObject *) FROM_GC(g);
+ }
+ op = (PyVarObject *) (mem + presize);
Py_SET_SIZE(op, nitems);
return op;
}
diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c
index eddc1a33a953e6..4a2381d9611776 100644
--- a/Modules/mathmodule.c
+++ b/Modules/mathmodule.c
@@ -2096,7 +2096,7 @@ math_trunc(PyObject *module, PyObject *x)
return PyFloat_Type.tp_as_number->nb_int(x);
}
- if (Py_TYPE(x)->tp_dict == NULL) {
+ if (_PyType_IsReady(Py_TYPE(x))) {
if (PyType_Ready(Py_TYPE(x)) < 0)
return NULL;
}
@@ -2314,7 +2314,7 @@ math_log(PyObject *module, PyObject * const *args, Py_ssize_t nargs)
PyDoc_STRVAR(math_log_doc,
"log(x, [base=math.e])\n\
Return the logarithm of x to the given base.\n\n\
-If the base not specified, returns the natural logarithm (base e) of x.");
+If the base is not specified, returns the natural logarithm (base e) of x.");
/*[clinic input]
math.log2
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index dd150107e4a9de..dcb5e7a0e0408c 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -4789,6 +4789,8 @@ os__path_isdir_impl(PyObject *module, PyObject *path)
FILE_BASIC_INFO info;
path_t _path = PATH_T_INITIALIZE("isdir", "path", 0, 1);
int result;
+ BOOL slow_path = TRUE;
+ FILE_STAT_BASIC_INFORMATION statInfo;
if (!path_converter(path, &_path)) {
path_cleanup(&_path);
@@ -4800,43 +4802,60 @@ os__path_isdir_impl(PyObject *module, PyObject *path)
}
Py_BEGIN_ALLOW_THREADS
- if (_path.fd != -1) {
- hfile = _Py_get_osfhandle_noraise(_path.fd);
- close_file = FALSE;
- }
- else {
- hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL,
- OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL);
+ if (_path.wide) {
+ if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo,
+ &statInfo, sizeof(statInfo))) {
+ if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) {
+ slow_path = FALSE;
+ result = statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY;
+ } else if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY)) {
+ slow_path = FALSE;
+ result = 0;
+ }
+ } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) {
+ slow_path = FALSE;
+ result = 0;
+ }
}
- if (hfile != INVALID_HANDLE_VALUE) {
- if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info,
- sizeof(info)))
- {
- result = info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY;
+ if (slow_path) {
+ if (_path.fd != -1) {
+ hfile = _Py_get_osfhandle_noraise(_path.fd);
+ close_file = FALSE;
}
else {
- result = 0;
+ hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL,
+ OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL);
}
- if (close_file) {
- CloseHandle(hfile);
- }
- }
- else {
- STRUCT_STAT st;
- switch (GetLastError()) {
- case ERROR_ACCESS_DENIED:
- case ERROR_SHARING_VIOLATION:
- case ERROR_CANT_ACCESS_FILE:
- case ERROR_INVALID_PARAMETER:
- if (STAT(_path.wide, &st)) {
- result = 0;
+ if (hfile != INVALID_HANDLE_VALUE) {
+ if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info,
+ sizeof(info)))
+ {
+ result = info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY;
}
else {
- result = S_ISDIR(st.st_mode);
+ result = 0;
+ }
+ if (close_file) {
+ CloseHandle(hfile);
+ }
+ }
+ else {
+ STRUCT_STAT st;
+ switch (GetLastError()) {
+ case ERROR_ACCESS_DENIED:
+ case ERROR_SHARING_VIOLATION:
+ case ERROR_CANT_ACCESS_FILE:
+ case ERROR_INVALID_PARAMETER:
+ if (STAT(_path.wide, &st)) {
+ result = 0;
+ }
+ else {
+ result = S_ISDIR(st.st_mode);
+ }
+ break;
+ default:
+ result = 0;
}
- break;
- default:
- result = 0;
}
}
Py_END_ALLOW_THREADS
@@ -4867,6 +4886,8 @@ os__path_isfile_impl(PyObject *module, PyObject *path)
FILE_BASIC_INFO info;
path_t _path = PATH_T_INITIALIZE("isfile", "path", 0, 1);
int result;
+ BOOL slow_path = TRUE;
+ FILE_STAT_BASIC_INFORMATION statInfo;
if (!path_converter(path, &_path)) {
path_cleanup(&_path);
@@ -4878,43 +4899,60 @@ os__path_isfile_impl(PyObject *module, PyObject *path)
}
Py_BEGIN_ALLOW_THREADS
- if (_path.fd != -1) {
- hfile = _Py_get_osfhandle_noraise(_path.fd);
- close_file = FALSE;
- }
- else {
- hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL,
- OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL);
+ if (_path.wide) {
+ if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo,
+ &statInfo, sizeof(statInfo))) {
+ if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) {
+ slow_path = FALSE;
+ result = !(statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY);
+ } else if (statInfo.FileAttributes & FILE_ATTRIBUTE_DIRECTORY) {
+ slow_path = FALSE;
+ result = 0;
+ }
+ } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) {
+ slow_path = FALSE;
+ result = 0;
+ }
}
- if (hfile != INVALID_HANDLE_VALUE) {
- if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info,
- sizeof(info)))
- {
- result = !(info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY);
+ if (slow_path) {
+ if (_path.fd != -1) {
+ hfile = _Py_get_osfhandle_noraise(_path.fd);
+ close_file = FALSE;
}
else {
- result = 0;
- }
- if (close_file) {
- CloseHandle(hfile);
+ hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL,
+ OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL);
}
- }
- else {
- STRUCT_STAT st;
- switch (GetLastError()) {
- case ERROR_ACCESS_DENIED:
- case ERROR_SHARING_VIOLATION:
- case ERROR_CANT_ACCESS_FILE:
- case ERROR_INVALID_PARAMETER:
- if (STAT(_path.wide, &st)) {
- result = 0;
+ if (hfile != INVALID_HANDLE_VALUE) {
+ if (GetFileInformationByHandleEx(hfile, FileBasicInfo, &info,
+ sizeof(info)))
+ {
+ result = !(info.FileAttributes & FILE_ATTRIBUTE_DIRECTORY);
}
else {
- result = S_ISREG(st.st_mode);
+ result = 0;
+ }
+ if (close_file) {
+ CloseHandle(hfile);
+ }
+ }
+ else {
+ STRUCT_STAT st;
+ switch (GetLastError()) {
+ case ERROR_ACCESS_DENIED:
+ case ERROR_SHARING_VIOLATION:
+ case ERROR_CANT_ACCESS_FILE:
+ case ERROR_INVALID_PARAMETER:
+ if (STAT(_path.wide, &st)) {
+ result = 0;
+ }
+ else {
+ result = S_ISREG(st.st_mode);
+ }
+ break;
+ default:
+ result = 0;
}
- break;
- default:
- result = 0;
}
}
Py_END_ALLOW_THREADS
@@ -4944,6 +4982,8 @@ os__path_exists_impl(PyObject *module, PyObject *path)
BOOL close_file = TRUE;
path_t _path = PATH_T_INITIALIZE("exists", "path", 0, 1);
int result;
+ BOOL slow_path = TRUE;
+ FILE_STAT_BASIC_INFORMATION statInfo;
if (!path_converter(path, &_path)) {
path_cleanup(&_path);
@@ -4955,36 +4995,50 @@ os__path_exists_impl(PyObject *module, PyObject *path)
}
Py_BEGIN_ALLOW_THREADS
- if (_path.fd != -1) {
- hfile = _Py_get_osfhandle_noraise(_path.fd);
- close_file = FALSE;
- }
- else {
- hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL,
- OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL);
- }
- if (hfile != INVALID_HANDLE_VALUE) {
- result = 1;
- if (close_file) {
- CloseHandle(hfile);
+ if (_path.wide) {
+ if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo,
+ &statInfo, sizeof(statInfo))) {
+ if (!(statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)) {
+ slow_path = FALSE;
+ result = 1;
+ }
+ } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) {
+ slow_path = FALSE;
+ result = 0;
}
}
- else {
- STRUCT_STAT st;
- switch (GetLastError()) {
- case ERROR_ACCESS_DENIED:
- case ERROR_SHARING_VIOLATION:
- case ERROR_CANT_ACCESS_FILE:
- case ERROR_INVALID_PARAMETER:
- if (STAT(_path.wide, &st)) {
- result = 0;
+ if (slow_path) {
+ if (_path.fd != -1) {
+ hfile = _Py_get_osfhandle_noraise(_path.fd);
+ close_file = FALSE;
+ }
+ else {
+ hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL,
+ OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL);
+ }
+ if (hfile != INVALID_HANDLE_VALUE) {
+ result = 1;
+ if (close_file) {
+ CloseHandle(hfile);
}
- else {
- result = 1;
+ }
+ else {
+ STRUCT_STAT st;
+ switch (GetLastError()) {
+ case ERROR_ACCESS_DENIED:
+ case ERROR_SHARING_VIOLATION:
+ case ERROR_CANT_ACCESS_FILE:
+ case ERROR_INVALID_PARAMETER:
+ if (STAT(_path.wide, &st)) {
+ result = 0;
+ }
+ else {
+ result = 1;
+ }
+ break;
+ default:
+ result = 0;
}
- break;
- default:
- result = 0;
}
}
Py_END_ALLOW_THREADS
@@ -5015,6 +5069,8 @@ os__path_islink_impl(PyObject *module, PyObject *path)
FILE_ATTRIBUTE_TAG_INFO info;
path_t _path = PATH_T_INITIALIZE("islink", "path", 0, 1);
int result;
+ BOOL slow_path = TRUE;
+ FILE_STAT_BASIC_INFORMATION statInfo;
if (!path_converter(path, &_path)) {
path_cleanup(&_path);
@@ -5026,45 +5082,62 @@ os__path_islink_impl(PyObject *module, PyObject *path)
}
Py_BEGIN_ALLOW_THREADS
- if (_path.fd != -1) {
- hfile = _Py_get_osfhandle_noraise(_path.fd);
- close_file = FALSE;
+ if (_path.wide) {
+ if (_Py_GetFileInformationByName(_path.wide, FileStatBasicByNameInfo,
+ &statInfo, sizeof(statInfo))) {
+ slow_path = FALSE;
+ if (statInfo.FileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) {
+ result = (statInfo.ReparseTag == IO_REPARSE_TAG_SYMLINK);
+ }
+ else {
+ result = 0;
+ }
+ } else if (_Py_GetFileInformationByName_ErrorIsTrustworthy(GetLastError())) {
+ slow_path = FALSE;
+ result = 0;
+ }
}
- else {
- hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL,
- OPEN_EXISTING,
- FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
- NULL);
- }
- if (hfile != INVALID_HANDLE_VALUE) {
- if (GetFileInformationByHandleEx(hfile, FileAttributeTagInfo, &info,
- sizeof(info)))
- {
- result = (info.ReparseTag == IO_REPARSE_TAG_SYMLINK);
+ if (slow_path) {
+ if (_path.fd != -1) {
+ hfile = _Py_get_osfhandle_noraise(_path.fd);
+ close_file = FALSE;
}
else {
- result = 0;
+ hfile = CreateFileW(_path.wide, FILE_READ_ATTRIBUTES, 0, NULL,
+ OPEN_EXISTING,
+ FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
+ NULL);
}
- if (close_file) {
- CloseHandle(hfile);
- }
- }
- else {
- STRUCT_STAT st;
- switch (GetLastError()) {
- case ERROR_ACCESS_DENIED:
- case ERROR_SHARING_VIOLATION:
- case ERROR_CANT_ACCESS_FILE:
- case ERROR_INVALID_PARAMETER:
- if (LSTAT(_path.wide, &st)) {
- result = 0;
+ if (hfile != INVALID_HANDLE_VALUE) {
+ if (GetFileInformationByHandleEx(hfile, FileAttributeTagInfo, &info,
+ sizeof(info)))
+ {
+ result = (info.ReparseTag == IO_REPARSE_TAG_SYMLINK);
}
else {
- result = S_ISLNK(st.st_mode);
+ result = 0;
+ }
+ if (close_file) {
+ CloseHandle(hfile);
+ }
+ }
+ else {
+ STRUCT_STAT st;
+ switch (GetLastError()) {
+ case ERROR_ACCESS_DENIED:
+ case ERROR_SHARING_VIOLATION:
+ case ERROR_CANT_ACCESS_FILE:
+ case ERROR_INVALID_PARAMETER:
+ if (LSTAT(_path.wide, &st)) {
+ result = 0;
+ }
+ else {
+ result = S_ISLNK(st.st_mode);
+ }
+ break;
+ default:
+ result = 0;
}
- break;
- default:
- result = 0;
}
}
Py_END_ALLOW_THREADS
diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c
index 656cd546d46d31..f11d4b1a6e0591 100644
--- a/Modules/socketmodule.c
+++ b/Modules/socketmodule.c
@@ -8418,6 +8418,18 @@ socket_exec(PyObject *m)
#ifdef IP_BIND_ADDRESS_NO_PORT
ADD_INT_MACRO(m, IP_BIND_ADDRESS_NO_PORT);
#endif
+#ifdef IP_UNBLOCK_SOURCE
+ ADD_INT_MACRO(m, IP_UNBLOCK_SOURCE);
+#endif
+#ifdef IP_BLOCK_SOURCE
+ ADD_INT_MACRO(m, IP_BLOCK_SOURCE);
+#endif
+#ifdef IP_ADD_SOURCE_MEMBERSHIP
+ ADD_INT_MACRO(m, IP_ADD_SOURCE_MEMBERSHIP);
+#endif
+#ifdef IP_DROP_SOURCE_MEMBERSHIP
+ ADD_INT_MACRO(m, IP_DROP_SOURCE_MEMBERSHIP);
+#endif
/* IPv6 [gs]etsockopt options, defined in RFC2553 */
#ifdef IPV6_JOIN_GROUP
diff --git a/Modules/symtablemodule.c b/Modules/symtablemodule.c
index 4ef1d8cde07db6..91538b4fb15cbd 100644
--- a/Modules/symtablemodule.c
+++ b/Modules/symtablemodule.c
@@ -66,12 +66,6 @@ static PyMethodDef symtable_methods[] = {
{NULL, NULL} /* sentinel */
};
-static int
-symtable_init_stentry_type(PyObject *m)
-{
- return PyType_Ready(&PySTEntry_Type);
-}
-
static int
symtable_init_constants(PyObject *m)
{
@@ -105,7 +99,6 @@ symtable_init_constants(PyObject *m)
}
static PyModuleDef_Slot symtable_slots[] = {
- {Py_mod_exec, symtable_init_stentry_type},
{Py_mod_exec, symtable_init_constants},
{0, NULL}
};
diff --git a/Modules/tkappinit.c b/Modules/tkappinit.c
index 7616d9d319d228..67d6250318c616 100644
--- a/Modules/tkappinit.c
+++ b/Modules/tkappinit.c
@@ -18,18 +18,10 @@
#include "tkinter.h"
-#ifdef TKINTER_PROTECT_LOADTK
-/* See Tkapp_TkInit in _tkinter.c for the usage of tk_load_faile */
-static int tk_load_failed;
-#endif
-
int
Tcl_AppInit(Tcl_Interp *interp)
{
const char *_tkinter_skip_tk_init;
-#ifdef TKINTER_PROTECT_LOADTK
- const char *_tkinter_tk_failed;
-#endif
#ifdef TK_AQUA
#ifndef MAX_PATH_LEN
@@ -90,23 +82,7 @@ Tcl_AppInit(Tcl_Interp *interp)
return TCL_OK;
}
-#ifdef TKINTER_PROTECT_LOADTK
- _tkinter_tk_failed = Tcl_GetVar(interp,
- "_tkinter_tk_failed", TCL_GLOBAL_ONLY);
-
- if (tk_load_failed || (
- _tkinter_tk_failed != NULL &&
- strcmp(_tkinter_tk_failed, "1") == 0)) {
- Tcl_SetResult(interp, TKINTER_LOADTK_ERRMSG, TCL_STATIC);
- return TCL_ERROR;
- }
-#endif
-
if (Tk_Init(interp) == TCL_ERROR) {
-#ifdef TKINTER_PROTECT_LOADTK
- tk_load_failed = 1;
- Tcl_SetVar(interp, "_tkinter_tk_failed", "1", TCL_GLOBAL_ONLY);
-#endif
return TCL_ERROR;
}
diff --git a/Modules/tkinter.h b/Modules/tkinter.h
index cb5a806b0c4326..40281c21760331 100644
--- a/Modules/tkinter.h
+++ b/Modules/tkinter.h
@@ -16,12 +16,4 @@
(TK_RELEASE_LEVEL << 8) | \
(TK_RELEASE_SERIAL << 0))
-/* Protect Tk 8.4.13 and older from a deadlock that happens when trying
- * to load tk after a failed attempt. */
-#if TK_HEX_VERSION < 0x0804020e
-#define TKINTER_PROTECT_LOADTK
-#define TKINTER_LOADTK_ERRMSG \
- "Calling Tk_Init again after a previous call failed might deadlock"
-#endif
-
#endif /* !TKINTER_H */
diff --git a/Objects/boolobject.c b/Objects/boolobject.c
index 9d8e956e06f712..597a76fa5cb162 100644
--- a/Objects/boolobject.c
+++ b/Objects/boolobject.c
@@ -145,10 +145,14 @@ static PyNumberMethods bool_as_number = {
0, /* nb_index */
};
-static void _Py_NO_RETURN
-bool_dealloc(PyObject* Py_UNUSED(ignore))
+static void
+bool_dealloc(PyObject *boolean)
{
- _Py_FatalRefcountError("deallocating True or False");
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref Booleans out of existence. Instead,
+ * since bools are immortal, re-set the reference count.
+ */
+ _Py_SetImmortal(boolean);
}
/* The type object for bool. Note that this cannot be subclassed! */
diff --git a/Objects/bytes_methods.c b/Objects/bytes_methods.c
index ef9e65e566ece9..33aa9c3db6e805 100644
--- a/Objects/bytes_methods.c
+++ b/Objects/bytes_methods.c
@@ -258,9 +258,12 @@ _Py_bytes_istitle(const char *cptr, Py_ssize_t len)
const unsigned char *e;
int cased, previous_is_cased;
- /* Shortcut for single character strings */
- if (len == 1)
- return PyBool_FromLong(Py_ISUPPER(*p));
+ if (len == 1) {
+ if (Py_ISUPPER(*p)) {
+ Py_RETURN_TRUE;
+ }
+ Py_RETURN_FALSE;
+ }
/* Special case for empty strings */
if (len == 0)
diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c
index 2d8dab6f378006..27b2ad4f2cb38f 100644
--- a/Objects/bytesobject.c
+++ b/Objects/bytesobject.c
@@ -3090,25 +3090,6 @@ _Py_COMP_DIAG_POP
}
-PyStatus
-_PyBytes_InitTypes(PyInterpreterState *interp)
-{
- if (!_Py_IsMainInterpreter(interp)) {
- return _PyStatus_OK();
- }
-
- if (PyType_Ready(&PyBytes_Type) < 0) {
- return _PyStatus_ERR("Can't initialize bytes type");
- }
-
- if (PyType_Ready(&PyBytesIter_Type) < 0) {
- return _PyStatus_ERR("Can't initialize bytes iterator type");
- }
-
- return _PyStatus_OK();
-}
-
-
/*********************** Bytes Iterator ****************************/
typedef struct {
diff --git a/Objects/classobject.c b/Objects/classobject.c
index 2cb192e725d40d..71c4a4e5d0f8ab 100644
--- a/Objects/classobject.c
+++ b/Objects/classobject.c
@@ -181,7 +181,7 @@ method_getattro(PyObject *obj, PyObject *name)
PyObject *descr = NULL;
{
- if (tp->tp_dict == NULL) {
+ if (!_PyType_IsReady(tp)) {
if (PyType_Ready(tp) < 0)
return NULL;
}
@@ -395,7 +395,7 @@ instancemethod_getattro(PyObject *self, PyObject *name)
PyTypeObject *tp = Py_TYPE(self);
PyObject *descr = NULL;
- if (tp->tp_dict == NULL) {
+ if (!_PyType_IsReady(tp)) {
if (PyType_Ready(tp) < 0)
return NULL;
}
diff --git a/Objects/exceptions.c b/Objects/exceptions.c
index a355244cf997e6..6c9dfbd9b415cf 100644
--- a/Objects/exceptions.c
+++ b/Objects/exceptions.c
@@ -1421,7 +1421,12 @@ _PyExc_PrepReraiseStar(PyObject *orig, PyObject *excs)
if (res < 0) {
goto done;
}
- result = _PyExc_CreateExceptionGroup("", raised_list);
+ if (PyList_GET_SIZE(raised_list) > 1) {
+ result = _PyExc_CreateExceptionGroup("", raised_list);
+ }
+ else {
+ result = Py_NewRef(PyList_GetItem(raised_list, 0));
+ }
if (result == NULL) {
goto done;
}
@@ -3591,10 +3596,6 @@ static struct static_exception static_exceptions[] = {
int
_PyExc_InitTypes(PyInterpreterState *interp)
{
- if (!_Py_IsMainInterpreter(interp)) {
- return 0;
- }
-
for (size_t i=0; i < Py_ARRAY_LENGTH(static_exceptions); i++) {
PyTypeObject *exc = static_exceptions[i].exc;
if (_PyStaticType_InitBuiltin(exc) < 0) {
diff --git a/Objects/floatobject.c b/Objects/floatobject.c
index d641311f1126cd..9c2315781bed36 100644
--- a/Objects/floatobject.c
+++ b/Objects/floatobject.c
@@ -1990,20 +1990,10 @@ _PyFloat_InitState(PyInterpreterState *interp)
PyStatus
_PyFloat_InitTypes(PyInterpreterState *interp)
{
- if (!_Py_IsMainInterpreter(interp)) {
- return _PyStatus_OK();
- }
-
- if (PyType_Ready(&PyFloat_Type) < 0) {
- return _PyStatus_ERR("Can't initialize float type");
- }
-
/* Init float info */
- if (FloatInfoType.tp_name == NULL) {
- if (_PyStructSequence_InitBuiltin(&FloatInfoType,
- &floatinfo_desc) < 0) {
- return _PyStatus_ERR("can't init float info type");
- }
+ if (_PyStructSequence_InitBuiltin(&FloatInfoType,
+ &floatinfo_desc) < 0) {
+ return _PyStatus_ERR("can't init float info type");
}
return _PyStatus_OK();
diff --git a/Objects/longobject.c b/Objects/longobject.c
index bb4eac0d932bb8..f84809b8a8986a 100644
--- a/Objects/longobject.c
+++ b/Objects/longobject.c
@@ -52,8 +52,7 @@ static PyObject *
get_small_int(sdigit ival)
{
assert(IS_SMALL_INT(ival));
- PyObject *v = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + ival];
- return Py_NewRef(v);
+ return (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + ival];
}
static PyLongObject *
@@ -3271,6 +3270,27 @@ long_richcompare(PyObject *self, PyObject *other, int op)
Py_RETURN_RICHCOMPARE(result, 0, op);
}
+static void
+long_dealloc(PyObject *self)
+{
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref small Ints out of existence. Instead,
+ * since small Ints are immortal, re-set the reference count.
+ */
+ PyLongObject *pylong = (PyLongObject*)self;
+ if (pylong && _PyLong_IsCompact(pylong)) {
+ stwodigits ival = medium_value(pylong);
+ if (IS_SMALL_INT(ival)) {
+ PyLongObject *small_pylong = (PyLongObject *)get_small_int((sdigit)ival);
+ if (pylong == small_pylong) {
+ _Py_SetImmortal(self);
+ return;
+ }
+ }
+ }
+ Py_TYPE(self)->tp_free(self);
+}
+
static Py_hash_t
long_hash(PyLongObject *v)
{
@@ -6233,7 +6253,7 @@ PyTypeObject PyLong_Type = {
"int", /* tp_name */
offsetof(PyLongObject, long_value.ob_digit), /* tp_basicsize */
sizeof(digit), /* tp_itemsize */
- 0, /* tp_dealloc */
+ long_dealloc, /* tp_dealloc */
0, /* tp_vectorcall_offset */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -6331,19 +6351,9 @@ PyLong_GetInfo(void)
PyStatus
_PyLong_InitTypes(PyInterpreterState *interp)
{
- if (!_Py_IsMainInterpreter(interp)) {
- return _PyStatus_OK();
- }
-
- if (PyType_Ready(&PyLong_Type) < 0) {
- return _PyStatus_ERR("Can't initialize int type");
- }
-
/* initialize int_info */
- if (Int_InfoType.tp_name == NULL) {
- if (_PyStructSequence_InitBuiltin(&Int_InfoType, &int_info_desc) < 0) {
- return _PyStatus_ERR("can't init int info type");
- }
+ if (_PyStructSequence_InitBuiltin(&Int_InfoType, &int_info_desc) < 0) {
+ return _PyStatus_ERR("can't init int info type");
}
return _PyStatus_OK();
diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c
index 1d6cc3b508448d..34cc797b404cda 100644
--- a/Objects/memoryobject.c
+++ b/Objects/memoryobject.c
@@ -2642,7 +2642,11 @@ static Py_ssize_t
memory_length(PyMemoryViewObject *self)
{
CHECK_RELEASED_INT(self);
- return self->view.ndim == 0 ? 1 : self->view.shape[0];
+ if (self->view.ndim == 0) {
+ PyErr_SetString(PyExc_TypeError, "0-dim memory has no length");
+ return -1;
+ }
+ return self->view.shape[0];
}
/* As mapping */
diff --git a/Objects/object.c b/Objects/object.c
index 56747fa193e178..4ce10cf1192d3f 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -145,7 +145,7 @@ _PyDebug_PrintTotalRefs(void) {
_PyRuntimeState *runtime = &_PyRuntime;
fprintf(stderr,
"[%zd refs, %zd blocks]\n",
- get_global_reftotal(runtime), _Py_GetAllocatedBlocks());
+ get_global_reftotal(runtime), _Py_GetGlobalAllocatedBlocks());
/* It may be helpful to also print the "legacy" reftotal separately.
Likewise for the total for each interpreter. */
}
@@ -890,7 +890,7 @@ PyObject_Hash(PyObject *v)
* an explicit call to PyType_Ready, we implicitly call
* PyType_Ready here and then check the tp_hash slot again
*/
- if (tp->tp_dict == NULL) {
+ if (!_PyType_IsReady(tp)) {
if (PyType_Ready(tp) < 0)
return -1;
if (tp->tp_hash != NULL)
@@ -1033,7 +1033,7 @@ PyObject_GetAttr(PyObject *v, PyObject *name)
}
else {
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object has no attribute '%U'",
+ "'%.100s' object has no attribute '%U'",
tp->tp_name, name);
}
@@ -1353,7 +1353,7 @@ _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method)
}
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object has no attribute '%U'",
+ "'%.100s' object has no attribute '%U'",
tp->tp_name, name);
set_attribute_error_context(obj, name);
@@ -1385,7 +1385,7 @@ _PyObject_GenericGetAttrWithDict(PyObject *obj, PyObject *name,
}
Py_INCREF(name);
- if (tp->tp_dict == NULL) {
+ if (!_PyType_IsReady(tp)) {
if (PyType_Ready(tp) < 0)
goto done;
}
@@ -1474,7 +1474,7 @@ _PyObject_GenericGetAttrWithDict(PyObject *obj, PyObject *name,
if (!suppress) {
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object has no attribute '%U'",
+ "'%.100s' object has no attribute '%U'",
tp->tp_name, name);
set_attribute_error_context(obj, name);
@@ -1507,8 +1507,9 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name,
return -1;
}
- if (tp->tp_dict == NULL && PyType_Ready(tp) < 0)
+ if (!_PyType_IsReady(tp) && PyType_Ready(tp) < 0) {
return -1;
+ }
Py_INCREF(name);
Py_INCREF(tp);
@@ -1545,7 +1546,7 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name,
}
else {
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object attribute '%U' is read-only",
+ "'%.100s' object attribute '%U' is read-only",
tp->tp_name, name);
}
goto done;
@@ -1754,10 +1755,14 @@ none_repr(PyObject *op)
return PyUnicode_FromString("None");
}
-static void _Py_NO_RETURN
-none_dealloc(PyObject* Py_UNUSED(ignore))
+static void
+none_dealloc(PyObject* none)
{
- _Py_FatalRefcountError("deallocating None");
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref None out of existence. Instead,
+ * since None is an immortal object, re-set the reference count.
+ */
+ _Py_SetImmortal(none);
}
static PyObject *
@@ -1823,7 +1828,7 @@ PyTypeObject _PyNone_Type = {
"NoneType",
0,
0,
- none_dealloc, /*tp_dealloc*/ /*never called*/
+ none_dealloc, /*tp_dealloc*/
0, /*tp_vectorcall_offset*/
0, /*tp_getattr*/
0, /*tp_setattr*/
@@ -1860,8 +1865,9 @@ PyTypeObject _PyNone_Type = {
};
PyObject _Py_NoneStruct = {
- _PyObject_EXTRA_INIT
- 1, &_PyNone_Type
+ _PyObject_EXTRA_INIT
+ { _Py_IMMORTAL_REFCNT },
+ &_PyNone_Type
};
/* NotImplemented is an object that can be used to signal that an
@@ -1894,13 +1900,14 @@ notimplemented_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
Py_RETURN_NOTIMPLEMENTED;
}
-static void _Py_NO_RETURN
-notimplemented_dealloc(PyObject* ignore)
+static void
+notimplemented_dealloc(PyObject *notimplemented)
{
/* This should never get called, but we also don't want to SEGV if
- * we accidentally decref NotImplemented out of existence.
+ * we accidentally decref NotImplemented out of existence. Instead,
+ * since Notimplemented is an immortal object, re-set the reference count.
*/
- Py_FatalError("deallocating NotImplemented");
+ _Py_SetImmortal(notimplemented);
}
static int
@@ -1962,12 +1969,10 @@ PyTypeObject _PyNotImplemented_Type = {
PyObject _Py_NotImplementedStruct = {
_PyObject_EXTRA_INIT
- 1, &_PyNotImplemented_Type
+ { _Py_IMMORTAL_REFCNT },
+ &_PyNotImplemented_Type
};
-#ifdef MS_WINDOWS
-extern PyTypeObject PyHKEY_Type;
-#endif
extern PyTypeObject _Py_GenericAliasIterType;
extern PyTypeObject _PyMemoryIter_Type;
extern PyTypeObject _PyLineIterator;
@@ -2018,9 +2023,6 @@ static PyTypeObject* static_types[] = {
&PyFunction_Type,
&PyGen_Type,
&PyGetSetDescr_Type,
-#ifdef MS_WINDOWS
- &PyHKEY_Type,
-#endif
&PyInstanceMethod_Type,
&PyListIter_Type,
&PyListRevIter_Type,
@@ -2100,10 +2102,6 @@ static PyTypeObject* static_types[] = {
PyStatus
_PyTypes_InitTypes(PyInterpreterState *interp)
{
- if (!_Py_IsMainInterpreter(interp)) {
- return _PyStatus_OK();
- }
-
// All other static types (unless initialized elsewhere)
for (size_t i=0; i < Py_ARRAY_LENGTH(static_types); i++) {
PyTypeObject *type = static_types[i];
@@ -2149,7 +2147,8 @@ new_reference(PyObject *op)
if (_PyRuntime.tracemalloc.config.tracing) {
_PyTraceMalloc_NewReference(op);
}
- Py_SET_REFCNT(op, 1);
+ // Skip the immortal object check in Py_SET_REFCNT; always set refcnt to 1
+ op->ob_refcnt = 1;
#ifdef Py_TRACE_REFS
_Py_AddToAllObjects(op, 1);
#endif
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
index 5e1bcda1d976bb..de62aeb04461fa 100644
--- a/Objects/obmalloc.c
+++ b/Objects/obmalloc.c
@@ -725,20 +725,51 @@ PyObject_Free(void *ptr)
static int running_on_valgrind = -1;
#endif
+typedef struct _obmalloc_state OMState;
-#define allarenas (_PyRuntime.obmalloc.mgmt.arenas)
-#define maxarenas (_PyRuntime.obmalloc.mgmt.maxarenas)
-#define unused_arena_objects (_PyRuntime.obmalloc.mgmt.unused_arena_objects)
-#define usable_arenas (_PyRuntime.obmalloc.mgmt.usable_arenas)
-#define nfp2lasta (_PyRuntime.obmalloc.mgmt.nfp2lasta)
-#define narenas_currently_allocated (_PyRuntime.obmalloc.mgmt.narenas_currently_allocated)
-#define ntimes_arena_allocated (_PyRuntime.obmalloc.mgmt.ntimes_arena_allocated)
-#define narenas_highwater (_PyRuntime.obmalloc.mgmt.narenas_highwater)
-#define raw_allocated_blocks (_PyRuntime.obmalloc.mgmt.raw_allocated_blocks)
+static inline int
+has_own_state(PyInterpreterState *interp)
+{
+ return (_Py_IsMainInterpreter(interp) ||
+ !(interp->feature_flags & Py_RTFLAGS_USE_MAIN_OBMALLOC) ||
+ _Py_IsMainInterpreterFinalizing(interp));
+}
+
+static inline OMState *
+get_state(void)
+{
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ if (!has_own_state(interp)) {
+ interp = _PyInterpreterState_Main();
+ }
+ return &interp->obmalloc;
+}
+
+// These macros all rely on a local "state" variable.
+#define usedpools (state->pools.used)
+#define allarenas (state->mgmt.arenas)
+#define maxarenas (state->mgmt.maxarenas)
+#define unused_arena_objects (state->mgmt.unused_arena_objects)
+#define usable_arenas (state->mgmt.usable_arenas)
+#define nfp2lasta (state->mgmt.nfp2lasta)
+#define narenas_currently_allocated (state->mgmt.narenas_currently_allocated)
+#define ntimes_arena_allocated (state->mgmt.ntimes_arena_allocated)
+#define narenas_highwater (state->mgmt.narenas_highwater)
+#define raw_allocated_blocks (state->mgmt.raw_allocated_blocks)
Py_ssize_t
-_Py_GetAllocatedBlocks(void)
+_PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *interp)
{
+#ifdef Py_DEBUG
+ assert(has_own_state(interp));
+#else
+ if (!has_own_state(interp)) {
+ _Py_FatalErrorFunc(__func__,
+ "the interpreter doesn't have its own allocator");
+ }
+#endif
+ OMState *state = &interp->obmalloc;
+
Py_ssize_t n = raw_allocated_blocks;
/* add up allocated blocks for used pools */
for (uint i = 0; i < maxarenas; ++i) {
@@ -759,20 +790,100 @@ _Py_GetAllocatedBlocks(void)
return n;
}
+void
+_PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *interp)
+{
+ if (has_own_state(interp)) {
+ Py_ssize_t leaked = _PyInterpreterState_GetAllocatedBlocks(interp);
+ assert(has_own_state(interp) || leaked == 0);
+ interp->runtime->obmalloc.interpreter_leaks += leaked;
+ }
+}
+
+static Py_ssize_t get_num_global_allocated_blocks(_PyRuntimeState *);
+
+/* We preserve the number of blockss leaked during runtime finalization,
+ so they can be reported if the runtime is initialized again. */
+// XXX We don't lose any information by dropping this,
+// so we should consider doing so.
+static Py_ssize_t last_final_leaks = 0;
+
+void
+_Py_FinalizeAllocatedBlocks(_PyRuntimeState *runtime)
+{
+ last_final_leaks = get_num_global_allocated_blocks(runtime);
+ runtime->obmalloc.interpreter_leaks = 0;
+}
+
+static Py_ssize_t
+get_num_global_allocated_blocks(_PyRuntimeState *runtime)
+{
+ Py_ssize_t total = 0;
+ if (_PyRuntimeState_GetFinalizing(runtime) != NULL) {
+ PyInterpreterState *interp = _PyInterpreterState_Main();
+ if (interp == NULL) {
+ /* We are at the very end of runtime finalization.
+ We can't rely on finalizing->interp since that thread
+ state is probably already freed, so we don't worry
+ about it. */
+ assert(PyInterpreterState_Head() == NULL);
+ }
+ else {
+ assert(interp != NULL);
+ /* It is probably the last interpreter but not necessarily. */
+ assert(PyInterpreterState_Next(interp) == NULL);
+ total += _PyInterpreterState_GetAllocatedBlocks(interp);
+ }
+ }
+ else {
+ HEAD_LOCK(runtime);
+ PyInterpreterState *interp = PyInterpreterState_Head();
+ assert(interp != NULL);
+#ifdef Py_DEBUG
+ int got_main = 0;
+#endif
+ for (; interp != NULL; interp = PyInterpreterState_Next(interp)) {
+#ifdef Py_DEBUG
+ if (_Py_IsMainInterpreter(interp)) {
+ assert(!got_main);
+ got_main = 1;
+ assert(has_own_state(interp));
+ }
+#endif
+ if (has_own_state(interp)) {
+ total += _PyInterpreterState_GetAllocatedBlocks(interp);
+ }
+ }
+ HEAD_UNLOCK(runtime);
+#ifdef Py_DEBUG
+ assert(got_main);
+#endif
+ }
+ total += runtime->obmalloc.interpreter_leaks;
+ total += last_final_leaks;
+ return total;
+}
+
+Py_ssize_t
+_Py_GetGlobalAllocatedBlocks(void)
+{
+ return get_num_global_allocated_blocks(&_PyRuntime);
+}
+
#if WITH_PYMALLOC_RADIX_TREE
/*==========================================================================*/
/* radix tree for tracking arena usage. */
-#define arena_map_root (_PyRuntime.obmalloc.usage.arena_map_root)
+#define arena_map_root (state->usage.arena_map_root)
#ifdef USE_INTERIOR_NODES
-#define arena_map_mid_count (_PyRuntime.obmalloc.usage.arena_map_mid_count)
-#define arena_map_bot_count (_PyRuntime.obmalloc.usage.arena_map_bot_count)
+#define arena_map_mid_count (state->usage.arena_map_mid_count)
+#define arena_map_bot_count (state->usage.arena_map_bot_count)
#endif
/* Return a pointer to a bottom tree node, return NULL if it doesn't exist or
* it cannot be created */
static Py_ALWAYS_INLINE arena_map_bot_t *
-arena_map_get(pymem_block *p, int create)
+arena_map_get(OMState *state, pymem_block *p, int create)
{
#ifdef USE_INTERIOR_NODES
/* sanity check that IGNORE_BITS is correct */
@@ -833,11 +944,12 @@ arena_map_get(pymem_block *p, int create)
/* mark or unmark addresses covered by arena */
static int
-arena_map_mark_used(uintptr_t arena_base, int is_used)
+arena_map_mark_used(OMState *state, uintptr_t arena_base, int is_used)
{
/* sanity check that IGNORE_BITS is correct */
assert(HIGH_BITS(arena_base) == HIGH_BITS(&arena_map_root));
- arena_map_bot_t *n_hi = arena_map_get((pymem_block *)arena_base, is_used);
+ arena_map_bot_t *n_hi = arena_map_get(
+ state, (pymem_block *)arena_base, is_used);
if (n_hi == NULL) {
assert(is_used); /* otherwise node should already exist */
return 0; /* failed to allocate space for node */
@@ -862,7 +974,8 @@ arena_map_mark_used(uintptr_t arena_base, int is_used)
* must overflow to 0. However, that would mean arena_base was
* "ideal" and we should not be in this case. */
assert(arena_base < arena_base_next);
- arena_map_bot_t *n_lo = arena_map_get((pymem_block *)arena_base_next, is_used);
+ arena_map_bot_t *n_lo = arena_map_get(
+ state, (pymem_block *)arena_base_next, is_used);
if (n_lo == NULL) {
assert(is_used); /* otherwise should already exist */
n_hi->arenas[i3].tail_hi = 0;
@@ -877,9 +990,9 @@ arena_map_mark_used(uintptr_t arena_base, int is_used)
/* Return true if 'p' is a pointer inside an obmalloc arena.
* _PyObject_Free() calls this so it needs to be very fast. */
static int
-arena_map_is_used(pymem_block *p)
+arena_map_is_used(OMState *state, pymem_block *p)
{
- arena_map_bot_t *n = arena_map_get(p, 0);
+ arena_map_bot_t *n = arena_map_get(state, p, 0);
if (n == NULL) {
return 0;
}
@@ -902,7 +1015,7 @@ arena_map_is_used(pymem_block *p)
* `usable_arenas` to the return value.
*/
static struct arena_object*
-new_arena(void)
+new_arena(OMState *state)
{
struct arena_object* arenaobj;
uint excess; /* number of bytes above pool alignment */
@@ -968,7 +1081,7 @@ new_arena(void)
address = _PyObject_Arena.alloc(_PyObject_Arena.ctx, ARENA_SIZE);
#if WITH_PYMALLOC_RADIX_TREE
if (address != NULL) {
- if (!arena_map_mark_used((uintptr_t)address, 1)) {
+ if (!arena_map_mark_used(state, (uintptr_t)address, 1)) {
/* marking arena in radix tree failed, abort */
_PyObject_Arena.free(_PyObject_Arena.ctx, address, ARENA_SIZE);
address = NULL;
@@ -1011,9 +1124,9 @@ new_arena(void)
pymalloc. When the radix tree is used, 'poolp' is unused.
*/
static bool
-address_in_range(void *p, poolp Py_UNUSED(pool))
+address_in_range(OMState *state, void *p, poolp Py_UNUSED(pool))
{
- return arena_map_is_used(p);
+ return arena_map_is_used(state, p);
}
#else
/*
@@ -1094,7 +1207,7 @@ extremely desirable that it be this fast.
static bool _Py_NO_SANITIZE_ADDRESS
_Py_NO_SANITIZE_THREAD
_Py_NO_SANITIZE_MEMORY
-address_in_range(void *p, poolp pool)
+address_in_range(OMState *state, void *p, poolp pool)
{
// Since address_in_range may be reading from memory which was not allocated
// by Python, it is important that pool->arenaindex is read only once, as
@@ -1111,8 +1224,6 @@ address_in_range(void *p, poolp pool)
/*==========================================================================*/
-#define usedpools (_PyRuntime.obmalloc.pools.used)
-
// Called when freelist is exhausted. Extend the freelist if there is
// space for a block. Otherwise, remove this pool from usedpools.
static void
@@ -1138,7 +1249,7 @@ pymalloc_pool_extend(poolp pool, uint size)
* This function takes new pool and allocate a block from it.
*/
static void*
-allocate_from_new_pool(uint size)
+allocate_from_new_pool(OMState *state, uint size)
{
/* There isn't a pool of the right size class immediately
* available: use a free pool.
@@ -1150,7 +1261,7 @@ allocate_from_new_pool(uint size)
return NULL;
}
#endif
- usable_arenas = new_arena();
+ usable_arenas = new_arena(state);
if (usable_arenas == NULL) {
return NULL;
}
@@ -1274,7 +1385,7 @@ allocate_from_new_pool(uint size)
or when the max memory limit has been reached.
*/
static inline void*
-pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes)
+pymalloc_alloc(OMState *state, void *Py_UNUSED(ctx), size_t nbytes)
{
#ifdef WITH_VALGRIND
if (UNLIKELY(running_on_valgrind == -1)) {
@@ -1314,7 +1425,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes)
/* There isn't a pool of the right size class immediately
* available: use a free pool.
*/
- bp = allocate_from_new_pool(size);
+ bp = allocate_from_new_pool(state, size);
}
return (void *)bp;
@@ -1324,7 +1435,8 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes)
void *
_PyObject_Malloc(void *ctx, size_t nbytes)
{
- void* ptr = pymalloc_alloc(ctx, nbytes);
+ OMState *state = get_state();
+ void* ptr = pymalloc_alloc(state, ctx, nbytes);
if (LIKELY(ptr != NULL)) {
return ptr;
}
@@ -1343,7 +1455,8 @@ _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize)
assert(elsize == 0 || nelem <= (size_t)PY_SSIZE_T_MAX / elsize);
size_t nbytes = nelem * elsize;
- void* ptr = pymalloc_alloc(ctx, nbytes);
+ OMState *state = get_state();
+ void* ptr = pymalloc_alloc(state, ctx, nbytes);
if (LIKELY(ptr != NULL)) {
memset(ptr, 0, nbytes);
return ptr;
@@ -1358,7 +1471,7 @@ _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize)
static void
-insert_to_usedpool(poolp pool)
+insert_to_usedpool(OMState *state, poolp pool)
{
assert(pool->ref.count > 0); /* else the pool is empty */
@@ -1374,7 +1487,7 @@ insert_to_usedpool(poolp pool)
}
static void
-insert_to_freepool(poolp pool)
+insert_to_freepool(OMState *state, poolp pool)
{
poolp next = pool->nextpool;
poolp prev = pool->prevpool;
@@ -1457,7 +1570,7 @@ insert_to_freepool(poolp pool)
#if WITH_PYMALLOC_RADIX_TREE
/* mark arena region as not under control of obmalloc */
- arena_map_mark_used(ao->address, 0);
+ arena_map_mark_used(state, ao->address, 0);
#endif
/* Free the entire arena. */
@@ -1544,7 +1657,7 @@ insert_to_freepool(poolp pool)
Return 1 if it was freed.
Return 0 if the block was not allocated by pymalloc_alloc(). */
static inline int
-pymalloc_free(void *Py_UNUSED(ctx), void *p)
+pymalloc_free(OMState *state, void *Py_UNUSED(ctx), void *p)
{
assert(p != NULL);
@@ -1555,7 +1668,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p)
#endif
poolp pool = POOL_ADDR(p);
- if (UNLIKELY(!address_in_range(p, pool))) {
+ if (UNLIKELY(!address_in_range(state, p, pool))) {
return 0;
}
/* We allocated this address. */
@@ -1579,7 +1692,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p)
* targets optimal filling when several pools contain
* blocks of the same size class.
*/
- insert_to_usedpool(pool);
+ insert_to_usedpool(state, pool);
return 1;
}
@@ -1596,7 +1709,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p)
* previously freed pools will be allocated later
* (being not referenced, they are perhaps paged out).
*/
- insert_to_freepool(pool);
+ insert_to_freepool(state, pool);
return 1;
}
@@ -1609,7 +1722,8 @@ _PyObject_Free(void *ctx, void *p)
return;
}
- if (UNLIKELY(!pymalloc_free(ctx, p))) {
+ OMState *state = get_state();
+ if (UNLIKELY(!pymalloc_free(state, ctx, p))) {
/* pymalloc didn't allocate this address */
PyMem_RawFree(p);
raw_allocated_blocks--;
@@ -1627,7 +1741,8 @@ _PyObject_Free(void *ctx, void *p)
Return 0 if pymalloc didn't allocated p. */
static int
-pymalloc_realloc(void *ctx, void **newptr_p, void *p, size_t nbytes)
+pymalloc_realloc(OMState *state, void *ctx,
+ void **newptr_p, void *p, size_t nbytes)
{
void *bp;
poolp pool;
@@ -1643,7 +1758,7 @@ pymalloc_realloc(void *ctx, void **newptr_p, void *p, size_t nbytes)
#endif
pool = POOL_ADDR(p);
- if (!address_in_range(p, pool)) {
+ if (!address_in_range(state, p, pool)) {
/* pymalloc is not managing this block.
If nbytes <= SMALL_REQUEST_THRESHOLD, it's tempting to try to take
@@ -1696,7 +1811,8 @@ _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes)
return _PyObject_Malloc(ctx, nbytes);
}
- if (pymalloc_realloc(ctx, &ptr2, ptr, nbytes)) {
+ OMState *state = get_state();
+ if (pymalloc_realloc(state, ctx, &ptr2, ptr, nbytes)) {
return ptr2;
}
@@ -1710,11 +1826,29 @@ _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes)
* only be used by extensions that are compiled with pymalloc enabled. */
Py_ssize_t
-_Py_GetAllocatedBlocks(void)
+_PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *Py_UNUSED(interp))
+{
+ return 0;
+}
+
+Py_ssize_t
+_Py_GetGlobalAllocatedBlocks(void)
{
return 0;
}
+void
+_PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *Py_UNUSED(interp))
+{
+ return;
+}
+
+void
+_Py_FinalizeAllocatedBlocks(_PyRuntimeState *Py_UNUSED(runtime))
+{
+ return;
+}
+
#endif /* WITH_PYMALLOC */
@@ -2289,6 +2423,7 @@ _PyObject_DebugMallocStats(FILE *out)
if (!_PyMem_PymallocEnabled()) {
return 0;
}
+ OMState *state = get_state();
uint i;
const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT;
diff --git a/Objects/setobject.c b/Objects/setobject.c
index fcdda2a0bca2b6..58f0ae73c0c403 100644
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -2543,6 +2543,7 @@ static PyTypeObject _PySetDummy_Type = {
};
static PyObject _dummy_struct = {
- _PyObject_EXTRA_INIT
- 2, &_PySetDummy_Type
+ _PyObject_EXTRA_INIT
+ { _Py_IMMORTAL_REFCNT },
+ &_PySetDummy_Type
};
diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c
index 584ebce721faed..e6776ac92b669c 100644
--- a/Objects/sliceobject.c
+++ b/Objects/sliceobject.c
@@ -29,6 +29,16 @@ ellipsis_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
return Py_NewRef(Py_Ellipsis);
}
+static void
+ellipsis_dealloc(PyObject *ellipsis)
+{
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref Ellipsis out of existence. Instead,
+ * since Ellipsis is an immortal object, re-set the reference count.
+ */
+ _Py_SetImmortal(ellipsis);
+}
+
static PyObject *
ellipsis_repr(PyObject *op)
{
@@ -51,7 +61,7 @@ PyTypeObject PyEllipsis_Type = {
"ellipsis", /* tp_name */
0, /* tp_basicsize */
0, /* tp_itemsize */
- 0, /*never called*/ /* tp_dealloc */
+ ellipsis_dealloc, /* tp_dealloc */
0, /* tp_vectorcall_offset */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -89,7 +99,8 @@ PyTypeObject PyEllipsis_Type = {
PyObject _Py_EllipsisObject = {
_PyObject_EXTRA_INIT
- 1, &PyEllipsis_Type
+ { _Py_IMMORTAL_REFCNT },
+ &PyEllipsis_Type
};
diff --git a/Objects/structseq.c b/Objects/structseq.c
index 2a5343815866d3..88a71bc52958f5 100644
--- a/Objects/structseq.c
+++ b/Objects/structseq.c
@@ -31,6 +31,7 @@ get_type_attr_as_size(PyTypeObject *tp, PyObject *name)
PyErr_Format(PyExc_TypeError,
"Missed attribute '%U' of type %s",
name, tp->tp_name);
+ return -1;
}
return PyLong_AsSsize_t(v);
}
@@ -509,6 +510,13 @@ _PyStructSequence_InitBuiltinWithFlags(PyTypeObject *type,
PyStructSequence_Desc *desc,
unsigned long tp_flags)
{
+ if (type->tp_flags & Py_TPFLAGS_READY) {
+ if (_PyStaticType_InitBuiltin(type) < 0) {
+ goto failed_init_builtin;
+ }
+ return 0;
+ }
+
PyMemberDef *members;
Py_ssize_t n_members, n_unnamed_members;
@@ -517,18 +525,25 @@ _PyStructSequence_InitBuiltinWithFlags(PyTypeObject *type,
return -1;
}
initialize_static_fields(type, desc, members, tp_flags);
+
+ Py_INCREF(type); // XXX It should be immortal.
if (_PyStaticType_InitBuiltin(type) < 0) {
PyMem_Free(members);
- PyErr_Format(PyExc_RuntimeError,
- "Can't initialize builtin type %s",
- desc->name);
- return -1;
+ goto failed_init_builtin;
}
- if (initialize_static_type(type, desc, n_members, n_unnamed_members) < 0) {
+
+ if (initialize_structseq_dict(
+ desc, type->tp_dict, n_members, n_unnamed_members) < 0) {
PyMem_Free(members);
return -1;
}
return 0;
+
+failed_init_builtin:
+ PyErr_Format(PyExc_RuntimeError,
+ "Can't initialize builtin type %s",
+ desc->name);
+ return -1;
}
int
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
index 61fab4078d66ba..991edcc86677de 100644
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -960,24 +960,6 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
}
-PyStatus
-_PyTuple_InitTypes(PyInterpreterState *interp)
-{
- if (!_Py_IsMainInterpreter(interp)) {
- return _PyStatus_OK();
- }
-
- if (PyType_Ready(&PyTuple_Type) < 0) {
- return _PyStatus_ERR("Can't initialize tuple type");
- }
-
- if (PyType_Ready(&PyTupleIter_Type) < 0) {
- return _PyStatus_ERR("Can't initialize tuple iterator type");
- }
-
- return _PyStatus_OK();
-}
-
static void maybe_freelist_clear(PyInterpreterState *, int);
void
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index 9ea458f30394e3..38b99315457a58 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -45,7 +45,9 @@ class object "PyObject *" "&PyBaseObject_Type"
PyUnicode_IS_READY(name) && \
(PyUnicode_GET_LENGTH(name) <= MCACHE_MAX_ATTR_SIZE)
-#define next_version_tag (_PyRuntime.types.next_version_tag)
+#define NEXT_GLOBAL_VERSION_TAG _PyRuntime.types.next_version_tag
+#define NEXT_VERSION_TAG(interp) \
+ (interp)->types.next_version_tag
typedef struct PySlot_Offset {
short subslot_offset;
@@ -318,27 +320,11 @@ _PyType_InitCache(PyInterpreterState *interp)
entry->version = 0;
// Set to None so _PyType_Lookup() can use Py_SETREF(),
// rather than using slower Py_XSETREF().
- // (See _PyType_FixCacheRefcounts() about the refcount.)
entry->name = Py_None;
entry->value = NULL;
}
}
-// This is the temporary fix used by pycore_create_interpreter(),
-// in pylifecycle.c. _PyType_InitCache() is called before the GIL
-// has been created (for the main interpreter) and without the
-// "current" thread state set. This causes crashes when the
-// reftotal is updated, so we don't modify the refcount in
-// _PyType_InitCache(), and instead do it later by calling
-// _PyType_FixCacheRefcounts().
-// XXX This workaround should be removed once we have immortal
-// objects (PEP 683).
-void
-_PyType_FixCacheRefcounts(void)
-{
- _Py_RefcntAdd(Py_None, (1 << MCACHE_SIZE_EXP));
-}
-
static unsigned int
_PyType_ClearCache(PyInterpreterState *interp)
@@ -348,7 +334,7 @@ _PyType_ClearCache(PyInterpreterState *interp)
// use Py_SETREF() rather than using slower Py_XSETREF().
type_cache_clear(cache, Py_None);
- return next_version_tag - 1;
+ return NEXT_VERSION_TAG(interp) - 1;
}
@@ -417,7 +403,7 @@ PyType_ClearWatcher(int watcher_id)
return 0;
}
-static int assign_version_tag(PyTypeObject *type);
+static int assign_version_tag(PyInterpreterState *interp, PyTypeObject *type);
int
PyType_Watch(int watcher_id, PyObject* obj)
@@ -432,7 +418,7 @@ PyType_Watch(int watcher_id, PyObject* obj)
return -1;
}
// ensure we will get a callback on the next modification
- assign_version_tag(type);
+ assign_version_tag(interp, type);
type->tp_watched |= (1 << watcher_id);
return 0;
}
@@ -565,7 +551,9 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) {
}
}
return;
+
clear:
+ assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN));
type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG;
type->tp_version_tag = 0; /* 0 is not a valid version tag */
if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
@@ -576,7 +564,7 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) {
}
static int
-assign_version_tag(PyTypeObject *type)
+assign_version_tag(PyInterpreterState *interp, PyTypeObject *type)
{
/* Ensure that the tp_version_tag is valid and set
Py_TPFLAGS_VALID_VERSION_TAG. To respect the invariant, this
@@ -590,24 +578,42 @@ assign_version_tag(PyTypeObject *type)
return 0;
}
- if (next_version_tag == 0) {
- /* We have run out of version numbers */
- return 0;
+ if (type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) {
+ /* static types */
+ if (NEXT_GLOBAL_VERSION_TAG > _Py_MAX_GLOBAL_TYPE_VERSION_TAG) {
+ /* We have run out of version numbers */
+ return 0;
+ }
+ type->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++;
+ assert (type->tp_version_tag <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG);
+ }
+ else {
+ /* heap types */
+ if (NEXT_VERSION_TAG(interp) == 0) {
+ /* We have run out of version numbers */
+ return 0;
+ }
+ type->tp_version_tag = NEXT_VERSION_TAG(interp)++;
+ assert (type->tp_version_tag != 0);
}
- type->tp_version_tag = next_version_tag++;
- assert (type->tp_version_tag != 0);
PyObject *bases = type->tp_bases;
Py_ssize_t n = PyTuple_GET_SIZE(bases);
for (Py_ssize_t i = 0; i < n; i++) {
PyObject *b = PyTuple_GET_ITEM(bases, i);
- if (!assign_version_tag(_PyType_CAST(b)))
+ if (!assign_version_tag(interp, _PyType_CAST(b)))
return 0;
}
type->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG;
return 1;
}
+int PyUnstable_Type_AssignVersionTag(PyTypeObject *type)
+{
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ return assign_version_tag(interp, type);
+}
+
static PyMemberDef type_members[] = {
{"__basicsize__", T_PYSSIZET, offsetof(PyTypeObject,tp_basicsize),READONLY},
@@ -2357,7 +2363,15 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro)
from the custom MRO */
type_mro_modified(type, type->tp_bases);
- PyType_Modified(type);
+ // XXX Expand this to Py_TPFLAGS_IMMUTABLETYPE?
+ if (!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)) {
+ PyType_Modified(type);
+ }
+ else {
+ /* For static builtin types, this is only called during init
+ before the method cache has been populated. */
+ assert(_PyType_HasFeature(type, Py_TPFLAGS_VALID_VERSION_TAG));
+ }
if (p_old_mro != NULL)
*p_old_mro = old_mro; /* transfer the ownership */
@@ -4192,6 +4206,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name)
{
PyObject *res;
int error;
+ PyInterpreterState *interp = _PyInterpreterState_GET();
unsigned int h = MCACHE_HASH_METHOD(type, name);
struct type_cache *cache = get_type_cache();
@@ -4226,7 +4241,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name)
return NULL;
}
- if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(type)) {
+ if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(interp, type)) {
h = MCACHE_HASH_METHOD(type, name);
struct type_cache_entry *entry = &cache->hashtable[h];
entry->version = type->tp_version_tag;
@@ -4344,7 +4359,7 @@ _Py_type_getattro_impl(PyTypeObject *type, PyObject *name, int * suppress_missin
/* Give up */
if (suppress_missing_attribute == NULL) {
PyErr_Format(PyExc_AttributeError,
- "type object '%.50s' has no attribute '%U'",
+ "type object '%.100s' has no attribute '%U'",
type->tp_name, name);
} else {
// signal the caller we have not set an PyExc_AttributeError and gave up
@@ -6687,8 +6702,10 @@ type_ready_mro(PyTypeObject *type)
assert(type->tp_mro != NULL);
assert(PyTuple_Check(type->tp_mro));
- /* All bases of statically allocated type should be statically allocated */
+ /* All bases of statically allocated type should be statically allocated,
+ and static builtin types must have static builtin bases. */
if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) {
+ assert(type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE);
PyObject *mro = type->tp_mro;
Py_ssize_t n = PyTuple_GET_SIZE(mro);
for (Py_ssize_t i = 0; i < n; i++) {
@@ -6700,6 +6717,8 @@ type_ready_mro(PyTypeObject *type)
type->tp_name, base->tp_name);
return -1;
}
+ assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) ||
+ (base->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN));
}
}
return 0;
@@ -6929,8 +6948,12 @@ type_ready_post_checks(PyTypeObject *type)
static int
type_ready(PyTypeObject *type)
{
+ _PyObject_ASSERT((PyObject *)type,
+ (type->tp_flags & Py_TPFLAGS_READYING) == 0);
+ type->tp_flags |= Py_TPFLAGS_READYING;
+
if (type_ready_pre_checks(type) < 0) {
- return -1;
+ goto error;
}
#ifdef Py_TRACE_REFS
@@ -6944,41 +6967,49 @@ type_ready(PyTypeObject *type)
/* Initialize tp_dict: _PyType_IsReady() tests if tp_dict != NULL */
if (type_ready_set_dict(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_set_bases(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_mro(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_set_new(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_fill_dict(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_inherit(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_preheader(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_set_hash(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_add_subclasses(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_managed_dict(type) < 0) {
- return -1;
+ goto error;
}
if (type_ready_post_checks(type) < 0) {
- return -1;
+ goto error;
}
+
+ /* All done -- set the ready flag */
+ type->tp_flags = (type->tp_flags & ~Py_TPFLAGS_READYING) | Py_TPFLAGS_READY;
+
+ assert(_PyType_CheckConsistency(type));
return 0;
-}
+error:
+ type->tp_flags &= ~Py_TPFLAGS_READYING;
+ return -1;
+}
int
PyType_Ready(PyTypeObject *type)
@@ -6987,35 +7018,37 @@ PyType_Ready(PyTypeObject *type)
assert(_PyType_CheckConsistency(type));
return 0;
}
- _PyObject_ASSERT((PyObject *)type,
- (type->tp_flags & Py_TPFLAGS_READYING) == 0);
-
- type->tp_flags |= Py_TPFLAGS_READYING;
+ assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN));
/* Historically, all static types were immutable. See bpo-43908 */
if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) {
type->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE;
}
- if (type_ready(type) < 0) {
- type->tp_flags &= ~Py_TPFLAGS_READYING;
- return -1;
- }
-
- /* All done -- set the ready flag */
- type->tp_flags = (type->tp_flags & ~Py_TPFLAGS_READYING) | Py_TPFLAGS_READY;
- assert(_PyType_CheckConsistency(type));
- return 0;
+ return type_ready(type);
}
int
_PyStaticType_InitBuiltin(PyTypeObject *self)
{
- self->tp_flags = self->tp_flags | _Py_TPFLAGS_STATIC_BUILTIN;
+ assert(!(self->tp_flags & Py_TPFLAGS_HEAPTYPE));
+
+ if (self->tp_flags & Py_TPFLAGS_READY) {
+ assert(self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN);
+ assert(_PyType_CheckConsistency(self));
+ return 0;
+ }
+
+ self->tp_flags |= _Py_TPFLAGS_STATIC_BUILTIN;
+ self->tp_flags |= Py_TPFLAGS_IMMUTABLETYPE;
+
+ assert(NEXT_GLOBAL_VERSION_TAG <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG);
+ self->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++;
+ self->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG;
static_builtin_state_init(self);
- int res = PyType_Ready(self);
+ int res = type_ready(self);
if (res < 0) {
static_builtin_state_clear(self);
}
@@ -9357,42 +9390,33 @@ super_repr(PyObject *self)
su->type ? su->type->tp_name : "NULL");
}
+/* Do a super lookup without executing descriptors or falling back to getattr
+on the super object itself.
+
+May return NULL with or without an exception set, like PyDict_GetItemWithError. */
static PyObject *
-super_getattro(PyObject *self, PyObject *name)
+_super_lookup_descr(PyTypeObject *su_type, PyTypeObject *su_obj_type, PyObject *name)
{
- superobject *su = (superobject *)self;
- PyTypeObject *starttype;
- PyObject *mro;
+ PyObject *mro, *res;
Py_ssize_t i, n;
- starttype = su->obj_type;
- if (starttype == NULL)
- goto skip;
-
- /* We want __class__ to return the class of the super object
- (i.e. super, or a subclass), not the class of su->obj. */
- if (PyUnicode_Check(name) &&
- PyUnicode_GET_LENGTH(name) == 9 &&
- _PyUnicode_Equal(name, &_Py_ID(__class__)))
- goto skip;
-
- mro = starttype->tp_mro;
+ mro = su_obj_type->tp_mro;
if (mro == NULL)
- goto skip;
+ return NULL;
assert(PyTuple_Check(mro));
n = PyTuple_GET_SIZE(mro);
/* No need to check the last one: it's gonna be skipped anyway. */
for (i = 0; i+1 < n; i++) {
- if ((PyObject *)(su->type) == PyTuple_GET_ITEM(mro, i))
+ if ((PyObject *)(su_type) == PyTuple_GET_ITEM(mro, i))
break;
}
i++; /* skip su->type (if any) */
if (i >= n)
- goto skip;
+ return NULL;
- /* keep a strong reference to mro because starttype->tp_mro can be
+ /* keep a strong reference to mro because su_obj_type->tp_mro can be
replaced during PyDict_GetItemWithError(dict, name) */
Py_INCREF(mro);
do {
@@ -9400,21 +9424,9 @@ super_getattro(PyObject *self, PyObject *name)
PyObject *dict = _PyType_CAST(obj)->tp_dict;
assert(dict != NULL && PyDict_Check(dict));
- PyObject *res = PyDict_GetItemWithError(dict, name);
+ res = PyDict_GetItemWithError(dict, name);
if (res != NULL) {
Py_INCREF(res);
-
- descrgetfunc f = Py_TYPE(res)->tp_descr_get;
- if (f != NULL) {
- PyObject *res2;
- res2 = f(res,
- /* Only pass 'obj' param if this is instance-mode super
- (See SF ID #743627) */
- (su->obj == (PyObject *)starttype) ? NULL : su->obj,
- (PyObject *)starttype);
- Py_SETREF(res, res2);
- }
-
Py_DECREF(mro);
return res;
}
@@ -9426,9 +9438,75 @@ super_getattro(PyObject *self, PyObject *name)
i++;
} while (i < n);
Py_DECREF(mro);
+ return NULL;
+}
+
+// if `method` is non-NULL, we are looking for a method descriptor,
+// and setting `*method = 1` means we found one.
+static PyObject *
+do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj,
+ PyTypeObject *su_obj_type, PyObject *name, int *method)
+{
+ PyObject *res;
+ int temp_su = 0;
+
+ if (su_obj_type == NULL) {
+ goto skip;
+ }
+
+ res = _super_lookup_descr(su_type, su_obj_type, name);
+ if (res != NULL) {
+ if (method && _PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) {
+ *method = 1;
+ }
+ else {
+ descrgetfunc f = Py_TYPE(res)->tp_descr_get;
+ if (f != NULL) {
+ PyObject *res2;
+ res2 = f(res,
+ /* Only pass 'obj' param if this is instance-mode super
+ (See SF ID #743627) */
+ (su_obj == (PyObject *)su_obj_type) ? NULL : su_obj,
+ (PyObject *)su_obj_type);
+ Py_SETREF(res, res2);
+ }
+ }
+
+ return res;
+ }
+ else if (PyErr_Occurred()) {
+ return NULL;
+ }
skip:
- return PyObject_GenericGetAttr(self, name);
+ if (su == NULL) {
+ PyObject *args[] = {(PyObject *)su_type, su_obj};
+ su = (superobject *)PyObject_Vectorcall((PyObject *)&PySuper_Type, args, 2, NULL);
+ if (su == NULL) {
+ return NULL;
+ }
+ temp_su = 1;
+ }
+ res = PyObject_GenericGetAttr((PyObject *)su, name);
+ if (temp_su) {
+ Py_DECREF(su);
+ }
+ return res;
+}
+
+static PyObject *
+super_getattro(PyObject *self, PyObject *name)
+{
+ superobject *su = (superobject *)self;
+
+ /* We want __class__ to return the class of the super object
+ (i.e. super, or a subclass), not the class of su->obj. */
+ if (PyUnicode_Check(name) &&
+ PyUnicode_GET_LENGTH(name) == 9 &&
+ _PyUnicode_Equal(name, &_Py_ID(__class__)))
+ return PyObject_GenericGetAttr(self, name);
+
+ return do_super_lookup(su, su->type, su->obj, su->obj_type, name, NULL);
}
static PyTypeObject *
@@ -9484,6 +9562,30 @@ supercheck(PyTypeObject *type, PyObject *obj)
return NULL;
}
+PyObject *
+_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *method)
+{
+ PyTypeObject *su_obj_type = supercheck(su_type, su_obj);
+ if (su_obj_type == NULL) {
+ return NULL;
+ }
+ PyObject *res = do_super_lookup(NULL, su_type, su_obj, su_obj_type, name, method);
+ Py_DECREF(su_obj_type);
+ return res;
+}
+
+PyObject *
+_PySuper_LookupDescr(PyTypeObject *su_type, PyObject *su_obj, PyObject *name)
+{
+ PyTypeObject *su_obj_type = supercheck(su_type, su_obj);
+ if (su_obj_type == NULL) {
+ return NULL;
+ }
+ PyObject *res = _super_lookup_descr(su_type, su_obj_type, name);
+ Py_DECREF(su_obj_type);
+ return res;
+}
+
static PyObject *
super_descr_get(PyObject *self, PyObject *obj, PyObject *type)
{
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index 85e5ae735709fd..7537c12e92680c 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -228,14 +228,18 @@ static inline PyObject* unicode_new_empty(void)
to strings in this dictionary are *not* counted in the string's ob_refcnt.
When the interned string reaches a refcnt of 0 the string deallocation
function will delete the reference from this dictionary.
- Another way to look at this is that to say that the actual reference
- count of a string is: s->ob_refcnt + (s->state ? 2 : 0)
*/
static inline PyObject *get_interned_dict(PyInterpreterState *interp)
{
return _Py_INTERP_CACHED_OBJECT(interp, interned_strings);
}
+Py_ssize_t
+_PyUnicode_InternedSize()
+{
+ return PyObject_Length(get_interned_dict(_PyInterpreterState_GET()));
+}
+
static int
init_interned_dict(PyInterpreterState *interp)
{
@@ -1538,30 +1542,19 @@ find_maxchar_surrogates(const wchar_t *begin, const wchar_t *end,
static void
unicode_dealloc(PyObject *unicode)
{
- PyInterpreterState *interp = _PyInterpreterState_GET();
#ifdef Py_DEBUG
if (!unicode_is_finalizing() && unicode_is_singleton(unicode)) {
_Py_FatalRefcountError("deallocating an Unicode singleton");
}
#endif
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref an immortal string out of existence. Since
+ * the string is an immortal object, just re-set the reference count.
+ */
if (PyUnicode_CHECK_INTERNED(unicode)) {
- /* Revive the dead object temporarily. PyDict_DelItem() removes two
- references (key and value) which were ignored by
- PyUnicode_InternInPlace(). Use refcnt=3 rather than refcnt=2
- to prevent calling unicode_dealloc() again. Adjust refcnt after
- PyDict_DelItem(). */
- assert(Py_REFCNT(unicode) == 0);
- Py_SET_REFCNT(unicode, 3);
- PyObject *interned = get_interned_dict(interp);
- assert(interned != NULL);
- if (PyDict_DelItem(interned, unicode) != 0) {
- _PyErr_WriteUnraisableMsg("deletion of interned string failed",
- NULL);
- }
- assert(Py_REFCNT(unicode) == 1);
- Py_SET_REFCNT(unicode, 0);
+ _Py_SetImmortal(unicode);
+ return;
}
-
if (_PyUnicode_HAS_UTF8_MEMORY(unicode)) {
PyObject_Free(_PyUnicode_UTF8(unicode));
}
@@ -14580,10 +14573,6 @@ _PyUnicode_InitGlobalObjects(PyInterpreterState *interp)
PyStatus
_PyUnicode_InitTypes(PyInterpreterState *interp)
{
- if (!_Py_IsMainInterpreter(interp)) {
- return _PyStatus_OK();
- }
-
if (_PyStaticType_InitBuiltin(&EncodingMapType) < 0) {
goto error;
}
@@ -14637,11 +14626,21 @@ _PyUnicode_InternInPlace(PyInterpreterState *interp, PyObject **p)
return;
}
- /* The two references in interned dict (key and value) are not counted by
- refcnt. unicode_dealloc() and _PyUnicode_ClearInterned() take care of
- this. */
- Py_SET_REFCNT(s, Py_REFCNT(s) - 2);
- _PyUnicode_STATE(s).interned = 1;
+ if (_Py_IsImmortal(s)) {
+ _PyUnicode_STATE(*p).interned = SSTATE_INTERNED_IMMORTAL_STATIC;
+ return;
+ }
+#ifdef Py_REF_DEBUG
+ /* The reference count value excluding the 2 references from the
+ interned dictionary should be excluded from the RefTotal. The
+ decrements to these objects will not be registered so they
+ need to be accounted for in here. */
+ for (Py_ssize_t i = 0; i < Py_REFCNT(s) - 2; i++) {
+ _Py_DecRefTotal(_PyInterpreterState_GET());
+ }
+#endif
+ _Py_SetImmortal(s);
+ _PyUnicode_STATE(*p).interned = SSTATE_INTERNED_IMMORTAL;
}
void
@@ -14681,10 +14680,20 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp)
}
assert(PyDict_CheckExact(interned));
- /* Interned unicode strings are not forcibly deallocated; rather, we give
- them their stolen references back, and then clear and DECREF the
- interned dict. */
-
+ /* TODO:
+ * Currently, the runtime is not able to guarantee that it can exit without
+ * allocations that carry over to a future initialization of Python within
+ * the same process. i.e:
+ * ./python -X showrefcount -c 'import itertools'
+ * [237 refs, 237 blocks]
+ *
+ * Therefore, this should remain disabled for until there is a strict guarantee
+ * that no memory will be left after `Py_Finalize`.
+ */
+#ifdef Py_DEBUG
+ /* For all non-singleton interned strings, restore the two valid references
+ to that instance from within the intern string dictionary and let the
+ normal reference counting process clean up these instances. */
#ifdef INTERNED_STATS
fprintf(stderr, "releasing %zd interned strings\n",
PyDict_GET_SIZE(interned));
@@ -14694,15 +14703,27 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp)
Py_ssize_t pos = 0;
PyObject *s, *ignored_value;
while (PyDict_Next(interned, &pos, &s, &ignored_value)) {
- assert(PyUnicode_CHECK_INTERNED(s));
- // Restore the two references (key and value) ignored
- // by PyUnicode_InternInPlace().
- Py_SET_REFCNT(s, Py_REFCNT(s) + 2);
+ assert(PyUnicode_IS_READY(s));
+ switch (PyUnicode_CHECK_INTERNED(s)) {
+ case SSTATE_INTERNED_IMMORTAL:
+ // Skip the Immortal Instance check and restore
+ // the two references (key and value) ignored
+ // by PyUnicode_InternInPlace().
+ s->ob_refcnt = 2;
#ifdef INTERNED_STATS
- total_length += PyUnicode_GET_LENGTH(s);
+ total_length += PyUnicode_GET_LENGTH(s);
#endif
-
- _PyUnicode_STATE(s).interned = 0;
+ break;
+ case SSTATE_INTERNED_IMMORTAL_STATIC:
+ break;
+ case SSTATE_INTERNED_MORTAL:
+ /* fall through */
+ case SSTATE_NOT_INTERNED:
+ /* fall through */
+ default:
+ Py_UNREACHABLE();
+ }
+ _PyUnicode_STATE(s).interned = SSTATE_NOT_INTERNED;
}
#ifdef INTERNED_STATS
fprintf(stderr,
@@ -14710,6 +14731,12 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp)
total_length);
#endif
+ struct _Py_unicode_state *state = &interp->unicode;
+ struct _Py_unicode_ids *ids = &state->ids;
+ for (Py_ssize_t i=0; i < ids->size; i++) {
+ Py_XINCREF(ids->array[i]);
+ }
+#endif /* Py_DEBUG */
clear_interned_dict(interp);
}
diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c
index 5a3e49a6fe45e3..c1afe63ecf66f6 100644
--- a/Objects/weakrefobject.c
+++ b/Objects/weakrefobject.c
@@ -170,10 +170,7 @@ weakref_repr(PyWeakReference *self)
}
Py_INCREF(obj);
- if (_PyObject_LookupAttr(obj, &_Py_ID(__name__), &name) < 0) {
- Py_DECREF(obj);
- return NULL;
- }
+ name = _PyObject_LookupSpecial(obj, &_Py_ID(__name__));
if (name == NULL || !PyUnicode_Check(name)) {
repr = PyUnicode_FromFormat(
"",
diff --git a/PC/clinic/winreg.c.h b/PC/clinic/winreg.c.h
index 7a9474301da8a1..4109c85276f0a4 100644
--- a/PC/clinic/winreg.c.h
+++ b/PC/clinic/winreg.c.h
@@ -219,14 +219,14 @@ winreg_ConnectRegistry(PyObject *module, PyObject *const *args, Py_ssize_t nargs
_PyArg_BadArgument("ConnectRegistry", "argument 1", "str or None", args[0]);
goto exit;
}
- if (!clinic_HKEY_converter(args[1], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[1], &key)) {
goto exit;
}
_return_value = winreg_ConnectRegistry_impl(module, computer_name, key);
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for computer_name */
@@ -275,7 +275,7 @@ winreg_CreateKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("CreateKey", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -295,7 +295,7 @@ winreg_CreateKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for sub_key */
@@ -382,7 +382,7 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py
if (!args) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -419,7 +419,7 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for sub_key */
@@ -466,7 +466,7 @@ winreg_DeleteKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("DeleteKey", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (!PyUnicode_Check(args[1])) {
@@ -566,7 +566,7 @@ winreg_DeleteKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py
if (!args) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (!PyUnicode_Check(args[1])) {
@@ -634,7 +634,7 @@ winreg_DeleteValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("DeleteValue", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -694,7 +694,7 @@ winreg_EnumKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("EnumKey", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
index = _PyLong_AsInt(args[1]);
@@ -751,7 +751,7 @@ winreg_EnumValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("EnumValue", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
index = _PyLong_AsInt(args[1]);
@@ -839,7 +839,7 @@ winreg_FlushKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_FlushKey_impl(module, key);
@@ -898,7 +898,7 @@ winreg_LoadKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("LoadKey", nargs, 3, 3)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (!PyUnicode_Check(args[1])) {
@@ -999,7 +999,7 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
if (!args) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1036,7 +1036,7 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for sub_key */
@@ -1116,7 +1116,7 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb
if (!args) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1153,7 +1153,7 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for sub_key */
@@ -1193,7 +1193,7 @@ winreg_QueryInfoKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_QueryInfoKey_impl(module, key);
@@ -1242,7 +1242,7 @@ winreg_QueryValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("QueryValue", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1303,7 +1303,7 @@ winreg_QueryValueEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("QueryValueEx", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1369,7 +1369,7 @@ winreg_SaveKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("SaveKey", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (!PyUnicode_Check(args[1])) {
@@ -1438,7 +1438,7 @@ winreg_SetValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("SetValue", nargs, 4, 4)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1542,7 +1542,7 @@ winreg_SetValueEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("SetValueEx", nargs, 5, 5)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1603,7 +1603,7 @@ winreg_DisableReflectionKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_DisableReflectionKey_impl(module, key);
@@ -1641,7 +1641,7 @@ winreg_EnableReflectionKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_EnableReflectionKey_impl(module, key);
@@ -1677,7 +1677,7 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_QueryReflectionKey_impl(module, key);
@@ -1795,4 +1795,4 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg)
#ifndef WINREG_QUERYREFLECTIONKEY_METHODDEF
#define WINREG_QUERYREFLECTIONKEY_METHODDEF
#endif /* !defined(WINREG_QUERYREFLECTIONKEY_METHODDEF) */
-/*[clinic end generated code: output=715db416dc1321ee input=a9049054013a1b77]*/
+/*[clinic end generated code: output=15dc2e6c4d4e2ad5 input=a9049054013a1b77]*/
diff --git a/PC/layout/support/pip.py b/PC/layout/support/pip.py
index c54acb250a252e..0a6582acf348a3 100644
--- a/PC/layout/support/pip.py
+++ b/PC/layout/support/pip.py
@@ -67,7 +67,6 @@ def extract_pip_files(ns):
"--no-color",
"install",
"pip",
- "setuptools",
"--upgrade",
"--target",
str(dest),
diff --git a/PC/msvcrtmodule.c b/PC/msvcrtmodule.c
index de9a88946aff3e..090254befc934d 100644
--- a/PC/msvcrtmodule.c
+++ b/PC/msvcrtmodule.c
@@ -564,88 +564,81 @@ static struct PyMethodDef msvcrt_functions[] = {
{NULL, NULL}
};
-static void
-insertint(PyObject *d, char *name, int value)
-{
- PyObject *v = PyLong_FromLong((long) value);
- if (v == NULL) {
- /* Don't bother reporting this error */
- PyErr_Clear();
- }
- else {
- PyDict_SetItemString(d, name, v);
- Py_DECREF(v);
- }
-}
-
-static void
-insertptr(PyObject *d, char *name, void *value)
+static int
+insertptr(PyObject *mod, char *name, void *value)
{
PyObject *v = PyLong_FromVoidPtr(value);
if (v == NULL) {
- /* Don't bother reporting this error */
- PyErr_Clear();
- }
- else {
- PyDict_SetItemString(d, name, v);
- Py_DECREF(v);
+ return -1;
}
+ int rc = PyModule_AddObjectRef(mod, name, v);
+ Py_DECREF(v);
+ return rc;
}
+#define INSERTINT(MOD, NAME, VAL) do { \
+ if (PyModule_AddIntConstant(MOD, NAME, VAL) < 0) { \
+ return -1; \
+ } \
+} while (0)
+
+#define INSERTPTR(MOD, NAME, PTR) do { \
+ if (insertptr(MOD, NAME, PTR) < 0) { \
+ return -1; \
+ } \
+} while (0)
+
+#define INSERTSTR(MOD, NAME, CONST) do { \
+ if (PyModule_AddStringConstant(MOD, NAME, CONST) < 0) { \
+ return -1; \
+ } \
+} while (0)
+
static int
exec_module(PyObject* m)
{
- int st;
- PyObject *d = PyModule_GetDict(m); // Borrowed ref.
-
/* constants for the locking() function's mode argument */
- insertint(d, "LK_LOCK", _LK_LOCK);
- insertint(d, "LK_NBLCK", _LK_NBLCK);
- insertint(d, "LK_NBRLCK", _LK_NBRLCK);
- insertint(d, "LK_RLCK", _LK_RLCK);
- insertint(d, "LK_UNLCK", _LK_UNLCK);
+ INSERTINT(m, "LK_LOCK", _LK_LOCK);
+ INSERTINT(m, "LK_NBLCK", _LK_NBLCK);
+ INSERTINT(m, "LK_NBRLCK", _LK_NBRLCK);
+ INSERTINT(m, "LK_RLCK", _LK_RLCK);
+ INSERTINT(m, "LK_UNLCK", _LK_UNLCK);
#ifdef MS_WINDOWS_DESKTOP
- insertint(d, "SEM_FAILCRITICALERRORS", SEM_FAILCRITICALERRORS);
- insertint(d, "SEM_NOALIGNMENTFAULTEXCEPT", SEM_NOALIGNMENTFAULTEXCEPT);
- insertint(d, "SEM_NOGPFAULTERRORBOX", SEM_NOGPFAULTERRORBOX);
- insertint(d, "SEM_NOOPENFILEERRORBOX", SEM_NOOPENFILEERRORBOX);
+ INSERTINT(m, "SEM_FAILCRITICALERRORS", SEM_FAILCRITICALERRORS);
+ INSERTINT(m, "SEM_NOALIGNMENTFAULTEXCEPT", SEM_NOALIGNMENTFAULTEXCEPT);
+ INSERTINT(m, "SEM_NOGPFAULTERRORBOX", SEM_NOGPFAULTERRORBOX);
+ INSERTINT(m, "SEM_NOOPENFILEERRORBOX", SEM_NOOPENFILEERRORBOX);
#endif
#ifdef _DEBUG
- insertint(d, "CRT_WARN", _CRT_WARN);
- insertint(d, "CRT_ERROR", _CRT_ERROR);
- insertint(d, "CRT_ASSERT", _CRT_ASSERT);
- insertint(d, "CRTDBG_MODE_DEBUG", _CRTDBG_MODE_DEBUG);
- insertint(d, "CRTDBG_MODE_FILE", _CRTDBG_MODE_FILE);
- insertint(d, "CRTDBG_MODE_WNDW", _CRTDBG_MODE_WNDW);
- insertint(d, "CRTDBG_REPORT_MODE", _CRTDBG_REPORT_MODE);
- insertptr(d, "CRTDBG_FILE_STDERR", _CRTDBG_FILE_STDERR);
- insertptr(d, "CRTDBG_FILE_STDOUT", _CRTDBG_FILE_STDOUT);
- insertptr(d, "CRTDBG_REPORT_FILE", _CRTDBG_REPORT_FILE);
+ INSERTINT(m, "CRT_WARN", _CRT_WARN);
+ INSERTINT(m, "CRT_ERROR", _CRT_ERROR);
+ INSERTINT(m, "CRT_ASSERT", _CRT_ASSERT);
+ INSERTINT(m, "CRTDBG_MODE_DEBUG", _CRTDBG_MODE_DEBUG);
+ INSERTINT(m, "CRTDBG_MODE_FILE", _CRTDBG_MODE_FILE);
+ INSERTINT(m, "CRTDBG_MODE_WNDW", _CRTDBG_MODE_WNDW);
+ INSERTINT(m, "CRTDBG_REPORT_MODE", _CRTDBG_REPORT_MODE);
+ INSERTPTR(m, "CRTDBG_FILE_STDERR", _CRTDBG_FILE_STDERR);
+ INSERTPTR(m, "CRTDBG_FILE_STDOUT", _CRTDBG_FILE_STDOUT);
+ INSERTPTR(m, "CRTDBG_REPORT_FILE", _CRTDBG_REPORT_FILE);
#endif
+#undef INSERTINT
+#undef INSERTPTR
+
/* constants for the crt versions */
#ifdef _VC_ASSEMBLY_PUBLICKEYTOKEN
- st = PyModule_AddStringConstant(m, "VC_ASSEMBLY_PUBLICKEYTOKEN",
- _VC_ASSEMBLY_PUBLICKEYTOKEN);
- if (st < 0) {
- return -1;
- }
+ INSERTSTR(m, "VC_ASSEMBLY_PUBLICKEYTOKEN", _VC_ASSEMBLY_PUBLICKEYTOKEN);
#endif
#ifdef _CRT_ASSEMBLY_VERSION
- st = PyModule_AddStringConstant(m, "CRT_ASSEMBLY_VERSION",
- _CRT_ASSEMBLY_VERSION);
- if (st < 0) {
- return -1;
- }
+ INSERTSTR(m, "CRT_ASSEMBLY_VERSION", _CRT_ASSEMBLY_VERSION);
#endif
#ifdef __LIBRARIES_ASSEMBLY_NAME_PREFIX
- st = PyModule_AddStringConstant(m, "LIBRARIES_ASSEMBLY_NAME_PREFIX",
- __LIBRARIES_ASSEMBLY_NAME_PREFIX);
- if (st < 0) {
- return -1;
- }
+ INSERTSTR(m, "LIBRARIES_ASSEMBLY_NAME_PREFIX",
+ __LIBRARIES_ASSEMBLY_NAME_PREFIX);
#endif
+#undef INSERTSTR
+
/* constants for the 2010 crt versions */
#if defined(_VC_CRT_MAJOR_VERSION) && defined (_VC_CRT_MINOR_VERSION) && defined(_VC_CRT_BUILD_VERSION) && defined(_VC_CRT_RBUILD_VERSION)
PyObject *version = PyUnicode_FromFormat("%d.%d.%d.%d",
@@ -656,14 +649,12 @@ exec_module(PyObject* m)
if (version == NULL) {
return -1;
}
- st = PyModule_AddObjectRef(m, "CRT_ASSEMBLY_VERSION", version);
+ int st = PyModule_AddObjectRef(m, "CRT_ASSEMBLY_VERSION", version);
Py_DECREF(version);
if (st < 0) {
return -1;
}
#endif
- /* make compiler warning quiet if st is unused */
- (void)st;
return 0;
}
diff --git a/PC/winreg.c b/PC/winreg.c
index 15d32e7fcb99c9..4884125c3609ad 100644
--- a/PC/winreg.c
+++ b/PC/winreg.c
@@ -15,15 +15,22 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#include "pycore_object.h" // _PyObject_Init()
+#include "pycore_moduleobject.h"
#include "structmember.h" // PyMemberDef
#include
#if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES)
-static BOOL PyHKEY_AsHKEY(PyObject *ob, HKEY *pRes, BOOL bNoneOK);
-static BOOL clinic_HKEY_converter(PyObject *ob, void *p);
-static PyObject *PyHKEY_FromHKEY(HKEY h);
-static BOOL PyHKEY_Close(PyObject *obHandle);
+typedef struct {
+ PyTypeObject *PyHKEY_Type;
+} winreg_state;
+
+/* Forward declares */
+
+static BOOL PyHKEY_AsHKEY(winreg_state *st, PyObject *ob, HKEY *pRes, BOOL bNoneOK);
+static BOOL clinic_HKEY_converter(winreg_state *st, PyObject *ob, void *p);
+static PyObject *PyHKEY_FromHKEY(winreg_state *st, HKEY h);
+static BOOL PyHKEY_Close(winreg_state *st, PyObject *obHandle);
static char errNotAHandle[] = "Object is not a handle";
@@ -35,8 +42,6 @@ static char errNotAHandle[] = "Object is not a handle";
#define PyErr_SetFromWindowsErrWithFunction(rc, fnname) \
PyErr_SetFromWindowsErr(rc)
-/* Forward declares */
-
/* Doc strings */
PyDoc_STRVAR(module_doc,
"This module provides access to the Windows registry API.\n"
@@ -114,7 +119,7 @@ typedef struct {
HKEY hkey;
} PyHKEYObject;
-#define PyHKEY_Check(op) Py_IS_TYPE(op, &PyHKEY_Type)
+#define PyHKEY_Check(st, op) Py_IS_TYPE(op, st->PyHKEY_Type)
static char *failMsg = "bad operand type";
@@ -147,7 +152,18 @@ PyHKEY_deallocFunc(PyObject *ob)
PyHKEYObject *obkey = (PyHKEYObject *)ob;
if (obkey->hkey)
RegCloseKey((HKEY)obkey->hkey);
- PyObject_Free(ob);
+
+ PyTypeObject *tp = Py_TYPE(ob);
+ PyObject_GC_UnTrack(ob);
+ PyObject_GC_Del(ob);
+ Py_DECREF(tp);
+}
+
+static int
+PyHKEY_traverseFunc(PyHKEYObject *self, visitproc visit, void *arg)
+{
+ Py_VISIT(Py_TYPE(self));
+ return 0;
}
static int
@@ -189,29 +205,6 @@ PyHKEY_hashFunc(PyObject *ob)
}
-static PyNumberMethods PyHKEY_NumberMethods =
-{
- PyHKEY_binaryFailureFunc, /* nb_add */
- PyHKEY_binaryFailureFunc, /* nb_subtract */
- PyHKEY_binaryFailureFunc, /* nb_multiply */
- PyHKEY_binaryFailureFunc, /* nb_remainder */
- PyHKEY_binaryFailureFunc, /* nb_divmod */
- PyHKEY_ternaryFailureFunc, /* nb_power */
- PyHKEY_unaryFailureFunc, /* nb_negative */
- PyHKEY_unaryFailureFunc, /* nb_positive */
- PyHKEY_unaryFailureFunc, /* nb_absolute */
- PyHKEY_boolFunc, /* nb_bool */
- PyHKEY_unaryFailureFunc, /* nb_invert */
- PyHKEY_binaryFailureFunc, /* nb_lshift */
- PyHKEY_binaryFailureFunc, /* nb_rshift */
- PyHKEY_binaryFailureFunc, /* nb_and */
- PyHKEY_binaryFailureFunc, /* nb_xor */
- PyHKEY_binaryFailureFunc, /* nb_or */
- PyHKEY_intFunc, /* nb_int */
- 0, /* nb_reserved */
- PyHKEY_unaryFailureFunc, /* nb_float */
-};
-
/*[clinic input]
module winreg
class winreg.HKEYType "PyHKEYObject *" "&PyHKEY_Type"
@@ -229,6 +222,14 @@ class HKEY_converter(CConverter):
type = 'HKEY'
converter = 'clinic_HKEY_converter'
+ def parse_arg(self, argname, displayname):
+ return """
+ if (!{converter}(_PyModule_GetState(module), {argname}, &{paramname})) {{{{
+ goto exit;
+ }}}}
+ """.format(argname=argname, paramname=self.parser_name,
+ converter=self.converter)
+
class HKEY_return_converter(CReturnConverter):
type = 'HKEY'
@@ -236,7 +237,7 @@ class HKEY_return_converter(CReturnConverter):
self.declare(data)
self.err_occurred_if_null_pointer("_return_value", data)
data.return_conversion.append(
- 'return_value = PyHKEY_FromHKEY(_return_value);\n')
+ 'return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);\n')
# HACK: this only works for PyHKEYObjects, nothing else.
# Should this be generalized and enshrined in clinic.py,
@@ -249,7 +250,7 @@ class self_return_converter(CReturnConverter):
data.return_conversion.append(
'return_value = (PyObject *)_return_value;\n')
[python start generated code]*/
-/*[python end generated code: output=da39a3ee5e6b4b0d input=2ebb7a4922d408d6]*/
+/*[python end generated code: output=da39a3ee5e6b4b0d input=17e645060c7b8ae1]*/
#include "clinic/winreg.c.h"
@@ -270,8 +271,11 @@ static PyObject *
winreg_HKEYType_Close_impl(PyHKEYObject *self)
/*[clinic end generated code: output=fced3a624fb0c344 input=6786ac75f6b89de6]*/
{
- if (!PyHKEY_Close((PyObject *)self))
+ winreg_state *st = _PyType_GetModuleState(Py_TYPE(self));
+ assert(st != NULL);
+ if (!PyHKEY_Close(st, (PyObject *)self)) {
return NULL;
+ }
Py_RETURN_NONE;
}
@@ -327,8 +331,11 @@ winreg_HKEYType___exit___impl(PyHKEYObject *self, PyObject *exc_type,
PyObject *exc_value, PyObject *traceback)
/*[clinic end generated code: output=923ebe7389e6a263 input=fb32489ee92403c7]*/
{
- if (!PyHKEY_Close((PyObject *)self))
+ winreg_state *st = _PyType_GetModuleState(Py_TYPE(self));
+ assert(st != NULL);
+ if (!PyHKEY_Close(st, (PyObject *)self)) {
return NULL;
+ }
Py_RETURN_NONE;
}
@@ -350,62 +357,71 @@ static PyMemberDef PyHKEY_memberlist[] = {
{NULL} /* Sentinel */
};
-/* The type itself */
-PyTypeObject PyHKEY_Type =
-{
- PyVarObject_HEAD_INIT(0, 0) /* fill in type at module init */
- "PyHKEY",
- sizeof(PyHKEYObject),
- 0,
- PyHKEY_deallocFunc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- &PyHKEY_NumberMethods, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- PyHKEY_hashFunc, /* tp_hash */
- 0, /* tp_call */
- PyHKEY_strFunc, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- 0, /* tp_flags */
- PyHKEY_doc, /* tp_doc */
- 0, /*tp_traverse*/
- 0, /*tp_clear*/
- 0, /*tp_richcompare*/
- 0, /*tp_weaklistoffset*/
- 0, /*tp_iter*/
- 0, /*tp_iternext*/
- PyHKEY_methods, /*tp_methods*/
- PyHKEY_memberlist, /*tp_members*/
+static PyType_Slot pyhkey_type_slots[] = {
+ {Py_tp_dealloc, PyHKEY_deallocFunc},
+ {Py_tp_members, PyHKEY_memberlist},
+ {Py_tp_methods, PyHKEY_methods},
+ {Py_tp_doc, (char *)PyHKEY_doc},
+ {Py_tp_traverse, PyHKEY_traverseFunc},
+ {Py_tp_hash, PyHKEY_hashFunc},
+ {Py_tp_str, PyHKEY_strFunc},
+
+ // Number protocol
+ {Py_nb_add, PyHKEY_binaryFailureFunc},
+ {Py_nb_subtract, PyHKEY_binaryFailureFunc},
+ {Py_nb_multiply, PyHKEY_binaryFailureFunc},
+ {Py_nb_remainder, PyHKEY_binaryFailureFunc},
+ {Py_nb_divmod, PyHKEY_binaryFailureFunc},
+ {Py_nb_power, PyHKEY_ternaryFailureFunc},
+ {Py_nb_negative, PyHKEY_unaryFailureFunc},
+ {Py_nb_positive, PyHKEY_unaryFailureFunc},
+ {Py_nb_absolute, PyHKEY_unaryFailureFunc},
+ {Py_nb_bool, PyHKEY_boolFunc},
+ {Py_nb_invert, PyHKEY_unaryFailureFunc},
+ {Py_nb_lshift, PyHKEY_binaryFailureFunc},
+ {Py_nb_rshift, PyHKEY_binaryFailureFunc},
+ {Py_nb_and, PyHKEY_binaryFailureFunc},
+ {Py_nb_xor, PyHKEY_binaryFailureFunc},
+ {Py_nb_or, PyHKEY_binaryFailureFunc},
+ {Py_nb_int, PyHKEY_intFunc},
+ {Py_nb_float, PyHKEY_unaryFailureFunc},
+ {0, NULL},
+};
+
+static PyType_Spec pyhkey_type_spec = {
+ .name = "winreg.PyHKEY",
+ .basicsize = sizeof(PyHKEYObject),
+ .flags = (Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE |
+ Py_TPFLAGS_DISALLOW_INSTANTIATION),
+ .slots = pyhkey_type_slots,
};
/************************************************************************
The public PyHKEY API (well, not public yet :-)
************************************************************************/
PyObject *
-PyHKEY_New(HKEY hInit)
+PyHKEY_New(PyObject *m, HKEY hInit)
{
- PyHKEYObject *key = PyObject_New(PyHKEYObject, &PyHKEY_Type);
- if (key)
- key->hkey = hInit;
+ winreg_state *st = _PyModule_GetState(m);
+ PyHKEYObject *key = PyObject_GC_New(PyHKEYObject, st->PyHKEY_Type);
+ if (key == NULL) {
+ return NULL;
+ }
+ key->hkey = hInit;
+ PyObject_GC_Track(key);
return (PyObject *)key;
}
BOOL
-PyHKEY_Close(PyObject *ob_handle)
+PyHKEY_Close(winreg_state *st, PyObject *ob_handle)
{
LONG rc;
HKEY key;
- if (!PyHKEY_AsHKEY(ob_handle, &key, TRUE)) {
+ if (!PyHKEY_AsHKEY(st, ob_handle, &key, TRUE)) {
return FALSE;
}
- if (PyHKEY_Check(ob_handle)) {
+ if (PyHKEY_Check(st, ob_handle)) {
((PyHKEYObject*)ob_handle)->hkey = 0;
}
rc = key ? RegCloseKey(key) : ERROR_SUCCESS;
@@ -415,7 +431,7 @@ PyHKEY_Close(PyObject *ob_handle)
}
BOOL
-PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK)
+PyHKEY_AsHKEY(winreg_state *st, PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK)
{
if (ob == Py_None) {
if (!bNoneOK) {
@@ -426,7 +442,7 @@ PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK)
}
*pHANDLE = (HKEY)0;
}
- else if (PyHKEY_Check(ob)) {
+ else if (PyHKEY_Check(st ,ob)) {
PyHKEYObject *pH = (PyHKEYObject *)ob;
*pHANDLE = pH->hkey;
}
@@ -447,23 +463,24 @@ PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK)
}
BOOL
-clinic_HKEY_converter(PyObject *ob, void *p)
+clinic_HKEY_converter(winreg_state *st, PyObject *ob, void *p)
{
- if (!PyHKEY_AsHKEY(ob, (HKEY *)p, FALSE))
+ if (!PyHKEY_AsHKEY(st, ob, (HKEY *)p, FALSE)) {
return FALSE;
+ }
return TRUE;
}
PyObject *
-PyHKEY_FromHKEY(HKEY h)
+PyHKEY_FromHKEY(winreg_state *st, HKEY h)
{
- /* Inline PyObject_New */
- PyHKEYObject *op = (PyHKEYObject *) PyObject_Malloc(sizeof(PyHKEYObject));
+ PyHKEYObject *op = (PyHKEYObject *)PyObject_GC_New(PyHKEYObject,
+ st->PyHKEY_Type);
if (op == NULL) {
- return PyErr_NoMemory();
+ return NULL;
}
- _PyObject_Init((PyObject*)op, &PyHKEY_Type);
op->hkey = h;
+ PyObject_GC_Track(op);
return (PyObject *)op;
}
@@ -472,11 +489,11 @@ PyHKEY_FromHKEY(HKEY h)
The module methods
************************************************************************/
BOOL
-PyWinObject_CloseHKEY(PyObject *obHandle)
+PyWinObject_CloseHKEY(winreg_state *st, PyObject *obHandle)
{
BOOL ok;
- if (PyHKEY_Check(obHandle)) {
- ok = PyHKEY_Close(obHandle);
+ if (PyHKEY_Check(st, obHandle)) {
+ ok = PyHKEY_Close(st, obHandle);
}
#if SIZEOF_LONG >= SIZEOF_HKEY
else if (PyLong_Check(obHandle)) {
@@ -826,8 +843,9 @@ static PyObject *
winreg_CloseKey(PyObject *module, PyObject *hkey)
/*[clinic end generated code: output=a4fa537019a80d15 input=5b1aac65ba5127ad]*/
{
- if (!PyHKEY_Close(hkey))
+ if (!PyHKEY_Close(_PyModule_GetState(module), hkey)) {
return NULL;
+ }
Py_RETURN_NONE;
}
@@ -2061,7 +2079,7 @@ static struct PyMethodDef winreg_methods[] = {
#define ADD_INT(VAL) do { \
if (PyModule_AddIntConstant(m, #VAL, VAL) < 0) { \
- goto error; \
+ return -1; \
} \
} while (0)
@@ -2079,38 +2097,25 @@ inskey(PyObject *mod, char *name, HKEY key)
#define ADD_KEY(VAL) do { \
if (inskey(m, #VAL, VAL) < 0) { \
- goto error; \
+ return -1; \
} \
} while (0)
-
-static struct PyModuleDef winregmodule = {
- PyModuleDef_HEAD_INIT,
- "winreg",
- module_doc,
- -1,
- winreg_methods,
- NULL,
- NULL,
- NULL,
- NULL
-};
-
-PyMODINIT_FUNC PyInit_winreg(void)
+static int
+exec_module(PyObject *m)
{
- PyObject *m = PyModule_Create(&winregmodule);
- if (m == NULL) {
- return NULL;
- }
- PyHKEY_Type.tp_doc = PyHKEY_doc;
- if (PyType_Ready(&PyHKEY_Type) < 0) {
- goto error;
+ winreg_state *st = (winreg_state *)_PyModule_GetState(m);
+
+ st->PyHKEY_Type = (PyTypeObject *)
+ PyType_FromModuleAndSpec(m, &pyhkey_type_spec, NULL);
+ if (st->PyHKEY_Type == NULL) {
+ return -1;
}
- if (PyModule_AddObjectRef(m, "HKEYType", (PyObject *)&PyHKEY_Type) < 0) {
- goto error;
+ if (PyModule_AddObjectRef(m, "HKEYType", (PyObject *)st->PyHKEY_Type) < 0) {
+ return -1;
}
if (PyModule_AddObjectRef(m, "error", PyExc_OSError) < 0) {
- goto error;
+ return -1;
}
/* Add the relevant constants */
@@ -2174,12 +2179,44 @@ PyMODINIT_FUNC PyInit_winreg(void)
ADD_INT(REG_RESOURCE_REQUIREMENTS_LIST);
#undef ADD_INT
+ return 0;
+}
- return m;
+static PyModuleDef_Slot winreg_slots[] = {
+ {Py_mod_exec, exec_module},
+ {0, NULL}
+};
-error:
- Py_DECREF(m);
- return NULL;
+static int
+winreg_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ winreg_state *state = _PyModule_GetState(module);
+ Py_VISIT(state->PyHKEY_Type);
+ return 0;
+}
+
+static int
+winreg_clear(PyObject *module)
+{
+ winreg_state *state = _PyModule_GetState(module);
+ Py_CLEAR(state->PyHKEY_Type);
+ return 0;
+}
+
+static struct PyModuleDef winregmodule = {
+ .m_base = PyModuleDef_HEAD_INIT,
+ .m_name = "winreg",
+ .m_doc = module_doc,
+ .m_size = sizeof(winreg_state),
+ .m_methods = winreg_methods,
+ .m_slots = winreg_slots,
+ .m_traverse = winreg_traverse,
+ .m_clear = winreg_clear,
+};
+
+PyMODINIT_FUNC PyInit_winreg(void)
+{
+ return PyModuleDef_Init(&winregmodule);
}
#endif /* MS_WINDOWS_DESKTOP || MS_WINDOWS_SYSTEM || MS_WINDOWS_GAMES */
diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c
index 46390966892d16..0aaaed64c4037c 100644
--- a/Parser/action_helpers.c
+++ b/Parser/action_helpers.c
@@ -1,6 +1,7 @@
#include
#include "pegen.h"
+#include "tokenizer.h"
#include "string_parser.h"
#include "pycore_runtime.h" // _PyRuntime
@@ -853,96 +854,6 @@ _PyPegen_seq_delete_starred_exprs(Parser *p, asdl_seq *kwargs)
return new_seq;
}
-expr_ty
-_PyPegen_concatenate_strings(Parser *p, asdl_seq *strings)
-{
- Py_ssize_t len = asdl_seq_LEN(strings);
- assert(len > 0);
-
- Token *first = asdl_seq_GET_UNTYPED(strings, 0);
- Token *last = asdl_seq_GET_UNTYPED(strings, len - 1);
-
- int bytesmode = 0;
- PyObject *bytes_str = NULL;
-
- FstringParser state;
- _PyPegen_FstringParser_Init(&state);
-
- for (Py_ssize_t i = 0; i < len; i++) {
- Token *t = asdl_seq_GET_UNTYPED(strings, i);
-
- int this_bytesmode;
- int this_rawmode;
- PyObject *s;
- const char *fstr;
- Py_ssize_t fstrlen = -1;
-
- if (_PyPegen_parsestr(p, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen, t) != 0) {
- goto error;
- }
-
- /* Check that we are not mixing bytes with unicode. */
- if (i != 0 && bytesmode != this_bytesmode) {
- RAISE_SYNTAX_ERROR("cannot mix bytes and nonbytes literals");
- Py_XDECREF(s);
- goto error;
- }
- bytesmode = this_bytesmode;
-
- if (fstr != NULL) {
- assert(s == NULL && !bytesmode);
-
- int result = _PyPegen_FstringParser_ConcatFstring(p, &state, &fstr, fstr + fstrlen,
- this_rawmode, 0, first, t, last);
- if (result < 0) {
- goto error;
- }
- }
- else {
- /* String or byte string. */
- assert(s != NULL && fstr == NULL);
- assert(bytesmode ? PyBytes_CheckExact(s) : PyUnicode_CheckExact(s));
-
- if (bytesmode) {
- if (i == 0) {
- bytes_str = s;
- }
- else {
- PyBytes_ConcatAndDel(&bytes_str, s);
- if (!bytes_str) {
- goto error;
- }
- }
- }
- else {
- /* This is a regular string. Concatenate it. */
- if (_PyPegen_FstringParser_ConcatAndDel(&state, s) < 0) {
- goto error;
- }
- }
- }
- }
-
- if (bytesmode) {
- if (_PyArena_AddPyObject(p->arena, bytes_str) < 0) {
- goto error;
- }
- return _PyAST_Constant(bytes_str, NULL, first->lineno,
- first->col_offset, last->end_lineno,
- last->end_col_offset, p->arena);
- }
-
- return _PyPegen_FstringParser_Finish(p, &state, first, last);
-
-error:
- Py_XDECREF(bytes_str);
- _PyPegen_FstringParser_Dealloc(&state);
- if (PyErr_Occurred()) {
- _Pypegen_raise_decode_error(p);
- }
- return NULL;
-}
-
expr_ty
_PyPegen_ensure_imaginary(Parser *p, expr_ty exp)
{
@@ -1054,6 +965,44 @@ _PyPegen_check_legacy_stmt(Parser *p, expr_ty name) {
return 0;
}
+static ResultTokenWithMetadata *
+result_token_with_metadata(Parser *p, void *result, PyObject *metadata)
+{
+ ResultTokenWithMetadata *res = _PyArena_Malloc(p->arena, sizeof(ResultTokenWithMetadata));
+ if (res == NULL) {
+ return NULL;
+ }
+ res->metadata = metadata;
+ res->result = result;
+ return res;
+}
+
+ResultTokenWithMetadata *
+_PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv)
+{
+ if (conv_token->lineno != conv->lineno || conv_token->end_col_offset != conv->col_offset) {
+ return RAISE_SYNTAX_ERROR_KNOWN_RANGE(
+ conv_token, conv,
+ "f-string: conversion type must come right after the exclamanation mark"
+ );
+ }
+ return result_token_with_metadata(p, conv, conv_token->metadata);
+}
+
+ResultTokenWithMetadata *
+_PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, int lineno, int col_offset,
+ int end_lineno, int end_col_offset, PyArena *arena)
+{
+ if (!spec) {
+ return NULL;
+ }
+ expr_ty res = _PyAST_JoinedStr(spec, lineno, col_offset, end_lineno, end_col_offset, p->arena);
+ if (!res) {
+ return NULL;
+ }
+ return result_token_with_metadata(p, res, colon->metadata);
+}
+
const char *
_PyPegen_get_expr_name(expr_ty e)
{
@@ -1271,3 +1220,423 @@ _PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq
"Generator expression must be parenthesized"
);
}
+
+// Fstring stuff
+
+static expr_ty
+_PyPegen_decode_fstring_part(Parser* p, int is_raw, expr_ty constant) {
+ assert(PyUnicode_CheckExact(constant->v.Constant.value));
+
+ const char* bstr = PyUnicode_AsUTF8(constant->v.Constant.value);
+ if (bstr == NULL) {
+ return NULL;
+ }
+
+ size_t len;
+ if (strcmp(bstr, "{{") == 0 || strcmp(bstr, "}}") == 0) {
+ len = 1;
+ } else {
+ len = strlen(bstr);
+ }
+
+ is_raw = is_raw || strchr(bstr, '\\') == NULL;
+ PyObject *str = _PyPegen_decode_string(p, is_raw, bstr, len, NULL);
+ if (str == NULL) {
+ _Pypegen_raise_decode_error(p);
+ return NULL;
+ }
+ if (_PyArena_AddPyObject(p->arena, str) < 0) {
+ Py_DECREF(str);
+ return NULL;
+ }
+ return _PyAST_Constant(str, NULL, constant->lineno, constant->col_offset,
+ constant->end_lineno, constant->end_col_offset,
+ p->arena);
+}
+
+static asdl_expr_seq *
+unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions)
+{
+ /* The parser might put multiple f-string values into an individual
+ * JoinedStr node at the top level due to stuff like f-string debugging
+ * expressions. This function flattens those and promotes them to the
+ * upper level. Only simplifies AST, but the compiler already takes care
+ * of the regular output, so this is not necessary if you are not going
+ * to expose the output AST to Python level. */
+
+ Py_ssize_t i, req_size, raw_size;
+
+ req_size = raw_size = asdl_seq_LEN(raw_expressions);
+ expr_ty expr;
+ for (i = 0; i < raw_size; i++) {
+ expr = asdl_seq_GET(raw_expressions, i);
+ if (expr->kind == JoinedStr_kind) {
+ req_size += asdl_seq_LEN(expr->v.JoinedStr.values) - 1;
+ }
+ }
+
+ asdl_expr_seq *expressions = _Py_asdl_expr_seq_new(req_size, p->arena);
+
+ Py_ssize_t raw_index, req_index = 0;
+ for (raw_index = 0; raw_index < raw_size; raw_index++) {
+ expr = asdl_seq_GET(raw_expressions, raw_index);
+ if (expr->kind == JoinedStr_kind) {
+ asdl_expr_seq *values = expr->v.JoinedStr.values;
+ for (Py_ssize_t n = 0; n < asdl_seq_LEN(values); n++) {
+ asdl_seq_SET(expressions, req_index, asdl_seq_GET(values, n));
+ req_index++;
+ }
+ } else {
+ asdl_seq_SET(expressions, req_index, expr);
+ req_index++;
+ }
+ }
+ return expressions;
+}
+
+expr_ty
+_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b) {
+ asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions);
+ Py_ssize_t n_items = asdl_seq_LEN(expr);
+
+ const char* quote_str = PyBytes_AsString(a->bytes);
+ if (quote_str == NULL) {
+ return NULL;
+ }
+ int is_raw = strpbrk(quote_str, "rR") != NULL;
+
+ asdl_expr_seq *seq = _Py_asdl_expr_seq_new(n_items, p->arena);
+ if (seq == NULL) {
+ return NULL;
+ }
+
+ Py_ssize_t index = 0;
+ for (Py_ssize_t i = 0; i < n_items; i++) {
+ expr_ty item = asdl_seq_GET(expr, i);
+ if (item->kind == Constant_kind) {
+ item = _PyPegen_decode_fstring_part(p, is_raw, item);
+ if (item == NULL) {
+ return NULL;
+ }
+
+ /* Tokenizer emits string parts even when the underlying string
+ might become an empty value (e.g. FSTRING_MIDDLE with the value \\n)
+ so we need to check for them and simplify it here. */
+ if (PyUnicode_CheckExact(item->v.Constant.value)
+ && PyUnicode_GET_LENGTH(item->v.Constant.value) == 0) {
+ continue;
+ }
+ }
+ asdl_seq_SET(seq, index++, item);
+ }
+
+ asdl_expr_seq *resized_exprs;
+ if (index != n_items) {
+ resized_exprs = _Py_asdl_expr_seq_new(index, p->arena);
+ if (resized_exprs == NULL) {
+ return NULL;
+ }
+ for (Py_ssize_t i = 0; i < index; i++) {
+ asdl_seq_SET(resized_exprs, i, asdl_seq_GET(seq, i));
+ }
+ }
+ else {
+ resized_exprs = seq;
+ }
+
+ return _PyAST_JoinedStr(resized_exprs, a->lineno, a->col_offset,
+ b->end_lineno, b->end_col_offset,
+ p->arena);
+}
+
+expr_ty _PyPegen_constant_from_token(Parser* p, Token* tok) {
+ char* bstr = PyBytes_AsString(tok->bytes);
+ if (bstr == NULL) {
+ return NULL;
+ }
+ PyObject* str = PyUnicode_FromString(bstr);
+ if (str == NULL) {
+ return NULL;
+ }
+ if (_PyArena_AddPyObject(p->arena, str) < 0) {
+ Py_DECREF(str);
+ return NULL;
+ }
+ return _PyAST_Constant(str, NULL, tok->lineno, tok->col_offset,
+ tok->end_lineno, tok->end_col_offset,
+ p->arena);
+}
+
+expr_ty _PyPegen_constant_from_string(Parser* p, Token* tok) {
+ char* the_str = PyBytes_AsString(tok->bytes);
+ if (the_str == NULL) {
+ return NULL;
+ }
+ PyObject *s = _PyPegen_parse_string(p, tok);
+ if (s == NULL) {
+ _Pypegen_raise_decode_error(p);
+ return NULL;
+ }
+ if (_PyArena_AddPyObject(p->arena, s) < 0) {
+ Py_DECREF(s);
+ return NULL;
+ }
+ PyObject *kind = NULL;
+ if (the_str && the_str[0] == 'u') {
+ kind = _PyPegen_new_identifier(p, "u");
+ if (kind == NULL) {
+ return NULL;
+ }
+ }
+ return _PyAST_Constant(s, kind, tok->lineno, tok->col_offset, tok->end_lineno, tok->end_col_offset, p->arena);
+}
+
+expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, ResultTokenWithMetadata *conversion,
+ ResultTokenWithMetadata *format, Token *closing_brace, int lineno, int col_offset,
+ int end_lineno, int end_col_offset, PyArena *arena) {
+ int conversion_val = -1;
+ if (conversion != NULL) {
+ expr_ty conversion_expr = (expr_ty) conversion->result;
+ assert(conversion_expr->kind == Name_kind);
+ Py_UCS4 first = PyUnicode_READ_CHAR(conversion_expr->v.Name.id, 0);
+
+ if (PyUnicode_GET_LENGTH(conversion_expr->v.Name.id) > 1 ||
+ !(first == 's' || first == 'r' || first == 'a')) {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(conversion_expr,
+ "f-string: invalid conversion character %R: expected 's', 'r', or 'a'",
+ conversion_expr->v.Name.id);
+ return NULL;
+ }
+
+ conversion_val = Py_SAFE_DOWNCAST(first, Py_UCS4, int);
+ }
+ else if (debug && !format) {
+ /* If no conversion is specified, use !r for debug expressions */
+ conversion_val = (int)'r';
+ }
+
+ expr_ty formatted_value = _PyAST_FormattedValue(
+ expression, conversion_val, format ? (expr_ty) format->result : NULL,
+ lineno, col_offset, end_lineno,
+ end_col_offset, arena
+ );
+
+ if (debug) {
+ /* Find the non whitespace token after the "=" */
+ int debug_end_line, debug_end_offset;
+ PyObject *debug_metadata;
+
+ if (conversion) {
+ debug_end_line = ((expr_ty) conversion->result)->lineno;
+ debug_end_offset = ((expr_ty) conversion->result)->col_offset;
+ debug_metadata = conversion->metadata;
+ }
+ else if (format) {
+ debug_end_line = ((expr_ty) format->result)->lineno;
+ debug_end_offset = ((expr_ty) format->result)->col_offset + 1;
+ debug_metadata = format->metadata;
+ }
+ else {
+ debug_end_line = end_lineno;
+ debug_end_offset = end_col_offset;
+ debug_metadata = closing_brace->metadata;
+ }
+
+ expr_ty debug_text = _PyAST_Constant(debug_metadata, NULL, lineno, col_offset + 1, debug_end_line,
+ debug_end_offset - 1, p->arena);
+ if (!debug_text) {
+ return NULL;
+ }
+
+ asdl_expr_seq *values = _Py_asdl_expr_seq_new(2, arena);
+ asdl_seq_SET(values, 0, debug_text);
+ asdl_seq_SET(values, 1, formatted_value);
+ return _PyAST_JoinedStr(values, lineno, col_offset, debug_end_line, debug_end_offset, p->arena);
+ }
+ else {
+ return formatted_value;
+ }
+}
+
+expr_ty
+_PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *strings,
+ int lineno, int col_offset, int end_lineno,
+ int end_col_offset, PyArena *arena)
+{
+ Py_ssize_t len = asdl_seq_LEN(strings);
+ assert(len > 0);
+
+ int f_string_found = 0;
+ int unicode_string_found = 0;
+ int bytes_found = 0;
+
+ Py_ssize_t i = 0;
+ Py_ssize_t n_flattened_elements = 0;
+ for (i = 0; i < len; i++) {
+ expr_ty elem = asdl_seq_GET(strings, i);
+ if (elem->kind == Constant_kind) {
+ if (PyBytes_CheckExact(elem->v.Constant.value)) {
+ bytes_found = 1;
+ } else {
+ unicode_string_found = 1;
+ }
+ n_flattened_elements++;
+ } else {
+ n_flattened_elements += asdl_seq_LEN(elem->v.JoinedStr.values);
+ f_string_found = 1;
+ }
+ }
+
+ if ((unicode_string_found || f_string_found) && bytes_found) {
+ RAISE_SYNTAX_ERROR("cannot mix bytes and nonbytes literals");
+ return NULL;
+ }
+
+ if (bytes_found) {
+ PyObject* res = PyBytes_FromString("");
+
+ /* Bytes literals never get a kind, but just for consistency
+ since they are represented as Constant nodes, we'll mirror
+ the same behavior as unicode strings for determining the
+ kind. */
+ PyObject* kind = asdl_seq_GET(strings, 0)->v.Constant.kind;
+ for (i = 0; i < len; i++) {
+ expr_ty elem = asdl_seq_GET(strings, i);
+ PyBytes_Concat(&res, elem->v.Constant.value);
+ }
+ if (!res || _PyArena_AddPyObject(arena, res) < 0) {
+ Py_XDECREF(res);
+ return NULL;
+ }
+ return _PyAST_Constant(res, kind, lineno, col_offset, end_lineno, end_col_offset, p->arena);
+ }
+
+ if (!f_string_found && len == 1) {
+ return asdl_seq_GET(strings, 0);
+ }
+
+ asdl_expr_seq* flattened = _Py_asdl_expr_seq_new(n_flattened_elements, p->arena);
+ if (flattened == NULL) {
+ return NULL;
+ }
+
+ /* build flattened list */
+ Py_ssize_t current_pos = 0;
+ Py_ssize_t j = 0;
+ for (i = 0; i < len; i++) {
+ expr_ty elem = asdl_seq_GET(strings, i);
+ if (elem->kind == Constant_kind) {
+ asdl_seq_SET(flattened, current_pos++, elem);
+ } else {
+ for (j = 0; j < asdl_seq_LEN(elem->v.JoinedStr.values); j++) {
+ expr_ty subvalue = asdl_seq_GET(elem->v.JoinedStr.values, j);
+ if (subvalue == NULL) {
+ return NULL;
+ }
+ asdl_seq_SET(flattened, current_pos++, subvalue);
+ }
+ }
+ }
+
+ /* calculate folded element count */
+ Py_ssize_t n_elements = 0;
+ int prev_is_constant = 0;
+ for (i = 0; i < n_flattened_elements; i++) {
+ expr_ty elem = asdl_seq_GET(flattened, i);
+
+ /* The concatenation of a FormattedValue and an empty Contant should
+ lead to the FormattedValue itself. Thus, we will not take any empty
+ constants into account, just as in `_PyPegen_joined_str` */
+ if (f_string_found && elem->kind == Constant_kind &&
+ PyUnicode_CheckExact(elem->v.Constant.value) &&
+ PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0)
+ continue;
+
+ if (!prev_is_constant || elem->kind != Constant_kind) {
+ n_elements++;
+ }
+ prev_is_constant = elem->kind == Constant_kind;
+ }
+
+ asdl_expr_seq* values = _Py_asdl_expr_seq_new(n_elements, p->arena);
+ if (values == NULL) {
+ return NULL;
+ }
+
+ /* build folded list */
+ _PyUnicodeWriter writer;
+ current_pos = 0;
+ for (i = 0; i < n_flattened_elements; i++) {
+ expr_ty elem = asdl_seq_GET(flattened, i);
+
+ /* if the current elem and the following are constants,
+ fold them and all consequent constants */
+ if (elem->kind == Constant_kind) {
+ if (i + 1 < n_flattened_elements &&
+ asdl_seq_GET(flattened, i + 1)->kind == Constant_kind) {
+ expr_ty first_elem = elem;
+
+ /* When a string is getting concatenated, the kind of the string
+ is determined by the first string in the concatenation
+ sequence.
+
+ u"abc" "def" -> u"abcdef"
+ "abc" u"abc" -> "abcabc" */
+ PyObject *kind = elem->v.Constant.kind;
+
+ _PyUnicodeWriter_Init(&writer);
+ expr_ty last_elem = elem;
+ for (j = i; j < n_flattened_elements; j++) {
+ expr_ty current_elem = asdl_seq_GET(flattened, j);
+ if (current_elem->kind == Constant_kind) {
+ if (_PyUnicodeWriter_WriteStr(
+ &writer, current_elem->v.Constant.value)) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ last_elem = current_elem;
+ } else {
+ break;
+ }
+ }
+ i = j - 1;
+
+ PyObject *concat_str = _PyUnicodeWriter_Finish(&writer);
+ if (concat_str == NULL) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ if (_PyArena_AddPyObject(p->arena, concat_str) < 0) {
+ Py_DECREF(concat_str);
+ return NULL;
+ }
+ elem = _PyAST_Constant(concat_str, kind, first_elem->lineno,
+ first_elem->col_offset,
+ last_elem->end_lineno,
+ last_elem->end_col_offset, p->arena);
+ if (elem == NULL) {
+ return NULL;
+ }
+ }
+
+ /* Drop all empty contanst strings */
+ if (f_string_found &&
+ PyUnicode_CheckExact(elem->v.Constant.value) &&
+ PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0) {
+ continue;
+ }
+ }
+
+ asdl_seq_SET(values, current_pos++, elem);
+ }
+
+ if (!f_string_found) {
+ assert(n_elements == 1);
+ expr_ty elem = asdl_seq_GET(values, 0);
+ assert(elem->kind == Constant_kind);
+ return elem;
+ }
+
+ assert(current_pos == n_elements);
+ return _PyAST_JoinedStr(values, lineno, col_offset, end_lineno, end_col_offset, p->arena);
+}
diff --git a/Parser/parser.c b/Parser/parser.c
index e0a88a9cc72c8b..6eb985a7d3e123 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -17,52 +17,52 @@ static KeywordToken *reserved_keywords[] = {
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {
- {"if", 641},
- {"as", 639},
- {"in", 650},
+ {"if", 642},
+ {"as", 640},
+ {"in", 651},
{"or", 574},
{"is", 582},
{NULL, -1},
},
(KeywordToken[]) {
- {"del", 603},
- {"def", 651},
- {"for", 649},
- {"try", 623},
+ {"del", 604},
+ {"def", 652},
+ {"for", 650},
+ {"try", 624},
{"and", 575},
{"not", 581},
{NULL, -1},
},
(KeywordToken[]) {
- {"from", 607},
+ {"from", 608},
{"pass", 504},
- {"with", 614},
- {"elif", 643},
- {"else", 644},
- {"None", 601},
- {"True", 600},
+ {"with", 615},
+ {"elif", 644},
+ {"else", 645},
+ {"None", 602},
+ {"True", 601},
{NULL, -1},
},
(KeywordToken[]) {
{"raise", 522},
{"yield", 573},
{"break", 508},
- {"class", 653},
- {"while", 646},
- {"False", 602},
+ {"class", 654},
+ {"while", 647},
+ {"False", 603},
{NULL, -1},
},
(KeywordToken[]) {
{"return", 519},
- {"import", 606},
+ {"import", 607},
{"assert", 526},
{"global", 523},
- {"except", 636},
- {"lambda", 586},
+ {"except", 637},
+ {"lambda", 600},
{NULL, -1},
},
(KeywordToken[]) {
- {"finally", 632},
+ {"finally", 633},
{NULL, -1},
},
(KeywordToken[]) {
@@ -224,341 +224,370 @@ static char *soft_keywords[] = {
#define lambda_param_with_default_type 1144
#define lambda_param_maybe_default_type 1145
#define lambda_param_type 1146
-#define strings_type 1147
-#define list_type 1148
-#define tuple_type 1149
-#define set_type 1150
-#define dict_type 1151
-#define double_starred_kvpairs_type 1152
-#define double_starred_kvpair_type 1153
-#define kvpair_type 1154
-#define for_if_clauses_type 1155
-#define for_if_clause_type 1156
-#define listcomp_type 1157
-#define setcomp_type 1158
-#define genexp_type 1159
-#define dictcomp_type 1160
-#define arguments_type 1161
-#define args_type 1162
-#define kwargs_type 1163
-#define starred_expression_type 1164
-#define kwarg_or_starred_type 1165
-#define kwarg_or_double_starred_type 1166
-#define star_targets_type 1167
-#define star_targets_list_seq_type 1168
-#define star_targets_tuple_seq_type 1169
-#define star_target_type 1170
-#define target_with_star_atom_type 1171
-#define star_atom_type 1172
-#define single_target_type 1173
-#define single_subscript_attribute_target_type 1174
-#define t_primary_type 1175 // Left-recursive
-#define t_lookahead_type 1176
-#define del_targets_type 1177
-#define del_target_type 1178
-#define del_t_atom_type 1179
-#define type_expressions_type 1180
-#define func_type_comment_type 1181
-#define invalid_arguments_type 1182
-#define invalid_kwarg_type 1183
-#define expression_without_invalid_type 1184
-#define invalid_legacy_expression_type 1185
-#define invalid_expression_type 1186
-#define invalid_named_expression_type 1187
-#define invalid_assignment_type 1188
-#define invalid_ann_assign_target_type 1189
-#define invalid_del_stmt_type 1190
-#define invalid_block_type 1191
-#define invalid_comprehension_type 1192
-#define invalid_dict_comprehension_type 1193
-#define invalid_parameters_type 1194
-#define invalid_default_type 1195
-#define invalid_star_etc_type 1196
-#define invalid_kwds_type 1197
-#define invalid_parameters_helper_type 1198
-#define invalid_lambda_parameters_type 1199
-#define invalid_lambda_parameters_helper_type 1200
-#define invalid_lambda_star_etc_type 1201
-#define invalid_lambda_kwds_type 1202
-#define invalid_double_type_comments_type 1203
-#define invalid_with_item_type 1204
-#define invalid_for_target_type 1205
-#define invalid_group_type 1206
-#define invalid_import_type 1207
-#define invalid_import_from_targets_type 1208
-#define invalid_with_stmt_type 1209
-#define invalid_with_stmt_indent_type 1210
-#define invalid_try_stmt_type 1211
-#define invalid_except_stmt_type 1212
-#define invalid_finally_stmt_type 1213
-#define invalid_except_stmt_indent_type 1214
-#define invalid_except_star_stmt_indent_type 1215
-#define invalid_match_stmt_type 1216
-#define invalid_case_block_type 1217
-#define invalid_as_pattern_type 1218
-#define invalid_class_pattern_type 1219
-#define invalid_class_argument_pattern_type 1220
-#define invalid_if_stmt_type 1221
-#define invalid_elif_stmt_type 1222
-#define invalid_else_stmt_type 1223
-#define invalid_while_stmt_type 1224
-#define invalid_for_stmt_type 1225
-#define invalid_def_raw_type 1226
-#define invalid_class_def_raw_type 1227
-#define invalid_double_starred_kvpairs_type 1228
-#define invalid_kvpair_type 1229
-#define invalid_starred_expression_type 1230
-#define _loop0_1_type 1231
-#define _loop0_2_type 1232
-#define _loop1_3_type 1233
-#define _loop0_5_type 1234
-#define _gather_4_type 1235
-#define _tmp_6_type 1236
-#define _tmp_7_type 1237
-#define _tmp_8_type 1238
-#define _tmp_9_type 1239
-#define _tmp_10_type 1240
-#define _tmp_11_type 1241
-#define _tmp_12_type 1242
-#define _tmp_13_type 1243
-#define _loop1_14_type 1244
-#define _tmp_15_type 1245
-#define _tmp_16_type 1246
-#define _tmp_17_type 1247
-#define _loop0_19_type 1248
-#define _gather_18_type 1249
-#define _loop0_21_type 1250
-#define _gather_20_type 1251
-#define _tmp_22_type 1252
-#define _tmp_23_type 1253
-#define _loop0_24_type 1254
-#define _loop1_25_type 1255
-#define _loop0_27_type 1256
-#define _gather_26_type 1257
-#define _tmp_28_type 1258
-#define _loop0_30_type 1259
-#define _gather_29_type 1260
-#define _tmp_31_type 1261
-#define _loop1_32_type 1262
-#define _tmp_33_type 1263
-#define _tmp_34_type 1264
-#define _tmp_35_type 1265
-#define _loop0_36_type 1266
-#define _loop0_37_type 1267
-#define _loop0_38_type 1268
-#define _loop1_39_type 1269
-#define _loop0_40_type 1270
-#define _loop1_41_type 1271
-#define _loop1_42_type 1272
-#define _loop1_43_type 1273
-#define _loop0_44_type 1274
-#define _loop1_45_type 1275
-#define _loop0_46_type 1276
-#define _loop1_47_type 1277
-#define _loop0_48_type 1278
-#define _loop0_49_type 1279
-#define _loop1_50_type 1280
-#define _loop0_52_type 1281
-#define _gather_51_type 1282
-#define _loop0_54_type 1283
-#define _gather_53_type 1284
-#define _loop0_56_type 1285
-#define _gather_55_type 1286
-#define _loop0_58_type 1287
-#define _gather_57_type 1288
-#define _tmp_59_type 1289
-#define _loop1_60_type 1290
-#define _loop1_61_type 1291
-#define _tmp_62_type 1292
-#define _tmp_63_type 1293
-#define _loop1_64_type 1294
-#define _loop0_66_type 1295
-#define _gather_65_type 1296
-#define _tmp_67_type 1297
-#define _tmp_68_type 1298
-#define _tmp_69_type 1299
-#define _tmp_70_type 1300
-#define _loop0_72_type 1301
-#define _gather_71_type 1302
-#define _loop0_74_type 1303
-#define _gather_73_type 1304
-#define _tmp_75_type 1305
-#define _loop0_77_type 1306
-#define _gather_76_type 1307
-#define _loop0_79_type 1308
-#define _gather_78_type 1309
-#define _loop1_80_type 1310
-#define _loop1_81_type 1311
-#define _loop0_83_type 1312
-#define _gather_82_type 1313
-#define _loop1_84_type 1314
-#define _loop1_85_type 1315
-#define _loop1_86_type 1316
-#define _tmp_87_type 1317
-#define _loop0_89_type 1318
-#define _gather_88_type 1319
-#define _tmp_90_type 1320
-#define _tmp_91_type 1321
-#define _tmp_92_type 1322
-#define _tmp_93_type 1323
-#define _tmp_94_type 1324
-#define _loop0_95_type 1325
-#define _loop0_96_type 1326
-#define _loop0_97_type 1327
-#define _loop1_98_type 1328
-#define _loop0_99_type 1329
-#define _loop1_100_type 1330
-#define _loop1_101_type 1331
-#define _loop1_102_type 1332
-#define _loop0_103_type 1333
-#define _loop1_104_type 1334
-#define _loop0_105_type 1335
-#define _loop1_106_type 1336
-#define _loop0_107_type 1337
-#define _loop1_108_type 1338
-#define _loop1_109_type 1339
-#define _tmp_110_type 1340
-#define _loop0_112_type 1341
-#define _gather_111_type 1342
-#define _loop1_113_type 1343
-#define _loop0_114_type 1344
-#define _loop0_115_type 1345
-#define _tmp_116_type 1346
-#define _loop0_118_type 1347
-#define _gather_117_type 1348
-#define _tmp_119_type 1349
-#define _loop0_121_type 1350
-#define _gather_120_type 1351
-#define _loop0_123_type 1352
-#define _gather_122_type 1353
-#define _loop0_125_type 1354
-#define _gather_124_type 1355
-#define _loop0_127_type 1356
-#define _gather_126_type 1357
-#define _loop0_128_type 1358
-#define _loop0_130_type 1359
-#define _gather_129_type 1360
-#define _loop1_131_type 1361
-#define _tmp_132_type 1362
-#define _loop0_134_type 1363
-#define _gather_133_type 1364
-#define _loop0_136_type 1365
-#define _gather_135_type 1366
-#define _loop0_138_type 1367
-#define _gather_137_type 1368
-#define _loop0_140_type 1369
-#define _gather_139_type 1370
-#define _loop0_142_type 1371
-#define _gather_141_type 1372
-#define _tmp_143_type 1373
-#define _tmp_144_type 1374
-#define _tmp_145_type 1375
-#define _tmp_146_type 1376
-#define _tmp_147_type 1377
-#define _tmp_148_type 1378
-#define _tmp_149_type 1379
-#define _tmp_150_type 1380
-#define _tmp_151_type 1381
-#define _tmp_152_type 1382
-#define _tmp_153_type 1383
-#define _loop0_154_type 1384
-#define _loop0_155_type 1385
-#define _loop0_156_type 1386
-#define _tmp_157_type 1387
-#define _tmp_158_type 1388
-#define _tmp_159_type 1389
-#define _tmp_160_type 1390
-#define _tmp_161_type 1391
-#define _loop0_162_type 1392
-#define _loop0_163_type 1393
-#define _loop0_164_type 1394
-#define _loop1_165_type 1395
-#define _tmp_166_type 1396
-#define _loop0_167_type 1397
-#define _tmp_168_type 1398
-#define _loop0_169_type 1399
-#define _loop1_170_type 1400
-#define _tmp_171_type 1401
-#define _tmp_172_type 1402
-#define _tmp_173_type 1403
-#define _loop0_174_type 1404
-#define _tmp_175_type 1405
-#define _tmp_176_type 1406
-#define _loop1_177_type 1407
-#define _tmp_178_type 1408
-#define _loop0_179_type 1409
-#define _loop0_180_type 1410
-#define _loop0_181_type 1411
-#define _loop0_183_type 1412
-#define _gather_182_type 1413
-#define _tmp_184_type 1414
-#define _loop0_185_type 1415
-#define _tmp_186_type 1416
-#define _loop0_187_type 1417
-#define _loop1_188_type 1418
-#define _loop1_189_type 1419
-#define _tmp_190_type 1420
-#define _tmp_191_type 1421
-#define _loop0_192_type 1422
-#define _tmp_193_type 1423
-#define _tmp_194_type 1424
-#define _tmp_195_type 1425
-#define _loop0_197_type 1426
-#define _gather_196_type 1427
-#define _loop0_199_type 1428
-#define _gather_198_type 1429
-#define _loop0_201_type 1430
-#define _gather_200_type 1431
-#define _loop0_203_type 1432
-#define _gather_202_type 1433
-#define _tmp_204_type 1434
-#define _loop0_205_type 1435
-#define _loop1_206_type 1436
-#define _tmp_207_type 1437
-#define _loop0_208_type 1438
-#define _loop1_209_type 1439
-#define _tmp_210_type 1440
-#define _tmp_211_type 1441
-#define _tmp_212_type 1442
-#define _tmp_213_type 1443
-#define _tmp_214_type 1444
-#define _tmp_215_type 1445
-#define _tmp_216_type 1446
-#define _tmp_217_type 1447
-#define _tmp_218_type 1448
-#define _tmp_219_type 1449
-#define _loop0_221_type 1450
-#define _gather_220_type 1451
-#define _tmp_222_type 1452
-#define _tmp_223_type 1453
-#define _tmp_224_type 1454
-#define _tmp_225_type 1455
-#define _tmp_226_type 1456
-#define _tmp_227_type 1457
-#define _tmp_228_type 1458
-#define _tmp_229_type 1459
-#define _tmp_230_type 1460
-#define _tmp_231_type 1461
-#define _tmp_232_type 1462
-#define _tmp_233_type 1463
-#define _tmp_234_type 1464
-#define _tmp_235_type 1465
-#define _tmp_236_type 1466
-#define _tmp_237_type 1467
-#define _tmp_238_type 1468
-#define _tmp_239_type 1469
-#define _tmp_240_type 1470
-#define _tmp_241_type 1471
-#define _tmp_242_type 1472
-#define _tmp_243_type 1473
-#define _tmp_244_type 1474
-#define _tmp_245_type 1475
-#define _tmp_246_type 1476
-#define _tmp_247_type 1477
-#define _tmp_248_type 1478
-#define _tmp_249_type 1479
-#define _tmp_250_type 1480
-#define _tmp_251_type 1481
+#define fstring_middle_type 1147
+#define fstring_replacement_field_type 1148
+#define fstring_conversion_type 1149
+#define fstring_full_format_spec_type 1150
+#define fstring_format_spec_type 1151
+#define string_type 1152
+#define strings_type 1153
+#define list_type 1154
+#define tuple_type 1155
+#define set_type 1156
+#define dict_type 1157
+#define double_starred_kvpairs_type 1158
+#define double_starred_kvpair_type 1159
+#define kvpair_type 1160
+#define for_if_clauses_type 1161
+#define for_if_clause_type 1162
+#define listcomp_type 1163
+#define setcomp_type 1164
+#define genexp_type 1165
+#define dictcomp_type 1166
+#define arguments_type 1167
+#define args_type 1168
+#define kwargs_type 1169
+#define starred_expression_type 1170
+#define kwarg_or_starred_type 1171
+#define kwarg_or_double_starred_type 1172
+#define star_targets_type 1173
+#define star_targets_list_seq_type 1174
+#define star_targets_tuple_seq_type 1175
+#define star_target_type 1176
+#define target_with_star_atom_type 1177
+#define star_atom_type 1178
+#define single_target_type 1179
+#define single_subscript_attribute_target_type 1180
+#define t_primary_type 1181 // Left-recursive
+#define t_lookahead_type 1182
+#define del_targets_type 1183
+#define del_target_type 1184
+#define del_t_atom_type 1185
+#define type_expressions_type 1186
+#define func_type_comment_type 1187
+#define invalid_arguments_type 1188
+#define invalid_kwarg_type 1189
+#define expression_without_invalid_type 1190
+#define invalid_legacy_expression_type 1191
+#define invalid_expression_type 1192
+#define invalid_named_expression_type 1193
+#define invalid_assignment_type 1194
+#define invalid_ann_assign_target_type 1195
+#define invalid_del_stmt_type 1196
+#define invalid_block_type 1197
+#define invalid_comprehension_type 1198
+#define invalid_dict_comprehension_type 1199
+#define invalid_parameters_type 1200
+#define invalid_default_type 1201
+#define invalid_star_etc_type 1202
+#define invalid_kwds_type 1203
+#define invalid_parameters_helper_type 1204
+#define invalid_lambda_parameters_type 1205
+#define invalid_lambda_parameters_helper_type 1206
+#define invalid_lambda_star_etc_type 1207
+#define invalid_lambda_kwds_type 1208
+#define invalid_double_type_comments_type 1209
+#define invalid_with_item_type 1210
+#define invalid_for_target_type 1211
+#define invalid_group_type 1212
+#define invalid_import_type 1213
+#define invalid_import_from_targets_type 1214
+#define invalid_with_stmt_type 1215
+#define invalid_with_stmt_indent_type 1216
+#define invalid_try_stmt_type 1217
+#define invalid_except_stmt_type 1218
+#define invalid_finally_stmt_type 1219
+#define invalid_except_stmt_indent_type 1220
+#define invalid_except_star_stmt_indent_type 1221
+#define invalid_match_stmt_type 1222
+#define invalid_case_block_type 1223
+#define invalid_as_pattern_type 1224
+#define invalid_class_pattern_type 1225
+#define invalid_class_argument_pattern_type 1226
+#define invalid_if_stmt_type 1227
+#define invalid_elif_stmt_type 1228
+#define invalid_else_stmt_type 1229
+#define invalid_while_stmt_type 1230
+#define invalid_for_stmt_type 1231
+#define invalid_def_raw_type 1232
+#define invalid_class_def_raw_type 1233
+#define invalid_double_starred_kvpairs_type 1234
+#define invalid_kvpair_type 1235
+#define invalid_starred_expression_type 1236
+#define invalid_replacement_field_type 1237
+#define invalid_conversion_character_type 1238
+#define _loop0_1_type 1239
+#define _loop0_2_type 1240
+#define _loop0_3_type 1241
+#define _loop1_4_type 1242
+#define _loop0_6_type 1243
+#define _gather_5_type 1244
+#define _tmp_7_type 1245
+#define _tmp_8_type 1246
+#define _tmp_9_type 1247
+#define _tmp_10_type 1248
+#define _tmp_11_type 1249
+#define _tmp_12_type 1250
+#define _tmp_13_type 1251
+#define _tmp_14_type 1252
+#define _loop1_15_type 1253
+#define _tmp_16_type 1254
+#define _tmp_17_type 1255
+#define _tmp_18_type 1256
+#define _loop0_20_type 1257
+#define _gather_19_type 1258
+#define _loop0_22_type 1259
+#define _gather_21_type 1260
+#define _tmp_23_type 1261
+#define _tmp_24_type 1262
+#define _loop0_25_type 1263
+#define _loop1_26_type 1264
+#define _loop0_28_type 1265
+#define _gather_27_type 1266
+#define _tmp_29_type 1267
+#define _loop0_31_type 1268
+#define _gather_30_type 1269
+#define _tmp_32_type 1270
+#define _loop1_33_type 1271
+#define _tmp_34_type 1272
+#define _tmp_35_type 1273
+#define _tmp_36_type 1274
+#define _loop0_37_type 1275
+#define _loop0_38_type 1276
+#define _loop0_39_type 1277
+#define _loop1_40_type 1278
+#define _loop0_41_type 1279
+#define _loop1_42_type 1280
+#define _loop1_43_type 1281
+#define _loop1_44_type 1282
+#define _loop0_45_type 1283
+#define _loop1_46_type 1284
+#define _loop0_47_type 1285
+#define _loop1_48_type 1286
+#define _loop0_49_type 1287
+#define _loop0_50_type 1288
+#define _loop1_51_type 1289
+#define _loop0_53_type 1290
+#define _gather_52_type 1291
+#define _loop0_55_type 1292
+#define _gather_54_type 1293
+#define _loop0_57_type 1294
+#define _gather_56_type 1295
+#define _loop0_59_type 1296
+#define _gather_58_type 1297
+#define _tmp_60_type 1298
+#define _loop1_61_type 1299
+#define _loop1_62_type 1300
+#define _tmp_63_type 1301
+#define _tmp_64_type 1302
+#define _loop1_65_type 1303
+#define _loop0_67_type 1304
+#define _gather_66_type 1305
+#define _tmp_68_type 1306
+#define _tmp_69_type 1307
+#define _tmp_70_type 1308
+#define _tmp_71_type 1309
+#define _loop0_73_type 1310
+#define _gather_72_type 1311
+#define _loop0_75_type 1312
+#define _gather_74_type 1313
+#define _tmp_76_type 1314
+#define _loop0_78_type 1315
+#define _gather_77_type 1316
+#define _loop0_80_type 1317
+#define _gather_79_type 1318
+#define _loop1_81_type 1319
+#define _loop1_82_type 1320
+#define _loop0_84_type 1321
+#define _gather_83_type 1322
+#define _loop1_85_type 1323
+#define _loop1_86_type 1324
+#define _loop1_87_type 1325
+#define _tmp_88_type 1326
+#define _loop0_90_type 1327
+#define _gather_89_type 1328
+#define _tmp_91_type 1329
+#define _tmp_92_type 1330
+#define _tmp_93_type 1331
+#define _tmp_94_type 1332
+#define _tmp_95_type 1333
+#define _tmp_96_type 1334
+#define _loop0_97_type 1335
+#define _loop0_98_type 1336
+#define _loop0_99_type 1337
+#define _loop1_100_type 1338
+#define _loop0_101_type 1339
+#define _loop1_102_type 1340
+#define _loop1_103_type 1341
+#define _loop1_104_type 1342
+#define _loop0_105_type 1343
+#define _loop1_106_type 1344
+#define _loop0_107_type 1345
+#define _loop1_108_type 1346
+#define _loop0_109_type 1347
+#define _loop1_110_type 1348
+#define _tmp_111_type 1349
+#define _loop0_112_type 1350
+#define _loop1_113_type 1351
+#define _tmp_114_type 1352
+#define _loop0_116_type 1353
+#define _gather_115_type 1354
+#define _loop1_117_type 1355
+#define _loop0_118_type 1356
+#define _loop0_119_type 1357
+#define _tmp_120_type 1358
+#define _loop0_122_type 1359
+#define _gather_121_type 1360
+#define _tmp_123_type 1361
+#define _loop0_125_type 1362
+#define _gather_124_type 1363
+#define _loop0_127_type 1364
+#define _gather_126_type 1365
+#define _loop0_129_type 1366
+#define _gather_128_type 1367
+#define _loop0_131_type 1368
+#define _gather_130_type 1369
+#define _loop0_132_type 1370
+#define _loop0_134_type 1371
+#define _gather_133_type 1372
+#define _loop1_135_type 1373
+#define _tmp_136_type 1374
+#define _loop0_138_type 1375
+#define _gather_137_type 1376
+#define _loop0_140_type 1377
+#define _gather_139_type 1378
+#define _loop0_142_type 1379
+#define _gather_141_type 1380
+#define _loop0_144_type 1381
+#define _gather_143_type 1382
+#define _loop0_146_type 1383
+#define _gather_145_type 1384
+#define _tmp_147_type 1385
+#define _tmp_148_type 1386
+#define _tmp_149_type 1387
+#define _tmp_150_type 1388
+#define _tmp_151_type 1389
+#define _tmp_152_type 1390
+#define _tmp_153_type 1391
+#define _tmp_154_type 1392
+#define _tmp_155_type 1393
+#define _tmp_156_type 1394
+#define _tmp_157_type 1395
+#define _tmp_158_type 1396
+#define _loop0_159_type 1397
+#define _loop0_160_type 1398
+#define _loop0_161_type 1399
+#define _tmp_162_type 1400
+#define _tmp_163_type 1401
+#define _tmp_164_type 1402
+#define _tmp_165_type 1403
+#define _tmp_166_type 1404
+#define _loop0_167_type 1405
+#define _loop0_168_type 1406
+#define _loop0_169_type 1407
+#define _loop1_170_type 1408
+#define _tmp_171_type 1409
+#define _loop0_172_type 1410
+#define _tmp_173_type 1411
+#define _loop0_174_type 1412
+#define _loop1_175_type 1413
+#define _tmp_176_type 1414
+#define _tmp_177_type 1415
+#define _tmp_178_type 1416
+#define _loop0_179_type 1417
+#define _tmp_180_type 1418
+#define _tmp_181_type 1419
+#define _loop1_182_type 1420
+#define _tmp_183_type 1421
+#define _loop0_184_type 1422
+#define _loop0_185_type 1423
+#define _loop0_186_type 1424
+#define _loop0_188_type 1425
+#define _gather_187_type 1426
+#define _tmp_189_type 1427
+#define _loop0_190_type 1428
+#define _tmp_191_type 1429
+#define _loop0_192_type 1430
+#define _loop1_193_type 1431
+#define _loop1_194_type 1432
+#define _tmp_195_type 1433
+#define _tmp_196_type 1434
+#define _loop0_197_type 1435
+#define _tmp_198_type 1436
+#define _tmp_199_type 1437
+#define _tmp_200_type 1438
+#define _loop0_202_type 1439
+#define _gather_201_type 1440
+#define _loop0_204_type 1441
+#define _gather_203_type 1442
+#define _loop0_206_type 1443
+#define _gather_205_type 1444
+#define _loop0_208_type 1445
+#define _gather_207_type 1446
+#define _tmp_209_type 1447
+#define _loop0_210_type 1448
+#define _loop1_211_type 1449
+#define _tmp_212_type 1450
+#define _loop0_213_type 1451
+#define _loop1_214_type 1452
+#define _tmp_215_type 1453
+#define _tmp_216_type 1454
+#define _tmp_217_type 1455
+#define _tmp_218_type 1456
+#define _tmp_219_type 1457
+#define _tmp_220_type 1458
+#define _tmp_221_type 1459
+#define _tmp_222_type 1460
+#define _tmp_223_type 1461
+#define _tmp_224_type 1462
+#define _loop0_226_type 1463
+#define _gather_225_type 1464
+#define _tmp_227_type 1465
+#define _tmp_228_type 1466
+#define _tmp_229_type 1467
+#define _tmp_230_type 1468
+#define _tmp_231_type 1469
+#define _tmp_232_type 1470
+#define _tmp_233_type 1471
+#define _tmp_234_type 1472
+#define _tmp_235_type 1473
+#define _tmp_236_type 1474
+#define _tmp_237_type 1475
+#define _tmp_238_type 1476
+#define _tmp_239_type 1477
+#define _loop0_240_type 1478
+#define _tmp_241_type 1479
+#define _tmp_242_type 1480
+#define _tmp_243_type 1481
+#define _tmp_244_type 1482
+#define _tmp_245_type 1483
+#define _tmp_246_type 1484
+#define _tmp_247_type 1485
+#define _tmp_248_type 1486
+#define _tmp_249_type 1487
+#define _tmp_250_type 1488
+#define _tmp_251_type 1489
+#define _tmp_252_type 1490
+#define _tmp_253_type 1491
+#define _tmp_254_type 1492
+#define _tmp_255_type 1493
+#define _tmp_256_type 1494
+#define _tmp_257_type 1495
+#define _tmp_258_type 1496
+#define _tmp_259_type 1497
+#define _tmp_260_type 1498
+#define _tmp_261_type 1499
+#define _tmp_262_type 1500
+#define _tmp_263_type 1501
+#define _tmp_264_type 1502
+#define _tmp_265_type 1503
+#define _tmp_266_type 1504
+#define _tmp_267_type 1505
+#define _tmp_268_type 1506
+#define _tmp_269_type 1507
+#define _tmp_270_type 1508
+#define _tmp_271_type 1509
+#define _tmp_272_type 1510
static mod_ty file_rule(Parser *p);
static mod_ty interactive_rule(Parser *p);
@@ -707,6 +736,12 @@ static arg_ty lambda_param_no_default_rule(Parser *p);
static NameDefaultPair* lambda_param_with_default_rule(Parser *p);
static NameDefaultPair* lambda_param_maybe_default_rule(Parser *p);
static arg_ty lambda_param_rule(Parser *p);
+static expr_ty fstring_middle_rule(Parser *p);
+static expr_ty fstring_replacement_field_rule(Parser *p);
+static ResultTokenWithMetadata* fstring_conversion_rule(Parser *p);
+static ResultTokenWithMetadata* fstring_full_format_spec_rule(Parser *p);
+static expr_ty fstring_format_spec_rule(Parser *p);
+static expr_ty string_rule(Parser *p);
static expr_ty strings_rule(Parser *p);
static expr_ty list_rule(Parser *p);
static expr_ty tuple_rule(Parser *p);
@@ -791,12 +826,14 @@ static void *invalid_class_def_raw_rule(Parser *p);
static void *invalid_double_starred_kvpairs_rule(Parser *p);
static void *invalid_kvpair_rule(Parser *p);
static void *invalid_starred_expression_rule(Parser *p);
+static void *invalid_replacement_field_rule(Parser *p);
+static void *invalid_conversion_character_rule(Parser *p);
static asdl_seq *_loop0_1_rule(Parser *p);
static asdl_seq *_loop0_2_rule(Parser *p);
-static asdl_seq *_loop1_3_rule(Parser *p);
-static asdl_seq *_loop0_5_rule(Parser *p);
-static asdl_seq *_gather_4_rule(Parser *p);
-static void *_tmp_6_rule(Parser *p);
+static asdl_seq *_loop0_3_rule(Parser *p);
+static asdl_seq *_loop1_4_rule(Parser *p);
+static asdl_seq *_loop0_6_rule(Parser *p);
+static asdl_seq *_gather_5_rule(Parser *p);
static void *_tmp_7_rule(Parser *p);
static void *_tmp_8_rule(Parser *p);
static void *_tmp_9_rule(Parser *p);
@@ -804,139 +841,139 @@ static void *_tmp_10_rule(Parser *p);
static void *_tmp_11_rule(Parser *p);
static void *_tmp_12_rule(Parser *p);
static void *_tmp_13_rule(Parser *p);
-static asdl_seq *_loop1_14_rule(Parser *p);
-static void *_tmp_15_rule(Parser *p);
+static void *_tmp_14_rule(Parser *p);
+static asdl_seq *_loop1_15_rule(Parser *p);
static void *_tmp_16_rule(Parser *p);
static void *_tmp_17_rule(Parser *p);
-static asdl_seq *_loop0_19_rule(Parser *p);
-static asdl_seq *_gather_18_rule(Parser *p);
-static asdl_seq *_loop0_21_rule(Parser *p);
-static asdl_seq *_gather_20_rule(Parser *p);
-static void *_tmp_22_rule(Parser *p);
+static void *_tmp_18_rule(Parser *p);
+static asdl_seq *_loop0_20_rule(Parser *p);
+static asdl_seq *_gather_19_rule(Parser *p);
+static asdl_seq *_loop0_22_rule(Parser *p);
+static asdl_seq *_gather_21_rule(Parser *p);
static void *_tmp_23_rule(Parser *p);
-static asdl_seq *_loop0_24_rule(Parser *p);
-static asdl_seq *_loop1_25_rule(Parser *p);
-static asdl_seq *_loop0_27_rule(Parser *p);
-static asdl_seq *_gather_26_rule(Parser *p);
-static void *_tmp_28_rule(Parser *p);
-static asdl_seq *_loop0_30_rule(Parser *p);
-static asdl_seq *_gather_29_rule(Parser *p);
-static void *_tmp_31_rule(Parser *p);
-static asdl_seq *_loop1_32_rule(Parser *p);
-static void *_tmp_33_rule(Parser *p);
+static void *_tmp_24_rule(Parser *p);
+static asdl_seq *_loop0_25_rule(Parser *p);
+static asdl_seq *_loop1_26_rule(Parser *p);
+static asdl_seq *_loop0_28_rule(Parser *p);
+static asdl_seq *_gather_27_rule(Parser *p);
+static void *_tmp_29_rule(Parser *p);
+static asdl_seq *_loop0_31_rule(Parser *p);
+static asdl_seq *_gather_30_rule(Parser *p);
+static void *_tmp_32_rule(Parser *p);
+static asdl_seq *_loop1_33_rule(Parser *p);
static void *_tmp_34_rule(Parser *p);
static void *_tmp_35_rule(Parser *p);
-static asdl_seq *_loop0_36_rule(Parser *p);
+static void *_tmp_36_rule(Parser *p);
static asdl_seq *_loop0_37_rule(Parser *p);
static asdl_seq *_loop0_38_rule(Parser *p);
-static asdl_seq *_loop1_39_rule(Parser *p);
-static asdl_seq *_loop0_40_rule(Parser *p);
-static asdl_seq *_loop1_41_rule(Parser *p);
+static asdl_seq *_loop0_39_rule(Parser *p);
+static asdl_seq *_loop1_40_rule(Parser *p);
+static asdl_seq *_loop0_41_rule(Parser *p);
static asdl_seq *_loop1_42_rule(Parser *p);
static asdl_seq *_loop1_43_rule(Parser *p);
-static asdl_seq *_loop0_44_rule(Parser *p);
-static asdl_seq *_loop1_45_rule(Parser *p);
-static asdl_seq *_loop0_46_rule(Parser *p);
-static asdl_seq *_loop1_47_rule(Parser *p);
-static asdl_seq *_loop0_48_rule(Parser *p);
+static asdl_seq *_loop1_44_rule(Parser *p);
+static asdl_seq *_loop0_45_rule(Parser *p);
+static asdl_seq *_loop1_46_rule(Parser *p);
+static asdl_seq *_loop0_47_rule(Parser *p);
+static asdl_seq *_loop1_48_rule(Parser *p);
static asdl_seq *_loop0_49_rule(Parser *p);
-static asdl_seq *_loop1_50_rule(Parser *p);
-static asdl_seq *_loop0_52_rule(Parser *p);
-static asdl_seq *_gather_51_rule(Parser *p);
-static asdl_seq *_loop0_54_rule(Parser *p);
-static asdl_seq *_gather_53_rule(Parser *p);
-static asdl_seq *_loop0_56_rule(Parser *p);
-static asdl_seq *_gather_55_rule(Parser *p);
-static asdl_seq *_loop0_58_rule(Parser *p);
-static asdl_seq *_gather_57_rule(Parser *p);
-static void *_tmp_59_rule(Parser *p);
-static asdl_seq *_loop1_60_rule(Parser *p);
+static asdl_seq *_loop0_50_rule(Parser *p);
+static asdl_seq *_loop1_51_rule(Parser *p);
+static asdl_seq *_loop0_53_rule(Parser *p);
+static asdl_seq *_gather_52_rule(Parser *p);
+static asdl_seq *_loop0_55_rule(Parser *p);
+static asdl_seq *_gather_54_rule(Parser *p);
+static asdl_seq *_loop0_57_rule(Parser *p);
+static asdl_seq *_gather_56_rule(Parser *p);
+static asdl_seq *_loop0_59_rule(Parser *p);
+static asdl_seq *_gather_58_rule(Parser *p);
+static void *_tmp_60_rule(Parser *p);
static asdl_seq *_loop1_61_rule(Parser *p);
-static void *_tmp_62_rule(Parser *p);
+static asdl_seq *_loop1_62_rule(Parser *p);
static void *_tmp_63_rule(Parser *p);
-static asdl_seq *_loop1_64_rule(Parser *p);
-static asdl_seq *_loop0_66_rule(Parser *p);
-static asdl_seq *_gather_65_rule(Parser *p);
-static void *_tmp_67_rule(Parser *p);
+static void *_tmp_64_rule(Parser *p);
+static asdl_seq *_loop1_65_rule(Parser *p);
+static asdl_seq *_loop0_67_rule(Parser *p);
+static asdl_seq *_gather_66_rule(Parser *p);
static void *_tmp_68_rule(Parser *p);
static void *_tmp_69_rule(Parser *p);
static void *_tmp_70_rule(Parser *p);
-static asdl_seq *_loop0_72_rule(Parser *p);
-static asdl_seq *_gather_71_rule(Parser *p);
-static asdl_seq *_loop0_74_rule(Parser *p);
-static asdl_seq *_gather_73_rule(Parser *p);
-static void *_tmp_75_rule(Parser *p);
-static asdl_seq *_loop0_77_rule(Parser *p);
-static asdl_seq *_gather_76_rule(Parser *p);
-static asdl_seq *_loop0_79_rule(Parser *p);
-static asdl_seq *_gather_78_rule(Parser *p);
-static asdl_seq *_loop1_80_rule(Parser *p);
+static void *_tmp_71_rule(Parser *p);
+static asdl_seq *_loop0_73_rule(Parser *p);
+static asdl_seq *_gather_72_rule(Parser *p);
+static asdl_seq *_loop0_75_rule(Parser *p);
+static asdl_seq *_gather_74_rule(Parser *p);
+static void *_tmp_76_rule(Parser *p);
+static asdl_seq *_loop0_78_rule(Parser *p);
+static asdl_seq *_gather_77_rule(Parser *p);
+static asdl_seq *_loop0_80_rule(Parser *p);
+static asdl_seq *_gather_79_rule(Parser *p);
static asdl_seq *_loop1_81_rule(Parser *p);
-static asdl_seq *_loop0_83_rule(Parser *p);
-static asdl_seq *_gather_82_rule(Parser *p);
-static asdl_seq *_loop1_84_rule(Parser *p);
+static asdl_seq *_loop1_82_rule(Parser *p);
+static asdl_seq *_loop0_84_rule(Parser *p);
+static asdl_seq *_gather_83_rule(Parser *p);
static asdl_seq *_loop1_85_rule(Parser *p);
static asdl_seq *_loop1_86_rule(Parser *p);
-static void *_tmp_87_rule(Parser *p);
-static asdl_seq *_loop0_89_rule(Parser *p);
-static asdl_seq *_gather_88_rule(Parser *p);
-static void *_tmp_90_rule(Parser *p);
+static asdl_seq *_loop1_87_rule(Parser *p);
+static void *_tmp_88_rule(Parser *p);
+static asdl_seq *_loop0_90_rule(Parser *p);
+static asdl_seq *_gather_89_rule(Parser *p);
static void *_tmp_91_rule(Parser *p);
static void *_tmp_92_rule(Parser *p);
static void *_tmp_93_rule(Parser *p);
static void *_tmp_94_rule(Parser *p);
-static asdl_seq *_loop0_95_rule(Parser *p);
-static asdl_seq *_loop0_96_rule(Parser *p);
+static void *_tmp_95_rule(Parser *p);
+static void *_tmp_96_rule(Parser *p);
static asdl_seq *_loop0_97_rule(Parser *p);
-static asdl_seq *_loop1_98_rule(Parser *p);
+static asdl_seq *_loop0_98_rule(Parser *p);
static asdl_seq *_loop0_99_rule(Parser *p);
static asdl_seq *_loop1_100_rule(Parser *p);
-static asdl_seq *_loop1_101_rule(Parser *p);
+static asdl_seq *_loop0_101_rule(Parser *p);
static asdl_seq *_loop1_102_rule(Parser *p);
-static asdl_seq *_loop0_103_rule(Parser *p);
+static asdl_seq *_loop1_103_rule(Parser *p);
static asdl_seq *_loop1_104_rule(Parser *p);
static asdl_seq *_loop0_105_rule(Parser *p);
static asdl_seq *_loop1_106_rule(Parser *p);
static asdl_seq *_loop0_107_rule(Parser *p);
static asdl_seq *_loop1_108_rule(Parser *p);
-static asdl_seq *_loop1_109_rule(Parser *p);
-static void *_tmp_110_rule(Parser *p);
+static asdl_seq *_loop0_109_rule(Parser *p);
+static asdl_seq *_loop1_110_rule(Parser *p);
+static void *_tmp_111_rule(Parser *p);
static asdl_seq *_loop0_112_rule(Parser *p);
-static asdl_seq *_gather_111_rule(Parser *p);
static asdl_seq *_loop1_113_rule(Parser *p);
-static asdl_seq *_loop0_114_rule(Parser *p);
-static asdl_seq *_loop0_115_rule(Parser *p);
-static void *_tmp_116_rule(Parser *p);
+static void *_tmp_114_rule(Parser *p);
+static asdl_seq *_loop0_116_rule(Parser *p);
+static asdl_seq *_gather_115_rule(Parser *p);
+static asdl_seq *_loop1_117_rule(Parser *p);
static asdl_seq *_loop0_118_rule(Parser *p);
-static asdl_seq *_gather_117_rule(Parser *p);
-static void *_tmp_119_rule(Parser *p);
-static asdl_seq *_loop0_121_rule(Parser *p);
-static asdl_seq *_gather_120_rule(Parser *p);
-static asdl_seq *_loop0_123_rule(Parser *p);
-static asdl_seq *_gather_122_rule(Parser *p);
+static asdl_seq *_loop0_119_rule(Parser *p);
+static void *_tmp_120_rule(Parser *p);
+static asdl_seq *_loop0_122_rule(Parser *p);
+static asdl_seq *_gather_121_rule(Parser *p);
+static void *_tmp_123_rule(Parser *p);
static asdl_seq *_loop0_125_rule(Parser *p);
static asdl_seq *_gather_124_rule(Parser *p);
static asdl_seq *_loop0_127_rule(Parser *p);
static asdl_seq *_gather_126_rule(Parser *p);
-static asdl_seq *_loop0_128_rule(Parser *p);
-static asdl_seq *_loop0_130_rule(Parser *p);
-static asdl_seq *_gather_129_rule(Parser *p);
-static asdl_seq *_loop1_131_rule(Parser *p);
-static void *_tmp_132_rule(Parser *p);
+static asdl_seq *_loop0_129_rule(Parser *p);
+static asdl_seq *_gather_128_rule(Parser *p);
+static asdl_seq *_loop0_131_rule(Parser *p);
+static asdl_seq *_gather_130_rule(Parser *p);
+static asdl_seq *_loop0_132_rule(Parser *p);
static asdl_seq *_loop0_134_rule(Parser *p);
static asdl_seq *_gather_133_rule(Parser *p);
-static asdl_seq *_loop0_136_rule(Parser *p);
-static asdl_seq *_gather_135_rule(Parser *p);
+static asdl_seq *_loop1_135_rule(Parser *p);
+static void *_tmp_136_rule(Parser *p);
static asdl_seq *_loop0_138_rule(Parser *p);
static asdl_seq *_gather_137_rule(Parser *p);
static asdl_seq *_loop0_140_rule(Parser *p);
static asdl_seq *_gather_139_rule(Parser *p);
static asdl_seq *_loop0_142_rule(Parser *p);
static asdl_seq *_gather_141_rule(Parser *p);
-static void *_tmp_143_rule(Parser *p);
-static void *_tmp_144_rule(Parser *p);
-static void *_tmp_145_rule(Parser *p);
-static void *_tmp_146_rule(Parser *p);
+static asdl_seq *_loop0_144_rule(Parser *p);
+static asdl_seq *_gather_143_rule(Parser *p);
+static asdl_seq *_loop0_146_rule(Parser *p);
+static asdl_seq *_gather_145_rule(Parser *p);
static void *_tmp_147_rule(Parser *p);
static void *_tmp_148_rule(Parser *p);
static void *_tmp_149_rule(Parser *p);
@@ -944,79 +981,79 @@ static void *_tmp_150_rule(Parser *p);
static void *_tmp_151_rule(Parser *p);
static void *_tmp_152_rule(Parser *p);
static void *_tmp_153_rule(Parser *p);
-static asdl_seq *_loop0_154_rule(Parser *p);
-static asdl_seq *_loop0_155_rule(Parser *p);
-static asdl_seq *_loop0_156_rule(Parser *p);
+static void *_tmp_154_rule(Parser *p);
+static void *_tmp_155_rule(Parser *p);
+static void *_tmp_156_rule(Parser *p);
static void *_tmp_157_rule(Parser *p);
static void *_tmp_158_rule(Parser *p);
-static void *_tmp_159_rule(Parser *p);
-static void *_tmp_160_rule(Parser *p);
-static void *_tmp_161_rule(Parser *p);
-static asdl_seq *_loop0_162_rule(Parser *p);
-static asdl_seq *_loop0_163_rule(Parser *p);
-static asdl_seq *_loop0_164_rule(Parser *p);
-static asdl_seq *_loop1_165_rule(Parser *p);
+static asdl_seq *_loop0_159_rule(Parser *p);
+static asdl_seq *_loop0_160_rule(Parser *p);
+static asdl_seq *_loop0_161_rule(Parser *p);
+static void *_tmp_162_rule(Parser *p);
+static void *_tmp_163_rule(Parser *p);
+static void *_tmp_164_rule(Parser *p);
+static void *_tmp_165_rule(Parser *p);
static void *_tmp_166_rule(Parser *p);
static asdl_seq *_loop0_167_rule(Parser *p);
-static void *_tmp_168_rule(Parser *p);
+static asdl_seq *_loop0_168_rule(Parser *p);
static asdl_seq *_loop0_169_rule(Parser *p);
static asdl_seq *_loop1_170_rule(Parser *p);
static void *_tmp_171_rule(Parser *p);
-static void *_tmp_172_rule(Parser *p);
+static asdl_seq *_loop0_172_rule(Parser *p);
static void *_tmp_173_rule(Parser *p);
static asdl_seq *_loop0_174_rule(Parser *p);
-static void *_tmp_175_rule(Parser *p);
+static asdl_seq *_loop1_175_rule(Parser *p);
static void *_tmp_176_rule(Parser *p);
-static asdl_seq *_loop1_177_rule(Parser *p);
+static void *_tmp_177_rule(Parser *p);
static void *_tmp_178_rule(Parser *p);
static asdl_seq *_loop0_179_rule(Parser *p);
-static asdl_seq *_loop0_180_rule(Parser *p);
-static asdl_seq *_loop0_181_rule(Parser *p);
-static asdl_seq *_loop0_183_rule(Parser *p);
-static asdl_seq *_gather_182_rule(Parser *p);
-static void *_tmp_184_rule(Parser *p);
+static void *_tmp_180_rule(Parser *p);
+static void *_tmp_181_rule(Parser *p);
+static asdl_seq *_loop1_182_rule(Parser *p);
+static void *_tmp_183_rule(Parser *p);
+static asdl_seq *_loop0_184_rule(Parser *p);
static asdl_seq *_loop0_185_rule(Parser *p);
-static void *_tmp_186_rule(Parser *p);
-static asdl_seq *_loop0_187_rule(Parser *p);
-static asdl_seq *_loop1_188_rule(Parser *p);
-static asdl_seq *_loop1_189_rule(Parser *p);
-static void *_tmp_190_rule(Parser *p);
+static asdl_seq *_loop0_186_rule(Parser *p);
+static asdl_seq *_loop0_188_rule(Parser *p);
+static asdl_seq *_gather_187_rule(Parser *p);
+static void *_tmp_189_rule(Parser *p);
+static asdl_seq *_loop0_190_rule(Parser *p);
static void *_tmp_191_rule(Parser *p);
static asdl_seq *_loop0_192_rule(Parser *p);
-static void *_tmp_193_rule(Parser *p);
-static void *_tmp_194_rule(Parser *p);
+static asdl_seq *_loop1_193_rule(Parser *p);
+static asdl_seq *_loop1_194_rule(Parser *p);
static void *_tmp_195_rule(Parser *p);
+static void *_tmp_196_rule(Parser *p);
static asdl_seq *_loop0_197_rule(Parser *p);
-static asdl_seq *_gather_196_rule(Parser *p);
-static asdl_seq *_loop0_199_rule(Parser *p);
-static asdl_seq *_gather_198_rule(Parser *p);
-static asdl_seq *_loop0_201_rule(Parser *p);
-static asdl_seq *_gather_200_rule(Parser *p);
-static asdl_seq *_loop0_203_rule(Parser *p);
-static asdl_seq *_gather_202_rule(Parser *p);
-static void *_tmp_204_rule(Parser *p);
-static asdl_seq *_loop0_205_rule(Parser *p);
-static asdl_seq *_loop1_206_rule(Parser *p);
-static void *_tmp_207_rule(Parser *p);
+static void *_tmp_198_rule(Parser *p);
+static void *_tmp_199_rule(Parser *p);
+static void *_tmp_200_rule(Parser *p);
+static asdl_seq *_loop0_202_rule(Parser *p);
+static asdl_seq *_gather_201_rule(Parser *p);
+static asdl_seq *_loop0_204_rule(Parser *p);
+static asdl_seq *_gather_203_rule(Parser *p);
+static asdl_seq *_loop0_206_rule(Parser *p);
+static asdl_seq *_gather_205_rule(Parser *p);
static asdl_seq *_loop0_208_rule(Parser *p);
-static asdl_seq *_loop1_209_rule(Parser *p);
-static void *_tmp_210_rule(Parser *p);
-static void *_tmp_211_rule(Parser *p);
+static asdl_seq *_gather_207_rule(Parser *p);
+static void *_tmp_209_rule(Parser *p);
+static asdl_seq *_loop0_210_rule(Parser *p);
+static asdl_seq *_loop1_211_rule(Parser *p);
static void *_tmp_212_rule(Parser *p);
-static void *_tmp_213_rule(Parser *p);
-static void *_tmp_214_rule(Parser *p);
+static asdl_seq *_loop0_213_rule(Parser *p);
+static asdl_seq *_loop1_214_rule(Parser *p);
static void *_tmp_215_rule(Parser *p);
static void *_tmp_216_rule(Parser *p);
static void *_tmp_217_rule(Parser *p);
static void *_tmp_218_rule(Parser *p);
static void *_tmp_219_rule(Parser *p);
-static asdl_seq *_loop0_221_rule(Parser *p);
-static asdl_seq *_gather_220_rule(Parser *p);
+static void *_tmp_220_rule(Parser *p);
+static void *_tmp_221_rule(Parser *p);
static void *_tmp_222_rule(Parser *p);
static void *_tmp_223_rule(Parser *p);
static void *_tmp_224_rule(Parser *p);
-static void *_tmp_225_rule(Parser *p);
-static void *_tmp_226_rule(Parser *p);
+static asdl_seq *_loop0_226_rule(Parser *p);
+static asdl_seq *_gather_225_rule(Parser *p);
static void *_tmp_227_rule(Parser *p);
static void *_tmp_228_rule(Parser *p);
static void *_tmp_229_rule(Parser *p);
@@ -1030,7 +1067,7 @@ static void *_tmp_236_rule(Parser *p);
static void *_tmp_237_rule(Parser *p);
static void *_tmp_238_rule(Parser *p);
static void *_tmp_239_rule(Parser *p);
-static void *_tmp_240_rule(Parser *p);
+static asdl_seq *_loop0_240_rule(Parser *p);
static void *_tmp_241_rule(Parser *p);
static void *_tmp_242_rule(Parser *p);
static void *_tmp_243_rule(Parser *p);
@@ -1042,6 +1079,27 @@ static void *_tmp_248_rule(Parser *p);
static void *_tmp_249_rule(Parser *p);
static void *_tmp_250_rule(Parser *p);
static void *_tmp_251_rule(Parser *p);
+static void *_tmp_252_rule(Parser *p);
+static void *_tmp_253_rule(Parser *p);
+static void *_tmp_254_rule(Parser *p);
+static void *_tmp_255_rule(Parser *p);
+static void *_tmp_256_rule(Parser *p);
+static void *_tmp_257_rule(Parser *p);
+static void *_tmp_258_rule(Parser *p);
+static void *_tmp_259_rule(Parser *p);
+static void *_tmp_260_rule(Parser *p);
+static void *_tmp_261_rule(Parser *p);
+static void *_tmp_262_rule(Parser *p);
+static void *_tmp_263_rule(Parser *p);
+static void *_tmp_264_rule(Parser *p);
+static void *_tmp_265_rule(Parser *p);
+static void *_tmp_266_rule(Parser *p);
+static void *_tmp_267_rule(Parser *p);
+static void *_tmp_268_rule(Parser *p);
+static void *_tmp_269_rule(Parser *p);
+static void *_tmp_270_rule(Parser *p);
+static void *_tmp_271_rule(Parser *p);
+static void *_tmp_272_rule(Parser *p);
// file: statements? $
@@ -1247,7 +1305,7 @@ func_type_rule(Parser *p)
return _res;
}
-// fstring: star_expressions
+// fstring: FSTRING_START fstring_middle* FSTRING_END
static expr_ty
fstring_rule(Parser *p)
{
@@ -1261,24 +1319,35 @@ fstring_rule(Parser *p)
}
expr_ty _res = NULL;
int _mark = p->mark;
- { // star_expressions
+ { // FSTRING_START fstring_middle* FSTRING_END
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
- expr_ty star_expressions_var;
+ D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END"));
+ Token * a;
+ asdl_seq * b;
+ Token * c;
if (
- (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ (a = _PyPegen_expect_token(p, FSTRING_START)) // token='FSTRING_START'
+ &&
+ (b = _loop0_3_rule(p)) // fstring_middle*
+ &&
+ (c = _PyPegen_expect_token(p, FSTRING_END)) // token='FSTRING_END'
)
{
- D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
- _res = star_expressions_var;
+ D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END"));
+ _res = _PyPegen_joined_str ( p , a , ( asdl_expr_seq* ) b , c );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s fstring[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END"));
}
_res = NULL;
done:
@@ -1308,7 +1377,7 @@ statements_rule(Parser *p)
D(fprintf(stderr, "%*c> statements[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement+"));
asdl_seq * a;
if (
- (a = _loop1_3_rule(p)) // statement+
+ (a = _loop1_4_rule(p)) // statement+
)
{
D(fprintf(stderr, "%*c+ statements[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement+"));
@@ -1584,7 +1653,7 @@ simple_stmts_rule(Parser *p)
asdl_stmt_seq* a;
Token * newline_var;
if (
- (a = (asdl_stmt_seq*)_gather_4_rule(p)) // ';'.simple_stmt+
+ (a = (asdl_stmt_seq*)_gather_5_rule(p)) // ';'.simple_stmt+
&&
(_opt_var = _PyPegen_expect_token(p, 13), !p->error_indicator) // ';'?
&&
@@ -1731,7 +1800,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt"));
stmt_ty import_stmt_var;
if (
- _PyPegen_lookahead(1, _tmp_6_rule, p)
+ _PyPegen_lookahead(1, _tmp_7_rule, p)
&&
(import_stmt_var = import_stmt_rule(p)) // import_stmt
)
@@ -1806,7 +1875,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt"));
stmt_ty del_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 603) // token='del'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 604) // token='del'
&&
(del_stmt_var = del_stmt_rule(p)) // del_stmt
)
@@ -2006,7 +2075,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def"));
stmt_ty function_def_var;
if (
- _PyPegen_lookahead(1, _tmp_7_rule, p)
+ _PyPegen_lookahead(1, _tmp_8_rule, p)
&&
(function_def_var = function_def_rule(p)) // function_def
)
@@ -2027,7 +2096,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt"));
stmt_ty if_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 641) // token='if'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 642) // token='if'
&&
(if_stmt_var = if_stmt_rule(p)) // if_stmt
)
@@ -2048,7 +2117,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def"));
stmt_ty class_def_var;
if (
- _PyPegen_lookahead(1, _tmp_8_rule, p)
+ _PyPegen_lookahead(1, _tmp_9_rule, p)
&&
(class_def_var = class_def_rule(p)) // class_def
)
@@ -2069,7 +2138,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt"));
stmt_ty with_stmt_var;
if (
- _PyPegen_lookahead(1, _tmp_9_rule, p)
+ _PyPegen_lookahead(1, _tmp_10_rule, p)
&&
(with_stmt_var = with_stmt_rule(p)) // with_stmt
)
@@ -2090,7 +2159,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt"));
stmt_ty for_stmt_var;
if (
- _PyPegen_lookahead(1, _tmp_10_rule, p)
+ _PyPegen_lookahead(1, _tmp_11_rule, p)
&&
(for_stmt_var = for_stmt_rule(p)) // for_stmt
)
@@ -2111,7 +2180,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt"));
stmt_ty try_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 623) // token='try'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 624) // token='try'
&&
(try_stmt_var = try_stmt_rule(p)) // try_stmt
)
@@ -2132,7 +2201,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt"));
stmt_ty while_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 646) // token='while'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 647) // token='while'
&&
(while_stmt_var = while_stmt_rule(p)) // while_stmt
)
@@ -2215,7 +2284,7 @@ assignment_rule(Parser *p)
&&
(b = expression_rule(p)) // expression
&&
- (c = _tmp_11_rule(p), !p->error_indicator) // ['=' annotated_rhs]
+ (c = _tmp_12_rule(p), !p->error_indicator) // ['=' annotated_rhs]
)
{
D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]"));
@@ -2251,13 +2320,13 @@ assignment_rule(Parser *p)
expr_ty b;
void *c;
if (
- (a = _tmp_12_rule(p)) // '(' single_target ')' | single_subscript_attribute_target
+ (a = _tmp_13_rule(p)) // '(' single_target ')' | single_subscript_attribute_target
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
(b = expression_rule(p)) // expression
&&
- (c = _tmp_13_rule(p), !p->error_indicator) // ['=' annotated_rhs]
+ (c = _tmp_14_rule(p), !p->error_indicator) // ['=' annotated_rhs]
)
{
D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]"));
@@ -2292,9 +2361,9 @@ assignment_rule(Parser *p)
void *b;
void *tc;
if (
- (a = (asdl_expr_seq*)_loop1_14_rule(p)) // ((star_targets '='))+
+ (a = (asdl_expr_seq*)_loop1_15_rule(p)) // ((star_targets '='))+
&&
- (b = _tmp_15_rule(p)) // yield_expr | star_expressions
+ (b = _tmp_16_rule(p)) // yield_expr | star_expressions
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='='
&&
@@ -2340,7 +2409,7 @@ assignment_rule(Parser *p)
&&
(_cut_var = 1)
&&
- (c = _tmp_16_rule(p)) // yield_expr | star_expressions
+ (c = _tmp_17_rule(p)) // yield_expr | star_expressions
)
{
D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_target augassign ~ (yield_expr | star_expressions)"));
@@ -2899,7 +2968,7 @@ raise_stmt_rule(Parser *p)
&&
(a = expression_rule(p)) // expression
&&
- (b = _tmp_17_rule(p), !p->error_indicator) // ['from' expression]
+ (b = _tmp_18_rule(p), !p->error_indicator) // ['from' expression]
)
{
D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]"));
@@ -2997,7 +3066,7 @@ global_stmt_rule(Parser *p)
if (
(_keyword = _PyPegen_expect_token(p, 523)) // token='global'
&&
- (a = (asdl_expr_seq*)_gather_18_rule(p)) // ','.NAME+
+ (a = (asdl_expr_seq*)_gather_19_rule(p)) // ','.NAME+
)
{
D(fprintf(stderr, "%*c+ global_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+"));
@@ -3062,7 +3131,7 @@ nonlocal_stmt_rule(Parser *p)
if (
(_keyword = _PyPegen_expect_token(p, 524)) // token='nonlocal'
&&
- (a = (asdl_expr_seq*)_gather_20_rule(p)) // ','.NAME+
+ (a = (asdl_expr_seq*)_gather_21_rule(p)) // ','.NAME+
)
{
D(fprintf(stderr, "%*c+ nonlocal_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+"));
@@ -3125,11 +3194,11 @@ del_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 604)) // token='del'
&&
(a = del_targets_rule(p)) // del_targets
&&
- _PyPegen_lookahead(1, _tmp_22_rule, p)
+ _PyPegen_lookahead(1, _tmp_23_rule, p)
)
{
D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets &(';' | NEWLINE)"));
@@ -3278,7 +3347,7 @@ assert_stmt_rule(Parser *p)
&&
(a = expression_rule(p)) // expression
&&
- (b = _tmp_23_rule(p), !p->error_indicator) // [',' expression]
+ (b = _tmp_24_rule(p), !p->error_indicator) // [',' expression]
)
{
D(fprintf(stderr, "%*c+ assert_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]"));
@@ -3418,7 +3487,7 @@ import_name_rule(Parser *p)
Token * _keyword;
asdl_alias_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 606)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 607)) // token='import'
&&
(a = dotted_as_names_rule(p)) // dotted_as_names
)
@@ -3488,13 +3557,13 @@ import_from_rule(Parser *p)
expr_ty b;
asdl_alias_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
&&
- (a = _loop0_24_rule(p)) // (('.' | '...'))*
+ (a = _loop0_25_rule(p)) // (('.' | '...'))*
&&
(b = dotted_name_rule(p)) // dotted_name
&&
- (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import'
&&
(c = import_from_targets_rule(p)) // import_from_targets
)
@@ -3532,11 +3601,11 @@ import_from_rule(Parser *p)
asdl_seq * a;
asdl_alias_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
&&
- (a = _loop1_25_rule(p)) // (('.' | '...'))+
+ (a = _loop1_26_rule(p)) // (('.' | '...'))+
&&
- (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import'
&&
(b = import_from_targets_rule(p)) // import_from_targets
)
@@ -3731,7 +3800,7 @@ import_from_as_names_rule(Parser *p)
D(fprintf(stderr, "%*c> import_from_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+"));
asdl_alias_seq* a;
if (
- (a = (asdl_alias_seq*)_gather_26_rule(p)) // ','.import_from_as_name+
+ (a = (asdl_alias_seq*)_gather_27_rule(p)) // ','.import_from_as_name+
)
{
D(fprintf(stderr, "%*c+ import_from_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+"));
@@ -3787,7 +3856,7 @@ import_from_as_name_rule(Parser *p)
if (
(a = _PyPegen_name_token(p)) // NAME
&&
- (b = _tmp_28_rule(p), !p->error_indicator) // ['as' NAME]
+ (b = _tmp_29_rule(p), !p->error_indicator) // ['as' NAME]
)
{
D(fprintf(stderr, "%*c+ import_from_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]"));
@@ -3840,7 +3909,7 @@ dotted_as_names_rule(Parser *p)
D(fprintf(stderr, "%*c> dotted_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+"));
asdl_alias_seq* a;
if (
- (a = (asdl_alias_seq*)_gather_29_rule(p)) // ','.dotted_as_name+
+ (a = (asdl_alias_seq*)_gather_30_rule(p)) // ','.dotted_as_name+
)
{
D(fprintf(stderr, "%*c+ dotted_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+"));
@@ -3896,7 +3965,7 @@ dotted_as_name_rule(Parser *p)
if (
(a = dotted_name_rule(p)) // dotted_name
&&
- (b = _tmp_31_rule(p), !p->error_indicator) // ['as' NAME]
+ (b = _tmp_32_rule(p), !p->error_indicator) // ['as' NAME]
)
{
D(fprintf(stderr, "%*c+ dotted_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]"));
@@ -4151,7 +4220,7 @@ decorators_rule(Parser *p)
D(fprintf(stderr, "%*c> decorators[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+"));
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_loop1_32_rule(p)) // (('@' named_expression NEWLINE))+
+ (a = (asdl_expr_seq*)_loop1_33_rule(p)) // (('@' named_expression NEWLINE))+
)
{
D(fprintf(stderr, "%*c+ decorators[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+"));
@@ -4293,11 +4362,11 @@ class_def_raw_rule(Parser *p)
void *b;
asdl_stmt_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 653)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
&&
(a = _PyPegen_name_token(p)) // NAME
&&
- (b = _tmp_33_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (b = _tmp_34_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -4459,7 +4528,7 @@ function_def_raw_rule(Parser *p)
void *params;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 651)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4469,7 +4538,7 @@ function_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (a = _tmp_34_rule(p), !p->error_indicator) // ['->' expression]
+ (a = _tmp_35_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -4519,7 +4588,7 @@ function_def_raw_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 651)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4529,7 +4598,7 @@ function_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (a = _tmp_35_rule(p), !p->error_indicator) // ['->' expression]
+ (a = _tmp_36_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -4656,9 +4725,9 @@ parameters_rule(Parser *p)
if (
(a = slash_no_default_rule(p)) // slash_no_default
&&
- (b = (asdl_arg_seq*)_loop0_36_rule(p)) // param_no_default*
+ (b = (asdl_arg_seq*)_loop0_37_rule(p)) // param_no_default*
&&
- (c = _loop0_37_rule(p)) // param_with_default*
+ (c = _loop0_38_rule(p)) // param_with_default*
&&
(d = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4688,7 +4757,7 @@ parameters_rule(Parser *p)
if (
(a = slash_with_default_rule(p)) // slash_with_default
&&
- (b = _loop0_38_rule(p)) // param_with_default*
+ (b = _loop0_39_rule(p)) // param_with_default*
&&
(c = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4716,9 +4785,9 @@ parameters_rule(Parser *p)
asdl_seq * b;
void *c;
if (
- (a = (asdl_arg_seq*)_loop1_39_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_40_rule(p)) // param_no_default+
&&
- (b = _loop0_40_rule(p)) // param_with_default*
+ (b = _loop0_41_rule(p)) // param_with_default*
&&
(c = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4745,7 +4814,7 @@ parameters_rule(Parser *p)
asdl_seq * a;
void *b;
if (
- (a = _loop1_41_rule(p)) // param_with_default+
+ (a = _loop1_42_rule(p)) // param_with_default+
&&
(b = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4817,7 +4886,7 @@ slash_no_default_rule(Parser *p)
Token * _literal_1;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_42_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_43_rule(p)) // param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -4846,7 +4915,7 @@ slash_no_default_rule(Parser *p)
Token * _literal;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_43_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_44_rule(p)) // param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -4899,9 +4968,9 @@ slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_44_rule(p)) // param_no_default*
+ (a = _loop0_45_rule(p)) // param_no_default*
&&
- (b = _loop1_45_rule(p)) // param_with_default+
+ (b = _loop1_46_rule(p)) // param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -4931,9 +5000,9 @@ slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_46_rule(p)) // param_no_default*
+ (a = _loop0_47_rule(p)) // param_no_default*
&&
- (b = _loop1_47_rule(p)) // param_with_default+
+ (b = _loop1_48_rule(p)) // param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -5012,7 +5081,7 @@ star_etc_rule(Parser *p)
&&
(a = param_no_default_rule(p)) // param_no_default
&&
- (b = _loop0_48_rule(p)) // param_maybe_default*
+ (b = _loop0_49_rule(p)) // param_maybe_default*
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5045,7 +5114,7 @@ star_etc_rule(Parser *p)
&&
(a = param_no_default_star_annotation_rule(p)) // param_no_default_star_annotation
&&
- (b = _loop0_49_rule(p)) // param_maybe_default*
+ (b = _loop0_50_rule(p)) // param_maybe_default*
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5078,7 +5147,7 @@ star_etc_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _loop1_50_rule(p)) // param_maybe_default+
+ (b = _loop1_51_rule(p)) // param_maybe_default+
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5871,7 +5940,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -5916,7 +5985,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6012,7 +6081,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 643)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6057,7 +6126,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 643)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6139,7 +6208,7 @@ else_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 644)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 645)) // token='else'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6219,7 +6288,7 @@ while_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 646)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 647)) // token='while'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6320,11 +6389,11 @@ for_stmt_rule(Parser *p)
expr_ty t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6384,11 +6453,11 @@ for_stmt_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6517,11 +6586,11 @@ with_stmt_rule(Parser *p)
asdl_withitem_seq* a;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = (asdl_withitem_seq*)_gather_51_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_52_rule(p)) // ','.with_item+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -6566,9 +6635,9 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
- (a = (asdl_withitem_seq*)_gather_53_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_54_rule(p)) // ','.with_item+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -6617,11 +6686,11 @@ with_stmt_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = (asdl_withitem_seq*)_gather_55_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_56_rule(p)) // ','.with_item+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -6669,9 +6738,9 @@ with_stmt_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
- (a = (asdl_withitem_seq*)_gather_57_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_58_rule(p)) // ','.with_item+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -6756,11 +6825,11 @@ with_item_rule(Parser *p)
if (
(e = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(t = star_target_rule(p)) // star_target
&&
- _PyPegen_lookahead(1, _tmp_59_rule, p)
+ _PyPegen_lookahead(1, _tmp_60_rule, p)
)
{
D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' star_target &(',' | ')' | ':')"));
@@ -6882,7 +6951,7 @@ try_stmt_rule(Parser *p)
asdl_stmt_seq* b;
asdl_stmt_seq* f;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6926,13 +6995,13 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
(b = block_rule(p)) // block
&&
- (ex = (asdl_excepthandler_seq*)_loop1_60_rule(p)) // except_block+
+ (ex = (asdl_excepthandler_seq*)_loop1_61_rule(p)) // except_block+
&&
(el = else_block_rule(p), !p->error_indicator) // else_block?
&&
@@ -6974,13 +7043,13 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
(b = block_rule(p)) // block
&&
- (ex = (asdl_excepthandler_seq*)_loop1_61_rule(p)) // except_star_block+
+ (ex = (asdl_excepthandler_seq*)_loop1_62_rule(p)) // except_star_block+
&&
(el = else_block_rule(p), !p->error_indicator) // else_block?
&&
@@ -7073,11 +7142,11 @@ except_block_rule(Parser *p)
expr_ty e;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
&&
(e = expression_rule(p)) // expression
&&
- (t = _tmp_62_rule(p), !p->error_indicator) // ['as' NAME]
+ (t = _tmp_63_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7116,7 +7185,7 @@ except_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7228,13 +7297,13 @@ except_star_block_rule(Parser *p)
expr_ty e;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(e = expression_rule(p)) // expression
&&
- (t = _tmp_63_rule(p), !p->error_indicator) // ['as' NAME]
+ (t = _tmp_64_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7331,7 +7400,7 @@ finally_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 632)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 633)) // token='finally'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7406,7 +7475,7 @@ match_stmt_rule(Parser *p)
&&
(indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT'
&&
- (cases = (asdl_match_case_seq*)_loop1_64_rule(p)) // case_block+
+ (cases = (asdl_match_case_seq*)_loop1_65_rule(p)) // case_block+
&&
(dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT'
)
@@ -7643,7 +7712,7 @@ guard_rule(Parser *p)
Token * _keyword;
expr_ty guard;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(guard = named_expression_rule(p)) // named_expression
)
@@ -7841,7 +7910,7 @@ as_pattern_rule(Parser *p)
if (
(pattern = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(target = pattern_capture_target_rule(p)) // pattern_capture_target
)
@@ -7924,7 +7993,7 @@ or_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> or_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|'.closed_pattern+"));
asdl_pattern_seq* patterns;
if (
- (patterns = (asdl_pattern_seq*)_gather_65_rule(p)) // '|'.closed_pattern+
+ (patterns = (asdl_pattern_seq*)_gather_66_rule(p)) // '|'.closed_pattern+
)
{
D(fprintf(stderr, "%*c+ or_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'|'.closed_pattern+"));
@@ -8179,7 +8248,7 @@ literal_pattern_rule(Parser *p)
if (
(value = signed_number_rule(p)) // signed_number
&&
- _PyPegen_lookahead(0, _tmp_67_rule, p)
+ _PyPegen_lookahead(0, _tmp_68_rule, p)
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "signed_number !('+' | '-')"));
@@ -8278,7 +8347,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8311,7 +8380,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8344,7 +8413,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -8414,7 +8483,7 @@ literal_expr_rule(Parser *p)
if (
(signed_number_var = signed_number_rule(p)) // signed_number
&&
- _PyPegen_lookahead(0, _tmp_68_rule, p)
+ _PyPegen_lookahead(0, _tmp_69_rule, p)
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "signed_number !('+' | '-')"));
@@ -8471,7 +8540,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8504,7 +8573,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8537,7 +8606,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -9021,7 +9090,7 @@ pattern_capture_target_rule(Parser *p)
&&
(name = _PyPegen_name_token(p)) // NAME
&&
- _PyPegen_lookahead(0, _tmp_69_rule, p)
+ _PyPegen_lookahead(0, _tmp_70_rule, p)
)
{
D(fprintf(stderr, "%*c+ pattern_capture_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!\"_\" NAME !('.' | '(' | '=')"));
@@ -9138,7 +9207,7 @@ value_pattern_rule(Parser *p)
if (
(attr = attr_rule(p)) // attr
&&
- _PyPegen_lookahead(0, _tmp_70_rule, p)
+ _PyPegen_lookahead(0, _tmp_71_rule, p)
)
{
D(fprintf(stderr, "%*c+ value_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr !('.' | '(' | '=')"));
@@ -9564,7 +9633,7 @@ maybe_sequence_pattern_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_seq * patterns;
if (
- (patterns = _gather_71_rule(p)) // ','.maybe_star_pattern+
+ (patterns = _gather_72_rule(p)) // ','.maybe_star_pattern+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -9976,13 +10045,13 @@ items_pattern_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> items_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.key_value_pattern+"));
- asdl_seq * _gather_73_var;
+ asdl_seq * _gather_74_var;
if (
- (_gather_73_var = _gather_73_rule(p)) // ','.key_value_pattern+
+ (_gather_74_var = _gather_74_rule(p)) // ','.key_value_pattern+
)
{
D(fprintf(stderr, "%*c+ items_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.key_value_pattern+"));
- _res = _gather_73_var;
+ _res = _gather_74_var;
goto done;
}
p->mark = _mark;
@@ -10019,7 +10088,7 @@ key_value_pattern_rule(Parser *p)
void *key;
pattern_ty pattern;
if (
- (key = _tmp_75_rule(p)) // literal_expr | attr
+ (key = _tmp_76_rule(p)) // literal_expr | attr
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -10350,7 +10419,7 @@ positional_patterns_rule(Parser *p)
D(fprintf(stderr, "%*c> positional_patterns[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.pattern+"));
asdl_pattern_seq* args;
if (
- (args = (asdl_pattern_seq*)_gather_76_rule(p)) // ','.pattern+
+ (args = (asdl_pattern_seq*)_gather_77_rule(p)) // ','.pattern+
)
{
D(fprintf(stderr, "%*c+ positional_patterns[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.pattern+"));
@@ -10392,13 +10461,13 @@ keyword_patterns_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> keyword_patterns[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.keyword_pattern+"));
- asdl_seq * _gather_78_var;
+ asdl_seq * _gather_79_var;
if (
- (_gather_78_var = _gather_78_rule(p)) // ','.keyword_pattern+
+ (_gather_79_var = _gather_79_rule(p)) // ','.keyword_pattern+
)
{
D(fprintf(stderr, "%*c+ keyword_patterns[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.keyword_pattern+"));
- _res = _gather_78_var;
+ _res = _gather_79_var;
goto done;
}
p->mark = _mark;
@@ -10497,7 +10566,7 @@ expressions_rule(Parser *p)
if (
(a = expression_rule(p)) // expression
&&
- (b = _loop1_80_rule(p)) // ((',' expression))+
+ (b = _loop1_81_rule(p)) // ((',' expression))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -10669,11 +10738,11 @@ expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -10780,7 +10849,7 @@ yield_expr_rule(Parser *p)
if (
(_keyword = _PyPegen_expect_token(p, 573)) // token='yield'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword_1 = _PyPegen_expect_token(p, 608)) // token='from'
&&
(a = expression_rule(p)) // expression
)
@@ -10888,7 +10957,7 @@ star_expressions_rule(Parser *p)
if (
(a = star_expression_rule(p)) // star_expression
&&
- (b = _loop1_81_rule(p)) // ((',' star_expression))+
+ (b = _loop1_82_rule(p)) // ((',' star_expression))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -11089,7 +11158,7 @@ star_named_expressions_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_82_rule(p)) // ','.star_named_expression+
+ (a = (asdl_expr_seq*)_gather_83_rule(p)) // ','.star_named_expression+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -11389,7 +11458,7 @@ disjunction_rule(Parser *p)
if (
(a = conjunction_rule(p)) // conjunction
&&
- (b = _loop1_84_rule(p)) // (('or' conjunction))+
+ (b = _loop1_85_rule(p)) // (('or' conjunction))+
)
{
D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+"));
@@ -11478,7 +11547,7 @@ conjunction_rule(Parser *p)
if (
(a = inversion_rule(p)) // inversion
&&
- (b = _loop1_85_rule(p)) // (('and' inversion))+
+ (b = _loop1_86_rule(p)) // (('and' inversion))+
)
{
D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+"));
@@ -11652,7 +11721,7 @@ comparison_rule(Parser *p)
if (
(a = bitwise_or_rule(p)) // bitwise_or
&&
- (b = _loop1_86_rule(p)) // compare_op_bitwise_or_pair+
+ (b = _loop1_87_rule(p)) // compare_op_bitwise_or_pair+
)
{
D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+"));
@@ -11989,10 +12058,10 @@ noteq_bitwise_or_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> noteq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or"));
- void *_tmp_87_var;
+ void *_tmp_88_var;
expr_ty a;
if (
- (_tmp_87_var = _tmp_87_rule(p)) // '!='
+ (_tmp_88_var = _tmp_88_rule(p)) // '!='
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12230,7 +12299,7 @@ notin_bitwise_or_rule(Parser *p)
if (
(_keyword = _PyPegen_expect_token(p, 581)) // token='not'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12277,7 +12346,7 @@ in_bitwise_or_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -14027,7 +14096,7 @@ slices_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_88_rule(p)) // ','.(slice | starred_expression)+
+ (a = (asdl_expr_seq*)_gather_89_rule(p)) // ','.(slice | starred_expression)+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -14100,7 +14169,7 @@ slice_rule(Parser *p)
&&
(b = expression_rule(p), !p->error_indicator) // expression?
&&
- (c = _tmp_90_rule(p), !p->error_indicator) // [':' expression?]
+ (c = _tmp_91_rule(p), !p->error_indicator) // [':' expression?]
)
{
D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]"));
@@ -14160,7 +14229,7 @@ slice_rule(Parser *p)
// | 'True'
// | 'False'
// | 'None'
-// | &STRING strings
+// | &(STRING | FSTRING_START) strings
// | NUMBER
// | &'(' (tuple | group | genexp)
// | &'[' (list | listcomp)
@@ -14215,7 +14284,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -14248,7 +14317,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -14281,7 +14350,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -14306,26 +14375,26 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
}
- { // &STRING strings
+ { // &(STRING | FSTRING_START) strings
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&STRING strings"));
+ D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&(STRING | FSTRING_START) strings"));
expr_ty strings_var;
if (
- _PyPegen_lookahead(1, _PyPegen_string_token, p)
+ _PyPegen_lookahead(1, _tmp_92_rule, p)
&&
(strings_var = strings_rule(p)) // strings
)
{
- D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&STRING strings"));
+ D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&(STRING | FSTRING_START) strings"));
_res = strings_var;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&STRING strings"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&(STRING | FSTRING_START) strings"));
}
{ // NUMBER
if (p->error_indicator) {
@@ -14352,15 +14421,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)"));
- void *_tmp_91_var;
+ void *_tmp_93_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='('
&&
- (_tmp_91_var = _tmp_91_rule(p)) // tuple | group | genexp
+ (_tmp_93_var = _tmp_93_rule(p)) // tuple | group | genexp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)"));
- _res = _tmp_91_var;
+ _res = _tmp_93_var;
goto done;
}
p->mark = _mark;
@@ -14373,15 +14442,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)"));
- void *_tmp_92_var;
+ void *_tmp_94_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='['
&&
- (_tmp_92_var = _tmp_92_rule(p)) // list | listcomp
+ (_tmp_94_var = _tmp_94_rule(p)) // list | listcomp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)"));
- _res = _tmp_92_var;
+ _res = _tmp_94_var;
goto done;
}
p->mark = _mark;
@@ -14394,15 +14463,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)"));
- void *_tmp_93_var;
+ void *_tmp_95_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{'
&&
- (_tmp_93_var = _tmp_93_rule(p)) // dict | set | dictcomp | setcomp
+ (_tmp_95_var = _tmp_95_rule(p)) // dict | set | dictcomp | setcomp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)"));
- _res = _tmp_93_var;
+ _res = _tmp_95_var;
goto done;
}
p->mark = _mark;
@@ -14474,7 +14543,7 @@ group_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_94_rule(p)) // yield_expr | named_expression
+ (a = _tmp_96_rule(p)) // yield_expr | named_expression
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
@@ -14551,7 +14620,7 @@ lambdef_rule(Parser *p)
void *a;
expr_ty b;
if (
- (_keyword = _PyPegen_expect_token(p, 586)) // token='lambda'
+ (_keyword = _PyPegen_expect_token(p, 600)) // token='lambda'
&&
(a = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -14678,9 +14747,9 @@ lambda_parameters_rule(Parser *p)
if (
(a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default
&&
- (b = (asdl_arg_seq*)_loop0_95_rule(p)) // lambda_param_no_default*
+ (b = (asdl_arg_seq*)_loop0_97_rule(p)) // lambda_param_no_default*
&&
- (c = _loop0_96_rule(p)) // lambda_param_with_default*
+ (c = _loop0_98_rule(p)) // lambda_param_with_default*
&&
(d = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -14710,7 +14779,7 @@ lambda_parameters_rule(Parser *p)
if (
(a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default
&&
- (b = _loop0_97_rule(p)) // lambda_param_with_default*
+ (b = _loop0_99_rule(p)) // lambda_param_with_default*
&&
(c = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -14738,9 +14807,9 @@ lambda_parameters_rule(Parser *p)
asdl_seq * b;
void *c;
if (
- (a = (asdl_arg_seq*)_loop1_98_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_100_rule(p)) // lambda_param_no_default+
&&
- (b = _loop0_99_rule(p)) // lambda_param_with_default*
+ (b = _loop0_101_rule(p)) // lambda_param_with_default*
&&
(c = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -14767,7 +14836,7 @@ lambda_parameters_rule(Parser *p)
asdl_seq * a;
void *b;
if (
- (a = _loop1_100_rule(p)) // lambda_param_with_default+
+ (a = _loop1_102_rule(p)) // lambda_param_with_default+
&&
(b = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -14841,7 +14910,7 @@ lambda_slash_no_default_rule(Parser *p)
Token * _literal_1;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_101_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_103_rule(p)) // lambda_param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -14870,7 +14939,7 @@ lambda_slash_no_default_rule(Parser *p)
Token * _literal;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_102_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_104_rule(p)) // lambda_param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -14923,9 +14992,9 @@ lambda_slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_103_rule(p)) // lambda_param_no_default*
+ (a = _loop0_105_rule(p)) // lambda_param_no_default*
&&
- (b = _loop1_104_rule(p)) // lambda_param_with_default+
+ (b = _loop1_106_rule(p)) // lambda_param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -14955,9 +15024,9 @@ lambda_slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_105_rule(p)) // lambda_param_no_default*
+ (a = _loop0_107_rule(p)) // lambda_param_no_default*
&&
- (b = _loop1_106_rule(p)) // lambda_param_with_default+
+ (b = _loop1_108_rule(p)) // lambda_param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -15035,7 +15104,7 @@ lambda_star_etc_rule(Parser *p)
&&
(a = lambda_param_no_default_rule(p)) // lambda_param_no_default
&&
- (b = _loop0_107_rule(p)) // lambda_param_maybe_default*
+ (b = _loop0_109_rule(p)) // lambda_param_maybe_default*
&&
(c = lambda_kwds_rule(p), !p->error_indicator) // lambda_kwds?
)
@@ -15068,7 +15137,7 @@ lambda_star_etc_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _loop1_108_rule(p)) // lambda_param_maybe_default+
+ (b = _loop1_110_rule(p)) // lambda_param_maybe_default+
&&
(c = lambda_kwds_rule(p), !p->error_indicator) // lambda_kwds?
)
@@ -15475,7 +15544,387 @@ lambda_param_rule(Parser *p)
return _res;
}
-// strings: STRING+
+// fstring_middle: fstring_replacement_field | FSTRING_MIDDLE
+static expr_ty
+fstring_middle_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ { // fstring_replacement_field
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_middle[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ expr_ty fstring_replacement_field_var;
+ if (
+ (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_middle[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ _res = fstring_replacement_field_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_middle[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field"));
+ }
+ { // FSTRING_MIDDLE
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_middle[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ Token * t;
+ if (
+ (t = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE'
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_middle[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ _res = _PyPegen_constant_from_token ( p , t );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_middle[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// fstring_replacement_field:
+// | '{' (yield_expr | star_expressions) "="? fstring_conversion? fstring_full_format_spec? '}'
+// | invalid_replacement_field
+static expr_ty
+fstring_replacement_field_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ int _start_lineno = p->tokens[_mark]->lineno;
+ UNUSED(_start_lineno); // Only used by EXTRA macro
+ int _start_col_offset = p->tokens[_mark]->col_offset;
+ UNUSED(_start_col_offset); // Only used by EXTRA macro
+ { // '{' (yield_expr | star_expressions) "="? fstring_conversion? fstring_full_format_spec? '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'"));
+ Token * _literal;
+ void *a;
+ void *conversion;
+ void *debug_expr;
+ void *format;
+ Token * rbrace;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _tmp_111_rule(p)) // yield_expr | star_expressions
+ &&
+ (debug_expr = _PyPegen_expect_token(p, 22), !p->error_indicator) // "="?
+ &&
+ (conversion = fstring_conversion_rule(p), !p->error_indicator) // fstring_conversion?
+ &&
+ (format = fstring_full_format_spec_rule(p), !p->error_indicator) // fstring_full_format_spec?
+ &&
+ (rbrace = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = _PyPegen_formatted_value ( p , a , debug_expr , conversion , format , rbrace , EXTRA );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'"));
+ }
+ if (p->call_invalid_rules) { // invalid_replacement_field
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_replacement_field"));
+ void *invalid_replacement_field_var;
+ if (
+ (invalid_replacement_field_var = invalid_replacement_field_rule(p)) // invalid_replacement_field
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_replacement_field"));
+ _res = invalid_replacement_field_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_replacement_field"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// fstring_conversion: "!" NAME
+static ResultTokenWithMetadata*
+fstring_conversion_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ ResultTokenWithMetadata* _res = NULL;
+ int _mark = p->mark;
+ { // "!" NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_conversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "\"!\" NAME"));
+ expr_ty conv;
+ Token * conv_token;
+ if (
+ (conv_token = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ (conv = _PyPegen_name_token(p)) // NAME
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_conversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "\"!\" NAME"));
+ _res = _PyPegen_check_fstring_conversion ( p , conv_token , conv );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_conversion[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "\"!\" NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// fstring_full_format_spec: ':' fstring_format_spec*
+static ResultTokenWithMetadata*
+fstring_full_format_spec_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ ResultTokenWithMetadata* _res = NULL;
+ int _mark = p->mark;
+ if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ int _start_lineno = p->tokens[_mark]->lineno;
+ UNUSED(_start_lineno); // Only used by EXTRA macro
+ int _start_col_offset = p->tokens[_mark]->col_offset;
+ UNUSED(_start_col_offset); // Only used by EXTRA macro
+ { // ':' fstring_format_spec*
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_full_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' fstring_format_spec*"));
+ Token * colon;
+ asdl_seq * spec;
+ if (
+ (colon = _PyPegen_expect_token(p, 11)) // token=':'
+ &&
+ (spec = _loop0_112_rule(p)) // fstring_format_spec*
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_full_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' fstring_format_spec*"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = _PyPegen_setup_full_format_spec ( p , colon , ( asdl_expr_seq* ) spec , EXTRA );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_full_format_spec[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' fstring_format_spec*"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// fstring_format_spec: FSTRING_MIDDLE | fstring_replacement_field
+static expr_ty
+fstring_format_spec_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ { // FSTRING_MIDDLE
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ Token * t;
+ if (
+ (t = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE'
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ _res = _PyPegen_constant_from_token ( p , t );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_format_spec[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE"));
+ }
+ { // fstring_replacement_field
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ expr_ty fstring_replacement_field_var;
+ if (
+ (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ _res = fstring_replacement_field_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_format_spec[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// string: STRING
+static expr_ty
+string_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ { // STRING
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> string[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING"));
+ Token* s;
+ if (
+ (s = (Token*)_PyPegen_string_token(p)) // STRING
+ )
+ {
+ D(fprintf(stderr, "%*c+ string[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING"));
+ _res = _PyPegen_constant_from_string ( p , s );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s string[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// strings: ((fstring | string))+
static expr_ty
strings_rule(Parser *p)
{
@@ -15493,19 +15942,37 @@ strings_rule(Parser *p)
return _res;
}
int _mark = p->mark;
- { // STRING+
+ if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ int _start_lineno = p->tokens[_mark]->lineno;
+ UNUSED(_start_lineno); // Only used by EXTRA macro
+ int _start_col_offset = p->tokens[_mark]->col_offset;
+ UNUSED(_start_col_offset); // Only used by EXTRA macro
+ { // ((fstring | string))+
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING+"));
- asdl_seq * a;
+ D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((fstring | string))+"));
+ asdl_expr_seq* a;
if (
- (a = _loop1_109_rule(p)) // STRING+
+ (a = (asdl_expr_seq*)_loop1_113_rule(p)) // ((fstring | string))+
)
{
- D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING+"));
- _res = _PyPegen_concatenate_strings ( p , a );
+ D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((fstring | string))+"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = _PyPegen_concatenate_strings ( p , a , EXTRA );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
p->level--;
@@ -15515,7 +15982,7 @@ strings_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s strings[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING+"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((fstring | string))+"));
}
_res = NULL;
done:
@@ -15627,7 +16094,7 @@ tuple_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_110_rule(p), !p->error_indicator) // [star_named_expression ',' star_named_expressions?]
+ (a = _tmp_114_rule(p), !p->error_indicator) // [star_named_expression ',' star_named_expressions?]
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
@@ -15845,7 +16312,7 @@ double_starred_kvpairs_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_seq * a;
if (
- (a = _gather_111_rule(p)) // ','.double_starred_kvpair+
+ (a = _gather_115_rule(p)) // ','.double_starred_kvpair+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -16007,7 +16474,7 @@ for_if_clauses_rule(Parser *p)
D(fprintf(stderr, "%*c> for_if_clauses[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause+"));
asdl_comprehension_seq* a;
if (
- (a = (asdl_comprehension_seq*)_loop1_113_rule(p)) // for_if_clause+
+ (a = (asdl_comprehension_seq*)_loop1_117_rule(p)) // for_if_clause+
)
{
D(fprintf(stderr, "%*c+ for_if_clauses[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "for_if_clause+"));
@@ -16062,17 +16529,17 @@ for_if_clause_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(_cut_var = 1)
&&
(b = disjunction_rule(p)) // disjunction
&&
- (c = (asdl_expr_seq*)_loop0_114_rule(p)) // (('if' disjunction))*
+ (c = (asdl_expr_seq*)_loop0_118_rule(p)) // (('if' disjunction))*
)
{
D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
@@ -16105,17 +16572,17 @@ for_if_clause_rule(Parser *p)
expr_ty b;
asdl_expr_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(_cut_var = 1)
&&
(b = disjunction_rule(p)) // disjunction
&&
- (c = (asdl_expr_seq*)_loop0_115_rule(p)) // (('if' disjunction))*
+ (c = (asdl_expr_seq*)_loop0_119_rule(p)) // (('if' disjunction))*
)
{
D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
@@ -16378,7 +16845,7 @@ genexp_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_116_rule(p)) // assignment_expression | expression !':='
+ (a = _tmp_120_rule(p)) // assignment_expression | expression !':='
&&
(b = for_if_clauses_rule(p)) // for_if_clauses
&&
@@ -16630,9 +17097,9 @@ args_rule(Parser *p)
asdl_expr_seq* a;
void *b;
if (
- (a = (asdl_expr_seq*)_gather_117_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+
+ (a = (asdl_expr_seq*)_gather_121_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+
&&
- (b = _tmp_119_rule(p), !p->error_indicator) // [',' kwargs]
+ (b = _tmp_123_rule(p), !p->error_indicator) // [',' kwargs]
)
{
D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs]"));
@@ -16723,11 +17190,11 @@ kwargs_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _gather_120_rule(p)) // ','.kwarg_or_starred+
+ (a = _gather_124_rule(p)) // ','.kwarg_or_starred+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _gather_122_rule(p)) // ','.kwarg_or_double_starred+
+ (b = _gather_126_rule(p)) // ','.kwarg_or_double_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+"));
@@ -16749,13 +17216,13 @@ kwargs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+"));
- asdl_seq * _gather_124_var;
+ asdl_seq * _gather_128_var;
if (
- (_gather_124_var = _gather_124_rule(p)) // ','.kwarg_or_starred+
+ (_gather_128_var = _gather_128_rule(p)) // ','.kwarg_or_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+"));
- _res = _gather_124_var;
+ _res = _gather_128_var;
goto done;
}
p->mark = _mark;
@@ -16768,13 +17235,13 @@ kwargs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+"));
- asdl_seq * _gather_126_var;
+ asdl_seq * _gather_130_var;
if (
- (_gather_126_var = _gather_126_rule(p)) // ','.kwarg_or_double_starred+
+ (_gather_130_var = _gather_130_rule(p)) // ','.kwarg_or_double_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+"));
- _res = _gather_126_var;
+ _res = _gather_130_var;
goto done;
}
p->mark = _mark;
@@ -17167,7 +17634,7 @@ star_targets_rule(Parser *p)
if (
(a = star_target_rule(p)) // star_target
&&
- (b = _loop0_128_rule(p)) // ((',' star_target))*
+ (b = _loop0_132_rule(p)) // ((',' star_target))*
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -17224,7 +17691,7 @@ star_targets_list_seq_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_129_rule(p)) // ','.star_target+
+ (a = (asdl_expr_seq*)_gather_133_rule(p)) // ','.star_target+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -17275,7 +17742,7 @@ star_targets_tuple_seq_rule(Parser *p)
if (
(a = star_target_rule(p)) // star_target
&&
- (b = _loop1_131_rule(p)) // ((',' star_target))+
+ (b = _loop1_135_rule(p)) // ((',' star_target))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -17364,7 +17831,7 @@ star_target_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (a = _tmp_132_rule(p)) // !'*' star_target
+ (a = _tmp_136_rule(p)) // !'*' star_target
)
{
D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)"));
@@ -18295,7 +18762,7 @@ del_targets_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_133_rule(p)) // ','.del_target+
+ (a = (asdl_expr_seq*)_gather_137_rule(p)) // ','.del_target+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -18656,7 +19123,7 @@ type_expressions_rule(Parser *p)
expr_ty b;
expr_ty c;
if (
- (a = _gather_135_rule(p)) // ','.expression+
+ (a = _gather_139_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -18695,7 +19162,7 @@ type_expressions_rule(Parser *p)
asdl_seq * a;
expr_ty b;
if (
- (a = _gather_137_rule(p)) // ','.expression+
+ (a = _gather_141_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -18728,7 +19195,7 @@ type_expressions_rule(Parser *p)
asdl_seq * a;
expr_ty b;
if (
- (a = _gather_139_rule(p)) // ','.expression+
+ (a = _gather_143_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -18848,7 +19315,7 @@ type_expressions_rule(Parser *p)
D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+"));
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_141_rule(p)) // ','.expression+
+ (a = (asdl_expr_seq*)_gather_145_rule(p)) // ','.expression+
)
{
D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+"));
@@ -18900,7 +19367,7 @@ func_type_comment_rule(Parser *p)
&&
(t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT'
&&
- _PyPegen_lookahead(1, _tmp_143_rule, p)
+ _PyPegen_lookahead(1, _tmp_147_rule, p)
)
{
D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)"));
@@ -19029,7 +19496,7 @@ invalid_arguments_rule(Parser *p)
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_opt_var = _tmp_144_rule(p), !p->error_indicator) // [args | expression for_if_clauses]
+ (_opt_var = _tmp_148_rule(p), !p->error_indicator) // [args | expression for_if_clauses]
)
{
D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]"));
@@ -19089,13 +19556,13 @@ invalid_arguments_rule(Parser *p)
expr_ty a;
Token * b;
if (
- (_opt_var = _tmp_145_rule(p), !p->error_indicator) // [(args ',')]
+ (_opt_var = _tmp_149_rule(p), !p->error_indicator) // [(args ',')]
&&
(a = _PyPegen_name_token(p)) // NAME
&&
(b = _PyPegen_expect_token(p, 22)) // token='='
&&
- _PyPegen_lookahead(1, _tmp_146_rule, p)
+ _PyPegen_lookahead(1, _tmp_150_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')"));
@@ -19234,7 +19701,7 @@ invalid_kwarg_rule(Parser *p)
Token* a;
Token * b;
if (
- (a = (Token*)_tmp_147_rule(p)) // 'True' | 'False' | 'None'
+ (a = (Token*)_tmp_151_rule(p)) // 'True' | 'False' | 'None'
&&
(b = _PyPegen_expect_token(p, 22)) // token='='
)
@@ -19294,7 +19761,7 @@ invalid_kwarg_rule(Parser *p)
expr_ty a;
Token * b;
if (
- _PyPegen_lookahead(0, _tmp_148_rule, p)
+ _PyPegen_lookahead(0, _tmp_152_rule, p)
&&
(a = expression_rule(p)) // expression
&&
@@ -19398,11 +19865,11 @@ expression_without_invalid_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -19530,6 +19997,7 @@ invalid_legacy_expression_rule(Parser *p)
// invalid_expression:
// | !(NAME STRING | SOFT_KEYWORD) disjunction expression_without_invalid
// | disjunction 'if' disjunction !('else' | ':')
+// | 'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)
static void *
invalid_expression_rule(Parser *p)
{
@@ -19552,7 +20020,7 @@ invalid_expression_rule(Parser *p)
expr_ty a;
expr_ty b;
if (
- _PyPegen_lookahead(0, _tmp_149_rule, p)
+ _PyPegen_lookahead(0, _tmp_153_rule, p)
&&
(a = disjunction_rule(p)) // disjunction
&&
@@ -19584,11 +20052,11 @@ invalid_expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- _PyPegen_lookahead(0, _tmp_150_rule, p)
+ _PyPegen_lookahead(0, _tmp_154_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')"));
@@ -19604,6 +20072,39 @@ invalid_expression_rule(Parser *p)
D(fprintf(stderr, "%*c%s invalid_expression[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')"));
}
+ { // 'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)"));
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ Token * a;
+ Token * b;
+ if (
+ (a = _PyPegen_expect_token(p, 600)) // token='lambda'
+ &&
+ (_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params?
+ &&
+ (b = _PyPegen_expect_token(p, 11)) // token=':'
+ &&
+ _PyPegen_lookahead(1, _tmp_155_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "f-string: lambda expressions are not allowed without parentheses" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_expression[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)"));
+ }
_res = NULL;
done:
p->level--;
@@ -19677,7 +20178,7 @@ invalid_named_expression_rule(Parser *p)
&&
(b = bitwise_or_rule(p)) // bitwise_or
&&
- _PyPegen_lookahead(0, _tmp_151_rule, p)
+ _PyPegen_lookahead(0, _tmp_156_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' bitwise_or !('=' | ':=')"));
@@ -19703,7 +20204,7 @@ invalid_named_expression_rule(Parser *p)
Token * b;
expr_ty bitwise_or_var;
if (
- _PyPegen_lookahead(0, _tmp_152_rule, p)
+ _PyPegen_lookahead(0, _tmp_157_rule, p)
&&
(a = bitwise_or_rule(p)) // bitwise_or
&&
@@ -19711,7 +20212,7 @@ invalid_named_expression_rule(Parser *p)
&&
(bitwise_or_var = bitwise_or_rule(p)) // bitwise_or
&&
- _PyPegen_lookahead(0, _tmp_153_rule, p)
+ _PyPegen_lookahead(0, _tmp_158_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')"));
@@ -19792,7 +20293,7 @@ invalid_assignment_rule(Parser *p)
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression"));
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_154_var;
+ asdl_seq * _loop0_159_var;
expr_ty a;
expr_ty expression_var;
if (
@@ -19800,7 +20301,7 @@ invalid_assignment_rule(Parser *p)
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_loop0_154_var = _loop0_154_rule(p)) // star_named_expressions*
+ (_loop0_159_var = _loop0_159_rule(p)) // star_named_expressions*
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -19857,10 +20358,10 @@ invalid_assignment_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='"));
Token * _literal;
- asdl_seq * _loop0_155_var;
+ asdl_seq * _loop0_160_var;
expr_ty a;
if (
- (_loop0_155_var = _loop0_155_rule(p)) // ((star_targets '='))*
+ (_loop0_160_var = _loop0_160_rule(p)) // ((star_targets '='))*
&&
(a = star_expressions_rule(p)) // star_expressions
&&
@@ -19887,10 +20388,10 @@ invalid_assignment_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='"));
Token * _literal;
- asdl_seq * _loop0_156_var;
+ asdl_seq * _loop0_161_var;
expr_ty a;
if (
- (_loop0_156_var = _loop0_156_rule(p)) // ((star_targets '='))*
+ (_loop0_161_var = _loop0_161_rule(p)) // ((star_targets '='))*
&&
(a = yield_expr_rule(p)) // yield_expr
&&
@@ -19916,7 +20417,7 @@ invalid_assignment_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)"));
- void *_tmp_157_var;
+ void *_tmp_162_var;
expr_ty a;
AugOperator* augassign_var;
if (
@@ -19924,7 +20425,7 @@ invalid_assignment_rule(Parser *p)
&&
(augassign_var = augassign_rule(p)) // augassign
&&
- (_tmp_157_var = _tmp_157_rule(p)) // yield_expr | star_expressions
+ (_tmp_162_var = _tmp_162_rule(p)) // yield_expr | star_expressions
)
{
D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)"));
@@ -20057,7 +20558,7 @@ invalid_del_stmt_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 604)) // token='del'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -20150,11 +20651,11 @@ invalid_comprehension_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses"));
- void *_tmp_158_var;
+ void *_tmp_163_var;
expr_ty a;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_158_var = _tmp_158_rule(p)) // '[' | '(' | '{'
+ (_tmp_163_var = _tmp_163_rule(p)) // '[' | '(' | '{'
&&
(a = starred_expression_rule(p)) // starred_expression
&&
@@ -20181,12 +20682,12 @@ invalid_comprehension_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses"));
Token * _literal;
- void *_tmp_159_var;
+ void *_tmp_164_var;
expr_ty a;
asdl_expr_seq* b;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_159_var = _tmp_159_rule(p)) // '[' | '{'
+ (_tmp_164_var = _tmp_164_rule(p)) // '[' | '{'
&&
(a = star_named_expression_rule(p)) // star_named_expression
&&
@@ -20216,12 +20717,12 @@ invalid_comprehension_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' for_if_clauses"));
- void *_tmp_160_var;
+ void *_tmp_165_var;
expr_ty a;
Token * b;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_160_var = _tmp_160_rule(p)) // '[' | '{'
+ (_tmp_165_var = _tmp_165_rule(p)) // '[' | '{'
&&
(a = star_named_expression_rule(p)) // star_named_expression
&&
@@ -20358,13 +20859,13 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slash_no_default | slash_with_default) param_maybe_default* '/'"));
- asdl_seq * _loop0_162_var;
- void *_tmp_161_var;
+ asdl_seq * _loop0_167_var;
+ void *_tmp_166_var;
Token * a;
if (
- (_tmp_161_var = _tmp_161_rule(p)) // slash_no_default | slash_with_default
+ (_tmp_166_var = _tmp_166_rule(p)) // slash_no_default | slash_with_default
&&
- (_loop0_162_var = _loop0_162_rule(p)) // param_maybe_default*
+ (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -20388,7 +20889,7 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default? param_no_default* invalid_parameters_helper param_no_default"));
- asdl_seq * _loop0_163_var;
+ asdl_seq * _loop0_168_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
arg_ty a;
@@ -20396,7 +20897,7 @@ invalid_parameters_rule(Parser *p)
if (
(_opt_var = slash_no_default_rule(p), !p->error_indicator) // slash_no_default?
&&
- (_loop0_163_var = _loop0_163_rule(p)) // param_no_default*
+ (_loop0_168_var = _loop0_168_rule(p)) // param_no_default*
&&
(invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper
&&
@@ -20422,18 +20923,18 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* '(' param_no_default+ ','? ')'"));
- asdl_seq * _loop0_164_var;
- asdl_seq * _loop1_165_var;
+ asdl_seq * _loop0_169_var;
+ asdl_seq * _loop1_170_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
if (
- (_loop0_164_var = _loop0_164_rule(p)) // param_no_default*
+ (_loop0_169_var = _loop0_169_rule(p)) // param_no_default*
&&
(a = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_loop1_165_var = _loop1_165_rule(p)) // param_no_default+
+ (_loop1_170_var = _loop1_170_rule(p)) // param_no_default+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -20460,22 +20961,22 @@ invalid_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/'"));
Token * _literal;
- asdl_seq * _loop0_167_var;
- asdl_seq * _loop0_169_var;
+ asdl_seq * _loop0_172_var;
+ asdl_seq * _loop0_174_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
- void *_tmp_168_var;
+ void *_tmp_173_var;
Token * a;
if (
- (_opt_var = _tmp_166_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)]
+ (_opt_var = _tmp_171_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)]
&&
- (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default*
+ (_loop0_172_var = _loop0_172_rule(p)) // param_maybe_default*
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_168_var = _tmp_168_rule(p)) // ',' | param_no_default
+ (_tmp_173_var = _tmp_173_rule(p)) // ',' | param_no_default
&&
- (_loop0_169_var = _loop0_169_rule(p)) // param_maybe_default*
+ (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -20500,10 +21001,10 @@ invalid_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default+ '/' '*'"));
Token * _literal;
- asdl_seq * _loop1_170_var;
+ asdl_seq * _loop1_175_var;
Token * a;
if (
- (_loop1_170_var = _loop1_170_rule(p)) // param_maybe_default+
+ (_loop1_175_var = _loop1_175_rule(p)) // param_maybe_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -20553,7 +21054,7 @@ invalid_default_rule(Parser *p)
if (
(a = _PyPegen_expect_token(p, 22)) // token='='
&&
- _PyPegen_lookahead(1, _tmp_171_rule, p)
+ _PyPegen_lookahead(1, _tmp_176_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' &(')' | ',')"));
@@ -20599,12 +21100,12 @@ invalid_star_etc_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))"));
- void *_tmp_172_var;
+ void *_tmp_177_var;
Token * a;
if (
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_172_var = _tmp_172_rule(p)) // ')' | ',' (')' | '**')
+ (_tmp_177_var = _tmp_177_rule(p)) // ')' | ',' (')' | '**')
)
{
D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))"));
@@ -20687,20 +21188,20 @@ invalid_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')"));
Token * _literal;
- asdl_seq * _loop0_174_var;
- void *_tmp_173_var;
- void *_tmp_175_var;
+ asdl_seq * _loop0_179_var;
+ void *_tmp_178_var;
+ void *_tmp_180_var;
Token * a;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_173_var = _tmp_173_rule(p)) // param_no_default | ','
+ (_tmp_178_var = _tmp_178_rule(p)) // param_no_default | ','
&&
- (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default*
+ (_loop0_179_var = _loop0_179_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_175_var = _tmp_175_rule(p)) // param_no_default | ','
+ (_tmp_180_var = _tmp_180_rule(p)) // param_no_default | ','
)
{
D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')"));
@@ -20816,7 +21317,7 @@ invalid_kwds_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (a = (Token*)_tmp_176_rule(p)) // '*' | '**' | '/'
+ (a = (Token*)_tmp_181_rule(p)) // '*' | '**' | '/'
)
{
D(fprintf(stderr, "%*c+ invalid_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param ',' ('*' | '**' | '/')"));
@@ -20882,13 +21383,13 @@ invalid_parameters_helper_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+"));
- asdl_seq * _loop1_177_var;
+ asdl_seq * _loop1_182_var;
if (
- (_loop1_177_var = _loop1_177_rule(p)) // param_with_default+
+ (_loop1_182_var = _loop1_182_rule(p)) // param_with_default+
)
{
D(fprintf(stderr, "%*c+ invalid_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+"));
- _res = _loop1_177_var;
+ _res = _loop1_182_var;
goto done;
}
p->mark = _mark;
@@ -20954,13 +21455,13 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/'"));
- asdl_seq * _loop0_179_var;
- void *_tmp_178_var;
+ asdl_seq * _loop0_184_var;
+ void *_tmp_183_var;
Token * a;
if (
- (_tmp_178_var = _tmp_178_rule(p)) // lambda_slash_no_default | lambda_slash_with_default
+ (_tmp_183_var = _tmp_183_rule(p)) // lambda_slash_no_default | lambda_slash_with_default
&&
- (_loop0_179_var = _loop0_179_rule(p)) // lambda_param_maybe_default*
+ (_loop0_184_var = _loop0_184_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -20984,7 +21485,7 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default"));
- asdl_seq * _loop0_180_var;
+ asdl_seq * _loop0_185_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
arg_ty a;
@@ -20992,7 +21493,7 @@ invalid_lambda_parameters_rule(Parser *p)
if (
(_opt_var = lambda_slash_no_default_rule(p), !p->error_indicator) // lambda_slash_no_default?
&&
- (_loop0_180_var = _loop0_180_rule(p)) // lambda_param_no_default*
+ (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_no_default*
&&
(invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper
&&
@@ -21018,18 +21519,18 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* '(' ','.lambda_param+ ','? ')'"));
- asdl_seq * _gather_182_var;
- asdl_seq * _loop0_181_var;
+ asdl_seq * _gather_187_var;
+ asdl_seq * _loop0_186_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
if (
- (_loop0_181_var = _loop0_181_rule(p)) // lambda_param_no_default*
+ (_loop0_186_var = _loop0_186_rule(p)) // lambda_param_no_default*
&&
(a = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_182_var = _gather_182_rule(p)) // ','.lambda_param+
+ (_gather_187_var = _gather_187_rule(p)) // ','.lambda_param+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -21056,22 +21557,22 @@ invalid_lambda_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/'"));
Token * _literal;
- asdl_seq * _loop0_185_var;
- asdl_seq * _loop0_187_var;
+ asdl_seq * _loop0_190_var;
+ asdl_seq * _loop0_192_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
- void *_tmp_186_var;
+ void *_tmp_191_var;
Token * a;
if (
- (_opt_var = _tmp_184_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)]
+ (_opt_var = _tmp_189_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)]
&&
- (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_maybe_default*
+ (_loop0_190_var = _loop0_190_rule(p)) // lambda_param_maybe_default*
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_186_var = _tmp_186_rule(p)) // ',' | lambda_param_no_default
+ (_tmp_191_var = _tmp_191_rule(p)) // ',' | lambda_param_no_default
&&
- (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_maybe_default*
+ (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -21096,10 +21597,10 @@ invalid_lambda_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default+ '/' '*'"));
Token * _literal;
- asdl_seq * _loop1_188_var;
+ asdl_seq * _loop1_193_var;
Token * a;
if (
- (_loop1_188_var = _loop1_188_rule(p)) // lambda_param_maybe_default+
+ (_loop1_193_var = _loop1_193_rule(p)) // lambda_param_maybe_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -21171,13 +21672,13 @@ invalid_lambda_parameters_helper_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+"));
- asdl_seq * _loop1_189_var;
+ asdl_seq * _loop1_194_var;
if (
- (_loop1_189_var = _loop1_189_rule(p)) // lambda_param_with_default+
+ (_loop1_194_var = _loop1_194_rule(p)) // lambda_param_with_default+
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+"));
- _res = _loop1_189_var;
+ _res = _loop1_194_var;
goto done;
}
p->mark = _mark;
@@ -21214,11 +21715,11 @@ invalid_lambda_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))"));
Token * _literal;
- void *_tmp_190_var;
+ void *_tmp_195_var;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_190_var = _tmp_190_rule(p)) // ':' | ',' (':' | '**')
+ (_tmp_195_var = _tmp_195_rule(p)) // ':' | ',' (':' | '**')
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))"));
@@ -21271,20 +21772,20 @@ invalid_lambda_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')"));
Token * _literal;
- asdl_seq * _loop0_192_var;
- void *_tmp_191_var;
- void *_tmp_193_var;
+ asdl_seq * _loop0_197_var;
+ void *_tmp_196_var;
+ void *_tmp_198_var;
Token * a;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_191_var = _tmp_191_rule(p)) // lambda_param_no_default | ','
+ (_tmp_196_var = _tmp_196_rule(p)) // lambda_param_no_default | ','
&&
- (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default*
+ (_loop0_197_var = _loop0_197_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_193_var = _tmp_193_rule(p)) // lambda_param_no_default | ','
+ (_tmp_198_var = _tmp_198_rule(p)) // lambda_param_no_default | ','
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')"));
@@ -21403,7 +21904,7 @@ invalid_lambda_kwds_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (a = (Token*)_tmp_194_rule(p)) // '*' | '**' | '/'
+ (a = (Token*)_tmp_199_rule(p)) // '*' | '**' | '/'
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param ',' ('*' | '**' | '/')"));
@@ -21507,11 +22008,11 @@ invalid_with_item_rule(Parser *p)
if (
(expression_var = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(a = expression_rule(p)) // expression
&&
- _PyPegen_lookahead(1, _tmp_195_rule, p)
+ _PyPegen_lookahead(1, _tmp_200_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression &(',' | ')' | ':')"));
@@ -21560,7 +22061,7 @@ invalid_for_target_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -21692,11 +22193,11 @@ invalid_import_rule(Parser *p)
expr_ty dotted_name_var;
expr_ty dotted_name_var_1;
if (
- (a = _PyPegen_expect_token(p, 606)) // token='import'
+ (a = _PyPegen_expect_token(p, 607)) // token='import'
&&
(dotted_name_var = dotted_name_rule(p)) // dotted_name
&&
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
&&
(dotted_name_var_1 = dotted_name_rule(p)) // dotted_name
)
@@ -21792,7 +22293,7 @@ invalid_with_stmt_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
- asdl_seq * _gather_196_var;
+ asdl_seq * _gather_201_var;
Token * _keyword;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
@@ -21800,9 +22301,9 @@ invalid_with_stmt_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
- (_gather_196_var = _gather_196_rule(p)) // ','.(expression ['as' star_target])+
+ (_gather_201_var = _gather_201_rule(p)) // ','.(expression ['as' star_target])+
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -21826,7 +22327,7 @@ invalid_with_stmt_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
- asdl_seq * _gather_198_var;
+ asdl_seq * _gather_203_var;
Token * _keyword;
Token * _literal;
Token * _literal_1;
@@ -21838,11 +22339,11 @@ invalid_with_stmt_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_198_var = _gather_198_rule(p)) // ','.(expressions ['as' star_target])+
+ (_gather_203_var = _gather_203_rule(p)) // ','.(expressions ['as' star_target])+
&&
(_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -21892,7 +22393,7 @@ invalid_with_stmt_indent_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
- asdl_seq * _gather_200_var;
+ asdl_seq * _gather_205_var;
Token * _literal;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
@@ -21901,9 +22402,9 @@ invalid_with_stmt_indent_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 614)) // token='with'
+ (a = _PyPegen_expect_token(p, 615)) // token='with'
&&
- (_gather_200_var = _gather_200_rule(p)) // ','.(expression ['as' star_target])+
+ (_gather_205_var = _gather_205_rule(p)) // ','.(expression ['as' star_target])+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -21931,7 +22432,7 @@ invalid_with_stmt_indent_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
- asdl_seq * _gather_202_var;
+ asdl_seq * _gather_207_var;
Token * _literal;
Token * _literal_1;
Token * _literal_2;
@@ -21944,11 +22445,11 @@ invalid_with_stmt_indent_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 614)) // token='with'
+ (a = _PyPegen_expect_token(p, 615)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_202_var = _gather_202_rule(p)) // ','.(expressions ['as' star_target])+
+ (_gather_207_var = _gather_207_rule(p)) // ','.(expressions ['as' star_target])+
&&
(_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -22008,7 +22509,7 @@ invalid_try_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 623)) // token='try'
+ (a = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22040,13 +22541,13 @@ invalid_try_stmt_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* block_var;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
(block_var = block_rule(p)) // block
&&
- _PyPegen_lookahead(0, _tmp_204_rule, p)
+ _PyPegen_lookahead(0, _tmp_209_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block !('except' | 'finally')"));
@@ -22071,29 +22572,29 @@ invalid_try_stmt_rule(Parser *p)
Token * _keyword;
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_205_var;
- asdl_seq * _loop1_206_var;
+ asdl_seq * _loop0_210_var;
+ asdl_seq * _loop1_211_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (_loop0_205_var = _loop0_205_rule(p)) // block*
+ (_loop0_210_var = _loop0_210_rule(p)) // block*
&&
- (_loop1_206_var = _loop1_206_rule(p)) // except_block+
+ (_loop1_211_var = _loop1_211_rule(p)) // except_block+
&&
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(b = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_207_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_212_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
@@ -22120,23 +22621,23 @@ invalid_try_stmt_rule(Parser *p)
Token * _keyword;
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_208_var;
- asdl_seq * _loop1_209_var;
+ asdl_seq * _loop0_213_var;
+ asdl_seq * _loop1_214_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (_loop0_208_var = _loop0_208_rule(p)) // block*
+ (_loop0_213_var = _loop0_213_rule(p)) // block*
&&
- (_loop1_209_var = _loop1_209_rule(p)) // except_star_block+
+ (_loop1_214_var = _loop1_214_rule(p)) // except_star_block+
&&
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
- (_opt_var = _tmp_210_rule(p), !p->error_indicator) // [expression ['as' NAME]]
+ (_opt_var = _tmp_215_rule(p), !p->error_indicator) // [expression ['as' NAME]]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
@@ -22194,7 +22695,7 @@ invalid_except_stmt_rule(Parser *p)
expr_ty a;
expr_ty expressions_var;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'?
&&
@@ -22204,7 +22705,7 @@ invalid_except_stmt_rule(Parser *p)
&&
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var_1 = _tmp_211_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var_1 = _tmp_216_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
@@ -22236,13 +22737,13 @@ invalid_except_stmt_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'?
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var_1 = _tmp_212_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var_1 = _tmp_217_rule(p), !p->error_indicator) // ['as' NAME]
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -22269,7 +22770,7 @@ invalid_except_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -22294,14 +22795,14 @@ invalid_except_stmt_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')"));
Token * _literal;
- void *_tmp_213_var;
+ void *_tmp_218_var;
Token * a;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_213_var = _tmp_213_rule(p)) // NEWLINE | ':'
+ (_tmp_218_var = _tmp_218_rule(p)) // NEWLINE | ':'
)
{
D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')"));
@@ -22347,7 +22848,7 @@ invalid_finally_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 632)) // token='finally'
+ (a = _PyPegen_expect_token(p, 633)) // token='finally'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22404,11 +22905,11 @@ invalid_except_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_214_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_219_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22440,7 +22941,7 @@ invalid_except_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22497,13 +22998,13 @@ invalid_except_star_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_215_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_220_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22739,7 +23240,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"'
)
@@ -22769,7 +23270,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
_PyPegen_lookahead_with_name(0, _PyPegen_name_token, p)
&&
@@ -22872,7 +23373,7 @@ invalid_class_argument_pattern_rule(Parser *p)
asdl_pattern_seq* a;
asdl_seq* keyword_patterns_var;
if (
- (_opt_var = _tmp_216_rule(p), !p->error_indicator) // [positional_patterns ',']
+ (_opt_var = _tmp_221_rule(p), !p->error_indicator) // [positional_patterns ',']
&&
(keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns
&&
@@ -22926,7 +23427,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -22957,7 +23458,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty a_1;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 641)) // token='if'
+ (a = _PyPegen_expect_token(p, 642)) // token='if'
&&
(a_1 = named_expression_rule(p)) // named_expression
&&
@@ -23013,7 +23514,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 643)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23044,7 +23545,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 643)) // token='elif'
+ (a = _PyPegen_expect_token(p, 644)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23098,7 +23599,7 @@ invalid_else_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 644)) // token='else'
+ (a = _PyPegen_expect_token(p, 645)) // token='else'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23152,7 +23653,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 646)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 647)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23183,7 +23684,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 646)) // token='while'
+ (a = _PyPegen_expect_token(p, 647)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23245,11 +23746,11 @@ invalid_for_stmt_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -23286,11 +23787,11 @@ invalid_for_stmt_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 649)) // token='for'
+ (a = _PyPegen_expect_token(p, 650)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -23356,7 +23857,7 @@ invalid_def_raw_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 651)) // token='def'
+ (a = _PyPegen_expect_token(p, 652)) // token='def'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -23366,7 +23867,7 @@ invalid_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (_opt_var_2 = _tmp_217_rule(p), !p->error_indicator) // ['->' expression]
+ (_opt_var_2 = _tmp_222_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23422,11 +23923,11 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 653)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
- (_opt_var = _tmp_218_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (_opt_var = _tmp_223_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -23457,11 +23958,11 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 653)) // token='class'
+ (a = _PyPegen_expect_token(p, 654)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
- (_opt_var = _tmp_219_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (_opt_var = _tmp_224_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23512,11 +24013,11 @@ invalid_double_starred_kvpairs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair"));
- asdl_seq * _gather_220_var;
+ asdl_seq * _gather_225_var;
Token * _literal;
void *invalid_kvpair_var;
if (
- (_gather_220_var = _gather_220_rule(p)) // ','.double_starred_kvpair+
+ (_gather_225_var = _gather_225_rule(p)) // ','.double_starred_kvpair+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -23524,7 +24025,7 @@ invalid_double_starred_kvpairs_rule(Parser *p)
)
{
D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair"));
- _res = _PyPegen_dummy_name(p, _gather_220_var, _literal, invalid_kvpair_var);
+ _res = _PyPegen_dummy_name(p, _gather_225_var, _literal, invalid_kvpair_var);
goto done;
}
p->mark = _mark;
@@ -23577,7 +24078,7 @@ invalid_double_starred_kvpairs_rule(Parser *p)
&&
(a = _PyPegen_expect_token(p, 11)) // token=':'
&&
- _PyPegen_lookahead(1, _tmp_222_rule, p)
+ _PyPegen_lookahead(1, _tmp_227_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')"));
@@ -23688,7 +24189,7 @@ invalid_kvpair_rule(Parser *p)
&&
(a = _PyPegen_expect_token(p, 11)) // token=':'
&&
- _PyPegen_lookahead(1, _tmp_223_rule, p)
+ _PyPegen_lookahead(1, _tmp_228_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')"));
@@ -23763,6 +24264,450 @@ invalid_starred_expression_rule(Parser *p)
return _res;
}
+// invalid_replacement_field:
+// | '{' '='
+// | '{' '!'
+// | '{' ':'
+// | '{' '}'
+// | '{' !(yield_expr | star_expressions)
+// | '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')
+// | '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')
+// | '{' (yield_expr | star_expressions) '='? invalid_conversion_character
+// | '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')
+// | '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'
+// | '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'
+static void *
+invalid_replacement_field_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '{' '='
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '='"));
+ Token * _literal;
+ Token * a;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _PyPegen_expect_token(p, 22)) // token='='
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '='"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '='" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '='"));
+ }
+ { // '{' '!'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '!'"));
+ Token * _literal;
+ Token * a;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _PyPegen_expect_token(p, 54)) // token='!'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '!'"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '!'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '!'"));
+ }
+ { // '{' ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' ':'"));
+ Token * _literal;
+ Token * a;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' ':'"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before ':'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' ':'"));
+ }
+ { // '{' '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '}'"));
+ Token * _literal;
+ Token * a;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '}'"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '}'"));
+ }
+ { // '{' !(yield_expr | star_expressions)
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' !(yield_expr | star_expressions)"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ _PyPegen_lookahead(0, _tmp_229_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' !(yield_expr | star_expressions)"));
+ _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting a valid expression after '{'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' !(yield_expr | star_expressions)"));
+ }
+ { // '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')"));
+ Token * _literal;
+ void *_tmp_230_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_230_var = _tmp_230_rule(p)) // yield_expr | star_expressions
+ &&
+ _PyPegen_lookahead(0, _tmp_231_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '=', or '!', or ':', or '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')"));
+ }
+ { // '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')"));
+ Token * _literal;
+ Token * _literal_1;
+ void *_tmp_232_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_232_var = _tmp_232_rule(p)) // yield_expr | star_expressions
+ &&
+ (_literal_1 = _PyPegen_expect_token(p, 22)) // token='='
+ &&
+ _PyPegen_lookahead(0, _tmp_233_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '!', or ':', or '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')"));
+ }
+ { // '{' (yield_expr | star_expressions) '='? invalid_conversion_character
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character"));
+ Token * _literal;
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ void *_tmp_234_var;
+ void *invalid_conversion_character_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_234_var = _tmp_234_rule(p)) // yield_expr | star_expressions
+ &&
+ (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
+ &&
+ (invalid_conversion_character_var = invalid_conversion_character_rule(p)) // invalid_conversion_character
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character"));
+ _res = _PyPegen_dummy_name(p, _literal, _tmp_234_var, _opt_var, invalid_conversion_character_var);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character"));
+ }
+ { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')"));
+ Token * _literal;
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ void *_opt_var_1;
+ UNUSED(_opt_var_1); // Silence compiler warnings
+ void *_tmp_235_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_235_var = _tmp_235_rule(p)) // yield_expr | star_expressions
+ &&
+ (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
+ &&
+ (_opt_var_1 = _tmp_236_rule(p), !p->error_indicator) // ['!' NAME]
+ &&
+ _PyPegen_lookahead(0, _tmp_237_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting ':' or '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')"));
+ }
+ { // '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'"));
+ Token * _literal;
+ Token * _literal_1;
+ asdl_seq * _loop0_240_var;
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ void *_opt_var_1;
+ UNUSED(_opt_var_1); // Silence compiler warnings
+ void *_tmp_238_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_238_var = _tmp_238_rule(p)) // yield_expr | star_expressions
+ &&
+ (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
+ &&
+ (_opt_var_1 = _tmp_239_rule(p), !p->error_indicator) // ['!' NAME]
+ &&
+ (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
+ &&
+ (_loop0_240_var = _loop0_240_rule(p)) // fstring_format_spec*
+ &&
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '}', or format specs" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'"));
+ }
+ { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'"));
+ Token * _literal;
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ void *_opt_var_1;
+ UNUSED(_opt_var_1); // Silence compiler warnings
+ void *_tmp_241_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_241_var = _tmp_241_rule(p)) // yield_expr | star_expressions
+ &&
+ (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
+ &&
+ (_opt_var_1 = _tmp_242_rule(p), !p->error_indicator) // ['!' NAME]
+ &&
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// invalid_conversion_character: '!' &(':' | '}') | '!' !NAME
+static void *
+invalid_conversion_character_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!' &(':' | '}')
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_conversion_character[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ _PyPegen_lookahead(1, _tmp_243_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')"));
+ _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: missing conversion character" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_conversion_character[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' &(':' | '}')"));
+ }
+ { // '!' !NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_conversion_character[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' !NAME"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' !NAME"));
+ _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: invalid conversion character" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_conversion_character[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' !NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
// _loop0_1: NEWLINE
static asdl_seq *
_loop0_1_rule(Parser *p)
@@ -23899,9 +24844,77 @@ _loop0_2_rule(Parser *p)
return _seq;
}
-// _loop1_3: statement
+// _loop0_3: fstring_middle
static asdl_seq *
-_loop1_3_rule(Parser *p)
+_loop0_3_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void *_res = NULL;
+ int _mark = p->mark;
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // fstring_middle
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _loop0_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_middle"));
+ expr_ty fstring_middle_var;
+ while (
+ (fstring_middle_var = fstring_middle_rule(p)) // fstring_middle
+ )
+ {
+ _res = fstring_middle_var;
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_3[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_middle"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _loop1_4: statement
+static asdl_seq *
+_loop1_4_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -23927,7 +24940,7 @@ _loop1_3_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement"));
+ D(fprintf(stderr, "%*c> _loop1_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement"));
asdl_stmt_seq* statement_var;
while (
(statement_var = statement_rule(p)) // statement
@@ -23950,7 +24963,7 @@ _loop1_3_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_3[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_4[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement"));
}
if (_n == 0 || p->error_indicator) {
@@ -23972,9 +24985,9 @@ _loop1_3_rule(Parser *p)
return _seq;
}
-// _loop0_5: ';' simple_stmt
+// _loop0_6: ';' simple_stmt
static asdl_seq *
-_loop0_5_rule(Parser *p)
+_loop0_6_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24000,7 +25013,7 @@ _loop0_5_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' simple_stmt"));
+ D(fprintf(stderr, "%*c> _loop0_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' simple_stmt"));
Token * _literal;
stmt_ty elem;
while (
@@ -24032,7 +25045,7 @@ _loop0_5_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_5[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_6[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';' simple_stmt"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -24049,9 +25062,9 @@ _loop0_5_rule(Parser *p)
return _seq;
}
-// _gather_4: simple_stmt _loop0_5
+// _gather_5: simple_stmt _loop0_6
static asdl_seq *
-_gather_4_rule(Parser *p)
+_gather_5_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24063,27 +25076,27 @@ _gather_4_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // simple_stmt _loop0_5
+ { // simple_stmt _loop0_6
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_5"));
+ D(fprintf(stderr, "%*c> _gather_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_6"));
stmt_ty elem;
asdl_seq * seq;
if (
(elem = simple_stmt_rule(p)) // simple_stmt
&&
- (seq = _loop0_5_rule(p)) // _loop0_5
+ (seq = _loop0_6_rule(p)) // _loop0_6
)
{
- D(fprintf(stderr, "%*c+ _gather_4[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_5"));
+ D(fprintf(stderr, "%*c+ _gather_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_6"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_4[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt _loop0_5"));
+ D(fprintf(stderr, "%*c%s _gather_5[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt _loop0_6"));
}
_res = NULL;
done:
@@ -24091,9 +25104,9 @@ _gather_4_rule(Parser *p)
return _res;
}
-// _tmp_6: 'import' | 'from'
+// _tmp_7: 'import' | 'from'
static void *
-_tmp_6_rule(Parser *p)
+_tmp_7_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24110,18 +25123,18 @@ _tmp_6_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'"));
+ D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 606)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 607)) // token='import'
)
{
- D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'"));
+ D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_6[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import'"));
}
{ // 'from'
@@ -24129,18 +25142,18 @@ _tmp_6_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'"));
+ D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
)
{
- D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'"));
+ D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_6[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from'"));
}
_res = NULL;
@@ -24149,9 +25162,9 @@ _tmp_6_rule(Parser *p)
return _res;
}
-// _tmp_7: 'def' | '@' | ASYNC
+// _tmp_8: 'def' | '@' | ASYNC
static void *
-_tmp_7_rule(Parser *p)
+_tmp_8_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24168,18 +25181,18 @@ _tmp_7_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'"));
+ D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 651)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
)
{
- D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'"));
+ D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def'"));
}
{ // '@'
@@ -24187,18 +25200,18 @@ _tmp_7_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 49)) // token='@'
)
{
- D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'"));
}
{ // ASYNC
@@ -24206,18 +25219,18 @@ _tmp_7_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
Token * async_var;
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
)
{
- D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
_res = async_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
}
_res = NULL;
@@ -24226,9 +25239,9 @@ _tmp_7_rule(Parser *p)
return _res;
}
-// _tmp_8: 'class' | '@'
+// _tmp_9: 'class' | '@'
static void *
-_tmp_8_rule(Parser *p)
+_tmp_9_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24245,18 +25258,18 @@ _tmp_8_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'"));
+ D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 653)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
)
{
- D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'"));
+ D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class'"));
}
{ // '@'
@@ -24264,18 +25277,18 @@ _tmp_8_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 49)) // token='@'
)
{
- D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'"));
}
_res = NULL;
@@ -24284,9 +25297,9 @@ _tmp_8_rule(Parser *p)
return _res;
}
-// _tmp_9: 'with' | ASYNC
+// _tmp_10: 'with' | ASYNC
static void *
-_tmp_9_rule(Parser *p)
+_tmp_10_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24303,18 +25316,18 @@ _tmp_9_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'"));
+ D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
)
{
- D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'"));
+ D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with'"));
}
{ // ASYNC
@@ -24322,18 +25335,18 @@ _tmp_9_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
Token * async_var;
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
)
{
- D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
_res = async_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
}
_res = NULL;
@@ -24342,9 +25355,9 @@ _tmp_9_rule(Parser *p)
return _res;
}
-// _tmp_10: 'for' | ASYNC
+// _tmp_11: 'for' | ASYNC
static void *
-_tmp_10_rule(Parser *p)
+_tmp_11_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24361,18 +25374,18 @@ _tmp_10_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'"));
+ D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
)
{
- D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'"));
+ D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for'"));
}
{ // ASYNC
@@ -24380,18 +25393,18 @@ _tmp_10_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
Token * async_var;
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
)
{
- D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
_res = async_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
}
_res = NULL;
@@ -24400,9 +25413,9 @@ _tmp_10_rule(Parser *p)
return _res;
}
-// _tmp_11: '=' annotated_rhs
+// _tmp_12: '=' annotated_rhs
static void *
-_tmp_11_rule(Parser *p)
+_tmp_12_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24419,7 +25432,7 @@ _tmp_11_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
+ D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
Token * _literal;
expr_ty d;
if (
@@ -24428,7 +25441,7 @@ _tmp_11_rule(Parser *p)
(d = annotated_rhs_rule(p)) // annotated_rhs
)
{
- D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
+ D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
_res = d;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24438,7 +25451,7 @@ _tmp_11_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs"));
}
_res = NULL;
@@ -24447,9 +25460,9 @@ _tmp_11_rule(Parser *p)
return _res;
}
-// _tmp_12: '(' single_target ')' | single_subscript_attribute_target
+// _tmp_13: '(' single_target ')' | single_subscript_attribute_target
static void *
-_tmp_12_rule(Parser *p)
+_tmp_13_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24466,7 +25479,7 @@ _tmp_12_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'"));
+ D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'"));
Token * _literal;
Token * _literal_1;
expr_ty b;
@@ -24478,7 +25491,7 @@ _tmp_12_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'"));
_res = b;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24488,7 +25501,7 @@ _tmp_12_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'"));
}
{ // single_subscript_attribute_target
@@ -24496,18 +25509,18 @@ _tmp_12_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target"));
+ D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target"));
expr_ty single_subscript_attribute_target_var;
if (
(single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target"));
+ D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target"));
_res = single_subscript_attribute_target_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target"));
}
_res = NULL;
@@ -24516,9 +25529,9 @@ _tmp_12_rule(Parser *p)
return _res;
}
-// _tmp_13: '=' annotated_rhs
+// _tmp_14: '=' annotated_rhs
static void *
-_tmp_13_rule(Parser *p)
+_tmp_14_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24535,7 +25548,7 @@ _tmp_13_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
+ D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
Token * _literal;
expr_ty d;
if (
@@ -24544,7 +25557,7 @@ _tmp_13_rule(Parser *p)
(d = annotated_rhs_rule(p)) // annotated_rhs
)
{
- D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
+ D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
_res = d;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24554,7 +25567,7 @@ _tmp_13_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs"));
}
_res = NULL;
@@ -24563,9 +25576,9 @@ _tmp_13_rule(Parser *p)
return _res;
}
-// _loop1_14: (star_targets '=')
+// _loop1_15: (star_targets '=')
static asdl_seq *
-_loop1_14_rule(Parser *p)
+_loop1_15_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24591,13 +25604,13 @@ _loop1_14_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
- void *_tmp_224_var;
+ D(fprintf(stderr, "%*c> _loop1_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
+ void *_tmp_244_var;
while (
- (_tmp_224_var = _tmp_224_rule(p)) // star_targets '='
+ (_tmp_244_var = _tmp_244_rule(p)) // star_targets '='
)
{
- _res = _tmp_224_var;
+ _res = _tmp_244_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -24614,7 +25627,7 @@ _loop1_14_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_14[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_15[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')"));
}
if (_n == 0 || p->error_indicator) {
@@ -24636,9 +25649,9 @@ _loop1_14_rule(Parser *p)
return _seq;
}
-// _tmp_15: yield_expr | star_expressions
+// _tmp_16: yield_expr | star_expressions
static void *
-_tmp_15_rule(Parser *p)
+_tmp_16_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24655,18 +25668,18 @@ _tmp_15_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // star_expressions
@@ -24674,18 +25687,18 @@ _tmp_15_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
expr_ty star_expressions_var;
if (
(star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
_res = star_expressions_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
}
_res = NULL;
@@ -24694,9 +25707,9 @@ _tmp_15_rule(Parser *p)
return _res;
}
-// _tmp_16: yield_expr | star_expressions
+// _tmp_17: yield_expr | star_expressions
static void *
-_tmp_16_rule(Parser *p)
+_tmp_17_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24713,18 +25726,18 @@ _tmp_16_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // star_expressions
@@ -24732,18 +25745,18 @@ _tmp_16_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
expr_ty star_expressions_var;
if (
(star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
_res = star_expressions_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
}
_res = NULL;
@@ -24752,9 +25765,9 @@ _tmp_16_rule(Parser *p)
return _res;
}
-// _tmp_17: 'from' expression
+// _tmp_18: 'from' expression
static void *
-_tmp_17_rule(Parser *p)
+_tmp_18_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24771,16 +25784,16 @@ _tmp_17_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression"));
+ D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
&&
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24790,7 +25803,7 @@ _tmp_17_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' expression"));
}
_res = NULL;
@@ -24799,9 +25812,9 @@ _tmp_17_rule(Parser *p)
return _res;
}
-// _loop0_19: ',' NAME
+// _loop0_20: ',' NAME
static asdl_seq *
-_loop0_19_rule(Parser *p)
+_loop0_20_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24827,7 +25840,7 @@ _loop0_19_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
+ D(fprintf(stderr, "%*c> _loop0_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
Token * _literal;
expr_ty elem;
while (
@@ -24859,7 +25872,7 @@ _loop0_19_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_19[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_20[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -24876,9 +25889,9 @@ _loop0_19_rule(Parser *p)
return _seq;
}
-// _gather_18: NAME _loop0_19
+// _gather_19: NAME _loop0_20
static asdl_seq *
-_gather_18_rule(Parser *p)
+_gather_19_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24890,27 +25903,27 @@ _gather_18_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // NAME _loop0_19
+ { // NAME _loop0_20
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_19"));
+ D(fprintf(stderr, "%*c> _gather_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_20"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = _PyPegen_name_token(p)) // NAME
&&
- (seq = _loop0_19_rule(p)) // _loop0_19
+ (seq = _loop0_20_rule(p)) // _loop0_20
)
{
- D(fprintf(stderr, "%*c+ _gather_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_19"));
+ D(fprintf(stderr, "%*c+ _gather_19[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_20"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_18[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_19"));
+ D(fprintf(stderr, "%*c%s _gather_19[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_20"));
}
_res = NULL;
done:
@@ -24918,9 +25931,9 @@ _gather_18_rule(Parser *p)
return _res;
}
-// _loop0_21: ',' NAME
+// _loop0_22: ',' NAME
static asdl_seq *
-_loop0_21_rule(Parser *p)
+_loop0_22_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24946,7 +25959,7 @@ _loop0_21_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
+ D(fprintf(stderr, "%*c> _loop0_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
Token * _literal;
expr_ty elem;
while (
@@ -24978,7 +25991,7 @@ _loop0_21_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_21[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_22[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -24995,9 +26008,9 @@ _loop0_21_rule(Parser *p)
return _seq;
}
-// _gather_20: NAME _loop0_21
+// _gather_21: NAME _loop0_22
static asdl_seq *
-_gather_20_rule(Parser *p)
+_gather_21_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25009,27 +26022,27 @@ _gather_20_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // NAME _loop0_21
+ { // NAME _loop0_22
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_21"));
+ D(fprintf(stderr, "%*c> _gather_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_22"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = _PyPegen_name_token(p)) // NAME
&&
- (seq = _loop0_21_rule(p)) // _loop0_21
+ (seq = _loop0_22_rule(p)) // _loop0_22
)
{
- D(fprintf(stderr, "%*c+ _gather_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_21"));
+ D(fprintf(stderr, "%*c+ _gather_21[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_22"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_20[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_21"));
+ D(fprintf(stderr, "%*c%s _gather_21[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_22"));
}
_res = NULL;
done:
@@ -25037,9 +26050,9 @@ _gather_20_rule(Parser *p)
return _res;
}
-// _tmp_22: ';' | NEWLINE
+// _tmp_23: ';' | NEWLINE
static void *
-_tmp_22_rule(Parser *p)
+_tmp_23_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25056,18 +26069,18 @@ _tmp_22_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'"));
+ D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 13)) // token=';'
)
{
- D(fprintf(stderr, "%*c+ _tmp_22[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'"));
+ D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_22[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'"));
}
{ // NEWLINE
@@ -25075,18 +26088,18 @@ _tmp_22_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
Token * newline_var;
if (
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_22[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
_res = newline_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_22[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE"));
}
_res = NULL;
@@ -25095,9 +26108,9 @@ _tmp_22_rule(Parser *p)
return _res;
}
-// _tmp_23: ',' expression
+// _tmp_24: ',' expression
static void *
-_tmp_23_rule(Parser *p)
+_tmp_24_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25114,7 +26127,7 @@ _tmp_23_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty z;
if (
@@ -25123,7 +26136,7 @@ _tmp_23_rule(Parser *p)
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25133,7 +26146,7 @@ _tmp_23_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
_res = NULL;
@@ -25142,9 +26155,9 @@ _tmp_23_rule(Parser *p)
return _res;
}
-// _loop0_24: ('.' | '...')
+// _loop0_25: ('.' | '...')
static asdl_seq *
-_loop0_24_rule(Parser *p)
+_loop0_25_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25170,13 +26183,13 @@ _loop0_24_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
- void *_tmp_225_var;
+ D(fprintf(stderr, "%*c> _loop0_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
+ void *_tmp_245_var;
while (
- (_tmp_225_var = _tmp_225_rule(p)) // '.' | '...'
+ (_tmp_245_var = _tmp_245_rule(p)) // '.' | '...'
)
{
- _res = _tmp_225_var;
+ _res = _tmp_245_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -25193,7 +26206,7 @@ _loop0_24_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_24[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_25[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25210,9 +26223,9 @@ _loop0_24_rule(Parser *p)
return _seq;
}
-// _loop1_25: ('.' | '...')
+// _loop1_26: ('.' | '...')
static asdl_seq *
-_loop1_25_rule(Parser *p)
+_loop1_26_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25238,13 +26251,13 @@ _loop1_25_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
- void *_tmp_226_var;
+ D(fprintf(stderr, "%*c> _loop1_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
+ void *_tmp_246_var;
while (
- (_tmp_226_var = _tmp_226_rule(p)) // '.' | '...'
+ (_tmp_246_var = _tmp_246_rule(p)) // '.' | '...'
)
{
- _res = _tmp_226_var;
+ _res = _tmp_246_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -25261,7 +26274,7 @@ _loop1_25_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_25[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_26[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')"));
}
if (_n == 0 || p->error_indicator) {
@@ -25283,9 +26296,9 @@ _loop1_25_rule(Parser *p)
return _seq;
}
-// _loop0_27: ',' import_from_as_name
+// _loop0_28: ',' import_from_as_name
static asdl_seq *
-_loop0_27_rule(Parser *p)
+_loop0_28_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25311,7 +26324,7 @@ _loop0_27_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name"));
+ D(fprintf(stderr, "%*c> _loop0_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name"));
Token * _literal;
alias_ty elem;
while (
@@ -25343,7 +26356,7 @@ _loop0_27_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_27[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_28[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' import_from_as_name"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25360,9 +26373,9 @@ _loop0_27_rule(Parser *p)
return _seq;
}
-// _gather_26: import_from_as_name _loop0_27
+// _gather_27: import_from_as_name _loop0_28
static asdl_seq *
-_gather_26_rule(Parser *p)
+_gather_27_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25374,27 +26387,27 @@ _gather_26_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // import_from_as_name _loop0_27
+ { // import_from_as_name _loop0_28
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_27"));
+ D(fprintf(stderr, "%*c> _gather_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_28"));
alias_ty elem;
asdl_seq * seq;
if (
(elem = import_from_as_name_rule(p)) // import_from_as_name
&&
- (seq = _loop0_27_rule(p)) // _loop0_27
+ (seq = _loop0_28_rule(p)) // _loop0_28
)
{
- D(fprintf(stderr, "%*c+ _gather_26[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_27"));
+ D(fprintf(stderr, "%*c+ _gather_27[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_28"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_26[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_27"));
+ D(fprintf(stderr, "%*c%s _gather_27[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_28"));
}
_res = NULL;
done:
@@ -25402,9 +26415,9 @@ _gather_26_rule(Parser *p)
return _res;
}
-// _tmp_28: 'as' NAME
+// _tmp_29: 'as' NAME
static void *
-_tmp_28_rule(Parser *p)
+_tmp_29_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25421,16 +26434,16 @@ _tmp_28_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_28[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25440,7 +26453,7 @@ _tmp_28_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_28[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_29[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -25449,9 +26462,9 @@ _tmp_28_rule(Parser *p)
return _res;
}
-// _loop0_30: ',' dotted_as_name
+// _loop0_31: ',' dotted_as_name
static asdl_seq *
-_loop0_30_rule(Parser *p)
+_loop0_31_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25477,7 +26490,7 @@ _loop0_30_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name"));
+ D(fprintf(stderr, "%*c> _loop0_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name"));
Token * _literal;
alias_ty elem;
while (
@@ -25509,7 +26522,7 @@ _loop0_30_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_30[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_31[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_as_name"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25526,9 +26539,9 @@ _loop0_30_rule(Parser *p)
return _seq;
}
-// _gather_29: dotted_as_name _loop0_30
+// _gather_30: dotted_as_name _loop0_31
static asdl_seq *
-_gather_29_rule(Parser *p)
+_gather_30_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25540,27 +26553,27 @@ _gather_29_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // dotted_as_name _loop0_30
+ { // dotted_as_name _loop0_31
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_30"));
+ D(fprintf(stderr, "%*c> _gather_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_31"));
alias_ty elem;
asdl_seq * seq;
if (
(elem = dotted_as_name_rule(p)) // dotted_as_name
&&
- (seq = _loop0_30_rule(p)) // _loop0_30
+ (seq = _loop0_31_rule(p)) // _loop0_31
)
{
- D(fprintf(stderr, "%*c+ _gather_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_30"));
+ D(fprintf(stderr, "%*c+ _gather_30[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_31"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_29[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_30"));
+ D(fprintf(stderr, "%*c%s _gather_30[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_31"));
}
_res = NULL;
done:
@@ -25568,9 +26581,9 @@ _gather_29_rule(Parser *p)
return _res;
}
-// _tmp_31: 'as' NAME
+// _tmp_32: 'as' NAME
static void *
-_tmp_31_rule(Parser *p)
+_tmp_32_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25587,16 +26600,16 @@ _tmp_31_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_31[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_32[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25606,7 +26619,7 @@ _tmp_31_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_31[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_32[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -25615,9 +26628,9 @@ _tmp_31_rule(Parser *p)
return _res;
}
-// _loop1_32: ('@' named_expression NEWLINE)
+// _loop1_33: ('@' named_expression NEWLINE)
static asdl_seq *
-_loop1_32_rule(Parser *p)
+_loop1_33_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25643,13 +26656,13 @@ _loop1_32_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)"));
- void *_tmp_227_var;
+ D(fprintf(stderr, "%*c> _loop1_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)"));
+ void *_tmp_247_var;
while (
- (_tmp_227_var = _tmp_227_rule(p)) // '@' named_expression NEWLINE
+ (_tmp_247_var = _tmp_247_rule(p)) // '@' named_expression NEWLINE
)
{
- _res = _tmp_227_var;
+ _res = _tmp_247_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -25666,7 +26679,7 @@ _loop1_32_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_32[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_33[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('@' named_expression NEWLINE)"));
}
if (_n == 0 || p->error_indicator) {
@@ -25688,9 +26701,9 @@ _loop1_32_rule(Parser *p)
return _seq;
}
-// _tmp_33: '(' arguments? ')'
+// _tmp_34: '(' arguments? ')'
static void *
-_tmp_33_rule(Parser *p)
+_tmp_34_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25707,7 +26720,7 @@ _tmp_33_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
Token * _literal;
Token * _literal_1;
void *z;
@@ -25719,7 +26732,7 @@ _tmp_33_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_33[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25729,7 +26742,7 @@ _tmp_33_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_33[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'"));
}
_res = NULL;
@@ -25738,9 +26751,9 @@ _tmp_33_rule(Parser *p)
return _res;
}
-// _tmp_34: '->' expression
+// _tmp_35: '->' expression
static void *
-_tmp_34_rule(Parser *p)
+_tmp_35_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25757,7 +26770,7 @@ _tmp_34_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
Token * _literal;
expr_ty z;
if (
@@ -25766,7 +26779,7 @@ _tmp_34_rule(Parser *p)
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25776,7 +26789,7 @@ _tmp_34_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression"));
}
_res = NULL;
@@ -25785,9 +26798,9 @@ _tmp_34_rule(Parser *p)
return _res;
}
-// _tmp_35: '->' expression
+// _tmp_36: '->' expression
static void *
-_tmp_35_rule(Parser *p)
+_tmp_36_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25804,7 +26817,7 @@ _tmp_35_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c> _tmp_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
Token * _literal;
expr_ty z;
if (
@@ -25813,7 +26826,7 @@ _tmp_35_rule(Parser *p)
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_36[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25823,7 +26836,7 @@ _tmp_35_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_36[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression"));
}
_res = NULL;
@@ -25832,9 +26845,9 @@ _tmp_35_rule(Parser *p)
return _res;
}
-// _loop0_36: param_no_default
+// _loop0_37: param_no_default
static asdl_seq *
-_loop0_36_rule(Parser *p)
+_loop0_37_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25860,7 +26873,7 @@ _loop0_36_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -25883,7 +26896,7 @@ _loop0_36_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_36[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_37[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25900,9 +26913,9 @@ _loop0_36_rule(Parser *p)
return _seq;
}
-// _loop0_37: param_with_default
+// _loop0_38: param_with_default
static asdl_seq *
-_loop0_37_rule(Parser *p)
+_loop0_38_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25928,7 +26941,7 @@ _loop0_37_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop0_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -25951,7 +26964,7 @@ _loop0_37_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_37[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_38[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25968,9 +26981,9 @@ _loop0_37_rule(Parser *p)
return _seq;
}
-// _loop0_38: param_with_default
+// _loop0_39: param_with_default
static asdl_seq *
-_loop0_38_rule(Parser *p)
+_loop0_39_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25996,7 +27009,7 @@ _loop0_38_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop0_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26019,7 +27032,7 @@ _loop0_38_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_38[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_39[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26036,9 +27049,9 @@ _loop0_38_rule(Parser *p)
return _seq;
}
-// _loop1_39: param_no_default
+// _loop1_40: param_no_default
static asdl_seq *
-_loop1_39_rule(Parser *p)
+_loop1_40_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26064,7 +27077,7 @@ _loop1_39_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26087,7 +27100,7 @@ _loop1_39_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_39[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_40[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26109,9 +27122,9 @@ _loop1_39_rule(Parser *p)
return _seq;
}
-// _loop0_40: param_with_default
+// _loop0_41: param_with_default
static asdl_seq *
-_loop0_40_rule(Parser *p)
+_loop0_41_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26137,7 +27150,7 @@ _loop0_40_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop0_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26160,7 +27173,7 @@ _loop0_40_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_40[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_41[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26177,9 +27190,9 @@ _loop0_40_rule(Parser *p)
return _seq;
}
-// _loop1_41: param_with_default
+// _loop1_42: param_with_default
static asdl_seq *
-_loop1_41_rule(Parser *p)
+_loop1_42_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26205,7 +27218,7 @@ _loop1_41_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26228,7 +27241,7 @@ _loop1_41_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_41[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_42[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26250,9 +27263,9 @@ _loop1_41_rule(Parser *p)
return _seq;
}
-// _loop1_42: param_no_default
+// _loop1_43: param_no_default
static asdl_seq *
-_loop1_42_rule(Parser *p)
+_loop1_43_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26278,7 +27291,7 @@ _loop1_42_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26301,7 +27314,7 @@ _loop1_42_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_42[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_43[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26323,9 +27336,9 @@ _loop1_42_rule(Parser *p)
return _seq;
}
-// _loop1_43: param_no_default
+// _loop1_44: param_no_default
static asdl_seq *
-_loop1_43_rule(Parser *p)
+_loop1_44_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26351,7 +27364,7 @@ _loop1_43_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26374,7 +27387,7 @@ _loop1_43_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_43[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_44[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26396,9 +27409,9 @@ _loop1_43_rule(Parser *p)
return _seq;
}
-// _loop0_44: param_no_default
+// _loop0_45: param_no_default
static asdl_seq *
-_loop0_44_rule(Parser *p)
+_loop0_45_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26424,7 +27437,7 @@ _loop0_44_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26447,7 +27460,7 @@ _loop0_44_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_44[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_45[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26464,9 +27477,9 @@ _loop0_44_rule(Parser *p)
return _seq;
}
-// _loop1_45: param_with_default
+// _loop1_46: param_with_default
static asdl_seq *
-_loop1_45_rule(Parser *p)
+_loop1_46_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26492,7 +27505,7 @@ _loop1_45_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26515,7 +27528,7 @@ _loop1_45_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_45[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_46[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26537,9 +27550,9 @@ _loop1_45_rule(Parser *p)
return _seq;
}
-// _loop0_46: param_no_default
+// _loop0_47: param_no_default
static asdl_seq *
-_loop0_46_rule(Parser *p)
+_loop0_47_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26565,7 +27578,7 @@ _loop0_46_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26588,7 +27601,7 @@ _loop0_46_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_46[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_47[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26605,9 +27618,9 @@ _loop0_46_rule(Parser *p)
return _seq;
}
-// _loop1_47: param_with_default
+// _loop1_48: param_with_default
static asdl_seq *
-_loop1_47_rule(Parser *p)
+_loop1_48_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26633,7 +27646,7 @@ _loop1_47_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26656,7 +27669,7 @@ _loop1_47_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_47[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_48[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26678,9 +27691,9 @@ _loop1_47_rule(Parser *p)
return _seq;
}
-// _loop0_48: param_maybe_default
+// _loop0_49: param_maybe_default
static asdl_seq *
-_loop0_48_rule(Parser *p)
+_loop0_49_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26706,7 +27719,7 @@ _loop0_48_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -26729,7 +27742,7 @@ _loop0_48_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_48[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_49[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26746,9 +27759,9 @@ _loop0_48_rule(Parser *p)
return _seq;
}
-// _loop0_49: param_maybe_default
+// _loop0_50: param_maybe_default
static asdl_seq *
-_loop0_49_rule(Parser *p)
+_loop0_50_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26774,7 +27787,7 @@ _loop0_49_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -26797,7 +27810,7 @@ _loop0_49_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_49[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_50[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26814,9 +27827,9 @@ _loop0_49_rule(Parser *p)
return _seq;
}
-// _loop1_50: param_maybe_default
+// _loop1_51: param_maybe_default
static asdl_seq *
-_loop1_50_rule(Parser *p)
+_loop1_51_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26842,7 +27855,7 @@ _loop1_50_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop1_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -26865,7 +27878,7 @@ _loop1_50_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_50[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_51[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26887,9 +27900,9 @@ _loop1_50_rule(Parser *p)
return _seq;
}
-// _loop0_52: ',' with_item
+// _loop0_53: ',' with_item
static asdl_seq *
-_loop0_52_rule(Parser *p)
+_loop0_53_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26915,7 +27928,7 @@ _loop0_52_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -26947,7 +27960,7 @@ _loop0_52_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_52[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_53[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26964,9 +27977,9 @@ _loop0_52_rule(Parser *p)
return _seq;
}
-// _gather_51: with_item _loop0_52
+// _gather_52: with_item _loop0_53
static asdl_seq *
-_gather_51_rule(Parser *p)
+_gather_52_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26978,27 +27991,27 @@ _gather_51_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_52
+ { // with_item _loop0_53
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_52"));
+ D(fprintf(stderr, "%*c> _gather_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_53"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_52_rule(p)) // _loop0_52
+ (seq = _loop0_53_rule(p)) // _loop0_53
)
{
- D(fprintf(stderr, "%*c+ _gather_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_52"));
+ D(fprintf(stderr, "%*c+ _gather_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_53"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_51[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_52"));
+ D(fprintf(stderr, "%*c%s _gather_52[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_53"));
}
_res = NULL;
done:
@@ -27006,9 +28019,9 @@ _gather_51_rule(Parser *p)
return _res;
}
-// _loop0_54: ',' with_item
+// _loop0_55: ',' with_item
static asdl_seq *
-_loop0_54_rule(Parser *p)
+_loop0_55_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27034,7 +28047,7 @@ _loop0_54_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -27066,7 +28079,7 @@ _loop0_54_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_55[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -27083,9 +28096,9 @@ _loop0_54_rule(Parser *p)
return _seq;
}
-// _gather_53: with_item _loop0_54
+// _gather_54: with_item _loop0_55
static asdl_seq *
-_gather_53_rule(Parser *p)
+_gather_54_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27097,27 +28110,27 @@ _gather_53_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_54
+ { // with_item _loop0_55
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_54"));
+ D(fprintf(stderr, "%*c> _gather_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_55"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_54_rule(p)) // _loop0_54
+ (seq = _loop0_55_rule(p)) // _loop0_55
)
{
- D(fprintf(stderr, "%*c+ _gather_53[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_54"));
+ D(fprintf(stderr, "%*c+ _gather_54[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_55"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_53[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_54"));
+ D(fprintf(stderr, "%*c%s _gather_54[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_55"));
}
_res = NULL;
done:
@@ -27125,9 +28138,9 @@ _gather_53_rule(Parser *p)
return _res;
}
-// _loop0_56: ',' with_item
+// _loop0_57: ',' with_item
static asdl_seq *
-_loop0_56_rule(Parser *p)
+_loop0_57_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27153,7 +28166,7 @@ _loop0_56_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -27185,7 +28198,7 @@ _loop0_56_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_56[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_57[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -27202,9 +28215,9 @@ _loop0_56_rule(Parser *p)
return _seq;
}
-// _gather_55: with_item _loop0_56
+// _gather_56: with_item _loop0_57
static asdl_seq *
-_gather_55_rule(Parser *p)
+_gather_56_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27216,27 +28229,27 @@ _gather_55_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_56
+ { // with_item _loop0_57
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_56"));
+ D(fprintf(stderr, "%*c> _gather_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_57"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_56_rule(p)) // _loop0_56
+ (seq = _loop0_57_rule(p)) // _loop0_57
)
{
- D(fprintf(stderr, "%*c+ _gather_55[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_56"));
+ D(fprintf(stderr, "%*c+ _gather_56[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_57"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_55[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_56"));
+ D(fprintf(stderr, "%*c%s _gather_56[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_57"));
}
_res = NULL;
done:
@@ -27244,9 +28257,9 @@ _gather_55_rule(Parser *p)
return _res;
}
-// _loop0_58: ',' with_item
+// _loop0_59: ',' with_item
static asdl_seq *
-_loop0_58_rule(Parser *p)
+_loop0_59_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27272,7 +28285,7 @@ _loop0_58_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -27304,7 +28317,7 @@ _loop0_58_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_58[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_59[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -27321,9 +28334,9 @@ _loop0_58_rule(Parser *p)
return _seq;
}
-// _gather_57: with_item _loop0_58
+// _gather_58: with_item _loop0_59
static asdl_seq *
-_gather_57_rule(Parser *p)
+_gather_58_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27335,27 +28348,27 @@ _gather_57_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_58
+ { // with_item _loop0_59
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_58"));
+ D(fprintf(stderr, "%*c> _gather_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_59"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_58_rule(p)) // _loop0_58
+ (seq = _loop0_59_rule(p)) // _loop0_59
)
{
- D(fprintf(stderr, "%*c+ _gather_57[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_58"));
+ D(fprintf(stderr, "%*c+ _gather_58[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_59"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_57[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_58"));
+ D(fprintf(stderr, "%*c%s _gather_58[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_59"));
}
_res = NULL;
done:
@@ -27363,9 +28376,9 @@ _gather_57_rule(Parser *p)
return _res;
}
-// _tmp_59: ',' | ')' | ':'
+// _tmp_60: ',' | ')' | ':'
static void *
-_tmp_59_rule(Parser *p)
+_tmp_60_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27382,18 +28395,18 @@ _tmp_59_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -27401,18 +28414,18 @@ _tmp_59_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ':'
@@ -27420,18 +28433,18 @@ _tmp_59_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -27440,9 +28453,9 @@ _tmp_59_rule(Parser *p)
return _res;
}
-// _loop1_60: except_block
+// _loop1_61: except_block
static asdl_seq *
-_loop1_60_rule(Parser *p)
+_loop1_61_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27468,7 +28481,7 @@ _loop1_60_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
+ D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
excepthandler_ty except_block_var;
while (
(except_block_var = except_block_rule(p)) // except_block
@@ -27491,7 +28504,7 @@ _loop1_60_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -27513,9 +28526,9 @@ _loop1_60_rule(Parser *p)
return _seq;
}
-// _loop1_61: except_star_block
+// _loop1_62: except_star_block
static asdl_seq *
-_loop1_61_rule(Parser *p)
+_loop1_62_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27541,7 +28554,7 @@ _loop1_61_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
+ D(fprintf(stderr, "%*c> _loop1_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
excepthandler_ty except_star_block_var;
while (
(except_star_block_var = except_star_block_rule(p)) // except_star_block
@@ -27564,7 +28577,7 @@ _loop1_61_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_62[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -27586,9 +28599,9 @@ _loop1_61_rule(Parser *p)
return _seq;
}
-// _tmp_62: 'as' NAME
+// _tmp_63: 'as' NAME
static void *
-_tmp_62_rule(Parser *p)
+_tmp_63_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27605,16 +28618,16 @@ _tmp_62_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_62[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_63[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -27624,7 +28637,7 @@ _tmp_62_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_62[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_63[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -27633,9 +28646,9 @@ _tmp_62_rule(Parser *p)
return _res;
}
-// _tmp_63: 'as' NAME
+// _tmp_64: 'as' NAME
static void *
-_tmp_63_rule(Parser *p)
+_tmp_64_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27652,16 +28665,16 @@ _tmp_63_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_63[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_64[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -27671,7 +28684,7 @@ _tmp_63_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_63[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_64[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -27680,9 +28693,9 @@ _tmp_63_rule(Parser *p)
return _res;
}
-// _loop1_64: case_block
+// _loop1_65: case_block
static asdl_seq *
-_loop1_64_rule(Parser *p)
+_loop1_65_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27708,7 +28721,7 @@ _loop1_64_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "case_block"));
+ D(fprintf(stderr, "%*c> _loop1_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "case_block"));
match_case_ty case_block_var;
while (
(case_block_var = case_block_rule(p)) // case_block
@@ -27731,7 +28744,7 @@ _loop1_64_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_64[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_65[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "case_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -27753,9 +28766,9 @@ _loop1_64_rule(Parser *p)
return _seq;
}
-// _loop0_66: '|' closed_pattern
+// _loop0_67: '|' closed_pattern
static asdl_seq *
-_loop0_66_rule(Parser *p)
+_loop0_67_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27781,7 +28794,7 @@ _loop0_66_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|' closed_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|' closed_pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -27813,7 +28826,7 @@ _loop0_66_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_66[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_67[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'|' closed_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -27830,9 +28843,9 @@ _loop0_66_rule(Parser *p)
return _seq;
}
-// _gather_65: closed_pattern _loop0_66
+// _gather_66: closed_pattern _loop0_67
static asdl_seq *
-_gather_65_rule(Parser *p)
+_gather_66_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27844,85 +28857,27 @@ _gather_65_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // closed_pattern _loop0_66
+ { // closed_pattern _loop0_67
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_66"));
+ D(fprintf(stderr, "%*c> _gather_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_67"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = closed_pattern_rule(p)) // closed_pattern
&&
- (seq = _loop0_66_rule(p)) // _loop0_66
+ (seq = _loop0_67_rule(p)) // _loop0_67
)
{
- D(fprintf(stderr, "%*c+ _gather_65[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_66"));
+ D(fprintf(stderr, "%*c+ _gather_66[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_67"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_65[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "closed_pattern _loop0_66"));
- }
- _res = NULL;
- done:
- p->level--;
- return _res;
-}
-
-// _tmp_67: '+' | '-'
-static void *
-_tmp_67_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void * _res = NULL;
- int _mark = p->mark;
- { // '+'
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
- Token * _literal;
- if (
- (_literal = _PyPegen_expect_token(p, 14)) // token='+'
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
- _res = _literal;
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'"));
- }
- { // '-'
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
- Token * _literal;
- if (
- (_literal = _PyPegen_expect_token(p, 15)) // token='-'
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
- _res = _literal;
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'"));
+ D(fprintf(stderr, "%*c%s _gather_66[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "closed_pattern _loop0_67"));
}
_res = NULL;
done:
@@ -27988,7 +28943,7 @@ _tmp_68_rule(Parser *p)
return _res;
}
-// _tmp_69: '.' | '(' | '='
+// _tmp_69: '+' | '-'
static void *
_tmp_69_rule(Parser *p)
{
@@ -28002,62 +28957,43 @@ _tmp_69_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // '.'
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
- Token * _literal;
- if (
- (_literal = _PyPegen_expect_token(p, 23)) // token='.'
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
- _res = _literal;
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
- }
- { // '('
+ { // '+'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
Token * _literal;
if (
- (_literal = _PyPegen_expect_token(p, 7)) // token='('
+ (_literal = _PyPegen_expect_token(p, 14)) // token='+'
)
{
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
_res = _literal;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'"));
}
- { // '='
+ { // '-'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
Token * _literal;
if (
- (_literal = _PyPegen_expect_token(p, 22)) // token='='
+ (_literal = _PyPegen_expect_token(p, 15)) // token='-'
)
{
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
_res = _literal;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'"));
}
_res = NULL;
done:
@@ -28142,9 +29078,86 @@ _tmp_70_rule(Parser *p)
return _res;
}
-// _loop0_72: ',' maybe_star_pattern
+// _tmp_71: '.' | '(' | '='
+static void *
+_tmp_71_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '.'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 23)) // token='.'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
+ }
+ { // '('
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 7)) // token='('
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('"));
+ }
+ { // '='
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 22)) // token='='
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _loop0_73: ',' maybe_star_pattern
static asdl_seq *
-_loop0_72_rule(Parser *p)
+_loop0_73_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28170,7 +29183,7 @@ _loop0_72_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' maybe_star_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' maybe_star_pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -28202,7 +29215,7 @@ _loop0_72_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_72[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_73[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' maybe_star_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28219,9 +29232,9 @@ _loop0_72_rule(Parser *p)
return _seq;
}
-// _gather_71: maybe_star_pattern _loop0_72
+// _gather_72: maybe_star_pattern _loop0_73
static asdl_seq *
-_gather_71_rule(Parser *p)
+_gather_72_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28233,27 +29246,27 @@ _gather_71_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // maybe_star_pattern _loop0_72
+ { // maybe_star_pattern _loop0_73
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_72"));
+ D(fprintf(stderr, "%*c> _gather_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_73"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = maybe_star_pattern_rule(p)) // maybe_star_pattern
&&
- (seq = _loop0_72_rule(p)) // _loop0_72
+ (seq = _loop0_73_rule(p)) // _loop0_73
)
{
- D(fprintf(stderr, "%*c+ _gather_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_72"));
+ D(fprintf(stderr, "%*c+ _gather_72[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_73"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_71[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "maybe_star_pattern _loop0_72"));
+ D(fprintf(stderr, "%*c%s _gather_72[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "maybe_star_pattern _loop0_73"));
}
_res = NULL;
done:
@@ -28261,9 +29274,9 @@ _gather_71_rule(Parser *p)
return _res;
}
-// _loop0_74: ',' key_value_pattern
+// _loop0_75: ',' key_value_pattern
static asdl_seq *
-_loop0_74_rule(Parser *p)
+_loop0_75_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28289,7 +29302,7 @@ _loop0_74_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' key_value_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' key_value_pattern"));
Token * _literal;
KeyPatternPair* elem;
while (
@@ -28321,7 +29334,7 @@ _loop0_74_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_74[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_75[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' key_value_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28338,9 +29351,9 @@ _loop0_74_rule(Parser *p)
return _seq;
}
-// _gather_73: key_value_pattern _loop0_74
+// _gather_74: key_value_pattern _loop0_75
static asdl_seq *
-_gather_73_rule(Parser *p)
+_gather_74_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28352,27 +29365,27 @@ _gather_73_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // key_value_pattern _loop0_74
+ { // key_value_pattern _loop0_75
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_74"));
+ D(fprintf(stderr, "%*c> _gather_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_75"));
KeyPatternPair* elem;
asdl_seq * seq;
if (
(elem = key_value_pattern_rule(p)) // key_value_pattern
&&
- (seq = _loop0_74_rule(p)) // _loop0_74
+ (seq = _loop0_75_rule(p)) // _loop0_75
)
{
- D(fprintf(stderr, "%*c+ _gather_73[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_74"));
+ D(fprintf(stderr, "%*c+ _gather_74[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_75"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_73[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "key_value_pattern _loop0_74"));
+ D(fprintf(stderr, "%*c%s _gather_74[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "key_value_pattern _loop0_75"));
}
_res = NULL;
done:
@@ -28380,9 +29393,9 @@ _gather_73_rule(Parser *p)
return _res;
}
-// _tmp_75: literal_expr | attr
+// _tmp_76: literal_expr | attr
static void *
-_tmp_75_rule(Parser *p)
+_tmp_76_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28399,18 +29412,18 @@ _tmp_75_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "literal_expr"));
+ D(fprintf(stderr, "%*c> _tmp_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "literal_expr"));
expr_ty literal_expr_var;
if (
(literal_expr_var = literal_expr_rule(p)) // literal_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_75[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "literal_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "literal_expr"));
_res = literal_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_75[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_76[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "literal_expr"));
}
{ // attr
@@ -28418,18 +29431,18 @@ _tmp_75_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "attr"));
+ D(fprintf(stderr, "%*c> _tmp_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "attr"));
expr_ty attr_var;
if (
(attr_var = attr_rule(p)) // attr
)
{
- D(fprintf(stderr, "%*c+ _tmp_75[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr"));
+ D(fprintf(stderr, "%*c+ _tmp_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr"));
_res = attr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_75[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_76[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "attr"));
}
_res = NULL;
@@ -28438,9 +29451,9 @@ _tmp_75_rule(Parser *p)
return _res;
}
-// _loop0_77: ',' pattern
+// _loop0_78: ',' pattern
static asdl_seq *
-_loop0_77_rule(Parser *p)
+_loop0_78_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28466,7 +29479,7 @@ _loop0_77_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' pattern"));
+ D(fprintf(stderr, "%*c> _loop0_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -28498,7 +29511,7 @@ _loop0_77_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_78[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28515,9 +29528,9 @@ _loop0_77_rule(Parser *p)
return _seq;
}
-// _gather_76: pattern _loop0_77
+// _gather_77: pattern _loop0_78
static asdl_seq *
-_gather_76_rule(Parser *p)
+_gather_77_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28529,27 +29542,27 @@ _gather_76_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // pattern _loop0_77
+ { // pattern _loop0_78
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pattern _loop0_77"));
+ D(fprintf(stderr, "%*c> _gather_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pattern _loop0_78"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = pattern_rule(p)) // pattern
&&
- (seq = _loop0_77_rule(p)) // _loop0_77
+ (seq = _loop0_78_rule(p)) // _loop0_78
)
{
- D(fprintf(stderr, "%*c+ _gather_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pattern _loop0_77"));
+ D(fprintf(stderr, "%*c+ _gather_77[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pattern _loop0_78"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_76[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pattern _loop0_77"));
+ D(fprintf(stderr, "%*c%s _gather_77[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pattern _loop0_78"));
}
_res = NULL;
done:
@@ -28557,9 +29570,9 @@ _gather_76_rule(Parser *p)
return _res;
}
-// _loop0_79: ',' keyword_pattern
+// _loop0_80: ',' keyword_pattern
static asdl_seq *
-_loop0_79_rule(Parser *p)
+_loop0_80_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28585,7 +29598,7 @@ _loop0_79_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' keyword_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' keyword_pattern"));
Token * _literal;
KeyPatternPair* elem;
while (
@@ -28617,7 +29630,7 @@ _loop0_79_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_80[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' keyword_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28634,9 +29647,9 @@ _loop0_79_rule(Parser *p)
return _seq;
}
-// _gather_78: keyword_pattern _loop0_79
+// _gather_79: keyword_pattern _loop0_80
static asdl_seq *
-_gather_78_rule(Parser *p)
+_gather_79_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28648,27 +29661,27 @@ _gather_78_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // keyword_pattern _loop0_79
+ { // keyword_pattern _loop0_80
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_79"));
+ D(fprintf(stderr, "%*c> _gather_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_80"));
KeyPatternPair* elem;
asdl_seq * seq;
if (
(elem = keyword_pattern_rule(p)) // keyword_pattern
&&
- (seq = _loop0_79_rule(p)) // _loop0_79
+ (seq = _loop0_80_rule(p)) // _loop0_80
)
{
- D(fprintf(stderr, "%*c+ _gather_78[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_79"));
+ D(fprintf(stderr, "%*c+ _gather_79[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_80"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_78[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "keyword_pattern _loop0_79"));
+ D(fprintf(stderr, "%*c%s _gather_79[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "keyword_pattern _loop0_80"));
}
_res = NULL;
done:
@@ -28676,9 +29689,9 @@ _gather_78_rule(Parser *p)
return _res;
}
-// _loop1_80: (',' expression)
+// _loop1_81: (',' expression)
static asdl_seq *
-_loop1_80_rule(Parser *p)
+_loop1_81_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28704,13 +29717,13 @@ _loop1_80_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)"));
- void *_tmp_228_var;
+ D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)"));
+ void *_tmp_248_var;
while (
- (_tmp_228_var = _tmp_228_rule(p)) // ',' expression
+ (_tmp_248_var = _tmp_248_rule(p)) // ',' expression
)
{
- _res = _tmp_228_var;
+ _res = _tmp_248_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28727,7 +29740,7 @@ _loop1_80_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_80[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' expression)"));
}
if (_n == 0 || p->error_indicator) {
@@ -28749,9 +29762,9 @@ _loop1_80_rule(Parser *p)
return _seq;
}
-// _loop1_81: (',' star_expression)
+// _loop1_82: (',' star_expression)
static asdl_seq *
-_loop1_81_rule(Parser *p)
+_loop1_82_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28777,13 +29790,13 @@ _loop1_81_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)"));
- void *_tmp_229_var;
+ D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)"));
+ void *_tmp_249_var;
while (
- (_tmp_229_var = _tmp_229_rule(p)) // ',' star_expression
+ (_tmp_249_var = _tmp_249_rule(p)) // ',' star_expression
)
{
- _res = _tmp_229_var;
+ _res = _tmp_249_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28800,7 +29813,7 @@ _loop1_81_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_82[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_expression)"));
}
if (_n == 0 || p->error_indicator) {
@@ -28822,9 +29835,9 @@ _loop1_81_rule(Parser *p)
return _seq;
}
-// _loop0_83: ',' star_named_expression
+// _loop0_84: ',' star_named_expression
static asdl_seq *
-_loop0_83_rule(Parser *p)
+_loop0_84_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28850,7 +29863,7 @@ _loop0_83_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression"));
+ D(fprintf(stderr, "%*c> _loop0_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression"));
Token * _literal;
expr_ty elem;
while (
@@ -28882,7 +29895,7 @@ _loop0_83_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_83[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_84[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_named_expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28899,9 +29912,9 @@ _loop0_83_rule(Parser *p)
return _seq;
}
-// _gather_82: star_named_expression _loop0_83
+// _gather_83: star_named_expression _loop0_84
static asdl_seq *
-_gather_82_rule(Parser *p)
+_gather_83_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28913,27 +29926,27 @@ _gather_82_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // star_named_expression _loop0_83
+ { // star_named_expression _loop0_84
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_83"));
+ D(fprintf(stderr, "%*c> _gather_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_84"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = star_named_expression_rule(p)) // star_named_expression
&&
- (seq = _loop0_83_rule(p)) // _loop0_83
+ (seq = _loop0_84_rule(p)) // _loop0_84
)
{
- D(fprintf(stderr, "%*c+ _gather_82[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_83"));
+ D(fprintf(stderr, "%*c+ _gather_83[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_84"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_82[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_83"));
+ D(fprintf(stderr, "%*c%s _gather_83[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_84"));
}
_res = NULL;
done:
@@ -28941,9 +29954,9 @@ _gather_82_rule(Parser *p)
return _res;
}
-// _loop1_84: ('or' conjunction)
+// _loop1_85: ('or' conjunction)
static asdl_seq *
-_loop1_84_rule(Parser *p)
+_loop1_85_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28969,13 +29982,13 @@ _loop1_84_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)"));
- void *_tmp_230_var;
+ D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)"));
+ void *_tmp_250_var;
while (
- (_tmp_230_var = _tmp_230_rule(p)) // 'or' conjunction
+ (_tmp_250_var = _tmp_250_rule(p)) // 'or' conjunction
)
{
- _res = _tmp_230_var;
+ _res = _tmp_250_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28992,7 +30005,7 @@ _loop1_84_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_84[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_85[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('or' conjunction)"));
}
if (_n == 0 || p->error_indicator) {
@@ -29014,9 +30027,9 @@ _loop1_84_rule(Parser *p)
return _seq;
}
-// _loop1_85: ('and' inversion)
+// _loop1_86: ('and' inversion)
static asdl_seq *
-_loop1_85_rule(Parser *p)
+_loop1_86_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29042,13 +30055,13 @@ _loop1_85_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)"));
- void *_tmp_231_var;
+ D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)"));
+ void *_tmp_251_var;
while (
- (_tmp_231_var = _tmp_231_rule(p)) // 'and' inversion
+ (_tmp_251_var = _tmp_251_rule(p)) // 'and' inversion
)
{
- _res = _tmp_231_var;
+ _res = _tmp_251_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -29065,7 +30078,7 @@ _loop1_85_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_85[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('and' inversion)"));
}
if (_n == 0 || p->error_indicator) {
@@ -29087,9 +30100,9 @@ _loop1_85_rule(Parser *p)
return _seq;
}
-// _loop1_86: compare_op_bitwise_or_pair
+// _loop1_87: compare_op_bitwise_or_pair
static asdl_seq *
-_loop1_86_rule(Parser *p)
+_loop1_87_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29115,7 +30128,7 @@ _loop1_86_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair"));
+ D(fprintf(stderr, "%*c> _loop1_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair"));
CmpopExprPair* compare_op_bitwise_or_pair_var;
while (
(compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair
@@ -29138,7 +30151,7 @@ _loop1_86_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_87[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compare_op_bitwise_or_pair"));
}
if (_n == 0 || p->error_indicator) {
@@ -29160,9 +30173,9 @@ _loop1_86_rule(Parser *p)
return _seq;
}
-// _tmp_87: '!='
+// _tmp_88: '!='
static void *
-_tmp_87_rule(Parser *p)
+_tmp_88_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29179,13 +30192,13 @@ _tmp_87_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='"));
+ D(fprintf(stderr, "%*c> _tmp_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='"));
Token * tok;
if (
(tok = _PyPegen_expect_token(p, 28)) // token='!='
)
{
- D(fprintf(stderr, "%*c+ _tmp_87[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='"));
+ D(fprintf(stderr, "%*c+ _tmp_88[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='"));
_res = _PyPegen_check_barry_as_flufl ( p , tok ) ? NULL : tok;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -29195,7 +30208,7 @@ _tmp_87_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_87[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_88[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!='"));
}
_res = NULL;
@@ -29204,9 +30217,9 @@ _tmp_87_rule(Parser *p)
return _res;
}
-// _loop0_89: ',' (slice | starred_expression)
+// _loop0_90: ',' (slice | starred_expression)
static asdl_seq *
-_loop0_89_rule(Parser *p)
+_loop0_90_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29232,13 +30245,13 @@ _loop0_89_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (slice | starred_expression)"));
+ D(fprintf(stderr, "%*c> _loop0_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (slice | starred_expression)"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_232_rule(p)) // slice | starred_expression
+ (elem = _tmp_252_rule(p)) // slice | starred_expression
)
{
_res = elem;
@@ -29264,7 +30277,7 @@ _loop0_89_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_89[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_90[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (slice | starred_expression)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -29281,9 +30294,9 @@ _loop0_89_rule(Parser *p)
return _seq;
}
-// _gather_88: (slice | starred_expression) _loop0_89
+// _gather_89: (slice | starred_expression) _loop0_90
static asdl_seq *
-_gather_88_rule(Parser *p)
+_gather_89_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29295,27 +30308,27 @@ _gather_88_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (slice | starred_expression) _loop0_89
+ { // (slice | starred_expression) _loop0_90
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_89"));
+ D(fprintf(stderr, "%*c> _gather_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_90"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_232_rule(p)) // slice | starred_expression
+ (elem = _tmp_252_rule(p)) // slice | starred_expression
&&
- (seq = _loop0_89_rule(p)) // _loop0_89
+ (seq = _loop0_90_rule(p)) // _loop0_90
)
{
- D(fprintf(stderr, "%*c+ _gather_88[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_89"));
+ D(fprintf(stderr, "%*c+ _gather_89[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_90"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_88[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(slice | starred_expression) _loop0_89"));
+ D(fprintf(stderr, "%*c%s _gather_89[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(slice | starred_expression) _loop0_90"));
}
_res = NULL;
done:
@@ -29323,9 +30336,9 @@ _gather_88_rule(Parser *p)
return _res;
}
-// _tmp_90: ':' expression?
+// _tmp_91: ':' expression?
static void *
-_tmp_90_rule(Parser *p)
+_tmp_91_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29342,7 +30355,7 @@ _tmp_90_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?"));
+ D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?"));
Token * _literal;
void *d;
if (
@@ -29351,7 +30364,7 @@ _tmp_90_rule(Parser *p)
(d = expression_rule(p), !p->error_indicator) // expression?
)
{
- D(fprintf(stderr, "%*c+ _tmp_90[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?"));
+ D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?"));
_res = d;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -29361,7 +30374,7 @@ _tmp_90_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_90[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression?"));
}
_res = NULL;
@@ -29370,9 +30383,67 @@ _tmp_90_rule(Parser *p)
return _res;
}
-// _tmp_91: tuple | group | genexp
+// _tmp_92: STRING | FSTRING_START
static void *
-_tmp_91_rule(Parser *p)
+_tmp_92_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // STRING
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING"));
+ expr_ty string_var;
+ if (
+ (string_var = _PyPegen_string_token(p)) // STRING
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING"));
+ _res = string_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING"));
+ }
+ { // FSTRING_START
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_START"));
+ Token * fstring_start_var;
+ if (
+ (fstring_start_var = _PyPegen_expect_token(p, FSTRING_START)) // token='FSTRING_START'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_START"));
+ _res = fstring_start_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_START"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_93: tuple | group | genexp
+static void *
+_tmp_93_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29389,18 +30460,18 @@ _tmp_91_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
expr_ty tuple_var;
if (
(tuple_var = tuple_rule(p)) // tuple
)
{
- D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
_res = tuple_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple"));
}
{ // group
@@ -29408,18 +30479,18 @@ _tmp_91_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group"));
+ D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group"));
expr_ty group_var;
if (
(group_var = group_rule(p)) // group
)
{
- D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group"));
+ D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group"));
_res = group_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "group"));
}
{ // genexp
@@ -29427,18 +30498,18 @@ _tmp_91_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
expr_ty genexp_var;
if (
(genexp_var = genexp_rule(p)) // genexp
)
{
- D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
_res = genexp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp"));
}
_res = NULL;
@@ -29447,9 +30518,9 @@ _tmp_91_rule(Parser *p)
return _res;
}
-// _tmp_92: list | listcomp
+// _tmp_94: list | listcomp
static void *
-_tmp_92_rule(Parser *p)
+_tmp_94_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29466,18 +30537,18 @@ _tmp_92_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
expr_ty list_var;
if (
(list_var = list_rule(p)) // list
)
{
- D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
_res = list_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list"));
}
{ // listcomp
@@ -29485,18 +30556,18 @@ _tmp_92_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp"));
+ D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp"));
expr_ty listcomp_var;
if (
(listcomp_var = listcomp_rule(p)) // listcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp"));
_res = listcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "listcomp"));
}
_res = NULL;
@@ -29505,9 +30576,9 @@ _tmp_92_rule(Parser *p)
return _res;
}
-// _tmp_93: dict | set | dictcomp | setcomp
+// _tmp_95: dict | set | dictcomp | setcomp
static void *
-_tmp_93_rule(Parser *p)
+_tmp_95_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29524,18 +30595,18 @@ _tmp_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict"));
+ D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict"));
expr_ty dict_var;
if (
(dict_var = dict_rule(p)) // dict
)
{
- D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict"));
+ D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict"));
_res = dict_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dict"));
}
{ // set
@@ -29543,18 +30614,18 @@ _tmp_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set"));
+ D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set"));
expr_ty set_var;
if (
(set_var = set_rule(p)) // set
)
{
- D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set"));
+ D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set"));
_res = set_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "set"));
}
{ // dictcomp
@@ -29562,18 +30633,18 @@ _tmp_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp"));
+ D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp"));
expr_ty dictcomp_var;
if (
(dictcomp_var = dictcomp_rule(p)) // dictcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp"));
_res = dictcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dictcomp"));
}
{ // setcomp
@@ -29581,18 +30652,18 @@ _tmp_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp"));
+ D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp"));
expr_ty setcomp_var;
if (
(setcomp_var = setcomp_rule(p)) // setcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp"));
_res = setcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "setcomp"));
}
_res = NULL;
@@ -29601,9 +30672,9 @@ _tmp_93_rule(Parser *p)
return _res;
}
-// _tmp_94: yield_expr | named_expression
+// _tmp_96: yield_expr | named_expression
static void *
-_tmp_94_rule(Parser *p)
+_tmp_96_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29620,18 +30691,18 @@ _tmp_94_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // named_expression
@@ -29639,18 +30710,18 @@ _tmp_94_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression"));
+ D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression"));
expr_ty named_expression_var;
if (
(named_expression_var = named_expression_rule(p)) // named_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression"));
_res = named_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression"));
}
_res = NULL;
@@ -29659,143 +30730,7 @@ _tmp_94_rule(Parser *p)
return _res;
}
-// _loop0_95: lambda_param_no_default
-static asdl_seq *
-_loop0_95_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void *_res = NULL;
- int _mark = p->mark;
- void **_children = PyMem_Malloc(sizeof(void *));
- if (!_children) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- Py_ssize_t _children_capacity = 1;
- Py_ssize_t _n = 0;
- { // lambda_param_no_default
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _loop0_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
- arg_ty lambda_param_no_default_var;
- while (
- (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
- )
- {
- _res = lambda_param_no_default_var;
- if (_n == _children_capacity) {
- _children_capacity *= 2;
- void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
- if (!_new_children) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- _children = _new_children;
- }
- _children[_n++] = _res;
- _mark = p->mark;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_95[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
- }
- asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
- if (!_seq) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
- PyMem_Free(_children);
- p->level--;
- return _seq;
-}
-
-// _loop0_96: lambda_param_with_default
-static asdl_seq *
-_loop0_96_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void *_res = NULL;
- int _mark = p->mark;
- void **_children = PyMem_Malloc(sizeof(void *));
- if (!_children) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- Py_ssize_t _children_capacity = 1;
- Py_ssize_t _n = 0;
- { // lambda_param_with_default
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _loop0_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
- NameDefaultPair* lambda_param_with_default_var;
- while (
- (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
- )
- {
- _res = lambda_param_with_default_var;
- if (_n == _children_capacity) {
- _children_capacity *= 2;
- void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
- if (!_new_children) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- _children = _new_children;
- }
- _children[_n++] = _res;
- _mark = p->mark;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_96[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
- }
- asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
- if (!_seq) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
- PyMem_Free(_children);
- p->level--;
- return _seq;
-}
-
-// _loop0_97: lambda_param_with_default
+// _loop0_97: lambda_param_no_default
static asdl_seq *
_loop0_97_rule(Parser *p)
{
@@ -29818,18 +30753,18 @@ _loop0_97_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_with_default
+ { // lambda_param_no_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
- NameDefaultPair* lambda_param_with_default_var;
+ D(fprintf(stderr, "%*c> _loop0_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ arg_ty lambda_param_no_default_var;
while (
- (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
+ (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- _res = lambda_param_with_default_var;
+ _res = lambda_param_no_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -29847,7 +30782,7 @@ _loop0_97_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop0_97[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -29863,9 +30798,9 @@ _loop0_97_rule(Parser *p)
return _seq;
}
-// _loop1_98: lambda_param_no_default
+// _loop0_98: lambda_param_with_default
static asdl_seq *
-_loop1_98_rule(Parser *p)
+_loop0_98_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29886,18 +30821,18 @@ _loop1_98_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_no_default
+ { // lambda_param_with_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
- arg_ty lambda_param_no_default_var;
+ D(fprintf(stderr, "%*c> _loop0_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ NameDefaultPair* lambda_param_with_default_var;
while (
- (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
+ (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
)
{
- _res = lambda_param_no_default_var;
+ _res = lambda_param_with_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -29914,13 +30849,8 @@ _loop1_98_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_98[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
- }
- if (_n == 0 || p->error_indicator) {
- PyMem_Free(_children);
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _loop0_98[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30004,7 +30934,7 @@ _loop0_99_rule(Parser *p)
return _seq;
}
-// _loop1_100: lambda_param_with_default
+// _loop1_100: lambda_param_no_default
static asdl_seq *
_loop1_100_rule(Parser *p)
{
@@ -30027,18 +30957,18 @@ _loop1_100_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_with_default
+ { // lambda_param_no_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
- NameDefaultPair* lambda_param_with_default_var;
+ D(fprintf(stderr, "%*c> _loop1_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ arg_ty lambda_param_no_default_var;
while (
- (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
+ (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- _res = lambda_param_with_default_var;
+ _res = lambda_param_no_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30056,7 +30986,7 @@ _loop1_100_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop1_100[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
if (_n == 0 || p->error_indicator) {
PyMem_Free(_children);
@@ -30077,9 +31007,9 @@ _loop1_100_rule(Parser *p)
return _seq;
}
-// _loop1_101: lambda_param_no_default
+// _loop0_101: lambda_param_with_default
static asdl_seq *
-_loop1_101_rule(Parser *p)
+_loop0_101_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30100,18 +31030,18 @@ _loop1_101_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_no_default
+ { // lambda_param_with_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
- arg_ty lambda_param_no_default_var;
+ D(fprintf(stderr, "%*c> _loop0_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ NameDefaultPair* lambda_param_with_default_var;
while (
- (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
+ (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
)
{
- _res = lambda_param_no_default_var;
+ _res = lambda_param_with_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30128,13 +31058,8 @@ _loop1_101_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_101[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
- }
- if (_n == 0 || p->error_indicator) {
- PyMem_Free(_children);
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _loop0_101[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30150,7 +31075,7 @@ _loop1_101_rule(Parser *p)
return _seq;
}
-// _loop1_102: lambda_param_no_default
+// _loop1_102: lambda_param_with_default
static asdl_seq *
_loop1_102_rule(Parser *p)
{
@@ -30173,18 +31098,18 @@ _loop1_102_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_no_default
+ { // lambda_param_with_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
- arg_ty lambda_param_no_default_var;
+ D(fprintf(stderr, "%*c> _loop1_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ NameDefaultPair* lambda_param_with_default_var;
while (
- (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
+ (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
)
{
- _res = lambda_param_no_default_var;
+ _res = lambda_param_with_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30202,7 +31127,7 @@ _loop1_102_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop1_102[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
if (_n == 0 || p->error_indicator) {
PyMem_Free(_children);
@@ -30223,9 +31148,9 @@ _loop1_102_rule(Parser *p)
return _seq;
}
-// _loop0_103: lambda_param_no_default
+// _loop1_103: lambda_param_no_default
static asdl_seq *
-_loop0_103_rule(Parser *p)
+_loop1_103_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30251,7 +31176,7 @@ _loop0_103_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
while (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
@@ -30274,9 +31199,14 @@ _loop0_103_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_103[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_103[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
+ if (_n == 0 || p->error_indicator) {
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
+ }
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
PyMem_Free(_children);
@@ -30291,7 +31221,7 @@ _loop0_103_rule(Parser *p)
return _seq;
}
-// _loop1_104: lambda_param_with_default
+// _loop1_104: lambda_param_no_default
static asdl_seq *
_loop1_104_rule(Parser *p)
{
@@ -30314,18 +31244,18 @@ _loop1_104_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_with_default
+ { // lambda_param_no_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
- NameDefaultPair* lambda_param_with_default_var;
+ D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ arg_ty lambda_param_no_default_var;
while (
- (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
+ (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- _res = lambda_param_with_default_var;
+ _res = lambda_param_no_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30343,7 +31273,7 @@ _loop1_104_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop1_104[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
if (_n == 0 || p->error_indicator) {
PyMem_Free(_children);
@@ -30505,7 +31435,7 @@ _loop1_106_rule(Parser *p)
return _seq;
}
-// _loop0_107: lambda_param_maybe_default
+// _loop0_107: lambda_param_no_default
static asdl_seq *
_loop0_107_rule(Parser *p)
{
@@ -30528,18 +31458,18 @@ _loop0_107_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_maybe_default
+ { // lambda_param_no_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
- NameDefaultPair* lambda_param_maybe_default_var;
+ D(fprintf(stderr, "%*c> _loop0_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ arg_ty lambda_param_no_default_var;
while (
- (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
+ (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- _res = lambda_param_maybe_default_var;
+ _res = lambda_param_no_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30557,7 +31487,7 @@ _loop0_107_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop0_107[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30573,7 +31503,7 @@ _loop0_107_rule(Parser *p)
return _seq;
}
-// _loop1_108: lambda_param_maybe_default
+// _loop1_108: lambda_param_with_default
static asdl_seq *
_loop1_108_rule(Parser *p)
{
@@ -30596,18 +31526,18 @@ _loop1_108_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_maybe_default
+ { // lambda_param_with_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
- NameDefaultPair* lambda_param_maybe_default_var;
+ D(fprintf(stderr, "%*c> _loop1_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ NameDefaultPair* lambda_param_with_default_var;
while (
- (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
+ (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
)
{
- _res = lambda_param_maybe_default_var;
+ _res = lambda_param_with_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30625,7 +31555,7 @@ _loop1_108_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop1_108[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
if (_n == 0 || p->error_indicator) {
PyMem_Free(_children);
@@ -30646,9 +31576,9 @@ _loop1_108_rule(Parser *p)
return _seq;
}
-// _loop1_109: STRING
+// _loop0_109: lambda_param_maybe_default
static asdl_seq *
-_loop1_109_rule(Parser *p)
+_loop0_109_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30669,18 +31599,18 @@ _loop1_109_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // STRING
+ { // lambda_param_maybe_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING"));
- expr_ty string_var;
+ D(fprintf(stderr, "%*c> _loop0_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ NameDefaultPair* lambda_param_maybe_default_var;
while (
- (string_var = _PyPegen_string_token(p)) // STRING
+ (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
)
{
- _res = string_var;
+ _res = lambda_param_maybe_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30697,13 +31627,8 @@ _loop1_109_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_109[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING"));
- }
- if (_n == 0 || p->error_indicator) {
- PyMem_Free(_children);
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _loop0_109[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30719,59 +31644,9 @@ _loop1_109_rule(Parser *p)
return _seq;
}
-// _tmp_110: star_named_expression ',' star_named_expressions?
-static void *
-_tmp_110_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void * _res = NULL;
- int _mark = p->mark;
- { // star_named_expression ',' star_named_expressions?
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
- Token * _literal;
- expr_ty y;
- void *z;
- if (
- (y = star_named_expression_rule(p)) // star_named_expression
- &&
- (_literal = _PyPegen_expect_token(p, 12)) // token=','
- &&
- (z = star_named_expressions_rule(p), !p->error_indicator) // star_named_expressions?
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
- _res = _PyPegen_seq_insert_in_front ( p , y , z );
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- p->level--;
- return NULL;
- }
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_110[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
- }
- _res = NULL;
- done:
- p->level--;
- return _res;
-}
-
-// _loop0_112: ',' double_starred_kvpair
+// _loop1_110: lambda_param_maybe_default
static asdl_seq *
-_loop0_112_rule(Parser *p)
+_loop1_110_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30792,27 +31667,18 @@ _loop0_112_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ',' double_starred_kvpair
+ { // lambda_param_maybe_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
- Token * _literal;
- KeyValuePair* elem;
+ D(fprintf(stderr, "%*c> _loop1_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ NameDefaultPair* lambda_param_maybe_default_var;
while (
- (_literal = _PyPegen_expect_token(p, 12)) // token=','
- &&
- (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
+ (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
)
{
- _res = elem;
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- PyMem_Free(_children);
- p->level--;
- return NULL;
- }
+ _res = lambda_param_maybe_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30829,8 +31695,13 @@ _loop0_112_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair"));
+ D(fprintf(stderr, "%*c%s _loop1_110[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
+ }
+ if (_n == 0 || p->error_indicator) {
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30846,9 +31717,9 @@ _loop0_112_rule(Parser *p)
return _seq;
}
-// _gather_111: double_starred_kvpair _loop0_112
-static asdl_seq *
-_gather_111_rule(Parser *p)
+// _tmp_111: yield_expr | star_expressions
+static void *
+_tmp_111_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30858,112 +31729,55 @@ _gather_111_rule(Parser *p)
p->level--;
return NULL;
}
- asdl_seq * _res = NULL;
+ void * _res = NULL;
int _mark = p->mark;
- { // double_starred_kvpair _loop0_112
+ { // yield_expr
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_112"));
- KeyValuePair* elem;
- asdl_seq * seq;
+ D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
if (
- (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
- &&
- (seq = _loop0_112_rule(p)) // _loop0_112
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _gather_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_112"));
- _res = _PyPegen_seq_insert_in_front(p, elem, seq);
+ D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_111[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_112"));
- }
- _res = NULL;
- done:
- p->level--;
- return _res;
-}
-
-// _loop1_113: for_if_clause
-static asdl_seq *
-_loop1_113_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void *_res = NULL;
- int _mark = p->mark;
- void **_children = PyMem_Malloc(sizeof(void *));
- if (!_children) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
- Py_ssize_t _children_capacity = 1;
- Py_ssize_t _n = 0;
- { // for_if_clause
+ { // star_expressions
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause"));
- comprehension_ty for_if_clause_var;
- while (
- (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause
+ D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- _res = for_if_clause_var;
- if (_n == _children_capacity) {
- _children_capacity *= 2;
- void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
- if (!_new_children) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- _children = _new_children;
- }
- _children[_n++] = _res;
- _mark = p->mark;
+ D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_113[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause"));
- }
- if (_n == 0 || p->error_indicator) {
- PyMem_Free(_children);
- p->level--;
- return NULL;
- }
- asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
- if (!_seq) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
}
- for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
- PyMem_Free(_children);
+ _res = NULL;
+ done:
p->level--;
- return _seq;
+ return _res;
}
-// _loop0_114: ('if' disjunction)
+// _loop0_112: fstring_format_spec
static asdl_seq *
-_loop0_114_rule(Parser *p)
+_loop0_112_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30984,18 +31798,18 @@ _loop0_114_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ('if' disjunction)
+ { // fstring_format_spec
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
- void *_tmp_233_var;
+ D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec"));
+ expr_ty fstring_format_spec_var;
while (
- (_tmp_233_var = _tmp_233_rule(p)) // 'if' disjunction
+ (fstring_format_spec_var = fstring_format_spec_rule(p)) // fstring_format_spec
)
{
- _res = _tmp_233_var;
+ _res = fstring_format_spec_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31012,8 +31826,8 @@ _loop0_114_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_114[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
+ D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_format_spec"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31029,9 +31843,9 @@ _loop0_114_rule(Parser *p)
return _seq;
}
-// _loop0_115: ('if' disjunction)
+// _loop1_113: (fstring | string)
static asdl_seq *
-_loop0_115_rule(Parser *p)
+_loop1_113_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31052,18 +31866,18 @@ _loop0_115_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ('if' disjunction)
+ { // (fstring | string)
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
- void *_tmp_234_var;
+ D(fprintf(stderr, "%*c> _loop1_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(fstring | string)"));
+ void *_tmp_253_var;
while (
- (_tmp_234_var = _tmp_234_rule(p)) // 'if' disjunction
+ (_tmp_253_var = _tmp_253_rule(p)) // fstring | string
)
{
- _res = _tmp_234_var;
+ _res = _tmp_253_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31080,8 +31894,13 @@ _loop0_115_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_115[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
+ D(fprintf(stderr, "%*c%s _loop1_113[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(fstring | string)"));
+ }
+ if (_n == 0 || p->error_indicator) {
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31097,9 +31916,9 @@ _loop0_115_rule(Parser *p)
return _seq;
}
-// _tmp_116: assignment_expression | expression !':='
+// _tmp_114: star_named_expression ',' star_named_expressions?
static void *
-_tmp_116_rule(Parser *p)
+_tmp_114_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31111,45 +31930,35 @@ _tmp_116_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // assignment_expression
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
- expr_ty assignment_expression_var;
- if (
- (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
- _res = assignment_expression_var;
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression"));
- }
- { // expression !':='
+ { // star_named_expression ',' star_named_expressions?
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
- expr_ty expression_var;
+ D(fprintf(stderr, "%*c> _tmp_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
+ Token * _literal;
+ expr_ty y;
+ void *z;
if (
- (expression_var = expression_rule(p)) // expression
+ (y = star_named_expression_rule(p)) // star_named_expression
&&
- _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':='
+ (_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (z = star_named_expressions_rule(p), !p->error_indicator) // star_named_expressions?
)
{
- D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
- _res = expression_var;
+ D(fprintf(stderr, "%*c+ _tmp_114[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
+ _res = _PyPegen_seq_insert_in_front ( p , y , z );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='"));
+ D(fprintf(stderr, "%*c%s _tmp_114[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
}
_res = NULL;
done:
@@ -31157,9 +31966,9 @@ _tmp_116_rule(Parser *p)
return _res;
}
-// _loop0_118: ',' (starred_expression | (assignment_expression | expression !':=') !'=')
+// _loop0_116: ',' double_starred_kvpair
static asdl_seq *
-_loop0_118_rule(Parser *p)
+_loop0_116_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31180,18 +31989,18 @@ _loop0_118_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ',' (starred_expression | (assignment_expression | expression !':=') !'=')
+ { // ',' double_starred_kvpair
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
+ D(fprintf(stderr, "%*c> _loop0_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
Token * _literal;
- void *elem;
+ KeyValuePair* elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
+ (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
)
{
_res = elem;
@@ -31217,8 +32026,8 @@ _loop0_118_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_118[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
+ D(fprintf(stderr, "%*c%s _loop0_116[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31234,10 +32043,9 @@ _loop0_118_rule(Parser *p)
return _seq;
}
-// _gather_117:
-// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118
+// _gather_115: double_starred_kvpair _loop0_116
static asdl_seq *
-_gather_117_rule(Parser *p)
+_gather_115_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31249,27 +32057,27 @@ _gather_117_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118
+ { // double_starred_kvpair _loop0_116
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118"));
- void *elem;
+ D(fprintf(stderr, "%*c> _gather_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_116"));
+ KeyValuePair* elem;
asdl_seq * seq;
if (
- (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
+ (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
&&
- (seq = _loop0_118_rule(p)) // _loop0_118
+ (seq = _loop0_116_rule(p)) // _loop0_116
)
{
- D(fprintf(stderr, "%*c+ _gather_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118"));
+ D(fprintf(stderr, "%*c+ _gather_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_116"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_117[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118"));
+ D(fprintf(stderr, "%*c%s _gather_115[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_116"));
}
_res = NULL;
done:
@@ -31277,9 +32085,9 @@ _gather_117_rule(Parser *p)
return _res;
}
-// _tmp_119: ',' kwargs
-static void *
-_tmp_119_rule(Parser *p)
+// _loop1_117: for_if_clause
+static asdl_seq *
+_loop1_117_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31289,44 +32097,70 @@ _tmp_119_rule(Parser *p)
p->level--;
return NULL;
}
- void * _res = NULL;
+ void *_res = NULL;
int _mark = p->mark;
- { // ',' kwargs
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // for_if_clause
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
- Token * _literal;
- asdl_seq* k;
- if (
- (_literal = _PyPegen_expect_token(p, 12)) // token=','
- &&
- (k = kwargs_rule(p)) // kwargs
+ D(fprintf(stderr, "%*c> _loop1_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause"));
+ comprehension_ty for_if_clause_var;
+ while (
+ (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause
)
{
- D(fprintf(stderr, "%*c+ _tmp_119[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
- _res = k;
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- p->level--;
- return NULL;
+ _res = for_if_clause_var;
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
}
- goto done;
+ _children[_n++] = _res;
+ _mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_119[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwargs"));
+ D(fprintf(stderr, "%*c%s _loop1_117[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause"));
}
- _res = NULL;
- done:
+ if (_n == 0 || p->error_indicator) {
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
p->level--;
- return _res;
+ return _seq;
}
-// _loop0_121: ',' kwarg_or_starred
+// _loop0_118: ('if' disjunction)
static asdl_seq *
-_loop0_121_rule(Parser *p)
+_loop0_118_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31347,27 +32181,18 @@ _loop0_121_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ',' kwarg_or_starred
+ { // ('if' disjunction)
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred"));
- Token * _literal;
- KeywordOrStarred* elem;
+ D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
+ void *_tmp_254_var;
while (
- (_literal = _PyPegen_expect_token(p, 12)) // token=','
- &&
- (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
+ (_tmp_254_var = _tmp_254_rule(p)) // 'if' disjunction
)
{
- _res = elem;
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- PyMem_Free(_children);
- p->level--;
- return NULL;
- }
+ _res = _tmp_254_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31384,8 +32209,8 @@ _loop0_121_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_121[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred"));
+ D(fprintf(stderr, "%*c%s _loop0_118[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31401,9 +32226,9 @@ _loop0_121_rule(Parser *p)
return _seq;
}
-// _gather_120: kwarg_or_starred _loop0_121
+// _loop0_119: ('if' disjunction)
static asdl_seq *
-_gather_120_rule(Parser *p)
+_loop0_119_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31413,29 +32238,115 @@ _gather_120_rule(Parser *p)
p->level--;
return NULL;
}
- asdl_seq * _res = NULL;
+ void *_res = NULL;
int _mark = p->mark;
- { // kwarg_or_starred _loop0_121
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // ('if' disjunction)
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_121"));
- KeywordOrStarred* elem;
- asdl_seq * seq;
+ D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
+ void *_tmp_255_var;
+ while (
+ (_tmp_255_var = _tmp_255_rule(p)) // 'if' disjunction
+ )
+ {
+ _res = _tmp_255_var;
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _tmp_120: assignment_expression | expression !':='
+static void *
+_tmp_120_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // assignment_expression
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ expr_ty assignment_expression_var;
if (
- (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
+ (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ _res = assignment_expression_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression"));
+ }
+ { // expression !':='
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ expr_ty expression_var;
+ if (
+ (expression_var = expression_rule(p)) // expression
&&
- (seq = _loop0_121_rule(p)) // _loop0_121
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _gather_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_121"));
- _res = _PyPegen_seq_insert_in_front(p, elem, seq);
+ D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ _res = expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_120[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_121"));
+ D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='"));
}
_res = NULL;
done:
@@ -31443,9 +32354,9 @@ _gather_120_rule(Parser *p)
return _res;
}
-// _loop0_123: ',' kwarg_or_double_starred
+// _loop0_122: ',' (starred_expression | (assignment_expression | expression !':=') !'=')
static asdl_seq *
-_loop0_123_rule(Parser *p)
+_loop0_122_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31466,18 +32377,18 @@ _loop0_123_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ',' kwarg_or_double_starred
+ { // ',' (starred_expression | (assignment_expression | expression !':=') !'=')
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred"));
+ D(fprintf(stderr, "%*c> _loop0_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
Token * _literal;
- KeywordOrStarred* elem;
+ void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
+ (elem = _tmp_256_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
)
{
_res = elem;
@@ -31503,8 +32414,8 @@ _loop0_123_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_123[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred"));
+ D(fprintf(stderr, "%*c%s _loop0_122[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31520,9 +32431,10 @@ _loop0_123_rule(Parser *p)
return _seq;
}
-// _gather_122: kwarg_or_double_starred _loop0_123
+// _gather_121:
+// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122
static asdl_seq *
-_gather_122_rule(Parser *p)
+_gather_121_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31534,27 +32446,74 @@ _gather_122_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // kwarg_or_double_starred _loop0_123
+ { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_123"));
- KeywordOrStarred* elem;
+ D(fprintf(stderr, "%*c> _gather_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122"));
+ void *elem;
asdl_seq * seq;
if (
- (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
+ (elem = _tmp_256_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
&&
- (seq = _loop0_123_rule(p)) // _loop0_123
+ (seq = _loop0_122_rule(p)) // _loop0_122
)
{
- D(fprintf(stderr, "%*c+ _gather_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_123"));
+ D(fprintf(stderr, "%*c+ _gather_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_122[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_123"));
+ D(fprintf(stderr, "%*c%s _gather_121[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_123: ',' kwargs
+static void *
+_tmp_123_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // ',' kwargs
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
+ Token * _literal;
+ asdl_seq* k;
+ if (
+ (_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (k = kwargs_rule(p)) // kwargs
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
+ _res = k;
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwargs"));
}
_res = NULL;
done:
@@ -31800,9 +32759,247 @@ _gather_126_rule(Parser *p)
return _res;
}
-// _loop0_128: (',' star_target)
+// _loop0_129: ',' kwarg_or_starred
static asdl_seq *
-_loop0_128_rule(Parser *p)
+_loop0_129_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void *_res = NULL;
+ int _mark = p->mark;
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // ',' kwarg_or_starred
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred"));
+ Token * _literal;
+ KeywordOrStarred* elem;
+ while (
+ (_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
+ )
+ {
+ _res = elem;
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
+ }
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _gather_128: kwarg_or_starred _loop0_129
+static asdl_seq *
+_gather_128_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ asdl_seq * _res = NULL;
+ int _mark = p->mark;
+ { // kwarg_or_starred _loop0_129
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _gather_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_129"));
+ KeywordOrStarred* elem;
+ asdl_seq * seq;
+ if (
+ (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
+ &&
+ (seq = _loop0_129_rule(p)) // _loop0_129
+ )
+ {
+ D(fprintf(stderr, "%*c+ _gather_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_129"));
+ _res = _PyPegen_seq_insert_in_front(p, elem, seq);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _gather_128[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_129"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _loop0_131: ',' kwarg_or_double_starred
+static asdl_seq *
+_loop0_131_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void *_res = NULL;
+ int _mark = p->mark;
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // ',' kwarg_or_double_starred
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _loop0_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred"));
+ Token * _literal;
+ KeywordOrStarred* elem;
+ while (
+ (_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
+ )
+ {
+ _res = elem;
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
+ }
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_131[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _gather_130: kwarg_or_double_starred _loop0_131
+static asdl_seq *
+_gather_130_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ asdl_seq * _res = NULL;
+ int _mark = p->mark;
+ { // kwarg_or_double_starred _loop0_131
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _gather_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_131"));
+ KeywordOrStarred* elem;
+ asdl_seq * seq;
+ if (
+ (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
+ &&
+ (seq = _loop0_131_rule(p)) // _loop0_131
+ )
+ {
+ D(fprintf(stderr, "%*c+ _gather_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_131"));
+ _res = _PyPegen_seq_insert_in_front(p, elem, seq);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _gather_130[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_131"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _loop0_132: (',' star_target)
+static asdl_seq *
+_loop0_132_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31828,13 +33025,13 @@ _loop0_128_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
- void *_tmp_236_var;
+ D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
+ void *_tmp_257_var;
while (
- (_tmp_236_var = _tmp_236_rule(p)) // ',' star_target
+ (_tmp_257_var = _tmp_257_rule(p)) // ',' star_target
)
{
- _res = _tmp_236_var;
+ _res = _tmp_257_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31851,7 +33048,7 @@ _loop0_128_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -31868,9 +33065,9 @@ _loop0_128_rule(Parser *p)
return _seq;
}
-// _loop0_130: ',' star_target
+// _loop0_134: ',' star_target
static asdl_seq *
-_loop0_130_rule(Parser *p)
+_loop0_134_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31896,7 +33093,7 @@ _loop0_130_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
Token * _literal;
expr_ty elem;
while (
@@ -31928,7 +33125,7 @@ _loop0_130_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_130[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -31945,9 +33142,9 @@ _loop0_130_rule(Parser *p)
return _seq;
}
-// _gather_129: star_target _loop0_130
+// _gather_133: star_target _loop0_134
static asdl_seq *
-_gather_129_rule(Parser *p)
+_gather_133_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31959,27 +33156,27 @@ _gather_129_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // star_target _loop0_130
+ { // star_target _loop0_134
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_130"));
+ D(fprintf(stderr, "%*c> _gather_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_134"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = star_target_rule(p)) // star_target
&&
- (seq = _loop0_130_rule(p)) // _loop0_130
+ (seq = _loop0_134_rule(p)) // _loop0_134
)
{
- D(fprintf(stderr, "%*c+ _gather_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_130"));
+ D(fprintf(stderr, "%*c+ _gather_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_134"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_129[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_130"));
+ D(fprintf(stderr, "%*c%s _gather_133[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_134"));
}
_res = NULL;
done:
@@ -31987,9 +33184,9 @@ _gather_129_rule(Parser *p)
return _res;
}
-// _loop1_131: (',' star_target)
+// _loop1_135: (',' star_target)
static asdl_seq *
-_loop1_131_rule(Parser *p)
+_loop1_135_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32015,13 +33212,13 @@ _loop1_131_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
- void *_tmp_237_var;
+ D(fprintf(stderr, "%*c> _loop1_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
+ void *_tmp_258_var;
while (
- (_tmp_237_var = _tmp_237_rule(p)) // ',' star_target
+ (_tmp_258_var = _tmp_258_rule(p)) // ',' star_target
)
{
- _res = _tmp_237_var;
+ _res = _tmp_258_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -32038,7 +33235,7 @@ _loop1_131_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_131[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_135[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)"));
}
if (_n == 0 || p->error_indicator) {
@@ -32060,9 +33257,9 @@ _loop1_131_rule(Parser *p)
return _seq;
}
-// _tmp_132: !'*' star_target
+// _tmp_136: !'*' star_target
static void *
-_tmp_132_rule(Parser *p)
+_tmp_136_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32079,7 +33276,7 @@ _tmp_132_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
expr_ty star_target_var;
if (
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*'
@@ -32087,12 +33284,12 @@ _tmp_132_rule(Parser *p)
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
_res = star_target_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target"));
}
_res = NULL;
@@ -32101,9 +33298,9 @@ _tmp_132_rule(Parser *p)
return _res;
}
-// _loop0_134: ',' del_target
+// _loop0_138: ',' del_target
static asdl_seq *
-_loop0_134_rule(Parser *p)
+_loop0_138_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32129,7 +33326,7 @@ _loop0_134_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target"));
+ D(fprintf(stderr, "%*c> _loop0_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target"));
Token * _literal;
expr_ty elem;
while (
@@ -32161,7 +33358,7 @@ _loop0_134_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_138[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32178,9 +33375,9 @@ _loop0_134_rule(Parser *p)
return _seq;
}
-// _gather_133: del_target _loop0_134
+// _gather_137: del_target _loop0_138
static asdl_seq *
-_gather_133_rule(Parser *p)
+_gather_137_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32192,27 +33389,27 @@ _gather_133_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // del_target _loop0_134
+ { // del_target _loop0_138
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_134"));
+ D(fprintf(stderr, "%*c> _gather_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_138"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = del_target_rule(p)) // del_target
&&
- (seq = _loop0_134_rule(p)) // _loop0_134
+ (seq = _loop0_138_rule(p)) // _loop0_138
)
{
- D(fprintf(stderr, "%*c+ _gather_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_134"));
+ D(fprintf(stderr, "%*c+ _gather_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_138"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_133[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_134"));
+ D(fprintf(stderr, "%*c%s _gather_137[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_138"));
}
_res = NULL;
done:
@@ -32220,9 +33417,9 @@ _gather_133_rule(Parser *p)
return _res;
}
-// _loop0_136: ',' expression
+// _loop0_140: ',' expression
static asdl_seq *
-_loop0_136_rule(Parser *p)
+_loop0_140_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32248,7 +33445,7 @@ _loop0_136_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -32280,7 +33477,7 @@ _loop0_136_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_136[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_140[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32297,9 +33494,9 @@ _loop0_136_rule(Parser *p)
return _seq;
}
-// _gather_135: expression _loop0_136
+// _gather_139: expression _loop0_140
static asdl_seq *
-_gather_135_rule(Parser *p)
+_gather_139_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32311,27 +33508,27 @@ _gather_135_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_136
+ { // expression _loop0_140
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_136"));
+ D(fprintf(stderr, "%*c> _gather_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_140"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_136_rule(p)) // _loop0_136
+ (seq = _loop0_140_rule(p)) // _loop0_140
)
{
- D(fprintf(stderr, "%*c+ _gather_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_136"));
+ D(fprintf(stderr, "%*c+ _gather_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_140"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_135[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_136"));
+ D(fprintf(stderr, "%*c%s _gather_139[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_140"));
}
_res = NULL;
done:
@@ -32339,9 +33536,9 @@ _gather_135_rule(Parser *p)
return _res;
}
-// _loop0_138: ',' expression
+// _loop0_142: ',' expression
static asdl_seq *
-_loop0_138_rule(Parser *p)
+_loop0_142_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32367,7 +33564,7 @@ _loop0_138_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -32399,7 +33596,7 @@ _loop0_138_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_138[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_142[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32416,9 +33613,9 @@ _loop0_138_rule(Parser *p)
return _seq;
}
-// _gather_137: expression _loop0_138
+// _gather_141: expression _loop0_142
static asdl_seq *
-_gather_137_rule(Parser *p)
+_gather_141_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32430,27 +33627,27 @@ _gather_137_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_138
+ { // expression _loop0_142
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_138"));
+ D(fprintf(stderr, "%*c> _gather_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_142"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_138_rule(p)) // _loop0_138
+ (seq = _loop0_142_rule(p)) // _loop0_142
)
{
- D(fprintf(stderr, "%*c+ _gather_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_138"));
+ D(fprintf(stderr, "%*c+ _gather_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_142"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_137[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_138"));
+ D(fprintf(stderr, "%*c%s _gather_141[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_142"));
}
_res = NULL;
done:
@@ -32458,9 +33655,9 @@ _gather_137_rule(Parser *p)
return _res;
}
-// _loop0_140: ',' expression
+// _loop0_144: ',' expression
static asdl_seq *
-_loop0_140_rule(Parser *p)
+_loop0_144_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32486,7 +33683,7 @@ _loop0_140_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -32518,7 +33715,7 @@ _loop0_140_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_140[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_144[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32535,9 +33732,9 @@ _loop0_140_rule(Parser *p)
return _seq;
}
-// _gather_139: expression _loop0_140
+// _gather_143: expression _loop0_144
static asdl_seq *
-_gather_139_rule(Parser *p)
+_gather_143_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32549,27 +33746,27 @@ _gather_139_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_140
+ { // expression _loop0_144
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_140"));
+ D(fprintf(stderr, "%*c> _gather_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_144"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_140_rule(p)) // _loop0_140
+ (seq = _loop0_144_rule(p)) // _loop0_144
)
{
- D(fprintf(stderr, "%*c+ _gather_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_140"));
+ D(fprintf(stderr, "%*c+ _gather_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_144"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_139[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_140"));
+ D(fprintf(stderr, "%*c%s _gather_143[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_144"));
}
_res = NULL;
done:
@@ -32577,9 +33774,9 @@ _gather_139_rule(Parser *p)
return _res;
}
-// _loop0_142: ',' expression
+// _loop0_146: ',' expression
static asdl_seq *
-_loop0_142_rule(Parser *p)
+_loop0_146_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32605,7 +33802,7 @@ _loop0_142_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -32637,7 +33834,7 @@ _loop0_142_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_142[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_146[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32654,9 +33851,9 @@ _loop0_142_rule(Parser *p)
return _seq;
}
-// _gather_141: expression _loop0_142
+// _gather_145: expression _loop0_146
static asdl_seq *
-_gather_141_rule(Parser *p)
+_gather_145_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32668,27 +33865,27 @@ _gather_141_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_142
+ { // expression _loop0_146
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_142"));
+ D(fprintf(stderr, "%*c> _gather_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_146"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_142_rule(p)) // _loop0_142
+ (seq = _loop0_146_rule(p)) // _loop0_146
)
{
- D(fprintf(stderr, "%*c+ _gather_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_142"));
+ D(fprintf(stderr, "%*c+ _gather_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_146"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_141[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_142"));
+ D(fprintf(stderr, "%*c%s _gather_145[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_146"));
}
_res = NULL;
done:
@@ -32696,9 +33893,9 @@ _gather_141_rule(Parser *p)
return _res;
}
-// _tmp_143: NEWLINE INDENT
+// _tmp_147: NEWLINE INDENT
static void *
-_tmp_143_rule(Parser *p)
+_tmp_147_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32715,7 +33912,7 @@ _tmp_143_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
+ D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
Token * indent_var;
Token * newline_var;
if (
@@ -32724,12 +33921,12 @@ _tmp_143_rule(Parser *p)
(indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT'
)
{
- D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
+ D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
_res = _PyPegen_dummy_name(p, newline_var, indent_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT"));
}
_res = NULL;
@@ -32738,9 +33935,9 @@ _tmp_143_rule(Parser *p)
return _res;
}
-// _tmp_144: args | expression for_if_clauses
+// _tmp_148: args | expression for_if_clauses
static void *
-_tmp_144_rule(Parser *p)
+_tmp_148_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32757,18 +33954,18 @@ _tmp_144_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args"));
+ D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args"));
expr_ty args_var;
if (
(args_var = args_rule(p)) // args
)
{
- D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args"));
+ D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args"));
_res = args_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args"));
}
{ // expression for_if_clauses
@@ -32776,7 +33973,7 @@ _tmp_144_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
+ D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
expr_ty expression_var;
asdl_comprehension_seq* for_if_clauses_var;
if (
@@ -32785,12 +33982,12 @@ _tmp_144_rule(Parser *p)
(for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses
)
{
- D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
+ D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
_res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses"));
}
_res = NULL;
@@ -32799,9 +33996,9 @@ _tmp_144_rule(Parser *p)
return _res;
}
-// _tmp_145: args ','
+// _tmp_149: args ','
static void *
-_tmp_145_rule(Parser *p)
+_tmp_149_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32818,7 +34015,7 @@ _tmp_145_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','"));
+ D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','"));
Token * _literal;
expr_ty args_var;
if (
@@ -32827,12 +34024,12 @@ _tmp_145_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','"));
+ D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','"));
_res = _PyPegen_dummy_name(p, args_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','"));
}
_res = NULL;
@@ -32841,9 +34038,9 @@ _tmp_145_rule(Parser *p)
return _res;
}
-// _tmp_146: ',' | ')'
+// _tmp_150: ',' | ')'
static void *
-_tmp_146_rule(Parser *p)
+_tmp_150_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32860,18 +34057,18 @@ _tmp_146_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -32879,18 +34076,18 @@ _tmp_146_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
_res = NULL;
@@ -32899,9 +34096,9 @@ _tmp_146_rule(Parser *p)
return _res;
}
-// _tmp_147: 'True' | 'False' | 'None'
+// _tmp_151: 'True' | 'False' | 'None'
static void *
-_tmp_147_rule(Parser *p)
+_tmp_151_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32918,18 +34115,18 @@ _tmp_147_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
- D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'"));
}
{ // 'False'
@@ -32937,18 +34134,18 @@ _tmp_147_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
- D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'"));
}
{ // 'None'
@@ -32956,18 +34153,18 @@ _tmp_147_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
- D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
}
_res = NULL;
@@ -32976,9 +34173,9 @@ _tmp_147_rule(Parser *p)
return _res;
}
-// _tmp_148: NAME '='
+// _tmp_152: NAME '='
static void *
-_tmp_148_rule(Parser *p)
+_tmp_152_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32995,7 +34192,7 @@ _tmp_148_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='"));
Token * _literal;
expr_ty name_var;
if (
@@ -33004,12 +34201,12 @@ _tmp_148_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='"));
_res = _PyPegen_dummy_name(p, name_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '='"));
}
_res = NULL;
@@ -33018,9 +34215,9 @@ _tmp_148_rule(Parser *p)
return _res;
}
-// _tmp_149: NAME STRING | SOFT_KEYWORD
+// _tmp_153: NAME STRING | SOFT_KEYWORD
static void *
-_tmp_149_rule(Parser *p)
+_tmp_153_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33037,7 +34234,7 @@ _tmp_149_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
+ D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
expr_ty name_var;
expr_ty string_var;
if (
@@ -33046,12 +34243,12 @@ _tmp_149_rule(Parser *p)
(string_var = _PyPegen_string_token(p)) // STRING
)
{
- D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
+ D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
_res = _PyPegen_dummy_name(p, name_var, string_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME STRING"));
}
{ // SOFT_KEYWORD
@@ -33059,18 +34256,18 @@ _tmp_149_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
+ D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
expr_ty soft_keyword_var;
if (
(soft_keyword_var = _PyPegen_soft_keyword_token(p)) // SOFT_KEYWORD
)
{
- D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
+ D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
_res = soft_keyword_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "SOFT_KEYWORD"));
}
_res = NULL;
@@ -33079,9 +34276,9 @@ _tmp_149_rule(Parser *p)
return _res;
}
-// _tmp_150: 'else' | ':'
+// _tmp_154: 'else' | ':'
static void *
-_tmp_150_rule(Parser *p)
+_tmp_154_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33098,18 +34295,18 @@ _tmp_150_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
+ D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 644)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 645)) // token='else'
)
{
- D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
+ D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else'"));
}
{ // ':'
@@ -33117,18 +34314,18 @@ _tmp_150_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -33137,9 +34334,67 @@ _tmp_150_rule(Parser *p)
return _res;
}
-// _tmp_151: '=' | ':='
+// _tmp_155: FSTRING_MIDDLE | fstring_replacement_field
static void *
-_tmp_151_rule(Parser *p)
+_tmp_155_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // FSTRING_MIDDLE
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ Token * fstring_middle_var;
+ if (
+ (fstring_middle_var = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ _res = fstring_middle_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE"));
+ }
+ { // fstring_replacement_field
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ expr_ty fstring_replacement_field_var;
+ if (
+ (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ _res = fstring_replacement_field_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_156: '=' | ':='
+static void *
+_tmp_156_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33156,18 +34411,18 @@ _tmp_151_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
}
{ // ':='
@@ -33175,18 +34430,18 @@ _tmp_151_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 53)) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='"));
}
_res = NULL;
@@ -33195,9 +34450,9 @@ _tmp_151_rule(Parser *p)
return _res;
}
-// _tmp_152: list | tuple | genexp | 'True' | 'None' | 'False'
+// _tmp_157: list | tuple | genexp | 'True' | 'None' | 'False'
static void *
-_tmp_152_rule(Parser *p)
+_tmp_157_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33214,18 +34469,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
expr_ty list_var;
if (
(list_var = list_rule(p)) // list
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
_res = list_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list"));
}
{ // tuple
@@ -33233,18 +34488,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
expr_ty tuple_var;
if (
(tuple_var = tuple_rule(p)) // tuple
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
_res = tuple_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple"));
}
{ // genexp
@@ -33252,18 +34507,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
expr_ty genexp_var;
if (
(genexp_var = genexp_rule(p)) // genexp
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
_res = genexp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp"));
}
{ // 'True'
@@ -33271,18 +34526,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'"));
}
{ // 'None'
@@ -33290,18 +34545,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
}
{ // 'False'
@@ -33309,18 +34564,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'"));
}
_res = NULL;
@@ -33329,9 +34584,9 @@ _tmp_152_rule(Parser *p)
return _res;
}
-// _tmp_153: '=' | ':='
+// _tmp_158: '=' | ':='
static void *
-_tmp_153_rule(Parser *p)
+_tmp_158_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33348,18 +34603,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
}
{ // ':='
@@ -33367,18 +34622,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 53)) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='"));
}
_res = NULL;
@@ -33387,9 +34642,9 @@ _tmp_153_rule(Parser *p)
return _res;
}
-// _loop0_154: star_named_expressions
+// _loop0_159: star_named_expressions
static asdl_seq *
-_loop0_154_rule(Parser *p)
+_loop0_159_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33415,7 +34670,7 @@ _loop0_154_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions"));
+ D(fprintf(stderr, "%*c> _loop0_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions"));
asdl_expr_seq* star_named_expressions_var;
while (
(star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions
@@ -33438,7 +34693,7 @@ _loop0_154_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_154[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_159[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33455,9 +34710,9 @@ _loop0_154_rule(Parser *p)
return _seq;
}
-// _loop0_155: (star_targets '=')
+// _loop0_160: (star_targets '=')
static asdl_seq *
-_loop0_155_rule(Parser *p)
+_loop0_160_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33483,13 +34738,13 @@ _loop0_155_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
- void *_tmp_238_var;
+ D(fprintf(stderr, "%*c> _loop0_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
+ void *_tmp_259_var;
while (
- (_tmp_238_var = _tmp_238_rule(p)) // star_targets '='
+ (_tmp_259_var = _tmp_259_rule(p)) // star_targets '='
)
{
- _res = _tmp_238_var;
+ _res = _tmp_259_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -33506,7 +34761,7 @@ _loop0_155_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_155[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_160[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33523,9 +34778,9 @@ _loop0_155_rule(Parser *p)
return _seq;
}
-// _loop0_156: (star_targets '=')
+// _loop0_161: (star_targets '=')
static asdl_seq *
-_loop0_156_rule(Parser *p)
+_loop0_161_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33551,13 +34806,13 @@ _loop0_156_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
- void *_tmp_239_var;
+ D(fprintf(stderr, "%*c> _loop0_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
+ void *_tmp_260_var;
while (
- (_tmp_239_var = _tmp_239_rule(p)) // star_targets '='
+ (_tmp_260_var = _tmp_260_rule(p)) // star_targets '='
)
{
- _res = _tmp_239_var;
+ _res = _tmp_260_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -33574,7 +34829,7 @@ _loop0_156_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_156[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_161[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33591,9 +34846,9 @@ _loop0_156_rule(Parser *p)
return _seq;
}
-// _tmp_157: yield_expr | star_expressions
+// _tmp_162: yield_expr | star_expressions
static void *
-_tmp_157_rule(Parser *p)
+_tmp_162_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33610,18 +34865,18 @@ _tmp_157_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // star_expressions
@@ -33629,18 +34884,18 @@ _tmp_157_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
expr_ty star_expressions_var;
if (
(star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
_res = star_expressions_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
}
_res = NULL;
@@ -33649,9 +34904,9 @@ _tmp_157_rule(Parser *p)
return _res;
}
-// _tmp_158: '[' | '(' | '{'
+// _tmp_163: '[' | '(' | '{'
static void *
-_tmp_158_rule(Parser *p)
+_tmp_163_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33668,18 +34923,18 @@ _tmp_158_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 9)) // token='['
)
{
- D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['"));
}
{ // '('
@@ -33687,18 +34942,18 @@ _tmp_158_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
)
{
- D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('"));
}
{ // '{'
@@ -33706,18 +34961,18 @@ _tmp_158_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 25)) // token='{'
)
{
- D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'"));
}
_res = NULL;
@@ -33726,9 +34981,9 @@ _tmp_158_rule(Parser *p)
return _res;
}
-// _tmp_159: '[' | '{'
+// _tmp_164: '[' | '{'
static void *
-_tmp_159_rule(Parser *p)
+_tmp_164_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33745,18 +35000,18 @@ _tmp_159_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 9)) // token='['
)
{
- D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['"));
}
{ // '{'
@@ -33764,18 +35019,18 @@ _tmp_159_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 25)) // token='{'
)
{
- D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'"));
}
_res = NULL;
@@ -33784,9 +35039,9 @@ _tmp_159_rule(Parser *p)
return _res;
}
-// _tmp_160: '[' | '{'
+// _tmp_165: '[' | '{'
static void *
-_tmp_160_rule(Parser *p)
+_tmp_165_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33803,18 +35058,18 @@ _tmp_160_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 9)) // token='['
)
{
- D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['"));
}
{ // '{'
@@ -33822,18 +35077,18 @@ _tmp_160_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 25)) // token='{'
)
{
- D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'"));
}
_res = NULL;
@@ -33842,9 +35097,9 @@ _tmp_160_rule(Parser *p)
return _res;
}
-// _tmp_161: slash_no_default | slash_with_default
+// _tmp_166: slash_no_default | slash_with_default
static void *
-_tmp_161_rule(Parser *p)
+_tmp_166_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33861,18 +35116,18 @@ _tmp_161_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
asdl_arg_seq* slash_no_default_var;
if (
(slash_no_default_var = slash_no_default_rule(p)) // slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
_res = slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default"));
}
{ // slash_with_default
@@ -33880,18 +35135,18 @@ _tmp_161_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
SlashWithDefault* slash_with_default_var;
if (
(slash_with_default_var = slash_with_default_rule(p)) // slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
_res = slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default"));
}
_res = NULL;
@@ -33900,9 +35155,9 @@ _tmp_161_rule(Parser *p)
return _res;
}
-// _loop0_162: param_maybe_default
+// _loop0_167: param_maybe_default
static asdl_seq *
-_loop0_162_rule(Parser *p)
+_loop0_167_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33928,7 +35183,7 @@ _loop0_162_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -33951,7 +35206,7 @@ _loop0_162_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33968,9 +35223,9 @@ _loop0_162_rule(Parser *p)
return _seq;
}
-// _loop0_163: param_no_default
+// _loop0_168: param_no_default
static asdl_seq *
-_loop0_163_rule(Parser *p)
+_loop0_168_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33996,7 +35251,7 @@ _loop0_163_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -34019,7 +35274,7 @@ _loop0_163_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_163[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_168[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34036,9 +35291,9 @@ _loop0_163_rule(Parser *p)
return _seq;
}
-// _loop0_164: param_no_default
+// _loop0_169: param_no_default
static asdl_seq *
-_loop0_164_rule(Parser *p)
+_loop0_169_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34064,7 +35319,7 @@ _loop0_164_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -34087,7 +35342,7 @@ _loop0_164_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_164[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34104,9 +35359,9 @@ _loop0_164_rule(Parser *p)
return _seq;
}
-// _loop1_165: param_no_default
+// _loop1_170: param_no_default
static asdl_seq *
-_loop1_165_rule(Parser *p)
+_loop1_170_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34132,7 +35387,7 @@ _loop1_165_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -34155,7 +35410,7 @@ _loop1_165_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_165[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_170[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -34177,9 +35432,9 @@ _loop1_165_rule(Parser *p)
return _seq;
}
-// _tmp_166: slash_no_default | slash_with_default
+// _tmp_171: slash_no_default | slash_with_default
static void *
-_tmp_166_rule(Parser *p)
+_tmp_171_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34196,18 +35451,18 @@ _tmp_166_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
asdl_arg_seq* slash_no_default_var;
if (
(slash_no_default_var = slash_no_default_rule(p)) // slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
_res = slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default"));
}
{ // slash_with_default
@@ -34215,18 +35470,18 @@ _tmp_166_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
SlashWithDefault* slash_with_default_var;
if (
(slash_with_default_var = slash_with_default_rule(p)) // slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
_res = slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default"));
}
_res = NULL;
@@ -34235,9 +35490,9 @@ _tmp_166_rule(Parser *p)
return _res;
}
-// _loop0_167: param_maybe_default
+// _loop0_172: param_maybe_default
static asdl_seq *
-_loop0_167_rule(Parser *p)
+_loop0_172_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34263,7 +35518,7 @@ _loop0_167_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -34286,7 +35541,7 @@ _loop0_167_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_172[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34303,9 +35558,9 @@ _loop0_167_rule(Parser *p)
return _seq;
}
-// _tmp_168: ',' | param_no_default
+// _tmp_173: ',' | param_no_default
static void *
-_tmp_168_rule(Parser *p)
+_tmp_173_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34322,18 +35577,18 @@ _tmp_168_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // param_no_default
@@ -34341,18 +35596,18 @@ _tmp_168_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
if (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
_res = param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
_res = NULL;
@@ -34361,9 +35616,9 @@ _tmp_168_rule(Parser *p)
return _res;
}
-// _loop0_169: param_maybe_default
+// _loop0_174: param_maybe_default
static asdl_seq *
-_loop0_169_rule(Parser *p)
+_loop0_174_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34389,7 +35644,7 @@ _loop0_169_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -34412,7 +35667,7 @@ _loop0_169_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34429,9 +35684,9 @@ _loop0_169_rule(Parser *p)
return _seq;
}
-// _loop1_170: param_maybe_default
+// _loop1_175: param_maybe_default
static asdl_seq *
-_loop1_170_rule(Parser *p)
+_loop1_175_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34457,7 +35712,7 @@ _loop1_170_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop1_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -34480,7 +35735,7 @@ _loop1_170_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_170[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_175[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -34502,9 +35757,9 @@ _loop1_170_rule(Parser *p)
return _seq;
}
-// _tmp_171: ')' | ','
+// _tmp_176: ')' | ','
static void *
-_tmp_171_rule(Parser *p)
+_tmp_176_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34521,18 +35776,18 @@ _tmp_171_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ','
@@ -34540,18 +35795,18 @@ _tmp_171_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -34560,9 +35815,9 @@ _tmp_171_rule(Parser *p)
return _res;
}
-// _tmp_172: ')' | ',' (')' | '**')
+// _tmp_177: ')' | ',' (')' | '**')
static void *
-_tmp_172_rule(Parser *p)
+_tmp_177_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34579,18 +35834,18 @@ _tmp_172_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_177[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_177[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ',' (')' | '**')
@@ -34598,21 +35853,21 @@ _tmp_172_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
+ D(fprintf(stderr, "%*c> _tmp_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
Token * _literal;
- void *_tmp_240_var;
+ void *_tmp_261_var;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_tmp_240_var = _tmp_240_rule(p)) // ')' | '**'
+ (_tmp_261_var = _tmp_261_rule(p)) // ')' | '**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
- _res = _PyPegen_dummy_name(p, _literal, _tmp_240_var);
+ D(fprintf(stderr, "%*c+ _tmp_177[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
+ _res = _PyPegen_dummy_name(p, _literal, _tmp_261_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_177[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')"));
}
_res = NULL;
@@ -34621,9 +35876,9 @@ _tmp_172_rule(Parser *p)
return _res;
}
-// _tmp_173: param_no_default | ','
+// _tmp_178: param_no_default | ','
static void *
-_tmp_173_rule(Parser *p)
+_tmp_178_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34640,18 +35895,18 @@ _tmp_173_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
if (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
_res = param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
{ // ','
@@ -34659,18 +35914,18 @@ _tmp_173_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -34679,9 +35934,9 @@ _tmp_173_rule(Parser *p)
return _res;
}
-// _loop0_174: param_maybe_default
+// _loop0_179: param_maybe_default
static asdl_seq *
-_loop0_174_rule(Parser *p)
+_loop0_179_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34707,7 +35962,7 @@ _loop0_174_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -34730,7 +35985,7 @@ _loop0_174_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34747,9 +36002,9 @@ _loop0_174_rule(Parser *p)
return _seq;
}
-// _tmp_175: param_no_default | ','
+// _tmp_180: param_no_default | ','
static void *
-_tmp_175_rule(Parser *p)
+_tmp_180_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34766,18 +36021,18 @@ _tmp_175_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
if (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
_res = param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
{ // ','
@@ -34785,18 +36040,18 @@ _tmp_175_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -34805,9 +36060,9 @@ _tmp_175_rule(Parser *p)
return _res;
}
-// _tmp_176: '*' | '**' | '/'
+// _tmp_181: '*' | '**' | '/'
static void *
-_tmp_176_rule(Parser *p)
+_tmp_181_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34824,18 +36079,18 @@ _tmp_176_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
)
{
- D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'"));
}
{ // '**'
@@ -34843,18 +36098,18 @@ _tmp_176_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
{ // '/'
@@ -34862,18 +36117,18 @@ _tmp_176_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
)
{
- D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'"));
}
_res = NULL;
@@ -34882,9 +36137,9 @@ _tmp_176_rule(Parser *p)
return _res;
}
-// _loop1_177: param_with_default
+// _loop1_182: param_with_default
static asdl_seq *
-_loop1_177_rule(Parser *p)
+_loop1_182_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34910,7 +36165,7 @@ _loop1_177_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -34933,7 +36188,7 @@ _loop1_177_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_177[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_182[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -34955,9 +36210,9 @@ _loop1_177_rule(Parser *p)
return _seq;
}
-// _tmp_178: lambda_slash_no_default | lambda_slash_with_default
+// _tmp_183: lambda_slash_no_default | lambda_slash_with_default
static void *
-_tmp_178_rule(Parser *p)
+_tmp_183_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34974,18 +36229,18 @@ _tmp_178_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
asdl_arg_seq* lambda_slash_no_default_var;
if (
(lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
_res = lambda_slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default"));
}
{ // lambda_slash_with_default
@@ -34993,18 +36248,18 @@ _tmp_178_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
SlashWithDefault* lambda_slash_with_default_var;
if (
(lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
_res = lambda_slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default"));
}
_res = NULL;
@@ -35013,9 +36268,9 @@ _tmp_178_rule(Parser *p)
return _res;
}
-// _loop0_179: lambda_param_maybe_default
+// _loop0_184: lambda_param_maybe_default
static asdl_seq *
-_loop0_179_rule(Parser *p)
+_loop0_184_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35041,7 +36296,7 @@ _loop0_179_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35064,7 +36319,7 @@ _loop0_179_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_184[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35081,9 +36336,9 @@ _loop0_179_rule(Parser *p)
return _seq;
}
-// _loop0_180: lambda_param_no_default
+// _loop0_185: lambda_param_no_default
static asdl_seq *
-_loop0_180_rule(Parser *p)
+_loop0_185_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35109,7 +36364,7 @@ _loop0_180_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
while (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
@@ -35132,7 +36387,7 @@ _loop0_180_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_180[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35149,9 +36404,9 @@ _loop0_180_rule(Parser *p)
return _seq;
}
-// _loop0_181: lambda_param_no_default
+// _loop0_186: lambda_param_no_default
static asdl_seq *
-_loop0_181_rule(Parser *p)
+_loop0_186_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35177,7 +36432,7 @@ _loop0_181_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
while (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
@@ -35200,7 +36455,7 @@ _loop0_181_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_181[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_186[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35217,9 +36472,9 @@ _loop0_181_rule(Parser *p)
return _seq;
}
-// _loop0_183: ',' lambda_param
+// _loop0_188: ',' lambda_param
static asdl_seq *
-_loop0_183_rule(Parser *p)
+_loop0_188_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35245,7 +36500,7 @@ _loop0_183_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param"));
+ D(fprintf(stderr, "%*c> _loop0_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param"));
Token * _literal;
arg_ty elem;
while (
@@ -35277,7 +36532,7 @@ _loop0_183_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_183[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_188[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' lambda_param"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35294,9 +36549,9 @@ _loop0_183_rule(Parser *p)
return _seq;
}
-// _gather_182: lambda_param _loop0_183
+// _gather_187: lambda_param _loop0_188
static asdl_seq *
-_gather_182_rule(Parser *p)
+_gather_187_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35308,27 +36563,27 @@ _gather_182_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // lambda_param _loop0_183
+ { // lambda_param _loop0_188
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183"));
+ D(fprintf(stderr, "%*c> _gather_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_188"));
arg_ty elem;
asdl_seq * seq;
if (
(elem = lambda_param_rule(p)) // lambda_param
&&
- (seq = _loop0_183_rule(p)) // _loop0_183
+ (seq = _loop0_188_rule(p)) // _loop0_188
)
{
- D(fprintf(stderr, "%*c+ _gather_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183"));
+ D(fprintf(stderr, "%*c+ _gather_187[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_188"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_182[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_183"));
+ D(fprintf(stderr, "%*c%s _gather_187[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_188"));
}
_res = NULL;
done:
@@ -35336,9 +36591,9 @@ _gather_182_rule(Parser *p)
return _res;
}
-// _tmp_184: lambda_slash_no_default | lambda_slash_with_default
+// _tmp_189: lambda_slash_no_default | lambda_slash_with_default
static void *
-_tmp_184_rule(Parser *p)
+_tmp_189_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35355,18 +36610,18 @@ _tmp_184_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
asdl_arg_seq* lambda_slash_no_default_var;
if (
(lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
_res = lambda_slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default"));
}
{ // lambda_slash_with_default
@@ -35374,18 +36629,18 @@ _tmp_184_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
SlashWithDefault* lambda_slash_with_default_var;
if (
(lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
_res = lambda_slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default"));
}
_res = NULL;
@@ -35394,9 +36649,9 @@ _tmp_184_rule(Parser *p)
return _res;
}
-// _loop0_185: lambda_param_maybe_default
+// _loop0_190: lambda_param_maybe_default
static asdl_seq *
-_loop0_185_rule(Parser *p)
+_loop0_190_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35422,7 +36677,7 @@ _loop0_185_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35445,7 +36700,7 @@ _loop0_185_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_190[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35462,9 +36717,9 @@ _loop0_185_rule(Parser *p)
return _seq;
}
-// _tmp_186: ',' | lambda_param_no_default
+// _tmp_191: ',' | lambda_param_no_default
static void *
-_tmp_186_rule(Parser *p)
+_tmp_191_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35481,18 +36736,18 @@ _tmp_186_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // lambda_param_no_default
@@ -35500,18 +36755,18 @@ _tmp_186_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
if (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
_res = lambda_param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
_res = NULL;
@@ -35520,9 +36775,9 @@ _tmp_186_rule(Parser *p)
return _res;
}
-// _loop0_187: lambda_param_maybe_default
+// _loop0_192: lambda_param_maybe_default
static asdl_seq *
-_loop0_187_rule(Parser *p)
+_loop0_192_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35548,7 +36803,7 @@ _loop0_187_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35571,7 +36826,7 @@ _loop0_187_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_187[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35588,9 +36843,9 @@ _loop0_187_rule(Parser *p)
return _seq;
}
-// _loop1_188: lambda_param_maybe_default
+// _loop1_193: lambda_param_maybe_default
static asdl_seq *
-_loop1_188_rule(Parser *p)
+_loop1_193_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35616,7 +36871,7 @@ _loop1_188_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop1_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35639,7 +36894,7 @@ _loop1_188_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_188[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_193[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -35661,9 +36916,9 @@ _loop1_188_rule(Parser *p)
return _seq;
}
-// _loop1_189: lambda_param_with_default
+// _loop1_194: lambda_param_with_default
static asdl_seq *
-_loop1_189_rule(Parser *p)
+_loop1_194_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35689,7 +36944,7 @@ _loop1_189_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
NameDefaultPair* lambda_param_with_default_var;
while (
(lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
@@ -35712,7 +36967,7 @@ _loop1_189_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_189[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_194[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -35734,9 +36989,9 @@ _loop1_189_rule(Parser *p)
return _seq;
}
-// _tmp_190: ':' | ',' (':' | '**')
+// _tmp_195: ':' | ',' (':' | '**')
static void *
-_tmp_190_rule(Parser *p)
+_tmp_195_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35753,18 +37008,18 @@ _tmp_190_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // ',' (':' | '**')
@@ -35772,21 +37027,21 @@ _tmp_190_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
+ D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
Token * _literal;
- void *_tmp_241_var;
+ void *_tmp_262_var;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_tmp_241_var = _tmp_241_rule(p)) // ':' | '**'
+ (_tmp_262_var = _tmp_262_rule(p)) // ':' | '**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
- _res = _PyPegen_dummy_name(p, _literal, _tmp_241_var);
+ D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
+ _res = _PyPegen_dummy_name(p, _literal, _tmp_262_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')"));
}
_res = NULL;
@@ -35795,9 +37050,9 @@ _tmp_190_rule(Parser *p)
return _res;
}
-// _tmp_191: lambda_param_no_default | ','
+// _tmp_196: lambda_param_no_default | ','
static void *
-_tmp_191_rule(Parser *p)
+_tmp_196_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35814,18 +37069,18 @@ _tmp_191_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
if (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
_res = lambda_param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_196[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
{ // ','
@@ -35833,18 +37088,18 @@ _tmp_191_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_196[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -35853,9 +37108,9 @@ _tmp_191_rule(Parser *p)
return _res;
}
-// _loop0_192: lambda_param_maybe_default
+// _loop0_197: lambda_param_maybe_default
static asdl_seq *
-_loop0_192_rule(Parser *p)
+_loop0_197_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35881,7 +37136,7 @@ _loop0_192_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35904,7 +37159,7 @@ _loop0_192_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35921,9 +37176,9 @@ _loop0_192_rule(Parser *p)
return _seq;
}
-// _tmp_193: lambda_param_no_default | ','
+// _tmp_198: lambda_param_no_default | ','
static void *
-_tmp_193_rule(Parser *p)
+_tmp_198_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35940,18 +37195,18 @@ _tmp_193_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
if (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
_res = lambda_param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
{ // ','
@@ -35959,18 +37214,18 @@ _tmp_193_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -35979,9 +37234,9 @@ _tmp_193_rule(Parser *p)
return _res;
}
-// _tmp_194: '*' | '**' | '/'
+// _tmp_199: '*' | '**' | '/'
static void *
-_tmp_194_rule(Parser *p)
+_tmp_199_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35998,18 +37253,18 @@ _tmp_194_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
)
{
- D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'"));
}
{ // '**'
@@ -36017,18 +37272,18 @@ _tmp_194_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
{ // '/'
@@ -36036,18 +37291,18 @@ _tmp_194_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
)
{
- D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'"));
}
_res = NULL;
@@ -36056,9 +37311,9 @@ _tmp_194_rule(Parser *p)
return _res;
}
-// _tmp_195: ',' | ')' | ':'
+// _tmp_200: ',' | ')' | ':'
static void *
-_tmp_195_rule(Parser *p)
+_tmp_200_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36075,18 +37330,18 @@ _tmp_195_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -36094,18 +37349,18 @@ _tmp_195_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ':'
@@ -36113,18 +37368,18 @@ _tmp_195_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -36133,9 +37388,9 @@ _tmp_195_rule(Parser *p)
return _res;
}
-// _loop0_197: ',' (expression ['as' star_target])
+// _loop0_202: ',' (expression ['as' star_target])
static asdl_seq *
-_loop0_197_rule(Parser *p)
+_loop0_202_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36161,13 +37416,13 @@ _loop0_197_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_242_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_263_rule(p)) // expression ['as' star_target]
)
{
_res = elem;
@@ -36193,7 +37448,7 @@ _loop0_197_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_202[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36210,9 +37465,9 @@ _loop0_197_rule(Parser *p)
return _seq;
}
-// _gather_196: (expression ['as' star_target]) _loop0_197
+// _gather_201: (expression ['as' star_target]) _loop0_202
static asdl_seq *
-_gather_196_rule(Parser *p)
+_gather_201_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36224,27 +37479,27 @@ _gather_196_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expression ['as' star_target]) _loop0_197
+ { // (expression ['as' star_target]) _loop0_202
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197"));
+ D(fprintf(stderr, "%*c> _gather_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_202"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_242_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_263_rule(p)) // expression ['as' star_target]
&&
- (seq = _loop0_197_rule(p)) // _loop0_197
+ (seq = _loop0_202_rule(p)) // _loop0_202
)
{
- D(fprintf(stderr, "%*c+ _gather_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197"));
+ D(fprintf(stderr, "%*c+ _gather_201[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_202"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_196[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_197"));
+ D(fprintf(stderr, "%*c%s _gather_201[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_202"));
}
_res = NULL;
done:
@@ -36252,9 +37507,9 @@ _gather_196_rule(Parser *p)
return _res;
}
-// _loop0_199: ',' (expressions ['as' star_target])
+// _loop0_204: ',' (expressions ['as' star_target])
static asdl_seq *
-_loop0_199_rule(Parser *p)
+_loop0_204_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36280,13 +37535,13 @@ _loop0_199_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_243_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_264_rule(p)) // expressions ['as' star_target]
)
{
_res = elem;
@@ -36312,7 +37567,7 @@ _loop0_199_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_199[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_204[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36329,9 +37584,9 @@ _loop0_199_rule(Parser *p)
return _seq;
}
-// _gather_198: (expressions ['as' star_target]) _loop0_199
+// _gather_203: (expressions ['as' star_target]) _loop0_204
static asdl_seq *
-_gather_198_rule(Parser *p)
+_gather_203_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36343,27 +37598,27 @@ _gather_198_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expressions ['as' star_target]) _loop0_199
+ { // (expressions ['as' star_target]) _loop0_204
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199"));
+ D(fprintf(stderr, "%*c> _gather_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_204"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_243_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_264_rule(p)) // expressions ['as' star_target]
&&
- (seq = _loop0_199_rule(p)) // _loop0_199
+ (seq = _loop0_204_rule(p)) // _loop0_204
)
{
- D(fprintf(stderr, "%*c+ _gather_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199"));
+ D(fprintf(stderr, "%*c+ _gather_203[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_204"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_198[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_199"));
+ D(fprintf(stderr, "%*c%s _gather_203[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_204"));
}
_res = NULL;
done:
@@ -36371,9 +37626,9 @@ _gather_198_rule(Parser *p)
return _res;
}
-// _loop0_201: ',' (expression ['as' star_target])
+// _loop0_206: ',' (expression ['as' star_target])
static asdl_seq *
-_loop0_201_rule(Parser *p)
+_loop0_206_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36399,13 +37654,13 @@ _loop0_201_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_244_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_265_rule(p)) // expression ['as' star_target]
)
{
_res = elem;
@@ -36431,7 +37686,7 @@ _loop0_201_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_201[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_206[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36448,9 +37703,9 @@ _loop0_201_rule(Parser *p)
return _seq;
}
-// _gather_200: (expression ['as' star_target]) _loop0_201
+// _gather_205: (expression ['as' star_target]) _loop0_206
static asdl_seq *
-_gather_200_rule(Parser *p)
+_gather_205_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36462,27 +37717,27 @@ _gather_200_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expression ['as' star_target]) _loop0_201
+ { // (expression ['as' star_target]) _loop0_206
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201"));
+ D(fprintf(stderr, "%*c> _gather_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_206"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_244_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_265_rule(p)) // expression ['as' star_target]
&&
- (seq = _loop0_201_rule(p)) // _loop0_201
+ (seq = _loop0_206_rule(p)) // _loop0_206
)
{
- D(fprintf(stderr, "%*c+ _gather_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201"));
+ D(fprintf(stderr, "%*c+ _gather_205[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_206"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_200[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_201"));
+ D(fprintf(stderr, "%*c%s _gather_205[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_206"));
}
_res = NULL;
done:
@@ -36490,9 +37745,9 @@ _gather_200_rule(Parser *p)
return _res;
}
-// _loop0_203: ',' (expressions ['as' star_target])
+// _loop0_208: ',' (expressions ['as' star_target])
static asdl_seq *
-_loop0_203_rule(Parser *p)
+_loop0_208_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36518,13 +37773,13 @@ _loop0_203_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_245_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_266_rule(p)) // expressions ['as' star_target]
)
{
_res = elem;
@@ -36550,7 +37805,7 @@ _loop0_203_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_203[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36567,9 +37822,9 @@ _loop0_203_rule(Parser *p)
return _seq;
}
-// _gather_202: (expressions ['as' star_target]) _loop0_203
+// _gather_207: (expressions ['as' star_target]) _loop0_208
static asdl_seq *
-_gather_202_rule(Parser *p)
+_gather_207_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36581,27 +37836,27 @@ _gather_202_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expressions ['as' star_target]) _loop0_203
+ { // (expressions ['as' star_target]) _loop0_208
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203"));
+ D(fprintf(stderr, "%*c> _gather_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_208"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_245_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_266_rule(p)) // expressions ['as' star_target]
&&
- (seq = _loop0_203_rule(p)) // _loop0_203
+ (seq = _loop0_208_rule(p)) // _loop0_208
)
{
- D(fprintf(stderr, "%*c+ _gather_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203"));
+ D(fprintf(stderr, "%*c+ _gather_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_208"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_202[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_203"));
+ D(fprintf(stderr, "%*c%s _gather_207[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_208"));
}
_res = NULL;
done:
@@ -36609,9 +37864,9 @@ _gather_202_rule(Parser *p)
return _res;
}
-// _tmp_204: 'except' | 'finally'
+// _tmp_209: 'except' | 'finally'
static void *
-_tmp_204_rule(Parser *p)
+_tmp_209_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36628,18 +37883,18 @@ _tmp_204_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
+ D(fprintf(stderr, "%*c> _tmp_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
)
{
- D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
+ D(fprintf(stderr, "%*c+ _tmp_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_209[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except'"));
}
{ // 'finally'
@@ -36647,18 +37902,18 @@ _tmp_204_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
+ D(fprintf(stderr, "%*c> _tmp_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 632)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 633)) // token='finally'
)
{
- D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
+ D(fprintf(stderr, "%*c+ _tmp_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_209[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally'"));
}
_res = NULL;
@@ -36667,9 +37922,9 @@ _tmp_204_rule(Parser *p)
return _res;
}
-// _loop0_205: block
+// _loop0_210: block
static asdl_seq *
-_loop0_205_rule(Parser *p)
+_loop0_210_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36695,7 +37950,7 @@ _loop0_205_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
+ D(fprintf(stderr, "%*c> _loop0_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
asdl_stmt_seq* block_var;
while (
(block_var = block_rule(p)) // block
@@ -36718,7 +37973,7 @@ _loop0_205_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_205[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_210[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36735,9 +37990,9 @@ _loop0_205_rule(Parser *p)
return _seq;
}
-// _loop1_206: except_block
+// _loop1_211: except_block
static asdl_seq *
-_loop1_206_rule(Parser *p)
+_loop1_211_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36763,7 +38018,7 @@ _loop1_206_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
+ D(fprintf(stderr, "%*c> _loop1_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
excepthandler_ty except_block_var;
while (
(except_block_var = except_block_rule(p)) // except_block
@@ -36786,7 +38041,7 @@ _loop1_206_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_206[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_211[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -36808,9 +38063,9 @@ _loop1_206_rule(Parser *p)
return _seq;
}
-// _tmp_207: 'as' NAME
+// _tmp_212: 'as' NAME
static void *
-_tmp_207_rule(Parser *p)
+_tmp_212_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36827,21 +38082,21 @@ _tmp_207_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_207[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -36850,9 +38105,9 @@ _tmp_207_rule(Parser *p)
return _res;
}
-// _loop0_208: block
+// _loop0_213: block
static asdl_seq *
-_loop0_208_rule(Parser *p)
+_loop0_213_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36878,7 +38133,7 @@ _loop0_208_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
+ D(fprintf(stderr, "%*c> _loop0_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
asdl_stmt_seq* block_var;
while (
(block_var = block_rule(p)) // block
@@ -36901,7 +38156,7 @@ _loop0_208_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_213[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36918,9 +38173,9 @@ _loop0_208_rule(Parser *p)
return _seq;
}
-// _loop1_209: except_star_block
+// _loop1_214: except_star_block
static asdl_seq *
-_loop1_209_rule(Parser *p)
+_loop1_214_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36946,7 +38201,7 @@ _loop1_209_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
+ D(fprintf(stderr, "%*c> _loop1_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
excepthandler_ty except_star_block_var;
while (
(except_star_block_var = except_star_block_rule(p)) // except_star_block
@@ -36969,7 +38224,7 @@ _loop1_209_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_209[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_214[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -36991,9 +38246,9 @@ _loop1_209_rule(Parser *p)
return _seq;
}
-// _tmp_210: expression ['as' NAME]
+// _tmp_215: expression ['as' NAME]
static void *
-_tmp_210_rule(Parser *p)
+_tmp_215_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37010,22 +38265,22 @@ _tmp_210_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
+ D(fprintf(stderr, "%*c> _tmp_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_246_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_267_rule(p), !p->error_indicator) // ['as' NAME]
)
{
- D(fprintf(stderr, "%*c+ _tmp_210[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
+ D(fprintf(stderr, "%*c+ _tmp_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
_res = _PyPegen_dummy_name(p, expression_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_210[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_215[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' NAME]"));
}
_res = NULL;
@@ -37034,9 +38289,9 @@ _tmp_210_rule(Parser *p)
return _res;
}
-// _tmp_211: 'as' NAME
+// _tmp_216: 'as' NAME
static void *
-_tmp_211_rule(Parser *p)
+_tmp_216_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37053,21 +38308,21 @@ _tmp_211_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_211[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_211[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -37076,9 +38331,9 @@ _tmp_211_rule(Parser *p)
return _res;
}
-// _tmp_212: 'as' NAME
+// _tmp_217: 'as' NAME
static void *
-_tmp_212_rule(Parser *p)
+_tmp_217_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37095,21 +38350,21 @@ _tmp_212_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -37118,9 +38373,9 @@ _tmp_212_rule(Parser *p)
return _res;
}
-// _tmp_213: NEWLINE | ':'
+// _tmp_218: NEWLINE | ':'
static void *
-_tmp_213_rule(Parser *p)
+_tmp_218_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37137,18 +38392,18 @@ _tmp_213_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
Token * newline_var;
if (
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
_res = newline_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE"));
}
{ // ':'
@@ -37156,18 +38411,18 @@ _tmp_213_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -37176,9 +38431,9 @@ _tmp_213_rule(Parser *p)
return _res;
}
-// _tmp_214: 'as' NAME
+// _tmp_219: 'as' NAME
static void *
-_tmp_214_rule(Parser *p)
+_tmp_219_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37195,21 +38450,21 @@ _tmp_214_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -37218,9 +38473,9 @@ _tmp_214_rule(Parser *p)
return _res;
}
-// _tmp_215: 'as' NAME
+// _tmp_220: 'as' NAME
static void *
-_tmp_215_rule(Parser *p)
+_tmp_220_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37237,21 +38492,21 @@ _tmp_215_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_215[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -37260,9 +38515,9 @@ _tmp_215_rule(Parser *p)
return _res;
}
-// _tmp_216: positional_patterns ','
+// _tmp_221: positional_patterns ','
static void *
-_tmp_216_rule(Parser *p)
+_tmp_221_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37279,7 +38534,7 @@ _tmp_216_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
+ D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
Token * _literal;
asdl_pattern_seq* positional_patterns_var;
if (
@@ -37288,12 +38543,12 @@ _tmp_216_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
+ D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
_res = _PyPegen_dummy_name(p, positional_patterns_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "positional_patterns ','"));
}
_res = NULL;
@@ -37302,9 +38557,9 @@ _tmp_216_rule(Parser *p)
return _res;
}
-// _tmp_217: '->' expression
+// _tmp_222: '->' expression
static void *
-_tmp_217_rule(Parser *p)
+_tmp_222_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37321,7 +38576,7 @@ _tmp_217_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
Token * _literal;
expr_ty expression_var;
if (
@@ -37330,12 +38585,12 @@ _tmp_217_rule(Parser *p)
(expression_var = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
_res = _PyPegen_dummy_name(p, _literal, expression_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression"));
}
_res = NULL;
@@ -37344,9 +38599,9 @@ _tmp_217_rule(Parser *p)
return _res;
}
-// _tmp_218: '(' arguments? ')'
+// _tmp_223: '(' arguments? ')'
static void *
-_tmp_218_rule(Parser *p)
+_tmp_223_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37363,7 +38618,7 @@ _tmp_218_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
Token * _literal;
Token * _literal_1;
void *_opt_var;
@@ -37376,12 +38631,12 @@ _tmp_218_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
_res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'"));
}
_res = NULL;
@@ -37390,9 +38645,9 @@ _tmp_218_rule(Parser *p)
return _res;
}
-// _tmp_219: '(' arguments? ')'
+// _tmp_224: '(' arguments? ')'
static void *
-_tmp_219_rule(Parser *p)
+_tmp_224_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37409,7 +38664,7 @@ _tmp_219_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
Token * _literal;
Token * _literal_1;
void *_opt_var;
@@ -37422,12 +38677,12 @@ _tmp_219_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
_res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'"));
}
_res = NULL;
@@ -37436,9 +38691,9 @@ _tmp_219_rule(Parser *p)
return _res;
}
-// _loop0_221: ',' double_starred_kvpair
+// _loop0_226: ',' double_starred_kvpair
static asdl_seq *
-_loop0_221_rule(Parser *p)
+_loop0_226_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37464,7 +38719,7 @@ _loop0_221_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
+ D(fprintf(stderr, "%*c> _loop0_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
Token * _literal;
KeyValuePair* elem;
while (
@@ -37496,7 +38751,7 @@ _loop0_221_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_221[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_226[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -37513,9 +38768,9 @@ _loop0_221_rule(Parser *p)
return _seq;
}
-// _gather_220: double_starred_kvpair _loop0_221
+// _gather_225: double_starred_kvpair _loop0_226
static asdl_seq *
-_gather_220_rule(Parser *p)
+_gather_225_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37527,27 +38782,27 @@ _gather_220_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // double_starred_kvpair _loop0_221
+ { // double_starred_kvpair _loop0_226
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221"));
+ D(fprintf(stderr, "%*c> _gather_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_226"));
KeyValuePair* elem;
asdl_seq * seq;
if (
(elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
&&
- (seq = _loop0_221_rule(p)) // _loop0_221
+ (seq = _loop0_226_rule(p)) // _loop0_226
)
{
- D(fprintf(stderr, "%*c+ _gather_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221"));
+ D(fprintf(stderr, "%*c+ _gather_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_226"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_220[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_221"));
+ D(fprintf(stderr, "%*c%s _gather_225[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_226"));
}
_res = NULL;
done:
@@ -37555,9 +38810,9 @@ _gather_220_rule(Parser *p)
return _res;
}
-// _tmp_222: '}' | ','
+// _tmp_227: '}' | ','
static void *
-_tmp_222_rule(Parser *p)
+_tmp_227_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37574,18 +38829,18 @@ _tmp_222_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 26)) // token='}'
)
{
- D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
}
{ // ','
@@ -37593,18 +38848,18 @@ _tmp_222_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -37613,9 +38868,9 @@ _tmp_222_rule(Parser *p)
return _res;
}
-// _tmp_223: '}' | ','
+// _tmp_228: '}' | ','
static void *
-_tmp_223_rule(Parser *p)
+_tmp_228_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37632,18 +38887,18 @@ _tmp_223_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 26)) // token='}'
)
{
- D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
}
{ // ','
@@ -37651,18 +38906,18 @@ _tmp_223_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -37671,9 +38926,898 @@ _tmp_223_rule(Parser *p)
return _res;
}
-// _tmp_224: star_targets '='
+// _tmp_229: yield_expr | star_expressions
static void *
-_tmp_224_rule(Parser *p)
+_tmp_229_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_230: yield_expr | star_expressions
+static void *
+_tmp_230_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_231: '=' | '!' | ':' | '}'
+static void *
+_tmp_231_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '='
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 22)) // token='='
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
+ }
+ { // '!'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'"));
+ }
+ { // ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
+ }
+ { // '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_232: yield_expr | star_expressions
+static void *
+_tmp_232_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_233: '!' | ':' | '}'
+static void *
+_tmp_233_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'"));
+ }
+ { // ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
+ }
+ { // '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_234: yield_expr | star_expressions
+static void *
+_tmp_234_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_235: yield_expr | star_expressions
+static void *
+_tmp_235_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_236: '!' NAME
+static void *
+_tmp_236_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!' NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ Token * _literal;
+ expr_ty name_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ (name_var = _PyPegen_name_token(p)) // NAME
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ _res = _PyPegen_dummy_name(p, _literal, name_var);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_237: ':' | '}'
+static void *
+_tmp_237_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
+ }
+ { // '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_238: yield_expr | star_expressions
+static void *
+_tmp_238_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_239: '!' NAME
+static void *
+_tmp_239_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!' NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ Token * _literal;
+ expr_ty name_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ (name_var = _PyPegen_name_token(p)) // NAME
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ _res = _PyPegen_dummy_name(p, _literal, name_var);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _loop0_240: fstring_format_spec
+static asdl_seq *
+_loop0_240_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void *_res = NULL;
+ int _mark = p->mark;
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // fstring_format_spec
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _loop0_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec"));
+ expr_ty fstring_format_spec_var;
+ while (
+ (fstring_format_spec_var = fstring_format_spec_rule(p)) // fstring_format_spec
+ )
+ {
+ _res = fstring_format_spec_var;
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_240[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_format_spec"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _tmp_241: yield_expr | star_expressions
+static void *
+_tmp_241_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_242: '!' NAME
+static void *
+_tmp_242_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!' NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ Token * _literal;
+ expr_ty name_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ (name_var = _PyPegen_name_token(p)) // NAME
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ _res = _PyPegen_dummy_name(p, _literal, name_var);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_243: ':' | '}'
+static void *
+_tmp_243_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
+ }
+ { // '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_244: star_targets '='
+static void *
+_tmp_244_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37690,7 +39834,7 @@ _tmp_224_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
Token * _literal;
expr_ty z;
if (
@@ -37699,7 +39843,7 @@ _tmp_224_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37709,7 +39853,7 @@ _tmp_224_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='"));
}
_res = NULL;
@@ -37718,9 +39862,9 @@ _tmp_224_rule(Parser *p)
return _res;
}
-// _tmp_225: '.' | '...'
+// _tmp_245: '.' | '...'
static void *
-_tmp_225_rule(Parser *p)
+_tmp_245_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37737,18 +39881,18 @@ _tmp_225_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 23)) // token='.'
)
{
- D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
}
{ // '...'
@@ -37756,18 +39900,18 @@ _tmp_225_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 52)) // token='...'
)
{
- D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'"));
}
_res = NULL;
@@ -37776,9 +39920,9 @@ _tmp_225_rule(Parser *p)
return _res;
}
-// _tmp_226: '.' | '...'
+// _tmp_246: '.' | '...'
static void *
-_tmp_226_rule(Parser *p)
+_tmp_246_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37795,18 +39939,18 @@ _tmp_226_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 23)) // token='.'
)
{
- D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
}
{ // '...'
@@ -37814,18 +39958,18 @@ _tmp_226_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 52)) // token='...'
)
{
- D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'"));
}
_res = NULL;
@@ -37834,9 +39978,9 @@ _tmp_226_rule(Parser *p)
return _res;
}
-// _tmp_227: '@' named_expression NEWLINE
+// _tmp_247: '@' named_expression NEWLINE
static void *
-_tmp_227_rule(Parser *p)
+_tmp_247_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37853,7 +39997,7 @@ _tmp_227_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
Token * _literal;
expr_ty f;
Token * newline_var;
@@ -37865,7 +40009,7 @@ _tmp_227_rule(Parser *p)
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
_res = f;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37875,7 +40019,7 @@ _tmp_227_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE"));
}
_res = NULL;
@@ -37884,9 +40028,9 @@ _tmp_227_rule(Parser *p)
return _res;
}
-// _tmp_228: ',' expression
+// _tmp_248: ',' expression
static void *
-_tmp_228_rule(Parser *p)
+_tmp_248_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37903,7 +40047,7 @@ _tmp_228_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty c;
if (
@@ -37912,7 +40056,7 @@ _tmp_228_rule(Parser *p)
(c = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37922,7 +40066,7 @@ _tmp_228_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
_res = NULL;
@@ -37931,9 +40075,9 @@ _tmp_228_rule(Parser *p)
return _res;
}
-// _tmp_229: ',' star_expression
+// _tmp_249: ',' star_expression
static void *
-_tmp_229_rule(Parser *p)
+_tmp_249_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37950,7 +40094,7 @@ _tmp_229_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
+ D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
Token * _literal;
expr_ty c;
if (
@@ -37959,7 +40103,7 @@ _tmp_229_rule(Parser *p)
(c = star_expression_rule(p)) // star_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37969,7 +40113,7 @@ _tmp_229_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression"));
}
_res = NULL;
@@ -37978,9 +40122,9 @@ _tmp_229_rule(Parser *p)
return _res;
}
-// _tmp_230: 'or' conjunction
+// _tmp_250: 'or' conjunction
static void *
-_tmp_230_rule(Parser *p)
+_tmp_250_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37997,7 +40141,7 @@ _tmp_230_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
+ D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
Token * _keyword;
expr_ty c;
if (
@@ -38006,7 +40150,7 @@ _tmp_230_rule(Parser *p)
(c = conjunction_rule(p)) // conjunction
)
{
- D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
+ D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38016,7 +40160,7 @@ _tmp_230_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction"));
}
_res = NULL;
@@ -38025,9 +40169,9 @@ _tmp_230_rule(Parser *p)
return _res;
}
-// _tmp_231: 'and' inversion
+// _tmp_251: 'and' inversion
static void *
-_tmp_231_rule(Parser *p)
+_tmp_251_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38044,7 +40188,7 @@ _tmp_231_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
+ D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
Token * _keyword;
expr_ty c;
if (
@@ -38053,7 +40197,7 @@ _tmp_231_rule(Parser *p)
(c = inversion_rule(p)) // inversion
)
{
- D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
+ D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38063,7 +40207,7 @@ _tmp_231_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion"));
}
_res = NULL;
@@ -38072,9 +40216,9 @@ _tmp_231_rule(Parser *p)
return _res;
}
-// _tmp_232: slice | starred_expression
+// _tmp_252: slice | starred_expression
static void *
-_tmp_232_rule(Parser *p)
+_tmp_252_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38091,18 +40235,18 @@ _tmp_232_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice"));
+ D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice"));
expr_ty slice_var;
if (
(slice_var = slice_rule(p)) // slice
)
{
- D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice"));
+ D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice"));
_res = slice_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice"));
}
{ // starred_expression
@@ -38110,18 +40254,18 @@ _tmp_232_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
expr_ty starred_expression_var;
if (
(starred_expression_var = starred_expression_rule(p)) // starred_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
_res = starred_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression"));
}
_res = NULL;
@@ -38130,9 +40274,67 @@ _tmp_232_rule(Parser *p)
return _res;
}
-// _tmp_233: 'if' disjunction
+// _tmp_253: fstring | string
static void *
-_tmp_233_rule(Parser *p)
+_tmp_253_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // fstring
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring"));
+ expr_ty fstring_var;
+ if (
+ (fstring_var = fstring_rule(p)) // fstring
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring"));
+ _res = fstring_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring"));
+ }
+ { // string
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string"));
+ expr_ty string_var;
+ if (
+ (string_var = string_rule(p)) // string
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string"));
+ _res = string_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "string"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_254: 'if' disjunction
+static void *
+_tmp_254_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38149,16 +40351,16 @@ _tmp_233_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c> _tmp_254[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
{
- D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c+ _tmp_254[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38168,7 +40370,7 @@ _tmp_233_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_254[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction"));
}
_res = NULL;
@@ -38177,9 +40379,9 @@ _tmp_233_rule(Parser *p)
return _res;
}
-// _tmp_234: 'if' disjunction
+// _tmp_255: 'if' disjunction
static void *
-_tmp_234_rule(Parser *p)
+_tmp_255_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38196,16 +40398,16 @@ _tmp_234_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c> _tmp_255[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
{
- D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c+ _tmp_255[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38215,7 +40417,7 @@ _tmp_234_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_255[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction"));
}
_res = NULL;
@@ -38224,9 +40426,9 @@ _tmp_234_rule(Parser *p)
return _res;
}
-// _tmp_235: starred_expression | (assignment_expression | expression !':=') !'='
+// _tmp_256: starred_expression | (assignment_expression | expression !':=') !'='
static void *
-_tmp_235_rule(Parser *p)
+_tmp_256_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38243,18 +40445,18 @@ _tmp_235_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
expr_ty starred_expression_var;
if (
(starred_expression_var = starred_expression_rule(p)) // starred_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
_res = starred_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression"));
}
{ // (assignment_expression | expression !':=') !'='
@@ -38262,20 +40464,20 @@ _tmp_235_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
- void *_tmp_247_var;
+ D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
+ void *_tmp_268_var;
if (
- (_tmp_247_var = _tmp_247_rule(p)) // assignment_expression | expression !':='
+ (_tmp_268_var = _tmp_268_rule(p)) // assignment_expression | expression !':='
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
- _res = _tmp_247_var;
+ D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
+ _res = _tmp_268_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
}
_res = NULL;
@@ -38284,9 +40486,9 @@ _tmp_235_rule(Parser *p)
return _res;
}
-// _tmp_236: ',' star_target
+// _tmp_257: ',' star_target
static void *
-_tmp_236_rule(Parser *p)
+_tmp_257_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38303,7 +40505,7 @@ _tmp_236_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
Token * _literal;
expr_ty c;
if (
@@ -38312,7 +40514,7 @@ _tmp_236_rule(Parser *p)
(c = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38322,7 +40524,7 @@ _tmp_236_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target"));
}
_res = NULL;
@@ -38331,9 +40533,9 @@ _tmp_236_rule(Parser *p)
return _res;
}
-// _tmp_237: ',' star_target
+// _tmp_258: ',' star_target
static void *
-_tmp_237_rule(Parser *p)
+_tmp_258_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38350,7 +40552,7 @@ _tmp_237_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
Token * _literal;
expr_ty c;
if (
@@ -38359,7 +40561,7 @@ _tmp_237_rule(Parser *p)
(c = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38369,7 +40571,7 @@ _tmp_237_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target"));
}
_res = NULL;
@@ -38378,9 +40580,9 @@ _tmp_237_rule(Parser *p)
return _res;
}
-// _tmp_238: star_targets '='
+// _tmp_259: star_targets '='
static void *
-_tmp_238_rule(Parser *p)
+_tmp_259_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38397,7 +40599,7 @@ _tmp_238_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c> _tmp_259[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
Token * _literal;
expr_ty star_targets_var;
if (
@@ -38406,12 +40608,12 @@ _tmp_238_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c+ _tmp_259[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
_res = _PyPegen_dummy_name(p, star_targets_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_259[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='"));
}
_res = NULL;
@@ -38420,9 +40622,9 @@ _tmp_238_rule(Parser *p)
return _res;
}
-// _tmp_239: star_targets '='
+// _tmp_260: star_targets '='
static void *
-_tmp_239_rule(Parser *p)
+_tmp_260_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38439,7 +40641,7 @@ _tmp_239_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
Token * _literal;
expr_ty star_targets_var;
if (
@@ -38448,12 +40650,12 @@ _tmp_239_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
_res = _PyPegen_dummy_name(p, star_targets_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='"));
}
_res = NULL;
@@ -38462,9 +40664,9 @@ _tmp_239_rule(Parser *p)
return _res;
}
-// _tmp_240: ')' | '**'
+// _tmp_261: ')' | '**'
static void *
-_tmp_240_rule(Parser *p)
+_tmp_261_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38481,18 +40683,18 @@ _tmp_240_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_261[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_261[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // '**'
@@ -38500,18 +40702,18 @@ _tmp_240_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_261[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_261[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
_res = NULL;
@@ -38520,9 +40722,9 @@ _tmp_240_rule(Parser *p)
return _res;
}
-// _tmp_241: ':' | '**'
+// _tmp_262: ':' | '**'
static void *
-_tmp_241_rule(Parser *p)
+_tmp_262_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38539,18 +40741,18 @@ _tmp_241_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // '**'
@@ -38558,18 +40760,18 @@ _tmp_241_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
_res = NULL;
@@ -38578,9 +40780,9 @@ _tmp_241_rule(Parser *p)
return _res;
}
-// _tmp_242: expression ['as' star_target]
+// _tmp_263: expression ['as' star_target]
static void *
-_tmp_242_rule(Parser *p)
+_tmp_263_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38597,22 +40799,22 @@ _tmp_242_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_263[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_248_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_269_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_263[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expression_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_263[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]"));
}
_res = NULL;
@@ -38621,9 +40823,9 @@ _tmp_242_rule(Parser *p)
return _res;
}
-// _tmp_243: expressions ['as' star_target]
+// _tmp_264: expressions ['as' star_target]
static void *
-_tmp_243_rule(Parser *p)
+_tmp_264_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38640,22 +40842,22 @@ _tmp_243_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_264[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expressions_var;
if (
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var = _tmp_249_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_270_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_264[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expressions_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_264[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]"));
}
_res = NULL;
@@ -38664,9 +40866,9 @@ _tmp_243_rule(Parser *p)
return _res;
}
-// _tmp_244: expression ['as' star_target]
+// _tmp_265: expression ['as' star_target]
static void *
-_tmp_244_rule(Parser *p)
+_tmp_265_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38683,22 +40885,22 @@ _tmp_244_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_265[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_250_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_271_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_265[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expression_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_265[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]"));
}
_res = NULL;
@@ -38707,9 +40909,9 @@ _tmp_244_rule(Parser *p)
return _res;
}
-// _tmp_245: expressions ['as' star_target]
+// _tmp_266: expressions ['as' star_target]
static void *
-_tmp_245_rule(Parser *p)
+_tmp_266_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38726,22 +40928,22 @@ _tmp_245_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_266[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expressions_var;
if (
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var = _tmp_251_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_272_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_266[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expressions_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_266[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]"));
}
_res = NULL;
@@ -38750,9 +40952,9 @@ _tmp_245_rule(Parser *p)
return _res;
}
-// _tmp_246: 'as' NAME
+// _tmp_267: 'as' NAME
static void *
-_tmp_246_rule(Parser *p)
+_tmp_267_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38769,21 +40971,21 @@ _tmp_246_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_267[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_267[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_267[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -38792,9 +40994,9 @@ _tmp_246_rule(Parser *p)
return _res;
}
-// _tmp_247: assignment_expression | expression !':='
+// _tmp_268: assignment_expression | expression !':='
static void *
-_tmp_247_rule(Parser *p)
+_tmp_268_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38811,18 +41013,18 @@ _tmp_247_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
expr_ty assignment_expression_var;
if (
(assignment_expression_var = assignment_expression_rule(p)) // assignment_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
_res = assignment_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression"));
}
{ // expression !':='
@@ -38830,7 +41032,7 @@ _tmp_247_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
@@ -38838,12 +41040,12 @@ _tmp_247_rule(Parser *p)
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
_res = expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='"));
}
_res = NULL;
@@ -38852,9 +41054,9 @@ _tmp_247_rule(Parser *p)
return _res;
}
-// _tmp_248: 'as' star_target
+// _tmp_269: 'as' star_target
static void *
-_tmp_248_rule(Parser *p)
+_tmp_269_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38871,21 +41073,21 @@ _tmp_248_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
@@ -38894,9 +41096,9 @@ _tmp_248_rule(Parser *p)
return _res;
}
-// _tmp_249: 'as' star_target
+// _tmp_270: 'as' star_target
static void *
-_tmp_249_rule(Parser *p)
+_tmp_270_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38913,21 +41115,21 @@ _tmp_249_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_270[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_270[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_270[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
@@ -38936,9 +41138,9 @@ _tmp_249_rule(Parser *p)
return _res;
}
-// _tmp_250: 'as' star_target
+// _tmp_271: 'as' star_target
static void *
-_tmp_250_rule(Parser *p)
+_tmp_271_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38955,21 +41157,21 @@ _tmp_250_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_271[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_271[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_271[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
@@ -38978,9 +41180,9 @@ _tmp_250_rule(Parser *p)
return _res;
}
-// _tmp_251: 'as' star_target
+// _tmp_272: 'as' star_target
static void *
-_tmp_251_rule(Parser *p)
+_tmp_272_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38997,21 +41199,21 @@ _tmp_251_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_272[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_272[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_272[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
diff --git a/Parser/pegen.c b/Parser/pegen.c
index b79ae4cb1fb370..da410ea84ecb8e 100644
--- a/Parser/pegen.c
+++ b/Parser/pegen.c
@@ -155,6 +155,16 @@ initialize_token(Parser *p, Token *parser_token, struct token *new_token, int to
return -1;
}
+ parser_token->metadata = NULL;
+ if (new_token->metadata != NULL) {
+ if (_PyArena_AddPyObject(p->arena, new_token->metadata) < 0) {
+ Py_DECREF(parser_token->metadata);
+ return -1;
+ }
+ parser_token->metadata = new_token->metadata;
+ new_token->metadata = NULL;
+ }
+
parser_token->level = new_token->level;
parser_token->lineno = new_token->lineno;
parser_token->col_offset = p->tok->lineno == p->starting_lineno ? p->starting_col_offset + new_token->col_offset
@@ -198,6 +208,7 @@ int
_PyPegen_fill_token(Parser *p)
{
struct token new_token;
+ new_token.metadata = NULL;
int type = _PyTokenizer_Get(p->tok, &new_token);
// Record and skip '# type: ignore' comments
@@ -206,14 +217,14 @@ _PyPegen_fill_token(Parser *p)
char *tag = PyMem_Malloc(len + 1);
if (tag == NULL) {
PyErr_NoMemory();
- return -1;
+ goto error;
}
strncpy(tag, new_token.start, len);
tag[len] = '\0';
// Ownership of tag passes to the growable array
if (!growable_comment_array_add(&p->type_ignore_comments, p->tok->lineno, tag)) {
PyErr_NoMemory();
- return -1;
+ goto error;
}
type = _PyTokenizer_Get(p->tok, &new_token);
}
@@ -234,11 +245,14 @@ _PyPegen_fill_token(Parser *p)
// Check if we are at the limit of the token array capacity and resize if needed
if ((p->fill == p->size) && (_resize_tokens_array(p) != 0)) {
- return -1;
+ goto error;
}
Token *t = p->tokens[p->fill];
return initialize_token(p, t, &new_token, type);
+error:
+ Py_XDECREF(new_token.metadata);
+ return -1;
}
#if defined(Py_DEBUG)
@@ -359,7 +373,7 @@ _PyPegen_expect_token(Parser *p, int type)
}
Token *t = p->tokens[p->mark];
if (t->type != type) {
- return NULL;
+ return NULL;
}
p->mark += 1;
return t;
diff --git a/Parser/pegen.h b/Parser/pegen.h
index ad5c97f5f7e5d1..8800e9f97f5e04 100644
--- a/Parser/pegen.h
+++ b/Parser/pegen.h
@@ -39,6 +39,7 @@ typedef struct {
int level;
int lineno, col_offset, end_lineno, end_col_offset;
Memo *memo;
+ PyObject *metadata;
} Token;
typedef struct {
@@ -118,6 +119,11 @@ typedef struct {
int is_keyword;
} KeywordOrStarred;
+typedef struct {
+ void *result;
+ PyObject *metadata;
+} ResultTokenWithMetadata;
+
// Internal parser functions
#if defined(Py_DEBUG)
void _PyPegen_clear_memo_statistics(void);
@@ -138,6 +144,7 @@ void* _PyPegen_expect_forced_result(Parser *p, void* result, const char* expecte
Token *_PyPegen_expect_forced_token(Parser *p, int type, const char* expected);
expr_ty _PyPegen_expect_soft_keyword(Parser *p, const char *keyword);
expr_ty _PyPegen_soft_keyword_token(Parser *p);
+expr_ty _PyPegen_fstring_middle_token(Parser* p);
Token *_PyPegen_get_last_nonnwhitespace_token(Parser *);
int _PyPegen_fill_token(Parser *p);
expr_ty _PyPegen_name_token(Parser *p);
@@ -155,7 +162,7 @@ typedef enum {
int _Pypegen_raise_decode_error(Parser *p);
void _PyPegen_raise_tokenizer_init_error(PyObject *filename);
int _Pypegen_tokenizer_error(Parser *p);
-void *_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...);
+void *_PyPegen_raise_error(Parser *p, PyObject *errtype, int use_mark, const char *errmsg, ...);
void *_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype,
Py_ssize_t lineno, Py_ssize_t col_offset,
Py_ssize_t end_lineno, Py_ssize_t end_col_offset,
@@ -175,8 +182,9 @@ RAISE_ERROR_KNOWN_LOCATION(Parser *p, PyObject *errtype,
va_end(va);
return NULL;
}
-#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, msg, ##__VA_ARGS__)
-#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, msg, ##__VA_ARGS__)
+#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__)
+#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 0, msg, ##__VA_ARGS__)
+#define RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__)
#define RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, msg, ...) \
RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, (a)->lineno, (a)->col_offset, (b)->end_lineno, (b)->end_col_offset, msg, ##__VA_ARGS__)
#define RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, msg, ...) \
@@ -308,6 +316,8 @@ StarEtc *_PyPegen_star_etc(Parser *, arg_ty, asdl_seq *, arg_ty);
arguments_ty _PyPegen_make_arguments(Parser *, asdl_arg_seq *, SlashWithDefault *,
asdl_arg_seq *, asdl_seq *, StarEtc *);
arguments_ty _PyPegen_empty_arguments(Parser *);
+expr_ty _PyPegen_formatted_value(Parser *, expr_ty, Token *, ResultTokenWithMetadata *, ResultTokenWithMetadata *, Token *,
+ int, int, int, int, PyArena *);
AugOperator *_PyPegen_augoperator(Parser*, operator_ty type);
stmt_ty _PyPegen_function_def_decorators(Parser *, asdl_expr_seq *, stmt_ty);
stmt_ty _PyPegen_class_def_decorators(Parser *, asdl_expr_seq *, stmt_ty);
@@ -317,12 +327,18 @@ asdl_keyword_seq *_PyPegen_seq_delete_starred_exprs(Parser *, asdl_seq *);
expr_ty _PyPegen_collect_call_seqs(Parser *, asdl_expr_seq *, asdl_seq *,
int lineno, int col_offset, int end_lineno,
int end_col_offset, PyArena *arena);
-expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_seq *);
+expr_ty _PyPegen_constant_from_token(Parser* p, Token* tok);
+expr_ty _PyPegen_constant_from_string(Parser* p, Token* tok);
+expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *, int, int, int, int, PyArena *);
+expr_ty _PyPegen_FetchRawForm(Parser *p, int, int, int, int);
expr_ty _PyPegen_ensure_imaginary(Parser *p, expr_ty);
expr_ty _PyPegen_ensure_real(Parser *p, expr_ty);
asdl_seq *_PyPegen_join_sequences(Parser *, asdl_seq *, asdl_seq *);
int _PyPegen_check_barry_as_flufl(Parser *, Token *);
int _PyPegen_check_legacy_stmt(Parser *p, expr_ty t);
+ResultTokenWithMetadata *_PyPegen_check_fstring_conversion(Parser *p, Token *, expr_ty t);
+ResultTokenWithMetadata *_PyPegen_setup_full_format_spec(Parser *, Token *, asdl_expr_seq *, int, int,
+ int, int, PyArena *);
mod_ty _PyPegen_make_module(Parser *, asdl_stmt_seq *);
void *_PyPegen_arguments_parsing_error(Parser *, expr_ty);
expr_ty _PyPegen_get_last_comprehension_item(comprehension_ty comprehension);
@@ -338,6 +354,9 @@ void *_PyPegen_run_parser(Parser *);
mod_ty _PyPegen_run_parser_from_string(const char *, int, PyObject *, PyCompilerFlags *, PyArena *);
asdl_stmt_seq *_PyPegen_interactive_exit(Parser *);
+// TODO: move to the correct place in this file
+expr_ty _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* expr, Token*b);
+
// Generated function in parse.c - function definition in python.gram
void *_PyPegen_parse(Parser *);
diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c
index 6ea7600119b643..1f227da0194e3c 100644
--- a/Parser/pegen_errors.c
+++ b/Parser/pegen_errors.c
@@ -165,6 +165,7 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
int ret = 0;
struct token new_token;
+ new_token.metadata = NULL;
for (;;) {
switch (_PyTokenizer_Get(p->tok, &new_token)) {
@@ -192,7 +193,11 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
exit:
- if (PyErr_Occurred()) {
+ Py_XDECREF(new_token.metadata);
+ // If we're in an f-string, we want the syntax error in the expression part
+ // to propagate, so that tokenizer errors (like expecting '}') that happen afterwards
+ // do not swallow it.
+ if (PyErr_Occurred() && p->tok->tok_mode_stack_index <= 0) {
Py_XDECREF(value);
Py_XDECREF(type);
Py_XDECREF(traceback);
@@ -205,7 +210,7 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
// PARSER ERRORS
void *
-_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...)
+_PyPegen_raise_error(Parser *p, PyObject *errtype, int use_mark, const char *errmsg, ...)
{
if (p->fill == 0) {
va_list va;
@@ -214,8 +219,13 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...)
va_end(va);
return NULL;
}
-
- Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1];
+ if (use_mark && p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
+ p->error_indicator = 1;
+ return NULL;
+ }
+ Token *t = p->known_err_token != NULL
+ ? p->known_err_token
+ : p->tokens[use_mark ? p->mark : p->fill - 1];
Py_ssize_t col_offset;
Py_ssize_t end_col_offset = -1;
if (t->col_offset == -1) {
diff --git a/Parser/string_parser.c b/Parser/string_parser.c
index c096bea7426e5c..d4ce33850f7c58 100644
--- a/Parser/string_parser.c
+++ b/Parser/string_parser.c
@@ -135,7 +135,9 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t)
const char *first_invalid_escape;
v = _PyUnicode_DecodeUnicodeEscapeInternal(s, len, NULL, NULL, &first_invalid_escape);
- if (v != NULL && first_invalid_escape != NULL) {
+ // HACK: later we can simply pass the line no, since we don't preserve the tokens
+ // when we are decoding the string but we preserve the line numbers.
+ if (v != NULL && first_invalid_escape != NULL && t != NULL) {
if (warn_invalid_escape_sequence(parser, first_invalid_escape, t) < 0) {
/* We have not decref u before because first_invalid_escape points
inside u. */
@@ -166,43 +168,43 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len, Token *t)
return result;
}
-/* s must include the bracketing quote characters, and r, b, u,
- &/or f prefixes (if any), and embedded escape sequences (if any).
- _PyPegen_parsestr parses it, and sets *result to decoded Python string object.
- If the string is an f-string, set *fstr and *fstrlen to the unparsed
- string object. Return 0 if no errors occurred. */
-int
-_PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result,
- const char **fstr, Py_ssize_t *fstrlen, Token *t)
+PyObject *
+_PyPegen_decode_string(Parser *p, int raw, const char *s, size_t len, Token *t)
+{
+ if (raw) {
+ return PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL);
+ }
+ return decode_unicode_with_escapes(p, s, len, t);
+}
+
+/* s must include the bracketing quote characters, and r, b &/or f prefixes
+ (if any), and embedded escape sequences (if any). (f-strings are handled by the parser)
+ _PyPegen_parse_string parses it, and returns the decoded Python string object. */
+PyObject *
+_PyPegen_parse_string(Parser *p, Token *t)
{
const char *s = PyBytes_AsString(t->bytes);
if (s == NULL) {
- return -1;
+ return NULL;
}
size_t len;
int quote = Py_CHARMASK(*s);
- int fmode = 0;
- *bytesmode = 0;
- *rawmode = 0;
- *result = NULL;
- *fstr = NULL;
+ int bytesmode = 0;
+ int rawmode = 0;
+
if (Py_ISALPHA(quote)) {
- while (!*bytesmode || !*rawmode) {
+ while (!bytesmode || !rawmode) {
if (quote == 'b' || quote == 'B') {
quote =(unsigned char)*++s;
- *bytesmode = 1;
+ bytesmode = 1;
}
else if (quote == 'u' || quote == 'U') {
quote = (unsigned char)*++s;
}
else if (quote == 'r' || quote == 'R') {
quote = (unsigned char)*++s;
- *rawmode = 1;
- }
- else if (quote == 'f' || quote == 'F') {
- quote = (unsigned char)*++s;
- fmode = 1;
+ rawmode = 1;
}
else {
break;
@@ -210,32 +212,21 @@ _PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result,
}
}
- /* fstrings are only allowed in Python 3.6 and greater */
- if (fmode && p->feature_version < 6) {
- p->error_indicator = 1;
- RAISE_SYNTAX_ERROR("Format strings are only supported in Python 3.6 and greater");
- return -1;
- }
-
- if (fmode && *bytesmode) {
- PyErr_BadInternalCall();
- return -1;
- }
if (quote != '\'' && quote != '\"') {
PyErr_BadInternalCall();
- return -1;
+ return NULL;
}
/* Skip the leading quote char. */
s++;
len = strlen(s);
if (len > INT_MAX) {
PyErr_SetString(PyExc_OverflowError, "string to parse is too long");
- return -1;
+ return NULL;
}
if (s[--len] != quote) {
/* Last quote char must match the first. */
PyErr_BadInternalCall();
- return -1;
+ return NULL;
}
if (len >= 4 && s[0] == quote && s[1] == quote) {
/* A triple quoted string. We've already skipped one quote at
@@ -246,1037 +237,28 @@ _PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result,
/* And check that the last two match. */
if (s[--len] != quote || s[--len] != quote) {
PyErr_BadInternalCall();
- return -1;
+ return NULL;
}
}
- if (fmode) {
- /* Just return the bytes. The caller will parse the resulting
- string. */
- *fstr = s;
- *fstrlen = len;
- return 0;
- }
-
- /* Not an f-string. */
/* Avoid invoking escape decoding routines if possible. */
- *rawmode = *rawmode || strchr(s, '\\') == NULL;
- if (*bytesmode) {
+ rawmode = rawmode || strchr(s, '\\') == NULL;
+ if (bytesmode) {
/* Disallow non-ASCII characters. */
const char *ch;
for (ch = s; *ch; ch++) {
if (Py_CHARMASK(*ch) >= 0x80) {
- RAISE_SYNTAX_ERROR(
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
+ t,
"bytes can only contain ASCII "
"literal characters");
- return -1;
- }
- }
- if (*rawmode) {
- *result = PyBytes_FromStringAndSize(s, len);
- }
- else {
- *result = decode_bytes_with_escapes(p, s, len, t);
- }
- }
- else {
- if (*rawmode) {
- *result = PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL);
- }
- else {
- *result = decode_unicode_with_escapes(p, s, len, t);
- }
- }
- return *result == NULL ? -1 : 0;
-}
-
-
-
-// FSTRING STUFF
-
-/* Fix locations for the given node and its children.
-
- `parent` is the enclosing node.
- `expr_start` is the starting position of the expression (pointing to the open brace).
- `n` is the node which locations are going to be fixed relative to parent.
- `expr_str` is the child node's string representation, including braces.
-*/
-static bool
-fstring_find_expr_location(Token *parent, const char* expr_start, char *expr_str, int *p_lines, int *p_cols)
-{
- *p_lines = 0;
- *p_cols = 0;
- assert(expr_start != NULL && *expr_start == '{');
- if (parent && parent->bytes) {
- const char *parent_str = PyBytes_AsString(parent->bytes);
- if (!parent_str) {
- return false;
- }
- // The following is needed, in order to correctly shift the column
- // offset, in the case that (disregarding any whitespace) a newline
- // immediately follows the opening curly brace of the fstring expression.
- bool newline_after_brace = 1;
- const char *start = expr_start + 1;
- while (start && *start != '}' && *start != '\n') {
- if (*start != ' ' && *start != '\t' && *start != '\f') {
- newline_after_brace = 0;
- break;
- }
- start++;
- }
-
- // Account for the characters from the last newline character to our
- // left until the beginning of expr_start.
- if (!newline_after_brace) {
- start = expr_start;
- while (start > parent_str && *start != '\n') {
- start--;
- }
- *p_cols += (int)(expr_start - start);
- if (*start == '\n') {
- *p_cols -= 1;
- }
- }
- /* adjust the start based on the number of newlines encountered
- before the f-string expression */
- for (const char *p = parent_str; p < expr_start; p++) {
- if (*p == '\n') {
- (*p_lines)++;
- }
- }
- }
- return true;
-}
-
-
-/* Compile this expression in to an expr_ty. Add parens around the
- expression, in order to allow leading spaces in the expression. */
-static expr_ty
-fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end,
- Token *t)
-{
- expr_ty expr = NULL;
- char *str;
- Py_ssize_t len;
- const char *s;
- expr_ty result = NULL;
-
- assert(expr_end >= expr_start);
- assert(*(expr_start-1) == '{');
- assert(*expr_end == '}' || *expr_end == '!' || *expr_end == ':' ||
- *expr_end == '=');
-
- /* If the substring is all whitespace, it's an error. We need to catch this
- here, and not when we call PyParser_SimpleParseStringFlagsFilename,
- because turning the expression '' in to '()' would go from being invalid
- to valid. */
- for (s = expr_start; s != expr_end; s++) {
- char c = *s;
- /* The Python parser ignores only the following whitespace
- characters (\r already is converted to \n). */
- if (!(c == ' ' || c == '\t' || c == '\n' || c == '\f')) {
- break;
- }
- }
-
- if (s == expr_end) {
- if (*expr_end == '!' || *expr_end == ':' || *expr_end == '=') {
- RAISE_SYNTAX_ERROR("f-string: expression required before '%c'", *expr_end);
- return NULL;
- }
- RAISE_SYNTAX_ERROR("f-string: empty expression not allowed");
- return NULL;
- }
-
- len = expr_end - expr_start;
- /* Allocate 3 extra bytes: open paren, close paren, null byte. */
- str = PyMem_Calloc(len + 3, sizeof(char));
- if (str == NULL) {
- PyErr_NoMemory();
- return NULL;
- }
-
- // The call to fstring_find_expr_location is responsible for finding the column offset
- // the generated AST nodes need to be shifted to the right, which is equal to the number
- // of the f-string characters before the expression starts.
- memcpy(str+1, expr_start, len);
- int lines, cols;
- if (!fstring_find_expr_location(t, expr_start-1, str+1, &lines, &cols)) {
- PyMem_Free(str);
- return NULL;
- }
-
- // The parentheses are needed in order to allow for leading whitespace within
- // the f-string expression. This consequently gets parsed as a group (see the
- // group rule in python.gram).
- str[0] = '(';
- str[len+1] = ')';
-
- struct tok_state* tok = _PyTokenizer_FromString(str, 1);
- if (tok == NULL) {
- PyMem_Free(str);
- return NULL;
- }
- tok->filename = Py_NewRef(p->tok->filename);
- tok->lineno = t->lineno + lines - 1;
-
- Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version,
- NULL, p->arena);
-
- p2->starting_lineno = t->lineno + lines;
- p2->starting_col_offset = lines != 0 ? cols : t->col_offset + cols;
-
- expr = _PyPegen_run_parser(p2);
-
- if (expr == NULL) {
- goto exit;
- }
- result = expr;
-
-exit:
- PyMem_Free(str);
- _PyPegen_Parser_Free(p2);
- _PyTokenizer_Free(tok);
- return result;
-}
-
-/* Return -1 on error.
-
- Return 0 if we reached the end of the literal.
-
- Return 1 if we haven't reached the end of the literal, but we want
- the caller to process the literal up to this point. Used for
- doubled braces.
-*/
-static int
-fstring_find_literal(Parser *p, const char **str, const char *end, int raw,
- PyObject **literal, int recurse_lvl, Token *t)
-{
- /* Get any literal string. It ends when we hit an un-doubled left
- brace (which isn't part of a unicode name escape such as
- "\N{EULER CONSTANT}"), or the end of the string. */
-
- const char *s = *str;
- const char *literal_start = s;
- int result = 0;
-
- assert(*literal == NULL);
- while (s < end) {
- char ch = *s++;
- if (!raw && ch == '\\' && s < end) {
- ch = *s++;
- if (ch == 'N') {
- /* We need to look at and skip matching braces for "\N{name}"
- sequences because otherwise we'll think the opening '{'
- starts an expression, which is not the case with "\N".
- Keep looking for either a matched '{' '}' pair, or the end
- of the string. */
-
- if (s < end && *s++ == '{') {
- while (s < end && *s++ != '}') {
- }
- continue;
- }
-
- /* This is an invalid "\N" sequence, since it's a "\N" not
- followed by a "{". Just keep parsing this literal. This
- error will be caught later by
- decode_unicode_with_escapes(). */
- continue;
- }
- if (ch == '{' && warn_invalid_escape_sequence(p, s-1, t) < 0) {
- return -1;
- }
- }
- if (ch == '{' || ch == '}') {
- /* Check for doubled braces, but only at the top level. If
- we checked at every level, then f'{0:{3}}' would fail
- with the two closing braces. */
- if (recurse_lvl == 0) {
- if (s < end && *s == ch) {
- /* We're going to tell the caller that the literal ends
- here, but that they should continue scanning. But also
- skip over the second brace when we resume scanning. */
- *str = s + 1;
- result = 1;
- goto done;
- }
-
- /* Where a single '{' is the start of a new expression, a
- single '}' is not allowed. */
- if (ch == '}') {
- *str = s - 1;
- RAISE_SYNTAX_ERROR("f-string: single '}' is not allowed");
- return -1;
- }
- }
- /* We're either at a '{', which means we're starting another
- expression; or a '}', which means we're at the end of this
- f-string (for a nested format_spec). */
- s--;
- break;
- }
- }
- *str = s;
- assert(s <= end);
- assert(s == end || *s == '{' || *s == '}');
-done:
- if (literal_start != s) {
- if (raw) {
- *literal = PyUnicode_DecodeUTF8Stateful(literal_start,
- s - literal_start,
- NULL, NULL);
- }
- else {
- *literal = decode_unicode_with_escapes(p, literal_start,
- s - literal_start, t);
- }
- if (!*literal) {
- return -1;
- }
- }
- return result;
-}
-
-/* Forward declaration because parsing is recursive. */
-static expr_ty
-fstring_parse(Parser *p, const char **str, const char *end, int raw, int recurse_lvl,
- Token *first_token, Token* t, Token *last_token);
-
-/* Parse the f-string at *str, ending at end. We know *str starts an
- expression (so it must be a '{'). Returns the FormattedValue node, which
- includes the expression, conversion character, format_spec expression, and
- optionally the text of the expression (if = is used).
-
- Note that I don't do a perfect job here: I don't make sure that a
- closing brace doesn't match an opening paren, for example. It
- doesn't need to error on all invalid expressions, just correctly
- find the end of all valid ones. Any errors inside the expression
- will be caught when we parse it later.
-
- *expression is set to the expression. For an '=' "debug" expression,
- *expr_text is set to the debug text (the original text of the expression,
- including the '=' and any whitespace around it, as a string object). If
- not a debug expression, *expr_text set to NULL. */
-static int
-fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int recurse_lvl,
- PyObject **expr_text, expr_ty *expression, Token *first_token,
- Token *t, Token *last_token)
-{
- /* Return -1 on error, else 0. */
-
- const char *expr_start;
- const char *expr_end;
- expr_ty simple_expression;
- expr_ty format_spec = NULL; /* Optional format specifier. */
- int conversion = -1; /* The conversion char. Use default if not
- specified, or !r if using = and no format
- spec. */
-
- /* 0 if we're not in a string, else the quote char we're trying to
- match (single or double quote). */
- char quote_char = 0;
-
- /* If we're inside a string, 1=normal, 3=triple-quoted. */
- int string_type = 0;
-
- /* Keep track of nesting level for braces/parens/brackets in
- expressions. */
- Py_ssize_t nested_depth = 0;
- char parenstack[MAXLEVEL];
-
- *expr_text = NULL;
-
- /* Can only nest one level deep. */
- if (recurse_lvl >= 2) {
- RAISE_SYNTAX_ERROR("f-string: expressions nested too deeply");
- goto error;
- }
-
- /* The first char must be a left brace, or we wouldn't have gotten
- here. Skip over it. */
- assert(**str == '{');
- *str += 1;
-
- expr_start = *str;
- for (; *str < end; (*str)++) {
- char ch;
-
- /* Loop invariants. */
- assert(nested_depth >= 0);
- assert(*str >= expr_start && *str < end);
- if (quote_char) {
- assert(string_type == 1 || string_type == 3);
- } else {
- assert(string_type == 0);
- }
-
- ch = **str;
- /* Nowhere inside an expression is a backslash allowed. */
- if (ch == '\\') {
- /* Error: can't include a backslash character, inside
- parens or strings or not. */
- RAISE_SYNTAX_ERROR(
- "f-string expression part "
- "cannot include a backslash");
- goto error;
- }
- if (quote_char) {
- /* We're inside a string. See if we're at the end. */
- /* This code needs to implement the same non-error logic
- as tok_get from tokenizer.c, at the letter_quote
- label. To actually share that code would be a
- nightmare. But, it's unlikely to change and is small,
- so duplicate it here. Note we don't need to catch all
- of the errors, since they'll be caught when parsing the
- expression. We just need to match the non-error
- cases. Thus we can ignore \n in single-quoted strings,
- for example. Or non-terminated strings. */
- if (ch == quote_char) {
- /* Does this match the string_type (single or triple
- quoted)? */
- if (string_type == 3) {
- if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) {
- /* We're at the end of a triple quoted string. */
- *str += 2;
- string_type = 0;
- quote_char = 0;
- continue;
- }
- } else {
- /* We're at the end of a normal string. */
- quote_char = 0;
- string_type = 0;
- continue;
- }
- }
- } else if (ch == '\'' || ch == '"') {
- /* Is this a triple quoted string? */
- if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) {
- string_type = 3;
- *str += 2;
- } else {
- /* Start of a normal string. */
- string_type = 1;
- }
- /* Start looking for the end of the string. */
- quote_char = ch;
- } else if (ch == '[' || ch == '{' || ch == '(') {
- if (nested_depth >= MAXLEVEL) {
- RAISE_SYNTAX_ERROR("f-string: too many nested parenthesis");
- goto error;
- }
- parenstack[nested_depth] = ch;
- nested_depth++;
- } else if (ch == '#') {
- /* Error: can't include a comment character, inside parens
- or not. */
- RAISE_SYNTAX_ERROR("f-string expression part cannot include '#'");
- goto error;
- } else if (nested_depth == 0 &&
- (ch == '!' || ch == ':' || ch == '}' ||
- ch == '=' || ch == '>' || ch == '<')) {
- /* See if there's a next character. */
- if (*str+1 < end) {
- char next = *(*str+1);
-
- /* For "!=". since '=' is not an allowed conversion character,
- nothing is lost in this test. */
- if ((ch == '!' && next == '=') || /* != */
- (ch == '=' && next == '=') || /* == */
- (ch == '<' && next == '=') || /* <= */
- (ch == '>' && next == '=') /* >= */
- ) {
- *str += 1;
- continue;
- }
- }
- /* Don't get out of the loop for these, if they're single
- chars (not part of 2-char tokens). If by themselves, they
- don't end an expression (unlike say '!'). */
- if (ch == '>' || ch == '<') {
- continue;
- }
-
- /* Normal way out of this loop. */
- break;
- } else if (ch == ']' || ch == '}' || ch == ')') {
- if (!nested_depth) {
- RAISE_SYNTAX_ERROR("f-string: unmatched '%c'", ch);
- goto error;
- }
- nested_depth--;
- int opening = (unsigned char)parenstack[nested_depth];
- if (!((opening == '(' && ch == ')') ||
- (opening == '[' && ch == ']') ||
- (opening == '{' && ch == '}')))
- {
- RAISE_SYNTAX_ERROR(
- "f-string: closing parenthesis '%c' "
- "does not match opening parenthesis '%c'",
- ch, opening);
- goto error;
- }
- } else {
- /* Just consume this char and loop around. */
- }
- }
- expr_end = *str;
- /* If we leave the above loop in a string or with mismatched parens, we
- don't really care. We'll get a syntax error when compiling the
- expression. But, we can produce a better error message, so let's just
- do that.*/
- if (quote_char) {
- RAISE_SYNTAX_ERROR("f-string: unterminated string");
- goto error;
- }
- if (nested_depth) {
- int opening = (unsigned char)parenstack[nested_depth - 1];
- RAISE_SYNTAX_ERROR("f-string: unmatched '%c'", opening);
- goto error;
- }
-
- if (*str >= end) {
- goto unexpected_end_of_string;
- }
-
- /* Compile the expression as soon as possible, so we show errors
- related to the expression before errors related to the
- conversion or format_spec. */
- simple_expression = fstring_compile_expr(p, expr_start, expr_end, t);
- if (!simple_expression) {
- goto error;
- }
-
- /* Check for =, which puts the text value of the expression in
- expr_text. */
- if (**str == '=') {
- if (p->feature_version < 8) {
- RAISE_SYNTAX_ERROR("f-string: self documenting expressions are "
- "only supported in Python 3.8 and greater");
- goto error;
- }
- *str += 1;
-
- /* Skip over ASCII whitespace. No need to test for end of string
- here, since we know there's at least a trailing quote somewhere
- ahead. */
- while (Py_ISSPACE(**str)) {
- *str += 1;
- }
- if (*str >= end) {
- goto unexpected_end_of_string;
- }
- /* Set *expr_text to the text of the expression. */
- *expr_text = PyUnicode_FromStringAndSize(expr_start, *str-expr_start);
- if (!*expr_text) {
- goto error;
- }
- }
-
- /* Check for a conversion char, if present. */
- if (**str == '!') {
- *str += 1;
- const char *conv_start = *str;
- while (1) {
- if (*str >= end) {
- goto unexpected_end_of_string;
- }
- if (**str == '}' || **str == ':') {
- break;
- }
- *str += 1;
- }
- if (*str == conv_start) {
- RAISE_SYNTAX_ERROR(
- "f-string: missed conversion character");
- goto error;
- }
-
- conversion = (unsigned char)*conv_start;
- /* Validate the conversion. */
- if ((*str != conv_start + 1) ||
- !(conversion == 's' || conversion == 'r' || conversion == 'a'))
- {
- PyObject *conv_obj = PyUnicode_FromStringAndSize(conv_start,
- *str-conv_start);
- if (conv_obj) {
- RAISE_SYNTAX_ERROR(
- "f-string: invalid conversion character %R: "
- "expected 's', 'r', or 'a'",
- conv_obj);
- Py_DECREF(conv_obj);
- }
- goto error;
- }
-
- }
-
- /* Check for the format spec, if present. */
- assert(*str < end);
- if (**str == ':') {
- *str += 1;
- if (*str >= end) {
- goto unexpected_end_of_string;
- }
-
- /* Parse the format spec. */
- format_spec = fstring_parse(p, str, end, raw, recurse_lvl+1,
- first_token, t, last_token);
- if (!format_spec) {
- goto error;
- }
- }
-
- if (*str >= end || **str != '}') {
- goto unexpected_end_of_string;
- }
-
- /* We're at a right brace. Consume it. */
- assert(*str < end);
- assert(**str == '}');
- *str += 1;
-
- /* If we're in = mode (detected by non-NULL expr_text), and have no format
- spec and no explicit conversion, set the conversion to 'r'. */
- if (*expr_text && format_spec == NULL && conversion == -1) {
- conversion = 'r';
- }
-
- /* And now create the FormattedValue node that represents this
- entire expression with the conversion and format spec. */
- //TODO: Fix this
- *expression = _PyAST_FormattedValue(simple_expression, conversion,
- format_spec, first_token->lineno,
- first_token->col_offset,
- last_token->end_lineno,
- last_token->end_col_offset, p->arena);
- if (!*expression) {
- goto error;
- }
-
- return 0;
-
-unexpected_end_of_string:
- RAISE_SYNTAX_ERROR("f-string: expecting '}'");
- /* Falls through to error. */
-
-error:
- Py_XDECREF(*expr_text);
- return -1;
-
-}
-
-/* Return -1 on error.
-
- Return 0 if we have a literal (possible zero length) and an
- expression (zero length if at the end of the string.
-
- Return 1 if we have a literal, but no expression, and we want the
- caller to call us again. This is used to deal with doubled
- braces.
-
- When called multiple times on the string 'a{{b{0}c', this function
- will return:
-
- 1. the literal 'a{' with no expression, and a return value
- of 1. Despite the fact that there's no expression, the return
- value of 1 means we're not finished yet.
-
- 2. the literal 'b' and the expression '0', with a return value of
- 0. The fact that there's an expression means we're not finished.
-
- 3. literal 'c' with no expression and a return value of 0. The
- combination of the return value of 0 with no expression means
- we're finished.
-*/
-static int
-fstring_find_literal_and_expr(Parser *p, const char **str, const char *end, int raw,
- int recurse_lvl, PyObject **literal,
- PyObject **expr_text, expr_ty *expression,
- Token *first_token, Token *t, Token *last_token)
-{
- int result;
-
- assert(*literal == NULL && *expression == NULL);
-
- /* Get any literal string. */
- result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl, t);
- if (result < 0) {
- goto error;
- }
-
- assert(result == 0 || result == 1);
-
- if (result == 1) {
- /* We have a literal, but don't look at the expression. */
- return 1;
- }
-
- if (*str >= end || **str == '}') {
- /* We're at the end of the string or the end of a nested
- f-string: no expression. The top-level error case where we
- expect to be at the end of the string but we're at a '}' is
- handled later. */
- return 0;
- }
-
- /* We must now be the start of an expression, on a '{'. */
- assert(**str == '{');
-
- if (fstring_find_expr(p, str, end, raw, recurse_lvl, expr_text,
- expression, first_token, t, last_token) < 0) {
- goto error;
- }
-
- return 0;
-
-error:
- Py_CLEAR(*literal);
- return -1;
-}
-
-#ifdef NDEBUG
-#define ExprList_check_invariants(l)
-#else
-static void
-ExprList_check_invariants(ExprList *l)
-{
- /* Check our invariants. Make sure this object is "live", and
- hasn't been deallocated. */
- assert(l->size >= 0);
- assert(l->p != NULL);
- if (l->size <= EXPRLIST_N_CACHED) {
- assert(l->data == l->p);
- }
-}
-#endif
-
-static void
-ExprList_Init(ExprList *l)
-{
- l->allocated = EXPRLIST_N_CACHED;
- l->size = 0;
-
- /* Until we start allocating dynamically, p points to data. */
- l->p = l->data;
-
- ExprList_check_invariants(l);
-}
-
-static int
-ExprList_Append(ExprList *l, expr_ty exp)
-{
- ExprList_check_invariants(l);
- if (l->size >= l->allocated) {
- /* We need to alloc (or realloc) the memory. */
- Py_ssize_t new_size = l->allocated * 2;
-
- /* See if we've ever allocated anything dynamically. */
- if (l->p == l->data) {
- Py_ssize_t i;
- /* We're still using the cached data. Switch to
- alloc-ing. */
- l->p = PyMem_Malloc(sizeof(expr_ty) * new_size);
- if (!l->p) {
- return -1;
- }
- /* Copy the cached data into the new buffer. */
- for (i = 0; i < l->size; i++) {
- l->p[i] = l->data[i];
- }
- } else {
- /* Just realloc. */
- expr_ty *tmp = PyMem_Realloc(l->p, sizeof(expr_ty) * new_size);
- if (!tmp) {
- PyMem_Free(l->p);
- l->p = NULL;
- return -1;
- }
- l->p = tmp;
- }
-
- l->allocated = new_size;
- assert(l->allocated == 2 * l->size);
- }
-
- l->p[l->size++] = exp;
-
- ExprList_check_invariants(l);
- return 0;
-}
-
-static void
-ExprList_Dealloc(ExprList *l)
-{
- ExprList_check_invariants(l);
-
- /* If there's been an error, or we've never dynamically allocated,
- do nothing. */
- if (!l->p || l->p == l->data) {
- /* Do nothing. */
- } else {
- /* We have dynamically allocated. Free the memory. */
- PyMem_Free(l->p);
- }
- l->p = NULL;
- l->size = -1;
-}
-
-static asdl_expr_seq *
-ExprList_Finish(ExprList *l, PyArena *arena)
-{
- asdl_expr_seq *seq;
-
- ExprList_check_invariants(l);
-
- /* Allocate the asdl_seq and copy the expressions in to it. */
- seq = _Py_asdl_expr_seq_new(l->size, arena);
- if (seq) {
- Py_ssize_t i;
- for (i = 0; i < l->size; i++) {
- asdl_seq_SET(seq, i, l->p[i]);
- }
- }
- ExprList_Dealloc(l);
- return seq;
-}
-
-#ifdef NDEBUG
-#define FstringParser_check_invariants(state)
-#else
-static void
-FstringParser_check_invariants(FstringParser *state)
-{
- if (state->last_str) {
- assert(PyUnicode_CheckExact(state->last_str));
- }
- ExprList_check_invariants(&state->expr_list);
-}
-#endif
-
-void
-_PyPegen_FstringParser_Init(FstringParser *state)
-{
- state->last_str = NULL;
- state->fmode = 0;
- ExprList_Init(&state->expr_list);
- FstringParser_check_invariants(state);
-}
-
-void
-_PyPegen_FstringParser_Dealloc(FstringParser *state)
-{
- FstringParser_check_invariants(state);
-
- Py_XDECREF(state->last_str);
- ExprList_Dealloc(&state->expr_list);
-}
-
-/* Make a Constant node, but decref the PyUnicode object being added. */
-static expr_ty
-make_str_node_and_del(Parser *p, PyObject **str, Token* first_token, Token *last_token)
-{
- PyObject *s = *str;
- PyObject *kind = NULL;
- *str = NULL;
- assert(PyUnicode_CheckExact(s));
- if (_PyArena_AddPyObject(p->arena, s) < 0) {
- Py_DECREF(s);
- return NULL;
- }
- const char* the_str = PyBytes_AsString(first_token->bytes);
- if (the_str && the_str[0] == 'u') {
- kind = _PyPegen_new_identifier(p, "u");
- }
-
- if (kind == NULL && PyErr_Occurred()) {
- return NULL;
- }
-
- return _PyAST_Constant(s, kind, first_token->lineno, first_token->col_offset,
- last_token->end_lineno, last_token->end_col_offset,
- p->arena);
-
-}
-
-
-/* Add a non-f-string (that is, a regular literal string). str is
- decref'd. */
-int
-_PyPegen_FstringParser_ConcatAndDel(FstringParser *state, PyObject *str)
-{
- FstringParser_check_invariants(state);
-
- assert(PyUnicode_CheckExact(str));
-
- if (PyUnicode_GET_LENGTH(str) == 0) {
- Py_DECREF(str);
- return 0;
- }
-
- if (!state->last_str) {
- /* We didn't have a string before, so just remember this one. */
- state->last_str = str;
- } else {
- /* Concatenate this with the previous string. */
- PyUnicode_AppendAndDel(&state->last_str, str);
- if (!state->last_str) {
- return -1;
- }
- }
- FstringParser_check_invariants(state);
- return 0;
-}
-
-/* Parse an f-string. The f-string is in *str to end, with no
- 'f' or quotes. */
-int
-_PyPegen_FstringParser_ConcatFstring(Parser *p, FstringParser *state, const char **str,
- const char *end, int raw, int recurse_lvl,
- Token *first_token, Token* t, Token *last_token)
-{
- FstringParser_check_invariants(state);
- state->fmode = 1;
-
- /* Parse the f-string. */
- while (1) {
- PyObject *literal = NULL;
- PyObject *expr_text = NULL;
- expr_ty expression = NULL;
-
- /* If there's a zero length literal in front of the
- expression, literal will be NULL. If we're at the end of
- the f-string, expression will be NULL (unless result == 1,
- see below). */
- int result = fstring_find_literal_and_expr(p, str, end, raw, recurse_lvl,
- &literal, &expr_text,
- &expression, first_token, t, last_token);
- if (result < 0) {
- return -1;
- }
-
- /* Add the literal, if any. */
- if (literal && _PyPegen_FstringParser_ConcatAndDel(state, literal) < 0) {
- Py_XDECREF(expr_text);
- return -1;
- }
- /* Add the expr_text, if any. */
- if (expr_text && _PyPegen_FstringParser_ConcatAndDel(state, expr_text) < 0) {
- return -1;
- }
-
- /* We've dealt with the literal and expr_text, their ownership has
- been transferred to the state object. Don't look at them again. */
-
- /* See if we should just loop around to get the next literal
- and expression, while ignoring the expression this
- time. This is used for un-doubling braces, as an
- optimization. */
- if (result == 1) {
- continue;
- }
-
- if (!expression) {
- /* We're done with this f-string. */
- break;
- }
-
- /* We know we have an expression. Convert any existing string
- to a Constant node. */
- if (state->last_str) {
- /* Convert the existing last_str literal to a Constant node. */
- expr_ty last_str = make_str_node_and_del(p, &state->last_str, first_token, last_token);
- if (!last_str || ExprList_Append(&state->expr_list, last_str) < 0) {
- return -1;
- }
- }
-
- if (ExprList_Append(&state->expr_list, expression) < 0) {
- return -1;
- }
- }
-
- /* If recurse_lvl is zero, then we must be at the end of the
- string. Otherwise, we must be at a right brace. */
-
- if (recurse_lvl == 0 && *str < end-1) {
- RAISE_SYNTAX_ERROR("f-string: unexpected end of string");
- return -1;
- }
- if (recurse_lvl != 0 && **str != '}') {
- RAISE_SYNTAX_ERROR("f-string: expecting '}'");
- return -1;
- }
-
- FstringParser_check_invariants(state);
- return 0;
-}
-
-/* Convert the partial state reflected in last_str and expr_list to an
- expr_ty. The expr_ty can be a Constant, or a JoinedStr. */
-expr_ty
-_PyPegen_FstringParser_Finish(Parser *p, FstringParser *state, Token* first_token,
- Token *last_token)
-{
- asdl_expr_seq *seq;
-
- FstringParser_check_invariants(state);
-
- /* If we're just a constant string with no expressions, return
- that. */
- if (!state->fmode) {
- assert(!state->expr_list.size);
- if (!state->last_str) {
- /* Create a zero length string. */
- state->last_str = PyUnicode_FromStringAndSize(NULL, 0);
- if (!state->last_str) {
- goto error;
+ return NULL;
}
}
- return make_str_node_and_del(p, &state->last_str, first_token, last_token);
- }
-
- /* Create a Constant node out of last_str, if needed. It will be the
- last node in our expression list. */
- if (state->last_str) {
- expr_ty str = make_str_node_and_del(p, &state->last_str, first_token, last_token);
- if (!str || ExprList_Append(&state->expr_list, str) < 0) {
- goto error;
+ if (rawmode) {
+ return PyBytes_FromStringAndSize(s, len);
}
+ return decode_bytes_with_escapes(p, s, len, t);
}
- /* This has already been freed. */
- assert(state->last_str == NULL);
-
- seq = ExprList_Finish(&state->expr_list, p->arena);
- if (!seq) {
- goto error;
- }
-
- return _PyAST_JoinedStr(seq, first_token->lineno, first_token->col_offset,
- last_token->end_lineno, last_token->end_col_offset,
- p->arena);
-
-error:
- _PyPegen_FstringParser_Dealloc(state);
- return NULL;
-}
-
-/* Given an f-string (with no 'f' or quotes) that's in *str and ends
- at end, parse it into an expr_ty. Return NULL on error. Adjust
- str to point past the parsed portion. */
-static expr_ty
-fstring_parse(Parser *p, const char **str, const char *end, int raw,
- int recurse_lvl, Token *first_token, Token* t, Token *last_token)
-{
- FstringParser state;
-
- _PyPegen_FstringParser_Init(&state);
- if (_PyPegen_FstringParser_ConcatFstring(p, &state, str, end, raw, recurse_lvl,
- first_token, t, last_token) < 0) {
- _PyPegen_FstringParser_Dealloc(&state);
- return NULL;
- }
-
- return _PyPegen_FstringParser_Finish(p, &state, t, t);
+ return _PyPegen_decode_string(p, rawmode, s, len, t);
}
diff --git a/Parser/string_parser.h b/Parser/string_parser.h
index 4a22f3d3086f47..0b34de1b4e41e9 100644
--- a/Parser/string_parser.h
+++ b/Parser/string_parser.h
@@ -5,42 +5,7 @@
#include
#include "pegen.h"
-#define EXPRLIST_N_CACHED 64
-
-typedef struct {
- /* Incrementally build an array of expr_ty, so be used in an
- asdl_seq. Cache some small but reasonably sized number of
- expr_ty's, and then after that start dynamically allocating,
- doubling the number allocated each time. Note that the f-string
- f'{0}a{1}' contains 3 expr_ty's: 2 FormattedValue's, and one
- Constant for the literal 'a'. So you add expr_ty's about twice as
- fast as you add expressions in an f-string. */
-
- Py_ssize_t allocated; /* Number we've allocated. */
- Py_ssize_t size; /* Number we've used. */
- expr_ty *p; /* Pointer to the memory we're actually
- using. Will point to 'data' until we
- start dynamically allocating. */
- expr_ty data[EXPRLIST_N_CACHED];
-} ExprList;
-
-/* The FstringParser is designed to add a mix of strings and
- f-strings, and concat them together as needed. Ultimately, it
- generates an expr_ty. */
-typedef struct {
- PyObject *last_str;
- ExprList expr_list;
- int fmode;
-} FstringParser;
-
-void _PyPegen_FstringParser_Init(FstringParser *);
-int _PyPegen_parsestr(Parser *, int *, int *, PyObject **,
- const char **, Py_ssize_t *, Token *);
-int _PyPegen_FstringParser_ConcatFstring(Parser *, FstringParser *, const char **,
- const char *, int, int, Token *, Token *,
- Token *);
-int _PyPegen_FstringParser_ConcatAndDel(FstringParser *, PyObject *);
-expr_ty _PyPegen_FstringParser_Finish(Parser *, FstringParser *, Token *, Token *);
-void _PyPegen_FstringParser_Dealloc(FstringParser *);
+PyObject *_PyPegen_parse_string(Parser *, Token *);
+PyObject *_PyPegen_decode_string(Parser *, int, const char *, size_t, Token *);
#endif
diff --git a/Parser/token.c b/Parser/token.c
index 6299ad2f563144..82267fbfcd0c54 100644
--- a/Parser/token.c
+++ b/Parser/token.c
@@ -60,12 +60,16 @@ const char * const _PyParser_TokenNames[] = {
"RARROW",
"ELLIPSIS",
"COLONEQUAL",
+ "EXCLAMATION",
"OP",
"AWAIT",
"ASYNC",
"TYPE_IGNORE",
"TYPE_COMMENT",
"SOFT_KEYWORD",
+ "FSTRING_START",
+ "FSTRING_MIDDLE",
+ "FSTRING_END",
"",
"",
"",
@@ -79,6 +83,7 @@ int
_PyToken_OneChar(int c1)
{
switch (c1) {
+ case '!': return EXCLAMATION;
case '%': return PERCENT;
case '&': return AMPER;
case '(': return LPAR;
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index 463c0e00ca1411..8de0572a1fc459 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -11,11 +11,6 @@
#include "tokenizer.h"
#include "errcode.h"
-#include "unicodeobject.h"
-#include "bytesobject.h"
-#include "fileobject.h"
-#include "abstract.h"
-
/* Alternate tab spacing */
#define ALTTABSIZE 1
@@ -43,6 +38,24 @@
tok->lineno++; \
tok->col_offset = 0;
+#define INSIDE_FSTRING(tok) (tok->tok_mode_stack_index > 0)
+#define INSIDE_FSTRING_EXPR(tok) (tok->curly_bracket_expr_start_depth >= 0)
+#ifdef Py_DEBUG
+static inline tokenizer_mode* TOK_GET_MODE(struct tok_state* tok) {
+ assert(tok->tok_mode_stack_index >= 0);
+ assert(tok->tok_mode_stack_index < MAXLEVEL);
+ return &(tok->tok_mode_stack[tok->tok_mode_stack_index]);
+}
+static inline tokenizer_mode* TOK_NEXT_MODE(struct tok_state* tok) {
+ assert(tok->tok_mode_stack_index >= 0);
+ assert(tok->tok_mode_stack_index < MAXLEVEL);
+ return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]);
+}
+#else
+#define TOK_GET_MODE(tok) (&(tok->tok_mode_stack[tok->tok_mode_stack_index]))
+#define TOK_NEXT_MODE(tok) (&(tok->tok_mode_stack[++tok->tok_mode_stack_index]))
+#endif
+
/* Forward */
static struct tok_state *tok_new(void);
static int tok_nextc(struct tok_state *tok);
@@ -98,6 +111,9 @@ tok_new(void)
tok->interactive_underflow = IUNDERFLOW_NORMAL;
tok->str = NULL;
tok->report_warnings = 1;
+ tok->tok_mode_stack[0] = (tokenizer_mode){.kind =TOK_REGULAR_MODE, .f_string_quote='\0', .f_string_quote_size = 0, .f_string_debug=0};
+ tok->tok_mode_stack_index = 0;
+ tok->tok_report_warnings = 1;
#ifdef Py_DEBUG
tok->debug = _Py_GetConfig()->parser_debug;
#endif
@@ -345,6 +361,126 @@ tok_concatenate_interactive_new_line(struct tok_state *tok, const char *line) {
return 0;
}
+/* Traverse and remember all f-string buffers, in order to be able to restore
+ them after reallocating tok->buf */
+static void
+remember_fstring_buffers(struct tok_state *tok)
+{
+ int index;
+ tokenizer_mode *mode;
+
+ for (index = tok->tok_mode_stack_index; index >= 0; --index) {
+ mode = &(tok->tok_mode_stack[index]);
+ mode->f_string_start_offset = mode->f_string_start - tok->buf;
+ mode->f_string_multi_line_start_offset = mode->f_string_multi_line_start - tok->buf;
+ }
+}
+
+/* Traverse and restore all f-string buffers after reallocating tok->buf */
+static void
+restore_fstring_buffers(struct tok_state *tok)
+{
+ int index;
+ tokenizer_mode *mode;
+
+ for (index = tok->tok_mode_stack_index; index >= 0; --index) {
+ mode = &(tok->tok_mode_stack[index]);
+ mode->f_string_start = tok->buf + mode->f_string_start_offset;
+ mode->f_string_multi_line_start = tok->buf + mode->f_string_multi_line_start_offset;
+ }
+}
+
+static int
+set_fstring_expr(struct tok_state* tok, struct token *token, char c) {
+ assert(token != NULL);
+ assert(c == '}' || c == ':' || c == '!');
+ tokenizer_mode *tok_mode = TOK_GET_MODE(tok);
+
+ if (!tok_mode->f_string_debug || token->metadata) {
+ return 0;
+ }
+
+ PyObject *res = PyUnicode_DecodeUTF8(
+ tok_mode->last_expr_buffer,
+ tok_mode->last_expr_size - tok_mode->last_expr_end,
+ NULL
+ );
+ if (!res) {
+ return -1;
+ }
+ token->metadata = res;
+ return 0;
+}
+
+static int
+update_fstring_expr(struct tok_state *tok, char cur)
+{
+ assert(tok->cur != NULL);
+
+ Py_ssize_t size = strlen(tok->cur);
+ tokenizer_mode *tok_mode = TOK_GET_MODE(tok);
+
+ switch (cur) {
+ case 0:
+ if (!tok_mode->last_expr_buffer || tok_mode->last_expr_end >= 0) {
+ return 1;
+ }
+ char *new_buffer = PyMem_Realloc(
+ tok_mode->last_expr_buffer,
+ tok_mode->last_expr_size + size
+ );
+ if (new_buffer == NULL) {
+ PyMem_Free(tok_mode->last_expr_buffer);
+ goto error;
+ }
+ tok_mode->last_expr_buffer = new_buffer;
+ strncpy(tok_mode->last_expr_buffer + tok_mode->last_expr_size, tok->cur, size);
+ tok_mode->last_expr_size += size;
+ break;
+ case '{':
+ if (tok_mode->last_expr_buffer != NULL) {
+ PyMem_Free(tok_mode->last_expr_buffer);
+ }
+ tok_mode->last_expr_buffer = PyMem_Malloc(size);
+ if (tok_mode->last_expr_buffer == NULL) {
+ goto error;
+ }
+ tok_mode->last_expr_size = size;
+ tok_mode->last_expr_end = -1;
+ strncpy(tok_mode->last_expr_buffer, tok->cur, size);
+ break;
+ case '}':
+ case '!':
+ case ':':
+ if (tok_mode->last_expr_end == -1) {
+ tok_mode->last_expr_end = strlen(tok->start);
+ }
+ break;
+ default:
+ Py_UNREACHABLE();
+ }
+ return 1;
+error:
+ tok->done = E_NOMEM;
+ return 0;
+}
+
+static void
+free_fstring_expressions(struct tok_state *tok)
+{
+ int index;
+ tokenizer_mode *mode;
+
+ for (index = tok->tok_mode_stack_index; index >= 0; --index) {
+ mode = &(tok->tok_mode_stack[index]);
+ if (mode->last_expr_buffer != NULL) {
+ PyMem_Free(mode->last_expr_buffer);
+ mode->last_expr_buffer = NULL;
+ mode->last_expr_size = 0;
+ mode->last_expr_end = -1;
+ }
+ }
+}
/* Read a line of text from TOK into S, using the stream in TOK.
Return NULL on failure, else S.
@@ -372,6 +508,7 @@ tok_reserve_buf(struct tok_state *tok, Py_ssize_t size)
Py_ssize_t start = tok->start == NULL ? -1 : tok->start - tok->buf;
Py_ssize_t line_start = tok->start == NULL ? -1 : tok->line_start - tok->buf;
Py_ssize_t multi_line_start = tok->multi_line_start - tok->buf;
+ remember_fstring_buffers(tok);
newbuf = (char *)PyMem_Realloc(newbuf, newsize);
if (newbuf == NULL) {
tok->done = E_NOMEM;
@@ -384,6 +521,7 @@ tok_reserve_buf(struct tok_state *tok, Py_ssize_t size)
tok->start = start < 0 ? NULL : tok->buf + start;
tok->line_start = line_start < 0 ? NULL : tok->buf + line_start;
tok->multi_line_start = multi_line_start < 0 ? NULL : tok->buf + multi_line_start;
+ restore_fstring_buffers(tok);
}
return 1;
}
@@ -838,6 +976,7 @@ _PyTokenizer_Free(struct tok_state *tok)
if (tok->interactive_src_start != NULL) {
PyMem_Free(tok->interactive_src_start);
}
+ free_fstring_expressions(tok);
PyMem_Free(tok);
}
@@ -854,6 +993,9 @@ tok_readline_raw(struct tok_state *tok)
if (line == NULL) {
return 1;
}
+ if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) {
+ return 0;
+ }
if (tok->fp_interactive &&
tok_concatenate_interactive_new_line(tok, line) == -1) {
return 0;
@@ -941,6 +1083,7 @@ tok_underflow_interactive(struct tok_state *tok) {
}
else if (tok->start != NULL) {
Py_ssize_t cur_multi_line_start = tok->multi_line_start - tok->buf;
+ remember_fstring_buffers(tok);
size_t size = strlen(newtok);
ADVANCE_LINENO();
if (!tok_reserve_buf(tok, size + 1)) {
@@ -953,8 +1096,10 @@ tok_underflow_interactive(struct tok_state *tok) {
PyMem_Free(newtok);
tok->inp += size;
tok->multi_line_start = tok->buf + cur_multi_line_start;
+ restore_fstring_buffers(tok);
}
else {
+ remember_fstring_buffers(tok);
ADVANCE_LINENO();
PyMem_Free(tok->buf);
tok->buf = newtok;
@@ -962,6 +1107,7 @@ tok_underflow_interactive(struct tok_state *tok) {
tok->line_start = tok->buf;
tok->inp = strchr(tok->buf, '\0');
tok->end = tok->inp + 1;
+ restore_fstring_buffers(tok);
}
if (tok->done != E_OK) {
if (tok->prompt != NULL) {
@@ -969,6 +1115,10 @@ tok_underflow_interactive(struct tok_state *tok) {
}
return 0;
}
+
+ if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) {
+ return 0;
+ }
return 1;
}
@@ -1073,7 +1223,7 @@ tok_nextc(struct tok_state *tok)
return Py_CHARMASK(*tok->cur++); /* Fast path */
}
if (tok->done != E_OK) {
- return EOF;
+ return EOF;
}
if (tok->fp == NULL) {
rc = tok_underflow_string(tok);
@@ -1115,7 +1265,7 @@ tok_backup(struct tok_state *tok, int c)
if (--tok->cur < tok->buf) {
Py_FatalError("tokenizer beginning of buffer");
}
- if ((int)(unsigned char)*tok->cur != c) {
+ if ((int)(unsigned char)*tok->cur != Py_CHARMASK(c)) {
Py_FatalError("tok_backup: wrong character");
}
tok->col_offset--;
@@ -1172,6 +1322,7 @@ _syntaxerror_range(struct tok_state *tok, const char *format,
static int
syntaxerror(struct tok_state *tok, const char *format, ...)
{
+ // This errors are cleaned on startup. Todo: Fix it.
va_list vargs;
va_start(vargs, format);
int ret = _syntaxerror_range(tok, format, -1, -1, vargs);
@@ -1234,6 +1385,41 @@ parser_warn(struct tok_state *tok, PyObject *category, const char *format, ...)
return -1;
}
+static int
+warn_invalid_escape_sequence(struct tok_state *tok, int first_invalid_escape_char)
+{
+
+ if (!tok->tok_report_warnings) {
+ return 0;
+ }
+
+ PyObject *msg = PyUnicode_FromFormat(
+ "invalid escape sequence '\\%c'",
+ (char) first_invalid_escape_char
+ );
+
+ if (msg == NULL) {
+ return -1;
+ }
+
+ if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, tok->filename,
+ tok->lineno, NULL, NULL) < 0) {
+ Py_DECREF(msg);
+
+ if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) {
+ /* Replace the DeprecationWarning exception with a SyntaxError
+ to get a more accurate error report */
+ PyErr_Clear();
+ return syntaxerror(tok, "invalid escape sequence '\\%c'", (char) first_invalid_escape_char);
+ }
+
+ return -1;
+ }
+
+ Py_DECREF(msg);
+ return 0;
+}
+
static int
lookahead(struct tok_state *tok, const char *test)
{
@@ -1389,7 +1575,6 @@ tok_decimal_tail(struct tok_state *tok)
return c;
}
-/* Get next token, after space stripping etc. */
static inline int
tok_continuation_line(struct tok_state *tok) {
@@ -1427,7 +1612,12 @@ token_setup(struct tok_state *tok, struct token *token, int type, const char *st
{
assert((start == NULL && end == NULL) || (start != NULL && end != NULL));
token->level = tok->level;
- token->lineno = type == STRING ? tok->first_lineno : tok->lineno;
+ if (ISSTRINGLIT(type)) {
+ token->lineno = tok->first_lineno;
+ }
+ else {
+ token->lineno = tok->lineno;
+ }
token->end_lineno = tok->lineno;
token->col_offset = token->end_col_offset = -1;
token->start = start;
@@ -1441,7 +1631,7 @@ token_setup(struct tok_state *tok, struct token *token, int type, const char *st
}
static int
-tok_get(struct tok_state *tok, struct token *token)
+tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct token *token)
{
int c;
int blankline, nonascii;
@@ -1602,6 +1792,11 @@ tok_get(struct tok_state *tok, struct token *token)
/* Skip comment, unless it's a type comment */
if (c == '#') {
+
+ if (INSIDE_FSTRING(tok)) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string expression part cannot include '#'"));
+ }
+
const char *prefix, *p, *type_start;
int current_starting_col_offset;
@@ -1703,6 +1898,9 @@ tok_get(struct tok_state *tok, struct token *token)
}
c = tok_nextc(tok);
if (c == '"' || c == '\'') {
+ if (saw_f) {
+ goto f_string_quote;
+ }
goto letter_quote;
}
}
@@ -1748,7 +1946,9 @@ tok_get(struct tok_state *tok, struct token *token)
int ahead_tok_kind;
memcpy(&ahead_tok, tok, sizeof(ahead_tok));
- ahead_tok_kind = tok_get(&ahead_tok, &ahead_token);
+ ahead_tok_kind = tok_get_normal_mode(&ahead_tok,
+ current_tok,
+ &ahead_token);
if (ahead_tok_kind == NAME
&& ahead_tok.cur - ahead_tok.start == 3
@@ -2003,6 +2203,69 @@ tok_get(struct tok_state *tok, struct token *token)
return MAKE_TOKEN(NUMBER);
}
+ f_string_quote:
+ if (((tolower(*tok->start) == 'f' || tolower(*tok->start) == 'r') && (c == '\'' || c == '"'))) {
+ int quote = c;
+ int quote_size = 1; /* 1 or 3 */
+
+ /* Nodes of type STRING, especially multi line strings
+ must be handled differently in order to get both
+ the starting line number and the column offset right.
+ (cf. issue 16806) */
+ tok->first_lineno = tok->lineno;
+ tok->multi_line_start = tok->line_start;
+
+ /* Find the quote size and start of string */
+ int after_quote = tok_nextc(tok);
+ if (after_quote == quote) {
+ int after_after_quote = tok_nextc(tok);
+ if (after_after_quote == quote) {
+ quote_size = 3;
+ }
+ else {
+ // TODO: Check this
+ tok_backup(tok, after_after_quote);
+ tok_backup(tok, after_quote);
+ }
+ }
+ if (after_quote != quote) {
+ tok_backup(tok, after_quote);
+ }
+
+
+ p_start = tok->start;
+ p_end = tok->cur;
+ tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok);
+ the_current_tok->kind = TOK_FSTRING_MODE;
+ the_current_tok->f_string_quote = quote;
+ the_current_tok->f_string_quote_size = quote_size;
+ the_current_tok->f_string_start = tok->start;
+ the_current_tok->f_string_multi_line_start = tok->line_start;
+ the_current_tok->f_string_start_offset = -1;
+ the_current_tok->f_string_multi_line_start_offset = -1;
+ the_current_tok->last_expr_buffer = NULL;
+ the_current_tok->last_expr_size = 0;
+ the_current_tok->last_expr_end = -1;
+ the_current_tok->f_string_debug = 0;
+
+ switch (*tok->start) {
+ case 'F':
+ case 'f':
+ the_current_tok->f_string_raw = tolower(*(tok->start + 1)) == 'r';
+ break;
+ case 'R':
+ case 'r':
+ the_current_tok->f_string_raw = 1;
+ break;
+ default:
+ Py_UNREACHABLE();
+ }
+
+ the_current_tok->curly_bracket_depth = 0;
+ the_current_tok->curly_bracket_expr_start_depth = -1;
+ return MAKE_TOKEN(FSTRING_START);
+ }
+
letter_quote:
/* String */
if (c == '\'' || c == '"') {
@@ -2047,6 +2310,20 @@ tok_get(struct tok_state *tok, struct token *token)
tok->line_start = tok->multi_line_start;
int start = tok->lineno;
tok->lineno = tok->first_lineno;
+
+ if (INSIDE_FSTRING(tok)) {
+ /* When we are in an f-string, before raising the
+ * unterminated string literal error, check whether
+ * does the initial quote matches with f-strings quotes
+ * and if it is, then this must be a missing '}' token
+ * so raise the proper error */
+ tokenizer_mode *the_current_tok = TOK_GET_MODE(tok);
+ if (the_current_tok->f_string_quote == quote &&
+ the_current_tok->f_string_quote_size == quote_size) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: expecting '}'", start));
+ }
+ }
+
if (quote_size == 3) {
syntaxerror(tok, "unterminated triple-quoted string literal"
" (detected at line %d)", start);
@@ -2089,6 +2366,28 @@ tok_get(struct tok_state *tok, struct token *token)
goto again; /* Read next line */
}
+ /* Punctuation character */
+ int is_punctuation = (c == ':' || c == '}' || c == '!' || c == '{');
+ if (is_punctuation && INSIDE_FSTRING(tok) && INSIDE_FSTRING_EXPR(current_tok)) {
+ /* This code block gets executed before the curly_bracket_depth is incremented
+ * by the `{` case, so for ensuring that we are on the 0th level, we need
+ * to adjust it manually */
+ int cursor = current_tok->curly_bracket_depth - (c != '{');
+ if (cursor == 0 && !update_fstring_expr(tok, c)) {
+ return MAKE_TOKEN(ENDMARKER);
+ }
+ if (cursor == 0 && c != '{' && set_fstring_expr(tok, token, c)) {
+ return MAKE_TOKEN(ERRORTOKEN);
+ }
+
+ if (c == ':' && cursor == current_tok->curly_bracket_expr_start_depth) {
+ current_tok->kind = TOK_FSTRING_MODE;
+ p_start = tok->start;
+ p_end = tok->cur;
+ return MAKE_TOKEN(_PyToken_OneChar(c));
+ }
+ }
+
/* Check for two-character token */
{
int c2 = tok_nextc(tok);
@@ -2121,11 +2420,17 @@ tok_get(struct tok_state *tok, struct token *token)
tok->parenlinenostack[tok->level] = tok->lineno;
tok->parencolstack[tok->level] = (int)(tok->start - tok->line_start);
tok->level++;
+ if (INSIDE_FSTRING(tok)) {
+ current_tok->curly_bracket_depth++;
+ }
break;
case ')':
case ']':
case '}':
if (!tok->level) {
+ if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed"));
+ }
return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c));
}
tok->level--;
@@ -2134,6 +2439,18 @@ tok_get(struct tok_state *tok, struct token *token)
(opening == '[' && c == ']') ||
(opening == '{' && c == '}')))
{
+ /* If the opening bracket belongs to an f-string's expression
+ part (e.g. f"{)}") and the closing bracket is an arbitrary
+ nested expression, then instead of matching a different
+ syntactical construct with it; we'll throw an unmatched
+ parentheses error. */
+ if (INSIDE_FSTRING(tok) && opening == '{') {
+ assert(current_tok->curly_bracket_depth >= 0);
+ int previous_bracket = current_tok->curly_bracket_depth - 1;
+ if (previous_bracket == current_tok->curly_bracket_expr_start_depth) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c));
+ }
+ }
if (tok->parenlinenostack[tok->level] != tok->lineno) {
return MAKE_TOKEN(syntaxerror(tok,
"closing parenthesis '%c' does not match "
@@ -2147,6 +2464,17 @@ tok_get(struct tok_state *tok, struct token *token)
c, opening));
}
}
+
+ if (INSIDE_FSTRING(tok)) {
+ current_tok->curly_bracket_depth--;
+ if (c == '}' && current_tok->curly_bracket_depth == current_tok->curly_bracket_expr_start_depth) {
+ current_tok->curly_bracket_expr_start_depth--;
+ current_tok->kind = TOK_FSTRING_MODE;
+ current_tok->f_string_debug = 0;
+ }
+ }
+ break;
+ default:
break;
}
@@ -2156,12 +2484,201 @@ tok_get(struct tok_state *tok, struct token *token)
return MAKE_TOKEN(syntaxerror(tok, "invalid non-printable character U+%s", hex));
}
+ if( c == '=' && INSIDE_FSTRING_EXPR(current_tok)) {
+ current_tok->f_string_debug = 1;
+ }
+
/* Punctuation character */
p_start = tok->start;
p_end = tok->cur;
return MAKE_TOKEN(_PyToken_OneChar(c));
}
+static int
+tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct token *token)
+{
+ const char *p_start = NULL;
+ const char *p_end = NULL;
+ int end_quote_size = 0;
+ int unicode_escape = 0;
+
+ tok->start = tok->cur;
+ tok->first_lineno = tok->lineno;
+ tok->starting_col_offset = tok->col_offset;
+
+ // If we start with a bracket, we defer to the normal mode as there is nothing for us to tokenize
+ // before it.
+ int start_char = tok_nextc(tok);
+ if (start_char == '{') {
+ int peek1 = tok_nextc(tok);
+ tok_backup(tok, peek1);
+ tok_backup(tok, start_char);
+ if (peek1 != '{') {
+ current_tok->curly_bracket_expr_start_depth++;
+ if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply"));
+ }
+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
+ return tok_get_normal_mode(tok, current_tok, token);
+ }
+ }
+ else {
+ tok_backup(tok, start_char);
+ }
+
+ // Check if we are at the end of the string
+ for (int i = 0; i < current_tok->f_string_quote_size; i++) {
+ int quote = tok_nextc(tok);
+ if (quote != current_tok->f_string_quote) {
+ tok_backup(tok, quote);
+ goto f_string_middle;
+ }
+ }
+
+ if (current_tok->last_expr_buffer != NULL) {
+ PyMem_Free(current_tok->last_expr_buffer);
+ current_tok->last_expr_buffer = NULL;
+ current_tok->last_expr_size = 0;
+ current_tok->last_expr_end = -1;
+ }
+
+ p_start = tok->start;
+ p_end = tok->cur;
+ tok->tok_mode_stack_index--;
+ return MAKE_TOKEN(FSTRING_END);
+
+f_string_middle:
+
+ while (end_quote_size != current_tok->f_string_quote_size) {
+ int c = tok_nextc(tok);
+ if (c == EOF || (current_tok->f_string_quote_size == 1 && c == '\n')) {
+ assert(tok->multi_line_start != NULL);
+ // shift the tok_state's location into
+ // the start of string, and report the error
+ // from the initial quote character
+ tok->cur = (char *)current_tok->f_string_start;
+ tok->cur++;
+ tok->line_start = current_tok->f_string_multi_line_start;
+ int start = tok->lineno;
+ tok->lineno = tok->first_lineno;
+
+ if (current_tok->f_string_quote_size == 3) {
+ return MAKE_TOKEN(syntaxerror(tok,
+ "unterminated triple-quoted f-string literal"
+ " (detected at line %d)", start));
+ }
+ else {
+ return MAKE_TOKEN(syntaxerror(tok,
+ "unterminated f-string literal (detected at"
+ " line %d)", start));
+ }
+ }
+
+ if (c == current_tok->f_string_quote) {
+ end_quote_size += 1;
+ continue;
+ } else {
+ end_quote_size = 0;
+ }
+
+ int in_format_spec = (
+ current_tok->last_expr_end != -1
+ &&
+ INSIDE_FSTRING_EXPR(current_tok)
+ );
+ if (c == '{') {
+ int peek = tok_nextc(tok);
+ if (peek != '{' || in_format_spec) {
+ tok_backup(tok, peek);
+ tok_backup(tok, c);
+ current_tok->curly_bracket_expr_start_depth++;
+ if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply"));
+ }
+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
+ p_start = tok->start;
+ p_end = tok->cur;
+ } else {
+ p_start = tok->start;
+ p_end = tok->cur - 1;
+ }
+ return MAKE_TOKEN(FSTRING_MIDDLE);
+ } else if (c == '}') {
+ if (unicode_escape) {
+ p_start = tok->start;
+ p_end = tok->cur;
+ return MAKE_TOKEN(FSTRING_MIDDLE);
+ }
+ int peek = tok_nextc(tok);
+
+ // The tokenizer can only be in the format spec if we have already completed the expression
+ // scanning (indicated by the end of the expression being set) and we are not at the top level
+ // of the bracket stack (-1 is the top level). Since format specifiers can't legally use double
+ // brackets, we can bypass it here.
+ if (peek == '}' && !in_format_spec) {
+ p_start = tok->start;
+ p_end = tok->cur - 1;
+ } else {
+ tok_backup(tok, peek);
+ tok_backup(tok, c);
+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
+ p_start = tok->start;
+ p_end = tok->cur;
+ }
+ return MAKE_TOKEN(FSTRING_MIDDLE);
+ } else if (c == '\\') {
+ int peek = tok_nextc(tok);
+ // Special case when the backslash is right before a curly
+ // brace. We have to restore and return the control back
+ // to the loop for the next iteration.
+ if (peek == '{' || peek == '}') {
+ if (!current_tok->f_string_raw) {
+ if (warn_invalid_escape_sequence(tok, peek)) {
+ return MAKE_TOKEN(ERRORTOKEN);
+ }
+ }
+ tok_backup(tok, peek);
+ continue;
+ }
+
+ if (!current_tok->f_string_raw) {
+ if (peek == 'N') {
+ /* Handle named unicode escapes (\N{BULLET}) */
+ peek = tok_nextc(tok);
+ if (peek == '{') {
+ unicode_escape = 1;
+ } else {
+ tok_backup(tok, peek);
+ }
+ }
+ } /* else {
+ skip the escaped character
+ }*/
+ }
+ }
+
+ // Backup the f-string quotes to emit a final FSTRING_MIDDLE and
+ // add the quotes to the FSTRING_END in the next tokenizer iteration.
+ for (int i = 0; i < current_tok->f_string_quote_size; i++) {
+ tok_backup(tok, current_tok->f_string_quote);
+ }
+ p_start = tok->start;
+ p_end = tok->cur;
+ return MAKE_TOKEN(FSTRING_MIDDLE);
+}
+
+
+static int
+tok_get(struct tok_state *tok, struct token *token)
+{
+ tokenizer_mode *current_tok = TOK_GET_MODE(tok);
+ if (current_tok->kind == TOK_REGULAR_MODE) {
+ return tok_get_normal_mode(tok, current_tok, token);
+ } else {
+ return tok_get_fstring_mode(tok, current_tok, token);
+ }
+}
+
int
_PyTokenizer_Get(struct tok_state *tok, struct token *token)
{
diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h
index 16a94d5f51d664..8b4213c4ce3b5a 100644
--- a/Parser/tokenizer.h
+++ b/Parser/tokenizer.h
@@ -31,8 +31,37 @@ struct token {
int level;
int lineno, col_offset, end_lineno, end_col_offset;
const char *start, *end;
+ PyObject *metadata;
};
+enum tokenizer_mode_kind_t {
+ TOK_REGULAR_MODE,
+ TOK_FSTRING_MODE,
+};
+
+#define MAX_EXPR_NESTING 3
+
+typedef struct _tokenizer_mode {
+ enum tokenizer_mode_kind_t kind;
+
+ int curly_bracket_depth;
+ int curly_bracket_expr_start_depth;
+
+ char f_string_quote;
+ int f_string_quote_size;
+ int f_string_raw;
+ const char* f_string_start;
+ const char* f_string_multi_line_start;
+
+ Py_ssize_t f_string_start_offset;
+ Py_ssize_t f_string_multi_line_start_offset;
+
+ Py_ssize_t last_expr_size;
+ Py_ssize_t last_expr_end;
+ char* last_expr_buffer;
+ int f_string_debug;
+} tokenizer_mode;
+
/* Tokenizer state */
struct tok_state {
/* Input state; buf <= cur <= inp <= end */
@@ -93,6 +122,10 @@ struct tok_state {
/* How to proceed when asked for a new token in interactive mode */
enum interactive_underflow_t interactive_underflow;
int report_warnings;
+ // TODO: Factor this into its own thing
+ tokenizer_mode tok_mode_stack[MAXLEVEL];
+ int tok_mode_stack_index;
+ int tok_report_warnings;
#ifdef Py_DEBUG
int debug;
#endif
diff --git a/Programs/_testembed.c b/Programs/_testembed.c
index 00717114b40286..f78ba41fe7b4eb 100644
--- a/Programs/_testembed.c
+++ b/Programs/_testembed.c
@@ -1911,14 +1911,13 @@ static int test_unicode_id_init(void)
str1 = _PyUnicode_FromId(&PyId_test_unicode_id_init);
assert(str1 != NULL);
- assert(Py_REFCNT(str1) == 1);
+ assert(_Py_IsImmortal(str1));
str2 = PyUnicode_FromString("test_unicode_id_init");
assert(str2 != NULL);
assert(PyUnicode_Compare(str1, str2) == 0);
- // str1 is a borrowed reference
Py_DECREF(str2);
Py_Finalize();
diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h
index 4ac472a88261e1..cd9d1032629f49 100644
--- a/Programs/test_frozenmain.h
+++ b/Programs/test_frozenmain.h
@@ -27,12 +27,12 @@ unsigned char M_test_frozenmain[] = {
218,3,107,101,121,169,0,243,0,0,0,0,250,18,116,101,
115,116,95,102,114,111,122,101,110,109,97,105,110,46,112,121,
250,8,60,109,111,100,117,108,101,62,114,18,0,0,0,1,
- 0,0,0,115,100,0,0,0,240,3,1,1,1,243,8,0,
+ 0,0,0,115,102,0,0,0,240,3,1,1,1,243,8,0,
1,11,219,0,24,225,0,5,208,6,26,212,0,27,217,0,
5,128,106,144,35,151,40,145,40,212,0,27,216,9,38,208,
9,26,215,9,38,209,9,38,211,9,40,168,24,209,9,50,
128,6,240,2,6,12,2,242,0,7,1,42,128,67,241,14,
- 0,5,10,208,10,40,144,67,209,10,40,152,54,160,35,153,
- 59,209,10,40,213,4,41,241,15,7,1,42,114,16,0,0,
- 0,
+ 0,5,10,136,71,144,67,144,53,152,2,152,54,160,35,153,
+ 59,152,45,208,10,40,213,4,41,241,15,7,1,42,114,16,
+ 0,0,0,
};
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index 8daa9877254e2e..416dc5971bca3d 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -86,8 +86,8 @@ tokenizeriter_next(tokenizeriterobject *it)
Py_DECREF(str);
return NULL;
}
- const char *line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start;
- int lineno = type == STRING ? it->tok->first_lineno : it->tok->lineno;
+ const char *line_start = ISSTRINGLIT(type) ? it->tok->multi_line_start : it->tok->line_start;
+ int lineno = ISSTRINGLIT(type) ? it->tok->first_lineno : it->tok->lineno;
int end_lineno = it->tok->lineno;
int col_offset = -1;
int end_col_offset = -1;
diff --git a/Python/assemble.c b/Python/assemble.c
index e5a361b230cf1c..369dd8dcde9b9b 100644
--- a/Python/assemble.c
+++ b/Python/assemble.c
@@ -1,10 +1,10 @@
#include
#include "Python.h"
-#include "pycore_flowgraph.h"
+#include "pycore_code.h" // write_location_entry_start()
#include "pycore_compile.h"
+#include "pycore_opcode.h" // _PyOpcode_Caches[] and opcode category macros
#include "pycore_pymem.h" // _PyMem_IsPtrFreed()
-#include "pycore_code.h" // write_location_entry_start()
#define DEFAULT_CODE_SIZE 128
@@ -22,8 +22,8 @@
}
typedef _PyCompilerSrcLocation location;
-typedef _PyCfgInstruction cfg_instr;
-typedef _PyCfgBasicblock basicblock;
+typedef _PyCompile_Instruction instruction;
+typedef _PyCompile_InstructionSequence instr_sequence;
static inline bool
same_location(location a, location b)
@@ -117,7 +117,8 @@ assemble_emit_exception_table_item(struct assembler *a, int value, int msb)
#define MAX_SIZE_OF_ENTRY 20
static int
-assemble_emit_exception_table_entry(struct assembler *a, int start, int end, basicblock *handler)
+assemble_emit_exception_table_entry(struct assembler *a, int start, int end,
+ _PyCompile_ExceptHandlerInfo *handler)
{
Py_ssize_t len = PyBytes_GET_SIZE(a->a_except_table);
if (a->a_except_table_off + MAX_SIZE_OF_ENTRY >= len) {
@@ -125,13 +126,13 @@ assemble_emit_exception_table_entry(struct assembler *a, int start, int end, bas
}
int size = end-start;
assert(end > start);
- int target = handler->b_offset;
- int depth = handler->b_startdepth - 1;
- if (handler->b_preserve_lasti) {
+ int target = handler->h_offset;
+ int depth = handler->h_startdepth - 1;
+ if (handler->h_preserve_lasti) {
depth -= 1;
}
assert(depth >= 0);
- int depth_lasti = (depth<<1) | handler->b_preserve_lasti;
+ int depth_lasti = (depth<<1) | handler->h_preserve_lasti;
assemble_emit_exception_table_item(a, start, (1<<7));
assemble_emit_exception_table_item(a, size, 0);
assemble_emit_exception_table_item(a, target, 0);
@@ -140,29 +141,26 @@ assemble_emit_exception_table_entry(struct assembler *a, int start, int end, bas
}
static int
-assemble_exception_table(struct assembler *a, basicblock *entryblock)
+assemble_exception_table(struct assembler *a, instr_sequence *instrs)
{
- basicblock *b;
int ioffset = 0;
- basicblock *handler = NULL;
+ _PyCompile_ExceptHandlerInfo handler;
+ handler.h_offset = -1;
int start = -1;
- for (b = entryblock; b != NULL; b = b->b_next) {
- ioffset = b->b_offset;
- for (int i = 0; i < b->b_iused; i++) {
- cfg_instr *instr = &b->b_instr[i];
- if (instr->i_except != handler) {
- if (handler != NULL) {
- RETURN_IF_ERROR(
- assemble_emit_exception_table_entry(a, start, ioffset, handler));
- }
- start = ioffset;
- handler = instr->i_except;
+ for (int i = 0; i < instrs->s_used; i++) {
+ instruction *instr = &instrs->s_instrs[i];
+ if (instr->i_except_handler_info.h_offset != handler.h_offset) {
+ if (handler.h_offset >= 0) {
+ RETURN_IF_ERROR(
+ assemble_emit_exception_table_entry(a, start, ioffset, &handler));
}
- ioffset += _PyCfg_InstrSize(instr);
+ start = ioffset;
+ handler = instr->i_except_handler_info;
}
+ ioffset += _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg);
}
- if (handler != NULL) {
- RETURN_IF_ERROR(assemble_emit_exception_table_entry(a, start, ioffset, handler));
+ if (handler.h_offset >= 0) {
+ RETURN_IF_ERROR(assemble_emit_exception_table_entry(a, start, ioffset, &handler));
}
return SUCCESS;
}
@@ -316,31 +314,31 @@ assemble_emit_location(struct assembler* a, location loc, int isize)
}
static int
-assemble_location_info(struct assembler *a, basicblock *entryblock, int firstlineno)
+assemble_location_info(struct assembler *a, instr_sequence *instrs,
+ int firstlineno)
{
a->a_lineno = firstlineno;
location loc = NO_LOCATION;
int size = 0;
- for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
- for (int j = 0; j < b->b_iused; j++) {
- if (!same_location(loc, b->b_instr[j].i_loc)) {
+ for (int i = 0; i < instrs->s_used; i++) {
+ instruction *instr = &instrs->s_instrs[i];
+ if (!same_location(loc, instr->i_loc)) {
RETURN_IF_ERROR(assemble_emit_location(a, loc, size));
- loc = b->b_instr[j].i_loc;
+ loc = instr->i_loc;
size = 0;
- }
- size += _PyCfg_InstrSize(&b->b_instr[j]);
}
+ size += _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg);
}
RETURN_IF_ERROR(assemble_emit_location(a, loc, size));
return SUCCESS;
}
static void
-write_instr(_Py_CODEUNIT *codestr, cfg_instr *instruction, int ilen)
+write_instr(_Py_CODEUNIT *codestr, instruction *instr, int ilen)
{
- int opcode = instruction->i_opcode;
+ int opcode = instr->i_opcode;
assert(!IS_PSEUDO_OPCODE(opcode));
- int oparg = instruction->i_oparg;
+ int oparg = instr->i_oparg;
assert(HAS_ARG(opcode) || oparg == 0);
int caches = _PyOpcode_Caches[opcode];
switch (ilen - caches) {
@@ -380,12 +378,12 @@ write_instr(_Py_CODEUNIT *codestr, cfg_instr *instruction, int ilen)
*/
static int
-assemble_emit_instr(struct assembler *a, cfg_instr *i)
+assemble_emit_instr(struct assembler *a, instruction *instr)
{
Py_ssize_t len = PyBytes_GET_SIZE(a->a_bytecode);
_Py_CODEUNIT *code;
- int size = _PyCfg_InstrSize(i);
+ int size = _PyCompile_InstrSize(instr->i_opcode, instr->i_oparg);
if (a->a_offset + size >= len / (int)sizeof(_Py_CODEUNIT)) {
if (len > PY_SSIZE_T_MAX / 2) {
return ERROR;
@@ -394,25 +392,24 @@ assemble_emit_instr(struct assembler *a, cfg_instr *i)
}
code = (_Py_CODEUNIT *)PyBytes_AS_STRING(a->a_bytecode) + a->a_offset;
a->a_offset += size;
- write_instr(code, i, size);
+ write_instr(code, instr, size);
return SUCCESS;
}
static int
-assemble_emit(struct assembler *a, basicblock *entryblock, int first_lineno,
- PyObject *const_cache)
+assemble_emit(struct assembler *a, instr_sequence *instrs,
+ int first_lineno, PyObject *const_cache)
{
RETURN_IF_ERROR(assemble_init(a, first_lineno));
- for (basicblock *b = entryblock; b != NULL; b = b->b_next) {
- for (int j = 0; j < b->b_iused; j++) {
- RETURN_IF_ERROR(assemble_emit_instr(a, &b->b_instr[j]));
- }
+ for (int i = 0; i < instrs->s_used; i++) {
+ instruction *instr = &instrs->s_instrs[i];
+ RETURN_IF_ERROR(assemble_emit_instr(a, instr));
}
- RETURN_IF_ERROR(assemble_location_info(a, entryblock, a->a_lineno));
+ RETURN_IF_ERROR(assemble_location_info(a, instrs, a->a_lineno));
- RETURN_IF_ERROR(assemble_exception_table(a, entryblock));
+ RETURN_IF_ERROR(assemble_exception_table(a, instrs));
RETURN_IF_ERROR(_PyBytes_Resize(&a->a_except_table, a->a_except_table_off));
RETURN_IF_ERROR(_PyCompile_ConstCacheMergeOne(const_cache, &a->a_except_table));
@@ -586,13 +583,13 @@ makecode(_PyCompile_CodeUnitMetadata *umd, struct assembler *a, PyObject *const_
PyCodeObject *
_PyAssemble_MakeCodeObject(_PyCompile_CodeUnitMetadata *umd, PyObject *const_cache,
- PyObject *consts, int maxdepth, basicblock *entryblock,
+ PyObject *consts, int maxdepth, instr_sequence *instrs,
int nlocalsplus, int code_flags, PyObject *filename)
{
PyCodeObject *co = NULL;
struct assembler a;
- int res = assemble_emit(&a, entryblock, umd->u_firstlineno, const_cache);
+ int res = assemble_emit(&a, instrs, umd->u_firstlineno, const_cache);
if (res == SUCCESS) {
co = makecode(umd, &a, const_cache, consts, maxdepth, nlocalsplus,
code_flags, filename);
diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c
index fcb4d7a9a975c6..8840bbabe4b584 100644
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -2316,7 +2316,7 @@ builtin_round_impl(PyObject *module, PyObject *number, PyObject *ndigits)
{
PyObject *round, *result;
- if (Py_TYPE(number)->tp_dict == NULL) {
+ if (!_PyType_IsReady(Py_TYPE(number))) {
if (PyType_Ready(Py_TYPE(number)) < 0)
return NULL;
}
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 7af96b4a5e324e..9de0d92e382d3d 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -25,6 +25,7 @@
#include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs
#include "pycore_sysmodule.h" // _PySys_Audit()
#include "pycore_tuple.h" // _PyTuple_ITEMS()
+#include "pycore_typeobject.h" // _PySuper_Lookup()
#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS
#include "pycore_dict.h"
@@ -1553,6 +1554,49 @@ dummy_func(
PREDICT(JUMP_BACKWARD);
}
+ family(load_super_attr, INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR) = {
+ LOAD_SUPER_ATTR,
+ LOAD_SUPER_ATTR_METHOD,
+ };
+
+ inst(LOAD_SUPER_ATTR, (unused/9, global_super, class, self -- res2 if (oparg & 1), res)) {
+ PyObject *name = GETITEM(frame->f_code->co_names, oparg >> 2);
+ int load_method = oparg & 1;
+ #if ENABLE_SPECIALIZATION
+ _PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr;
+ if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ next_instr--;
+ _Py_Specialize_LoadSuperAttr(global_super, class, self, next_instr, name, load_method);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(LOAD_SUPER_ATTR, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(cache->counter);
+ #endif /* ENABLE_SPECIALIZATION */
+
+ // we make no attempt to optimize here; specializations should
+ // handle any case whose performance we care about
+ PyObject *stack[] = {class, self};
+ PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
+ DECREF_INPUTS();
+ ERROR_IF(super == NULL, error);
+ res = PyObject_GetAttr(super, name);
+ Py_DECREF(super);
+ ERROR_IF(res == NULL, error);
+ }
+
+ inst(LOAD_SUPER_ATTR_METHOD, (unused/1, class_version/2, self_type_version/2, method/4, global_super, class, self -- res2, res)) {
+ DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR);
+ DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR);
+ DEOPT_IF(((PyTypeObject *)class)->tp_version_tag != class_version, LOAD_SUPER_ATTR);
+ PyTypeObject *self_type = Py_TYPE(self);
+ DEOPT_IF(self_type->tp_version_tag != self_type_version, LOAD_SUPER_ATTR);
+ res2 = method;
+ res = self; // transfer ownership
+ Py_INCREF(res2);
+ Py_DECREF(global_super);
+ Py_DECREF(class);
+ }
+
family(load_attr, INLINE_CACHE_ENTRIES_LOAD_ATTR) = {
LOAD_ATTR,
LOAD_ATTR_INSTANCE_VALUE,
diff --git a/Python/ceval.c b/Python/ceval.c
index 8c43e3d89c2c2a..5d5221b2e40990 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -21,6 +21,7 @@
#include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs
#include "pycore_sysmodule.h" // _PySys_Audit()
#include "pycore_tuple.h" // _PyTuple_ITEMS()
+#include "pycore_typeobject.h" // _PySuper_Lookup()
#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS
#include "pycore_dict.h"
@@ -53,8 +54,11 @@
#undef Py_DECREF
#define Py_DECREF(arg) \
do { \
- _Py_DECREF_STAT_INC(); \
PyObject *op = _PyObject_CAST(arg); \
+ if (_Py_IsImmortal(op)) { \
+ break; \
+ } \
+ _Py_DECREF_STAT_INC(); \
if (--op->ob_refcnt == 0) { \
destructor dealloc = Py_TYPE(op)->tp_dealloc; \
(*dealloc)(op); \
@@ -77,8 +81,11 @@
#undef _Py_DECREF_SPECIALIZED
#define _Py_DECREF_SPECIALIZED(arg, dealloc) \
do { \
- _Py_DECREF_STAT_INC(); \
PyObject *op = _PyObject_CAST(arg); \
+ if (_Py_IsImmortal(op)) { \
+ break; \
+ } \
+ _Py_DECREF_STAT_INC(); \
if (--op->ob_refcnt == 0) { \
destructor d = (destructor)(dealloc); \
d(op); \
@@ -416,7 +423,7 @@ match_class(PyThreadState *tstate, PyObject *subject, PyObject *type,
Py_ssize_t nargs, PyObject *kwargs)
{
if (!PyType_Check(type)) {
- const char *e = "called match pattern must be a type";
+ const char *e = "called match pattern must be a class";
_PyErr_Format(tstate, PyExc_TypeError, e);
return NULL;
}
diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h
index 46252dd404325b..7a7c188bcccc37 100644
--- a/Python/clinic/sysmodule.c.h
+++ b/Python/clinic/sysmodule.c.h
@@ -912,6 +912,34 @@ sys_getallocatedblocks(PyObject *module, PyObject *Py_UNUSED(ignored))
return return_value;
}
+PyDoc_STRVAR(sys_getunicodeinternedsize__doc__,
+"getunicodeinternedsize($module, /)\n"
+"--\n"
+"\n"
+"Return the number of elements of the unicode interned dictionary");
+
+#define SYS_GETUNICODEINTERNEDSIZE_METHODDEF \
+ {"getunicodeinternedsize", (PyCFunction)sys_getunicodeinternedsize, METH_NOARGS, sys_getunicodeinternedsize__doc__},
+
+static Py_ssize_t
+sys_getunicodeinternedsize_impl(PyObject *module);
+
+static PyObject *
+sys_getunicodeinternedsize(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ PyObject *return_value = NULL;
+ Py_ssize_t _return_value;
+
+ _return_value = sys_getunicodeinternedsize_impl(module);
+ if ((_return_value == -1) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyLong_FromSsize_t(_return_value);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(sys__getframe__doc__,
"_getframe($module, depth=0, /)\n"
"--\n"
@@ -1387,4 +1415,4 @@ sys__getframemodulename(PyObject *module, PyObject *const *args, Py_ssize_t narg
#ifndef SYS_GETANDROIDAPILEVEL_METHODDEF
#define SYS_GETANDROIDAPILEVEL_METHODDEF
#endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */
-/*[clinic end generated code: output=5c761f14326ced54 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=6d598acc26237fbe input=a9049054013a1b77]*/
diff --git a/Python/compile.c b/Python/compile.c
index d6882c31d6437e..e8789def867308 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -149,7 +149,18 @@ enum {
COMPILER_SCOPE_COMPREHENSION,
};
-typedef _PyCompilerInstruction instruction;
+
+int
+_PyCompile_InstrSize(int opcode, int oparg)
+{
+ assert(!IS_PSEUDO_OPCODE(opcode));
+ assert(HAS_ARG(opcode) || oparg == 0);
+ int extended_args = (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg);
+ int caches = _PyOpcode_Caches[opcode];
+ return extended_args + 1 + caches;
+}
+
+typedef _PyCompile_Instruction instruction;
typedef _PyCompile_InstructionSequence instr_sequence;
#define INITIAL_INSTR_SEQUENCE_SIZE 100
@@ -829,6 +840,10 @@ stack_effect(int opcode, int oparg, int jump)
case LOAD_METHOD:
return 1;
+ case LOAD_SUPER_METHOD:
+ case LOAD_ZERO_SUPER_METHOD:
+ case LOAD_ZERO_SUPER_ATTR:
+ return -1;
default:
return PY_INVALID_STACK_EFFECT;
}
@@ -1047,6 +1062,24 @@ compiler_addop_name(struct compiler_unit *u, location loc,
arg <<= 1;
arg |= 1;
}
+ if (opcode == LOAD_SUPER_ATTR) {
+ arg <<= 2;
+ arg |= 2;
+ }
+ if (opcode == LOAD_SUPER_METHOD) {
+ opcode = LOAD_SUPER_ATTR;
+ arg <<= 2;
+ arg |= 3;
+ }
+ if (opcode == LOAD_ZERO_SUPER_ATTR) {
+ opcode = LOAD_SUPER_ATTR;
+ arg <<= 2;
+ }
+ if (opcode == LOAD_ZERO_SUPER_METHOD) {
+ opcode = LOAD_SUPER_ATTR;
+ arg <<= 2;
+ arg |= 1;
+ }
return codegen_addop_i(&u->u_instr_sequence, opcode, arg, loc);
}
@@ -2269,6 +2302,8 @@ check_is_arg(expr_ty e)
|| value == Py_Ellipsis);
}
+static PyTypeObject * infer_type(expr_ty e);
+
/* Check operands of identity checks ("is" and "is not").
Emit a warning if any operand is a constant except named singletons.
*/
@@ -2277,19 +2312,25 @@ check_compare(struct compiler *c, expr_ty e)
{
Py_ssize_t i, n;
bool left = check_is_arg(e->v.Compare.left);
+ expr_ty left_expr = e->v.Compare.left;
n = asdl_seq_LEN(e->v.Compare.ops);
for (i = 0; i < n; i++) {
cmpop_ty op = (cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i);
- bool right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
+ expr_ty right_expr = (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i);
+ bool right = check_is_arg(right_expr);
if (op == Is || op == IsNot) {
if (!right || !left) {
const char *msg = (op == Is)
- ? "\"is\" with a literal. Did you mean \"==\"?"
- : "\"is not\" with a literal. Did you mean \"!=\"?";
- return compiler_warn(c, LOC(e), msg);
+ ? "\"is\" with '%.200s' literal. Did you mean \"==\"?"
+ : "\"is not\" with '%.200s' literal. Did you mean \"!=\"?";
+ expr_ty literal = !left ? left_expr : right_expr;
+ return compiler_warn(
+ c, LOC(e), msg, infer_type(literal)->tp_name
+ );
}
}
left = right;
+ left_expr = right_expr;
}
return SUCCESS;
}
@@ -3045,11 +3086,9 @@ compiler_try_except(struct compiler *c, stmt_ty s)
[orig, res, exc]
[orig, res, exc, E1] CHECK_EG_MATCH
[orig, res, rest/exc, match?] COPY 1
- [orig, res, rest/exc, match?, match?] POP_JUMP_IF_NOT_NONE H1
- [orig, res, exc, None] POP_TOP
- [orig, res, exc] JUMP L2
+ [orig, res, rest/exc, match?, match?] POP_JUMP_IF_NONE C1
- [orig, res, rest, match] H1: (or POP if no V1)
+ [orig, res, rest, match] (or POP if no V1)
[orig, res, rest] SETUP_FINALLY R1
[orig, res, rest]
@@ -3057,8 +3096,14 @@ compiler_try_except(struct compiler *c, stmt_ty s)
[orig, res, rest, i, v] R1: LIST_APPEND 3 ) exc raised in except* body - add to res
[orig, res, rest, i] POP
+ [orig, res, rest] JUMP LE2
+
+ [orig, res, rest] L2: NOP ) for lineno
+ [orig, res, rest] JUMP LE2
+
+ [orig, res, rest/exc, None] C1: POP
- [orig, res, rest] L2:
+ [orig, res, rest] LE2:
.............................etc.......................
[orig, res, rest] Ln+1: LIST_APPEND 1 ) add unhandled exc to res (could be None)
@@ -3114,7 +3159,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
location loc = LOC(handler);
NEW_JUMP_TARGET_LABEL(c, next_except);
except = next_except;
- NEW_JUMP_TARGET_LABEL(c, handle_match);
+ NEW_JUMP_TARGET_LABEL(c, except_with_error);
+ NEW_JUMP_TARGET_LABEL(c, no_match);
if (i == 0) {
/* create empty list for exceptions raised/reraise in the except* blocks */
/*
@@ -3132,13 +3178,9 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
VISIT(c, expr, handler->v.ExceptHandler.type);
ADDOP(c, loc, CHECK_EG_MATCH);
ADDOP_I(c, loc, COPY, 1);
- ADDOP_JUMP(c, loc, POP_JUMP_IF_NOT_NONE, handle_match);
- ADDOP(c, loc, POP_TOP); // match
- ADDOP_JUMP(c, loc, JUMP, except);
+ ADDOP_JUMP(c, loc, POP_JUMP_IF_NONE, no_match);
}
- USE_LABEL(c, handle_match);
-
NEW_JUMP_TARGET_LABEL(c, cleanup_end);
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
@@ -3197,9 +3239,16 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
/* add exception raised to the res list */
ADDOP_I(c, NO_LOCATION, LIST_APPEND, 3); // exc
ADDOP(c, NO_LOCATION, POP_TOP); // lasti
- ADDOP_JUMP(c, NO_LOCATION, JUMP, except);
+ ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error);
USE_LABEL(c, except);
+ ADDOP(c, NO_LOCATION, NOP); // to hold a propagated location info
+ ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error);
+
+ USE_LABEL(c, no_match);
+ ADDOP(c, loc, POP_TOP); // match (None)
+
+ USE_LABEL(c, except_with_error);
if (i == n - 1) {
/* Add exc to the list (if not None it's the unhandled part of the EG) */
@@ -4214,6 +4263,89 @@ is_import_originated(struct compiler *c, expr_ty e)
return flags & DEF_IMPORT;
}
+static int
+can_optimize_super_call(struct compiler *c, expr_ty attr)
+{
+ expr_ty e = attr->v.Attribute.value;
+ if (e->kind != Call_kind ||
+ e->v.Call.func->kind != Name_kind ||
+ !_PyUnicode_EqualToASCIIString(e->v.Call.func->v.Name.id, "super") ||
+ _PyUnicode_EqualToASCIIString(attr->v.Attribute.attr, "__class__") ||
+ asdl_seq_LEN(e->v.Call.keywords) != 0) {
+ return 0;
+ }
+ Py_ssize_t num_args = asdl_seq_LEN(e->v.Call.args);
+
+ PyObject *super_name = e->v.Call.func->v.Name.id;
+ // detect statically-visible shadowing of 'super' name
+ int scope = _PyST_GetScope(c->u->u_ste, super_name);
+ if (scope != GLOBAL_IMPLICIT) {
+ return 0;
+ }
+ scope = _PyST_GetScope(c->c_st->st_top, super_name);
+ if (scope != 0) {
+ return 0;
+ }
+
+ if (num_args == 2) {
+ for (Py_ssize_t i = 0; i < num_args; i++) {
+ expr_ty elt = asdl_seq_GET(e->v.Call.args, i);
+ if (elt->kind == Starred_kind) {
+ return 0;
+ }
+ }
+ // exactly two non-starred args; we can just load
+ // the provided args
+ return 1;
+ }
+
+ if (num_args != 0) {
+ return 0;
+ }
+ // we need the following for zero-arg super():
+
+ // enclosing function should have at least one argument
+ if (c->u->u_metadata.u_argcount == 0 &&
+ c->u->u_metadata.u_posonlyargcount == 0) {
+ return 0;
+ }
+ // __class__ cell should be available
+ if (get_ref_type(c, &_Py_ID(__class__)) == FREE) {
+ return 1;
+ }
+ return 0;
+}
+
+static int
+load_args_for_super(struct compiler *c, expr_ty e) {
+ location loc = LOC(e);
+
+ // load super() global
+ PyObject *super_name = e->v.Call.func->v.Name.id;
+ RETURN_IF_ERROR(compiler_nameop(c, loc, super_name, Load));
+
+ if (asdl_seq_LEN(e->v.Call.args) == 2) {
+ VISIT(c, expr, asdl_seq_GET(e->v.Call.args, 0));
+ VISIT(c, expr, asdl_seq_GET(e->v.Call.args, 1));
+ return SUCCESS;
+ }
+
+ // load __class__ cell
+ PyObject *name = &_Py_ID(__class__);
+ assert(get_ref_type(c, name) == FREE);
+ RETURN_IF_ERROR(compiler_nameop(c, loc, name, Load));
+
+ // load self (first argument)
+ Py_ssize_t i = 0;
+ PyObject *key, *value;
+ if (!PyDict_Next(c->u->u_metadata.u_varnames, &i, &key, &value)) {
+ return ERROR;
+ }
+ RETURN_IF_ERROR(compiler_nameop(c, loc, key, Load));
+
+ return SUCCESS;
+}
+
// If an attribute access spans multiple lines, update the current start
// location to point to the attribute name.
static location
@@ -4281,11 +4413,21 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e)
return 0;
}
}
+
/* Alright, we can optimize the code. */
- VISIT(c, expr, meth->v.Attribute.value);
location loc = LOC(meth);
- loc = update_start_location_to_match_attr(c, loc, meth);
- ADDOP_NAME(c, loc, LOAD_METHOD, meth->v.Attribute.attr, names);
+
+ if (can_optimize_super_call(c, meth)) {
+ RETURN_IF_ERROR(load_args_for_super(c, meth->v.Attribute.value));
+ int opcode = asdl_seq_LEN(meth->v.Attribute.value->v.Call.args) ?
+ LOAD_SUPER_METHOD : LOAD_ZERO_SUPER_METHOD;
+ ADDOP_NAME(c, loc, opcode, meth->v.Attribute.attr, names);
+ } else {
+ VISIT(c, expr, meth->v.Attribute.value);
+ loc = update_start_location_to_match_attr(c, loc, meth);
+ ADDOP_NAME(c, loc, LOAD_METHOD, meth->v.Attribute.attr, names);
+ }
+
VISIT_SEQ(c, expr, e->v.Call.args);
if (kwdsl) {
@@ -5293,6 +5435,13 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
return compiler_formatted_value(c, e);
/* The following exprs can be assignment targets. */
case Attribute_kind:
+ if (e->v.Attribute.ctx == Load && can_optimize_super_call(c, e)) {
+ RETURN_IF_ERROR(load_args_for_super(c, e->v.Attribute.value));
+ int opcode = asdl_seq_LEN(e->v.Attribute.value->v.Call.args) ?
+ LOAD_SUPER_ATTR : LOAD_ZERO_SUPER_ATTR;
+ ADDOP_NAME(c, loc, opcode, e->v.Attribute.attr, names);
+ return SUCCESS;
+ }
VISIT(c, expr, e->v.Attribute.value);
loc = LOC(e);
loc = update_start_location_to_match_attr(c, loc, e);
@@ -6688,7 +6837,10 @@ insert_prefix_instructions(struct compiler_unit *u, basicblock *entryblock,
.i_loc = NO_LOCATION,
.i_target = NULL,
};
- RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, ncellsused, &make_cell));
+ if (_PyBasicblock_InsertInstruction(entryblock, ncellsused, &make_cell) < 0) {
+ PyMem_RawFree(sorted);
+ return ERROR;
+ }
ncellsused += 1;
}
PyMem_RawFree(sorted);
@@ -6827,10 +6979,6 @@ optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache,
goto error;
}
- if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) {
- goto error;
- }
-
/** Assembly **/
int nlocalsplus = prepare_localsplus(u, &g, code_flags);
if (nlocalsplus < 0) {
@@ -6849,18 +6997,18 @@ optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache,
if (_PyCfg_ResolveJumps(&g) < 0) {
goto error;
}
+
+ /* Can't modify the bytecode after computing jump offsets. */
+
if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) {
goto error;
}
-
- /* Can't modify the bytecode after computing jump offsets. */
-
co = _PyAssemble_MakeCodeObject(&u->u_metadata, const_cache, consts,
- maxdepth, g.g_entryblock, nlocalsplus,
+ maxdepth, &optimized_instrs, nlocalsplus,
code_flags, filename);
- error:
+error:
Py_XDECREF(consts);
instr_sequence_fini(&optimized_instrs);
_PyCfgBuilder_Fini(&g);
@@ -6898,11 +7046,18 @@ cfg_to_instr_sequence(cfg_builder *g, instr_sequence *seq)
RETURN_IF_ERROR(instr_sequence_use_label(seq, b->b_label.id));
for (int i = 0; i < b->b_iused; i++) {
cfg_instr *instr = &b->b_instr[i];
- int arg = HAS_TARGET(instr->i_opcode) ?
- instr->i_target->b_label.id :
- instr->i_oparg;
RETURN_IF_ERROR(
- instr_sequence_addop(seq, instr->i_opcode, arg, instr->i_loc));
+ instr_sequence_addop(seq, instr->i_opcode, instr->i_oparg, instr->i_loc));
+
+ _PyCompile_ExceptHandlerInfo *hi = &seq->s_instrs[seq->s_used-1].i_except_handler_info;
+ if (instr->i_except != NULL) {
+ hi->h_offset = instr->i_except->b_offset;
+ hi->h_startdepth = instr->i_except->b_startdepth;
+ hi->h_preserve_lasti = instr->i_except->b_preserve_lasti;
+ }
+ else {
+ hi->h_offset = -1;
+ }
}
}
return SUCCESS;
@@ -6958,7 +7113,9 @@ instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq)
for (int i = 0; i < num_insts; i++) {
if (is_target[i]) {
- RETURN_IF_ERROR(instr_sequence_use_label(seq, i));
+ if (instr_sequence_use_label(seq, i) < 0) {
+ goto error;
+ }
}
PyObject *item = PyList_GET_ITEM(instructions, i);
if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 6) {
@@ -6996,10 +7153,14 @@ instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq)
if (PyErr_Occurred()) {
goto error;
}
- RETURN_IF_ERROR(instr_sequence_addop(seq, opcode, oparg, loc));
+ if (instr_sequence_addop(seq, opcode, oparg, loc) < 0) {
+ goto error;
+ }
}
if (seq->s_used && !IS_TERMINATOR_OPCODE(seq->s_instrs[seq->s_used-1].i_opcode)) {
- RETURN_IF_ERROR(instr_sequence_addop(seq, RETURN_VALUE, 0, NO_LOCATION));
+ if (instr_sequence_addop(seq, RETURN_VALUE, 0, NO_LOCATION) < 0) {
+ goto error;
+ }
}
PyMem_Free(is_target);
return SUCCESS;
@@ -7014,12 +7175,17 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g)
instr_sequence seq;
memset(&seq, 0, sizeof(instr_sequence));
- RETURN_IF_ERROR(
- instructions_to_instr_sequence(instructions, &seq));
-
- RETURN_IF_ERROR(instr_sequence_to_cfg(&seq, g));
+ if (instructions_to_instr_sequence(instructions, &seq) < 0) {
+ goto error;
+ }
+ if (instr_sequence_to_cfg(&seq, g) < 0) {
+ goto error;
+ }
instr_sequence_fini(&seq);
return SUCCESS;
+error:
+ instr_sequence_fini(&seq);
+ return ERROR;
}
static PyObject *
diff --git a/Python/errors.c b/Python/errors.c
index 0ff6a0d5985f0f..7fc267385c569b 100644
--- a/Python/errors.c
+++ b/Python/errors.c
@@ -1342,15 +1342,9 @@ static PyStructSequence_Desc UnraisableHookArgs_desc = {
PyStatus
_PyErr_InitTypes(PyInterpreterState *interp)
{
- if (!_Py_IsMainInterpreter(interp)) {
- return _PyStatus_OK();
- }
-
- if (UnraisableHookArgsType.tp_name == NULL) {
- if (_PyStructSequence_InitBuiltin(&UnraisableHookArgsType,
- &UnraisableHookArgs_desc) < 0) {
- return _PyStatus_ERR("failed to initialize UnraisableHookArgs type");
- }
+ if (_PyStructSequence_InitBuiltin(&UnraisableHookArgsType,
+ &UnraisableHookArgs_desc) < 0) {
+ return _PyStatus_ERR("failed to initialize UnraisableHookArgs type");
}
return _PyStatus_OK();
}
diff --git a/Python/flowgraph.c b/Python/flowgraph.c
index 67cc5c5e88be10..6f83a910cab392 100644
--- a/Python/flowgraph.c
+++ b/Python/flowgraph.c
@@ -166,16 +166,10 @@ _PyBasicblock_InsertInstruction(basicblock *block, int pos, cfg_instr *instr) {
return SUCCESS;
}
-int
-_PyCfg_InstrSize(cfg_instr *instruction)
+static int
+instr_size(cfg_instr *instruction)
{
- int opcode = instruction->i_opcode;
- assert(!IS_PSEUDO_OPCODE(opcode));
- int oparg = instruction->i_oparg;
- assert(HAS_ARG(opcode) || oparg == 0);
- int extended_args = (0xFFFFFF < oparg) + (0xFFFF < oparg) + (0xFF < oparg);
- int caches = _PyOpcode_Caches[opcode];
- return extended_args + 1 + caches;
+ return _PyCompile_InstrSize(instruction->i_opcode, instruction->i_oparg);
}
static int
@@ -183,7 +177,7 @@ blocksize(basicblock *b)
{
int size = 0;
for (int i = 0; i < b->b_iused; i++) {
- size += _PyCfg_InstrSize(&b->b_instr[i]);
+ size += instr_size(&b->b_instr[i]);
}
return size;
}
@@ -492,7 +486,7 @@ resolve_jump_offsets(basicblock *entryblock)
bsize = b->b_offset;
for (int i = 0; i < b->b_iused; i++) {
cfg_instr *instr = &b->b_instr[i];
- int isize = _PyCfg_InstrSize(instr);
+ int isize = instr_size(instr);
/* jump offsets are computed relative to
* the instruction pointer after fetching
* the jump instruction.
@@ -508,7 +502,7 @@ resolve_jump_offsets(basicblock *entryblock)
assert(!IS_BACKWARDS_JUMP_OPCODE(instr->i_opcode));
instr->i_oparg -= bsize;
}
- if (_PyCfg_InstrSize(instr) != isize) {
+ if (instr_size(instr) != isize) {
extended_arg_recompile = 1;
}
}
@@ -520,7 +514,7 @@ resolve_jump_offsets(basicblock *entryblock)
with a better solution.
The issue is that in the first loop blocksize() is called
- which calls _PyCfg_InstrSize() which requires i_oparg be set
+ which calls instr_size() which requires i_oparg be set
appropriately. There is a bootstrap problem because
i_oparg is calculated in the second loop above.
diff --git a/Python/frozen.c b/Python/frozen.c
index 48b429519b6606..6b977710e6e342 100644
--- a/Python/frozen.c
+++ b/Python/frozen.c
@@ -41,6 +41,29 @@
#include
/* Includes for frozen modules: */
+#include "frozen_modules/importlib._bootstrap.h"
+#include "frozen_modules/importlib._bootstrap_external.h"
+#include "frozen_modules/zipimport.h"
+#include "frozen_modules/abc.h"
+#include "frozen_modules/codecs.h"
+#include "frozen_modules/io.h"
+#include "frozen_modules/_collections_abc.h"
+#include "frozen_modules/_sitebuiltins.h"
+#include "frozen_modules/genericpath.h"
+#include "frozen_modules/ntpath.h"
+#include "frozen_modules/posixpath.h"
+#include "frozen_modules/os.h"
+#include "frozen_modules/site.h"
+#include "frozen_modules/stat.h"
+#include "frozen_modules/importlib.util.h"
+#include "frozen_modules/importlib.machinery.h"
+#include "frozen_modules/runpy.h"
+#include "frozen_modules/__hello__.h"
+#include "frozen_modules/__phello__.h"
+#include "frozen_modules/__phello__.ham.h"
+#include "frozen_modules/__phello__.ham.eggs.h"
+#include "frozen_modules/__phello__.spam.h"
+#include "frozen_modules/frozen_only.h"
/* End includes */
#define GET_CODE(name) _Py_get_##name##_toplevel
@@ -78,46 +101,46 @@ extern PyObject *_Py_get_frozen_only_toplevel(void);
/* End extern declarations */
static const struct _frozen bootstrap_modules[] = {
- {"_frozen_importlib", NULL, 0, false, GET_CODE(importlib__bootstrap)},
- {"_frozen_importlib_external", NULL, 0, false, GET_CODE(importlib__bootstrap_external)},
- {"zipimport", NULL, 0, false, GET_CODE(zipimport)},
+ {"_frozen_importlib", _Py_M__importlib__bootstrap, (int)sizeof(_Py_M__importlib__bootstrap), false, GET_CODE(importlib__bootstrap)},
+ {"_frozen_importlib_external", _Py_M__importlib__bootstrap_external, (int)sizeof(_Py_M__importlib__bootstrap_external), false, GET_CODE(importlib__bootstrap_external)},
+ {"zipimport", _Py_M__zipimport, (int)sizeof(_Py_M__zipimport), false, GET_CODE(zipimport)},
{0, 0, 0} /* bootstrap sentinel */
};
static const struct _frozen stdlib_modules[] = {
/* stdlib - startup, without site (python -S) */
- {"abc", NULL, 0, false, GET_CODE(abc)},
- {"codecs", NULL, 0, false, GET_CODE(codecs)},
- {"io", NULL, 0, false, GET_CODE(io)},
+ {"abc", _Py_M__abc, (int)sizeof(_Py_M__abc), false, GET_CODE(abc)},
+ {"codecs", _Py_M__codecs, (int)sizeof(_Py_M__codecs), false, GET_CODE(codecs)},
+ {"io", _Py_M__io, (int)sizeof(_Py_M__io), false, GET_CODE(io)},
/* stdlib - startup, with site */
- {"_collections_abc", NULL, 0, false, GET_CODE(_collections_abc)},
- {"_sitebuiltins", NULL, 0, false, GET_CODE(_sitebuiltins)},
- {"genericpath", NULL, 0, false, GET_CODE(genericpath)},
- {"ntpath", NULL, 0, false, GET_CODE(ntpath)},
- {"posixpath", NULL, 0, false, GET_CODE(posixpath)},
- {"os.path", NULL, 0, false, GET_CODE(posixpath)},
- {"os", NULL, 0, false, GET_CODE(os)},
- {"site", NULL, 0, false, GET_CODE(site)},
- {"stat", NULL, 0, false, GET_CODE(stat)},
+ {"_collections_abc", _Py_M___collections_abc, (int)sizeof(_Py_M___collections_abc), false, GET_CODE(_collections_abc)},
+ {"_sitebuiltins", _Py_M___sitebuiltins, (int)sizeof(_Py_M___sitebuiltins), false, GET_CODE(_sitebuiltins)},
+ {"genericpath", _Py_M__genericpath, (int)sizeof(_Py_M__genericpath), false, GET_CODE(genericpath)},
+ {"ntpath", _Py_M__ntpath, (int)sizeof(_Py_M__ntpath), false, GET_CODE(ntpath)},
+ {"posixpath", _Py_M__posixpath, (int)sizeof(_Py_M__posixpath), false, GET_CODE(posixpath)},
+ {"os.path", _Py_M__posixpath, (int)sizeof(_Py_M__posixpath), false, GET_CODE(posixpath)},
+ {"os", _Py_M__os, (int)sizeof(_Py_M__os), false, GET_CODE(os)},
+ {"site", _Py_M__site, (int)sizeof(_Py_M__site), false, GET_CODE(site)},
+ {"stat", _Py_M__stat, (int)sizeof(_Py_M__stat), false, GET_CODE(stat)},
/* runpy - run module with -m */
- {"importlib.util", NULL, 0, false, GET_CODE(importlib_util)},
- {"importlib.machinery", NULL, 0, false, GET_CODE(importlib_machinery)},
- {"runpy", NULL, 0, false, GET_CODE(runpy)},
+ {"importlib.util", _Py_M__importlib_util, (int)sizeof(_Py_M__importlib_util), false, GET_CODE(importlib_util)},
+ {"importlib.machinery", _Py_M__importlib_machinery, (int)sizeof(_Py_M__importlib_machinery), false, GET_CODE(importlib_machinery)},
+ {"runpy", _Py_M__runpy, (int)sizeof(_Py_M__runpy), false, GET_CODE(runpy)},
{0, 0, 0} /* stdlib sentinel */
};
static const struct _frozen test_modules[] = {
- {"__hello__", NULL, 0, false, GET_CODE(__hello__)},
- {"__hello_alias__", NULL, 0, false, GET_CODE(__hello__)},
- {"__phello_alias__", NULL, 0, true, GET_CODE(__hello__)},
- {"__phello_alias__.spam", NULL, 0, false, GET_CODE(__hello__)},
- {"__phello__", NULL, 0, true, GET_CODE(__phello__)},
- {"__phello__.__init__", NULL, 0, false, GET_CODE(__phello__)},
- {"__phello__.ham", NULL, 0, true, GET_CODE(__phello___ham)},
- {"__phello__.ham.__init__", NULL, 0, false, GET_CODE(__phello___ham)},
- {"__phello__.ham.eggs", NULL, 0, false, GET_CODE(__phello___ham_eggs)},
- {"__phello__.spam", NULL, 0, false, GET_CODE(__phello___spam)},
- {"__hello_only__", NULL, 0, false, GET_CODE(frozen_only)},
+ {"__hello__", _Py_M____hello__, (int)sizeof(_Py_M____hello__), false, GET_CODE(__hello__)},
+ {"__hello_alias__", _Py_M____hello__, (int)sizeof(_Py_M____hello__), false, GET_CODE(__hello__)},
+ {"__phello_alias__", _Py_M____hello__, (int)sizeof(_Py_M____hello__), true, GET_CODE(__hello__)},
+ {"__phello_alias__.spam", _Py_M____hello__, (int)sizeof(_Py_M____hello__), false, GET_CODE(__hello__)},
+ {"__phello__", _Py_M____phello__, (int)sizeof(_Py_M____phello__), true, GET_CODE(__phello__)},
+ {"__phello__.__init__", _Py_M____phello__, (int)sizeof(_Py_M____phello__), false, GET_CODE(__phello__)},
+ {"__phello__.ham", _Py_M____phello___ham, (int)sizeof(_Py_M____phello___ham), true, GET_CODE(__phello___ham)},
+ {"__phello__.ham.__init__", _Py_M____phello___ham, (int)sizeof(_Py_M____phello___ham), false, GET_CODE(__phello___ham)},
+ {"__phello__.ham.eggs", _Py_M____phello___ham_eggs, (int)sizeof(_Py_M____phello___ham_eggs), false, GET_CODE(__phello___ham_eggs)},
+ {"__phello__.spam", _Py_M____phello___spam, (int)sizeof(_Py_M____phello___spam), false, GET_CODE(__phello___spam)},
+ {"__hello_only__", _Py_M__frozen_only, (int)sizeof(_Py_M__frozen_only), false, GET_CODE(frozen_only)},
{0, 0, 0} /* test sentinel */
};
const struct _frozen *_PyImport_FrozenBootstrap = bootstrap_modules;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 0928f8200ae751..864a4f7bcaff0f 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -8,7 +8,7 @@
}
TARGET(RESUME) {
- #line 136 "Python/bytecodes.c"
+ #line 137 "Python/bytecodes.c"
assert(tstate->cframe == &cframe);
assert(frame == cframe.current_frame);
/* Possibly combine this with eval breaker */
@@ -25,7 +25,7 @@
}
TARGET(INSTRUMENTED_RESUME) {
- #line 150 "Python/bytecodes.c"
+ #line 151 "Python/bytecodes.c"
/* Possible performance enhancement:
* We need to check the eval breaker anyway, can we
* combine the instrument verison check and the eval breaker test?
@@ -57,7 +57,7 @@
TARGET(LOAD_CLOSURE) {
PyObject *value;
- #line 178 "Python/bytecodes.c"
+ #line 179 "Python/bytecodes.c"
/* We keep LOAD_CLOSURE so that the bytecode stays more readable. */
value = GETLOCAL(oparg);
if (value == NULL) goto unbound_local_error;
@@ -70,7 +70,7 @@
TARGET(LOAD_FAST_CHECK) {
PyObject *value;
- #line 185 "Python/bytecodes.c"
+ #line 186 "Python/bytecodes.c"
value = GETLOCAL(oparg);
if (value == NULL) goto unbound_local_error;
Py_INCREF(value);
@@ -82,7 +82,7 @@
TARGET(LOAD_FAST) {
PyObject *value;
- #line 191 "Python/bytecodes.c"
+ #line 192 "Python/bytecodes.c"
value = GETLOCAL(oparg);
assert(value != NULL);
Py_INCREF(value);
@@ -95,7 +95,7 @@
TARGET(LOAD_CONST) {
PREDICTED(LOAD_CONST);
PyObject *value;
- #line 197 "Python/bytecodes.c"
+ #line 198 "Python/bytecodes.c"
value = GETITEM(frame->f_code->co_consts, oparg);
Py_INCREF(value);
#line 102 "Python/generated_cases.c.h"
@@ -106,7 +106,7 @@
TARGET(STORE_FAST) {
PyObject *value = stack_pointer[-1];
- #line 202 "Python/bytecodes.c"
+ #line 203 "Python/bytecodes.c"
SETLOCAL(oparg, value);
#line 112 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -118,7 +118,7 @@
PyObject *_tmp_2;
{
PyObject *value;
- #line 191 "Python/bytecodes.c"
+ #line 192 "Python/bytecodes.c"
value = GETLOCAL(oparg);
assert(value != NULL);
Py_INCREF(value);
@@ -128,7 +128,7 @@
oparg = (next_instr++)->op.arg;
{
PyObject *value;
- #line 191 "Python/bytecodes.c"
+ #line 192 "Python/bytecodes.c"
value = GETLOCAL(oparg);
assert(value != NULL);
Py_INCREF(value);
@@ -146,7 +146,7 @@
PyObject *_tmp_2;
{
PyObject *value;
- #line 191 "Python/bytecodes.c"
+ #line 192 "Python/bytecodes.c"
value = GETLOCAL(oparg);
assert(value != NULL);
Py_INCREF(value);
@@ -156,7 +156,7 @@
oparg = (next_instr++)->op.arg;
{
PyObject *value;
- #line 197 "Python/bytecodes.c"
+ #line 198 "Python/bytecodes.c"
value = GETITEM(frame->f_code->co_consts, oparg);
Py_INCREF(value);
#line 163 "Python/generated_cases.c.h"
@@ -172,14 +172,14 @@
PyObject *_tmp_1 = stack_pointer[-1];
{
PyObject *value = _tmp_1;
- #line 202 "Python/bytecodes.c"
+ #line 203 "Python/bytecodes.c"
SETLOCAL(oparg, value);
#line 178 "Python/generated_cases.c.h"
}
oparg = (next_instr++)->op.arg;
{
PyObject *value;
- #line 191 "Python/bytecodes.c"
+ #line 192 "Python/bytecodes.c"
value = GETLOCAL(oparg);
assert(value != NULL);
Py_INCREF(value);
@@ -195,14 +195,14 @@
PyObject *_tmp_2 = stack_pointer[-2];
{
PyObject *value = _tmp_1;
- #line 202 "Python/bytecodes.c"
+ #line 203 "Python/bytecodes.c"
SETLOCAL(oparg, value);
#line 201 "Python/generated_cases.c.h"
}
oparg = (next_instr++)->op.arg;
{
PyObject *value = _tmp_2;
- #line 202 "Python/bytecodes.c"
+ #line 203 "Python/bytecodes.c"
SETLOCAL(oparg, value);
#line 208 "Python/generated_cases.c.h"
}
@@ -215,7 +215,7 @@
PyObject *_tmp_2;
{
PyObject *value;
- #line 197 "Python/bytecodes.c"
+ #line 198 "Python/bytecodes.c"
value = GETITEM(frame->f_code->co_consts, oparg);
Py_INCREF(value);
#line 222 "Python/generated_cases.c.h"
@@ -224,7 +224,7 @@
oparg = (next_instr++)->op.arg;
{
PyObject *value;
- #line 191 "Python/bytecodes.c"
+ #line 192 "Python/bytecodes.c"
value = GETLOCAL(oparg);
assert(value != NULL);
Py_INCREF(value);
@@ -239,7 +239,7 @@
TARGET(POP_TOP) {
PyObject *value = stack_pointer[-1];
- #line 212 "Python/bytecodes.c"
+ #line 213 "Python/bytecodes.c"
#line 244 "Python/generated_cases.c.h"
Py_DECREF(value);
STACK_SHRINK(1);
@@ -248,7 +248,7 @@
TARGET(PUSH_NULL) {
PyObject *res;
- #line 216 "Python/bytecodes.c"
+ #line 217 "Python/bytecodes.c"
res = NULL;
#line 254 "Python/generated_cases.c.h"
STACK_GROW(1);
@@ -261,13 +261,13 @@
PyObject *_tmp_2 = stack_pointer[-2];
{
PyObject *value = _tmp_1;
- #line 212 "Python/bytecodes.c"
+ #line 213 "Python/bytecodes.c"
#line 266 "Python/generated_cases.c.h"
Py_DECREF(value);
}
{
PyObject *value = _tmp_2;
- #line 212 "Python/bytecodes.c"
+ #line 213 "Python/bytecodes.c"
#line 272 "Python/generated_cases.c.h"
Py_DECREF(value);
}
@@ -278,7 +278,7 @@
TARGET(INSTRUMENTED_END_FOR) {
PyObject *value = stack_pointer[-1];
PyObject *receiver = stack_pointer[-2];
- #line 222 "Python/bytecodes.c"
+ #line 223 "Python/bytecodes.c"
/* Need to create a fake StopIteration error here,
* to conform to PEP 380 */
if (PyGen_Check(receiver)) {
@@ -298,7 +298,7 @@
TARGET(END_SEND) {
PyObject *value = stack_pointer[-1];
PyObject *receiver = stack_pointer[-2];
- #line 235 "Python/bytecodes.c"
+ #line 236 "Python/bytecodes.c"
Py_DECREF(receiver);
#line 304 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -309,7 +309,7 @@
TARGET(INSTRUMENTED_END_SEND) {
PyObject *value = stack_pointer[-1];
PyObject *receiver = stack_pointer[-2];
- #line 239 "Python/bytecodes.c"
+ #line 240 "Python/bytecodes.c"
if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) {
PyErr_SetObject(PyExc_StopIteration, value);
if (monitor_stop_iteration(tstate, frame, next_instr-1)) {
@@ -327,11 +327,11 @@
TARGET(UNARY_NEGATIVE) {
PyObject *value = stack_pointer[-1];
PyObject *res;
- #line 250 "Python/bytecodes.c"
+ #line 251 "Python/bytecodes.c"
res = PyNumber_Negative(value);
#line 333 "Python/generated_cases.c.h"
Py_DECREF(value);
- #line 252 "Python/bytecodes.c"
+ #line 253 "Python/bytecodes.c"
if (res == NULL) goto pop_1_error;
#line 337 "Python/generated_cases.c.h"
stack_pointer[-1] = res;
@@ -341,11 +341,11 @@
TARGET(UNARY_NOT) {
PyObject *value = stack_pointer[-1];
PyObject *res;
- #line 256 "Python/bytecodes.c"
+ #line 257 "Python/bytecodes.c"
int err = PyObject_IsTrue(value);
#line 347 "Python/generated_cases.c.h"
Py_DECREF(value);
- #line 258 "Python/bytecodes.c"
+ #line 259 "Python/bytecodes.c"
if (err < 0) goto pop_1_error;
if (err == 0) {
res = Py_True;
@@ -362,11 +362,11 @@
TARGET(UNARY_INVERT) {
PyObject *value = stack_pointer[-1];
PyObject *res;
- #line 269 "Python/bytecodes.c"
+ #line 270 "Python/bytecodes.c"
res = PyNumber_Invert(value);
#line 368 "Python/generated_cases.c.h"
Py_DECREF(value);
- #line 271 "Python/bytecodes.c"
+ #line 272 "Python/bytecodes.c"
if (res == NULL) goto pop_1_error;
#line 372 "Python/generated_cases.c.h"
stack_pointer[-1] = res;
@@ -377,7 +377,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *prod;
- #line 288 "Python/bytecodes.c"
+ #line 289 "Python/bytecodes.c"
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@@ -396,7 +396,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *prod;
- #line 298 "Python/bytecodes.c"
+ #line 299 "Python/bytecodes.c"
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@@ -414,7 +414,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *sub;
- #line 307 "Python/bytecodes.c"
+ #line 308 "Python/bytecodes.c"
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@@ -433,7 +433,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *sub;
- #line 317 "Python/bytecodes.c"
+ #line 318 "Python/bytecodes.c"
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@@ -450,7 +450,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 325 "Python/bytecodes.c"
+ #line 326 "Python/bytecodes.c"
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@@ -468,7 +468,7 @@
TARGET(BINARY_OP_INPLACE_ADD_UNICODE) {
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
- #line 341 "Python/bytecodes.c"
+ #line 342 "Python/bytecodes.c"
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
_Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP];
@@ -504,7 +504,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *sum;
- #line 370 "Python/bytecodes.c"
+ #line 371 "Python/bytecodes.c"
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@@ -522,7 +522,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *sum;
- #line 379 "Python/bytecodes.c"
+ #line 380 "Python/bytecodes.c"
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
@@ -543,7 +543,7 @@
PyObject *sub = stack_pointer[-1];
PyObject *container = stack_pointer[-2];
PyObject *res;
- #line 397 "Python/bytecodes.c"
+ #line 398 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -558,7 +558,7 @@
#line 559 "Python/generated_cases.c.h"
Py_DECREF(container);
Py_DECREF(sub);
- #line 409 "Python/bytecodes.c"
+ #line 410 "Python/bytecodes.c"
if (res == NULL) goto pop_2_error;
#line 564 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -572,7 +572,7 @@
PyObject *start = stack_pointer[-2];
PyObject *container = stack_pointer[-3];
PyObject *res;
- #line 413 "Python/bytecodes.c"
+ #line 414 "Python/bytecodes.c"
PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop);
// Can't use ERROR_IF() here, because we haven't
// DECREF'ed container yet, and we still own slice.
@@ -596,7 +596,7 @@
PyObject *start = stack_pointer[-2];
PyObject *container = stack_pointer[-3];
PyObject *v = stack_pointer[-4];
- #line 428 "Python/bytecodes.c"
+ #line 429 "Python/bytecodes.c"
PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop);
int err;
if (slice == NULL) {
@@ -618,7 +618,7 @@
PyObject *sub = stack_pointer[-1];
PyObject *list = stack_pointer[-2];
PyObject *res;
- #line 443 "Python/bytecodes.c"
+ #line 444 "Python/bytecodes.c"
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR);
@@ -643,7 +643,7 @@
PyObject *sub = stack_pointer[-1];
PyObject *tuple = stack_pointer[-2];
PyObject *res;
- #line 459 "Python/bytecodes.c"
+ #line 460 "Python/bytecodes.c"
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR);
@@ -668,7 +668,7 @@
PyObject *sub = stack_pointer[-1];
PyObject *dict = stack_pointer[-2];
PyObject *res;
- #line 475 "Python/bytecodes.c"
+ #line 476 "Python/bytecodes.c"
DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR);
STAT_INC(BINARY_SUBSCR, hit);
res = PyDict_GetItemWithError(dict, sub);
@@ -679,7 +679,7 @@
#line 680 "Python/generated_cases.c.h"
Py_DECREF(dict);
Py_DECREF(sub);
- #line 483 "Python/bytecodes.c"
+ #line 484 "Python/bytecodes.c"
if (true) goto pop_2_error;
}
Py_INCREF(res); // Do this before DECREF'ing dict, sub
@@ -695,7 +695,7 @@
TARGET(BINARY_SUBSCR_GETITEM) {
PyObject *sub = stack_pointer[-1];
PyObject *container = stack_pointer[-2];
- #line 490 "Python/bytecodes.c"
+ #line 491 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(container);
DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR);
PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
@@ -723,7 +723,7 @@
TARGET(LIST_APPEND) {
PyObject *v = stack_pointer[-1];
PyObject *list = stack_pointer[-(2 + (oparg-1))];
- #line 514 "Python/bytecodes.c"
+ #line 515 "Python/bytecodes.c"
if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error;
#line 729 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -734,11 +734,11 @@
TARGET(SET_ADD) {
PyObject *v = stack_pointer[-1];
PyObject *set = stack_pointer[-(2 + (oparg-1))];
- #line 519 "Python/bytecodes.c"
+ #line 520 "Python/bytecodes.c"
int err = PySet_Add(set, v);
#line 740 "Python/generated_cases.c.h"
Py_DECREF(v);
- #line 521 "Python/bytecodes.c"
+ #line 522 "Python/bytecodes.c"
if (err) goto pop_1_error;
#line 744 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -753,7 +753,7 @@
PyObject *container = stack_pointer[-2];
PyObject *v = stack_pointer[-3];
uint16_t counter = read_u16(&next_instr[0].cache);
- #line 532 "Python/bytecodes.c"
+ #line 533 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
next_instr--;
@@ -772,7 +772,7 @@
Py_DECREF(v);
Py_DECREF(container);
Py_DECREF(sub);
- #line 547 "Python/bytecodes.c"
+ #line 548 "Python/bytecodes.c"
if (err) goto pop_3_error;
#line 778 "Python/generated_cases.c.h"
STACK_SHRINK(3);
@@ -784,7 +784,7 @@
PyObject *sub = stack_pointer[-1];
PyObject *list = stack_pointer[-2];
PyObject *value = stack_pointer[-3];
- #line 551 "Python/bytecodes.c"
+ #line 552 "Python/bytecodes.c"
DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR);
@@ -811,7 +811,7 @@
PyObject *sub = stack_pointer[-1];
PyObject *dict = stack_pointer[-2];
PyObject *value = stack_pointer[-3];
- #line 570 "Python/bytecodes.c"
+ #line 571 "Python/bytecodes.c"
DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR);
STAT_INC(STORE_SUBSCR, hit);
int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value);
@@ -826,13 +826,13 @@
TARGET(DELETE_SUBSCR) {
PyObject *sub = stack_pointer[-1];
PyObject *container = stack_pointer[-2];
- #line 578 "Python/bytecodes.c"
+ #line 579 "Python/bytecodes.c"
/* del container[sub] */
int err = PyObject_DelItem(container, sub);
#line 833 "Python/generated_cases.c.h"
Py_DECREF(container);
Py_DECREF(sub);
- #line 581 "Python/bytecodes.c"
+ #line 582 "Python/bytecodes.c"
if (err) goto pop_2_error;
#line 838 "Python/generated_cases.c.h"
STACK_SHRINK(2);
@@ -842,12 +842,12 @@
TARGET(CALL_INTRINSIC_1) {
PyObject *value = stack_pointer[-1];
PyObject *res;
- #line 585 "Python/bytecodes.c"
+ #line 586 "Python/bytecodes.c"
assert(oparg <= MAX_INTRINSIC_1);
res = _PyIntrinsics_UnaryFunctions[oparg](tstate, value);
#line 849 "Python/generated_cases.c.h"
Py_DECREF(value);
- #line 588 "Python/bytecodes.c"
+ #line 589 "Python/bytecodes.c"
if (res == NULL) goto pop_1_error;
#line 853 "Python/generated_cases.c.h"
stack_pointer[-1] = res;
@@ -858,13 +858,13 @@
PyObject *value1 = stack_pointer[-1];
PyObject *value2 = stack_pointer[-2];
PyObject *res;
- #line 592 "Python/bytecodes.c"
+ #line 593 "Python/bytecodes.c"
assert(oparg <= MAX_INTRINSIC_2);
res = _PyIntrinsics_BinaryFunctions[oparg](tstate, value2, value1);
#line 865 "Python/generated_cases.c.h"
Py_DECREF(value2);
Py_DECREF(value1);
- #line 595 "Python/bytecodes.c"
+ #line 596 "Python/bytecodes.c"
if (res == NULL) goto pop_2_error;
#line 870 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -874,7 +874,7 @@
TARGET(RAISE_VARARGS) {
PyObject **args = (stack_pointer - oparg);
- #line 599 "Python/bytecodes.c"
+ #line 600 "Python/bytecodes.c"
PyObject *cause = NULL, *exc = NULL;
switch (oparg) {
case 2:
@@ -897,7 +897,7 @@
TARGET(INTERPRETER_EXIT) {
PyObject *retval = stack_pointer[-1];
- #line 619 "Python/bytecodes.c"
+ #line 620 "Python/bytecodes.c"
assert(frame == &entry_frame);
assert(_PyFrame_IsIncomplete(frame));
STACK_SHRINK(1); // Since we're not going to DISPATCH()
@@ -913,7 +913,7 @@
TARGET(RETURN_VALUE) {
PyObject *retval = stack_pointer[-1];
- #line 632 "Python/bytecodes.c"
+ #line 633 "Python/bytecodes.c"
STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
@@ -931,7 +931,7 @@
TARGET(INSTRUMENTED_RETURN_VALUE) {
PyObject *retval = stack_pointer[-1];
- #line 647 "Python/bytecodes.c"
+ #line 648 "Python/bytecodes.c"
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
frame, next_instr-1, retval);
@@ -952,7 +952,7 @@
}
TARGET(RETURN_CONST) {
- #line 666 "Python/bytecodes.c"
+ #line 667 "Python/bytecodes.c"
PyObject *retval = GETITEM(frame->f_code->co_consts, oparg);
Py_INCREF(retval);
assert(EMPTY());
@@ -970,7 +970,7 @@
}
TARGET(INSTRUMENTED_RETURN_CONST) {
- #line 682 "Python/bytecodes.c"
+ #line 683 "Python/bytecodes.c"
PyObject *retval = GETITEM(frame->f_code->co_consts, oparg);
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
@@ -994,7 +994,7 @@
TARGET(GET_AITER) {
PyObject *obj = stack_pointer[-1];
PyObject *iter;
- #line 702 "Python/bytecodes.c"
+ #line 703 "Python/bytecodes.c"
unaryfunc getter = NULL;
PyTypeObject *type = Py_TYPE(obj);
@@ -1009,14 +1009,14 @@
type->tp_name);
#line 1011 "Python/generated_cases.c.h"
Py_DECREF(obj);
- #line 715 "Python/bytecodes.c"
+ #line 716 "Python/bytecodes.c"
if (true) goto pop_1_error;
}
iter = (*getter)(obj);
#line 1018 "Python/generated_cases.c.h"
Py_DECREF(obj);
- #line 720 "Python/bytecodes.c"
+ #line 721 "Python/bytecodes.c"
if (iter == NULL) goto pop_1_error;
if (Py_TYPE(iter)->tp_as_async == NULL ||
@@ -1037,7 +1037,7 @@
TARGET(GET_ANEXT) {
PyObject *aiter = stack_pointer[-1];
PyObject *awaitable;
- #line 735 "Python/bytecodes.c"
+ #line 736 "Python/bytecodes.c"
unaryfunc getter = NULL;
PyObject *next_iter = NULL;
PyTypeObject *type = Py_TYPE(aiter);
@@ -1092,7 +1092,7 @@
PREDICTED(GET_AWAITABLE);
PyObject *iterable = stack_pointer[-1];
PyObject *iter;
- #line 782 "Python/bytecodes.c"
+ #line 783 "Python/bytecodes.c"
iter = _PyCoro_GetAwaitableIter(iterable);
if (iter == NULL) {
@@ -1101,7 +1101,7 @@
#line 1103 "Python/generated_cases.c.h"
Py_DECREF(iterable);
- #line 789 "Python/bytecodes.c"
+ #line 790 "Python/bytecodes.c"
if (iter != NULL && PyCoro_CheckExact(iter)) {
PyObject *yf = _PyGen_yf((PyGenObject*)iter);
@@ -1130,7 +1130,7 @@
PyObject *v = stack_pointer[-1];
PyObject *receiver = stack_pointer[-2];
PyObject *retval;
- #line 815 "Python/bytecodes.c"
+ #line 816 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PySendCache *cache = (_PySendCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -1185,7 +1185,7 @@
TARGET(SEND_GEN) {
PyObject *v = stack_pointer[-1];
PyObject *receiver = stack_pointer[-2];
- #line 863 "Python/bytecodes.c"
+ #line 864 "Python/bytecodes.c"
PyGenObject *gen = (PyGenObject *)receiver;
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type &&
Py_TYPE(gen) != &PyCoro_Type, SEND);
@@ -1205,7 +1205,7 @@
TARGET(INSTRUMENTED_YIELD_VALUE) {
PyObject *retval = stack_pointer[-1];
- #line 880 "Python/bytecodes.c"
+ #line 881 "Python/bytecodes.c"
assert(frame != &entry_frame);
PyGenObject *gen = _PyFrame_GetGenerator(frame);
gen->gi_frame_state = FRAME_SUSPENDED;
@@ -1227,7 +1227,7 @@
TARGET(YIELD_VALUE) {
PyObject *retval = stack_pointer[-1];
- #line 899 "Python/bytecodes.c"
+ #line 900 "Python/bytecodes.c"
// NOTE: It's important that YIELD_VALUE never raises an exception!
// The compiler treats any exception raised here as a failed close()
// or throw() call.
@@ -1248,7 +1248,7 @@
TARGET(POP_EXCEPT) {
PyObject *exc_value = stack_pointer[-1];
- #line 917 "Python/bytecodes.c"
+ #line 918 "Python/bytecodes.c"
_PyErr_StackItem *exc_info = tstate->exc_info;
Py_XSETREF(exc_info->exc_value, exc_value);
#line 1255 "Python/generated_cases.c.h"
@@ -1259,7 +1259,7 @@
TARGET(RERAISE) {
PyObject *exc = stack_pointer[-1];
PyObject **values = (stack_pointer - (1 + oparg));
- #line 922 "Python/bytecodes.c"
+ #line 923 "Python/bytecodes.c"
assert(oparg >= 0 && oparg <= 2);
if (oparg) {
PyObject *lasti = values[0];
@@ -1283,13 +1283,13 @@
TARGET(END_ASYNC_FOR) {
PyObject *exc = stack_pointer[-1];
PyObject *awaitable = stack_pointer[-2];
- #line 942 "Python/bytecodes.c"
+ #line 943 "Python/bytecodes.c"
assert(exc && PyExceptionInstance_Check(exc));
if (PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) {
#line 1290 "Python/generated_cases.c.h"
Py_DECREF(awaitable);
Py_DECREF(exc);
- #line 945 "Python/bytecodes.c"
+ #line 946 "Python/bytecodes.c"
}
else {
Py_INCREF(exc);
@@ -1307,7 +1307,7 @@
PyObject *sub_iter = stack_pointer[-3];
PyObject *none;
PyObject *value;
- #line 954 "Python/bytecodes.c"
+ #line 955 "Python/bytecodes.c"
assert(throwflag);
assert(exc_value && PyExceptionInstance_Check(exc_value));
if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) {
@@ -1316,7 +1316,7 @@
Py_DECREF(sub_iter);
Py_DECREF(last_sent_val);
Py_DECREF(exc_value);
- #line 959 "Python/bytecodes.c"
+ #line 960 "Python/bytecodes.c"
none = Py_NewRef(Py_None);
}
else {
@@ -1332,7 +1332,7 @@
TARGET(LOAD_ASSERTION_ERROR) {
PyObject *value;
- #line 968 "Python/bytecodes.c"
+ #line 969 "Python/bytecodes.c"
value = Py_NewRef(PyExc_AssertionError);
#line 1338 "Python/generated_cases.c.h"
STACK_GROW(1);
@@ -1342,7 +1342,7 @@
TARGET(LOAD_BUILD_CLASS) {
PyObject *bc;
- #line 972 "Python/bytecodes.c"
+ #line 973 "Python/bytecodes.c"
if (PyDict_CheckExact(BUILTINS())) {
bc = _PyDict_GetItemWithError(BUILTINS(),
&_Py_ID(__build_class__));
@@ -1372,7 +1372,7 @@
TARGET(STORE_NAME) {
PyObject *v = stack_pointer[-1];
- #line 996 "Python/bytecodes.c"
+ #line 997 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
PyObject *ns = LOCALS();
int err;
@@ -1381,7 +1381,7 @@
"no locals found when storing %R", name);
#line 1383 "Python/generated_cases.c.h"
Py_DECREF(v);
- #line 1003 "Python/bytecodes.c"
+ #line 1004 "Python/bytecodes.c"
if (true) goto pop_1_error;
}
if (PyDict_CheckExact(ns))
@@ -1390,7 +1390,7 @@
err = PyObject_SetItem(ns, name, v);
#line 1392 "Python/generated_cases.c.h"
Py_DECREF(v);
- #line 1010 "Python/bytecodes.c"
+ #line 1011 "Python/bytecodes.c"
if (err) goto pop_1_error;
#line 1396 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -1398,7 +1398,7 @@
}
TARGET(DELETE_NAME) {
- #line 1014 "Python/bytecodes.c"
+ #line 1015 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
PyObject *ns = LOCALS();
int err;
@@ -1423,7 +1423,7 @@
PREDICTED(UNPACK_SEQUENCE);
static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size");
PyObject *seq = stack_pointer[-1];
- #line 1040 "Python/bytecodes.c"
+ #line 1041 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -1438,7 +1438,7 @@
int res = unpack_iterable(tstate, seq, oparg, -1, top);
#line 1440 "Python/generated_cases.c.h"
Py_DECREF(seq);
- #line 1053 "Python/bytecodes.c"
+ #line 1054 "Python/bytecodes.c"
if (res == 0) goto pop_1_error;
#line 1444 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -1450,7 +1450,7 @@
TARGET(UNPACK_SEQUENCE_TWO_TUPLE) {
PyObject *seq = stack_pointer[-1];
PyObject **values = stack_pointer - (1);
- #line 1057 "Python/bytecodes.c"
+ #line 1058 "Python/bytecodes.c"
DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE);
DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE);
assert(oparg == 2);
@@ -1468,7 +1468,7 @@
TARGET(UNPACK_SEQUENCE_TUPLE) {
PyObject *seq = stack_pointer[-1];
PyObject **values = stack_pointer - (1);
- #line 1067 "Python/bytecodes.c"
+ #line 1068 "Python/bytecodes.c"
DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE);
DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE);
STAT_INC(UNPACK_SEQUENCE, hit);
@@ -1487,7 +1487,7 @@
TARGET(UNPACK_SEQUENCE_LIST) {
PyObject *seq = stack_pointer[-1];
PyObject **values = stack_pointer - (1);
- #line 1078 "Python/bytecodes.c"
+ #line 1079 "Python/bytecodes.c"
DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE);
DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE);
STAT_INC(UNPACK_SEQUENCE, hit);
@@ -1505,13 +1505,13 @@
TARGET(UNPACK_EX) {
PyObject *seq = stack_pointer[-1];
- #line 1089 "Python/bytecodes.c"
+ #line 1090 "Python/bytecodes.c"
int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8);
PyObject **top = stack_pointer + totalargs - 1;
int res = unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top);
#line 1513 "Python/generated_cases.c.h"
Py_DECREF(seq);
- #line 1093 "Python/bytecodes.c"
+ #line 1094 "Python/bytecodes.c"
if (res == 0) goto pop_1_error;
#line 1517 "Python/generated_cases.c.h"
STACK_GROW((oparg & 0xFF) + (oparg >> 8));
@@ -1524,7 +1524,7 @@
PyObject *owner = stack_pointer[-1];
PyObject *v = stack_pointer[-2];
uint16_t counter = read_u16(&next_instr[0].cache);
- #line 1104 "Python/bytecodes.c"
+ #line 1105 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
@@ -1543,7 +1543,7 @@
#line 1544 "Python/generated_cases.c.h"
Py_DECREF(v);
Py_DECREF(owner);
- #line 1120 "Python/bytecodes.c"
+ #line 1121 "Python/bytecodes.c"
if (err) goto pop_2_error;
#line 1549 "Python/generated_cases.c.h"
STACK_SHRINK(2);
@@ -1553,12 +1553,12 @@
TARGET(DELETE_ATTR) {
PyObject *owner = stack_pointer[-1];
- #line 1124 "Python/bytecodes.c"
+ #line 1125 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
int err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
#line 1560 "Python/generated_cases.c.h"
Py_DECREF(owner);
- #line 1127 "Python/bytecodes.c"
+ #line 1128 "Python/bytecodes.c"
if (err) goto pop_1_error;
#line 1564 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -1567,12 +1567,12 @@
TARGET(STORE_GLOBAL) {
PyObject *v = stack_pointer[-1];
- #line 1131 "Python/bytecodes.c"
+ #line 1132 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
int err = PyDict_SetItem(GLOBALS(), name, v);
#line 1574 "Python/generated_cases.c.h"
Py_DECREF(v);
- #line 1134 "Python/bytecodes.c"
+ #line 1135 "Python/bytecodes.c"
if (err) goto pop_1_error;
#line 1578 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -1580,7 +1580,7 @@
}
TARGET(DELETE_GLOBAL) {
- #line 1138 "Python/bytecodes.c"
+ #line 1139 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
int err;
err = PyDict_DelItem(GLOBALS(), name);
@@ -1598,7 +1598,7 @@
TARGET(LOAD_NAME) {
PyObject *v;
- #line 1152 "Python/bytecodes.c"
+ #line 1153 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
PyObject *locals = LOCALS();
if (locals == NULL) {
@@ -1668,7 +1668,7 @@
static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size");
PyObject *null = NULL;
PyObject *v;
- #line 1219 "Python/bytecodes.c"
+ #line 1220 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -1734,7 +1734,7 @@
PyObject *res;
uint16_t index = read_u16(&next_instr[1].cache);
uint16_t version = read_u16(&next_instr[2].cache);
- #line 1273 "Python/bytecodes.c"
+ #line 1274 "Python/bytecodes.c"
DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL);
PyDictObject *dict = (PyDictObject *)GLOBALS();
DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL);
@@ -1760,7 +1760,7 @@
uint16_t index = read_u16(&next_instr[1].cache);
uint16_t mod_version = read_u16(&next_instr[2].cache);
uint16_t bltn_version = read_u16(&next_instr[3].cache);
- #line 1286 "Python/bytecodes.c"
+ #line 1287 "Python/bytecodes.c"
DEOPT_IF(!PyDict_CheckExact(GLOBALS()), LOAD_GLOBAL);
DEOPT_IF(!PyDict_CheckExact(BUILTINS()), LOAD_GLOBAL);
PyDictObject *mdict = (PyDictObject *)GLOBALS();
@@ -1785,7 +1785,7 @@
}
TARGET(DELETE_FAST) {
- #line 1303 "Python/bytecodes.c"
+ #line 1304 "Python/bytecodes.c"
PyObject *v = GETLOCAL(oparg);
if (v == NULL) goto unbound_local_error;
SETLOCAL(oparg, NULL);
@@ -1794,7 +1794,7 @@
}
TARGET(MAKE_CELL) {
- #line 1309 "Python/bytecodes.c"
+ #line 1310 "Python/bytecodes.c"
// "initial" is probably NULL but not if it's an arg (or set
// via PyFrame_LocalsToFast() before MAKE_CELL has run).
PyObject *initial = GETLOCAL(oparg);
@@ -1808,7 +1808,7 @@
}
TARGET(DELETE_DEREF) {
- #line 1320 "Python/bytecodes.c"
+ #line 1321 "Python/bytecodes.c"
PyObject *cell = GETLOCAL(oparg);
PyObject *oldobj = PyCell_GET(cell);
// Can't use ERROR_IF here.
@@ -1825,7 +1825,7 @@
TARGET(LOAD_CLASSDEREF) {
PyObject *value;
- #line 1333 "Python/bytecodes.c"
+ #line 1334 "Python/bytecodes.c"
PyObject *name, *locals = LOCALS();
assert(locals);
assert(oparg >= 0 && oparg < frame->f_code->co_nlocalsplus);
@@ -1865,7 +1865,7 @@
TARGET(LOAD_DEREF) {
PyObject *value;
- #line 1367 "Python/bytecodes.c"
+ #line 1368 "Python/bytecodes.c"
PyObject *cell = GETLOCAL(oparg);
value = PyCell_GET(cell);
if (value == NULL) {
@@ -1881,7 +1881,7 @@
TARGET(STORE_DEREF) {
PyObject *v = stack_pointer[-1];
- #line 1377 "Python/bytecodes.c"
+ #line 1378 "Python/bytecodes.c"
PyObject *cell = GETLOCAL(oparg);
PyObject *oldobj = PyCell_GET(cell);
PyCell_SET(cell, v);
@@ -1892,7 +1892,7 @@
}
TARGET(COPY_FREE_VARS) {
- #line 1384 "Python/bytecodes.c"
+ #line 1385 "Python/bytecodes.c"
/* Copy closure variables to free variables */
PyCodeObject *co = frame->f_code;
assert(PyFunction_Check(frame->f_funcobj));
@@ -1910,13 +1910,13 @@
TARGET(BUILD_STRING) {
PyObject **pieces = (stack_pointer - oparg);
PyObject *str;
- #line 1397 "Python/bytecodes.c"
+ #line 1398 "Python/bytecodes.c"
str = _PyUnicode_JoinArray(&_Py_STR(empty), pieces, oparg);
#line 1916 "Python/generated_cases.c.h"
for (int _i = oparg; --_i >= 0;) {
Py_DECREF(pieces[_i]);
}
- #line 1399 "Python/bytecodes.c"
+ #line 1400 "Python/bytecodes.c"
if (str == NULL) { STACK_SHRINK(oparg); goto error; }
#line 1922 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
@@ -1928,7 +1928,7 @@
TARGET(BUILD_TUPLE) {
PyObject **values = (stack_pointer - oparg);
PyObject *tup;
- #line 1403 "Python/bytecodes.c"
+ #line 1404 "Python/bytecodes.c"
tup = _PyTuple_FromArraySteal(values, oparg);
if (tup == NULL) { STACK_SHRINK(oparg); goto error; }
#line 1935 "Python/generated_cases.c.h"
@@ -1941,7 +1941,7 @@
TARGET(BUILD_LIST) {
PyObject **values = (stack_pointer - oparg);
PyObject *list;
- #line 1408 "Python/bytecodes.c"
+ #line 1409 "Python/bytecodes.c"
list = _PyList_FromArraySteal(values, oparg);
if (list == NULL) { STACK_SHRINK(oparg); goto error; }
#line 1948 "Python/generated_cases.c.h"
@@ -1954,7 +1954,7 @@
TARGET(LIST_EXTEND) {
PyObject *iterable = stack_pointer[-1];
PyObject *list = stack_pointer[-(2 + (oparg-1))];
- #line 1413 "Python/bytecodes.c"
+ #line 1414 "Python/bytecodes.c"
PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable);
if (none_val == NULL) {
if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) &&
@@ -1967,7 +1967,7 @@
}
#line 1969 "Python/generated_cases.c.h"
Py_DECREF(iterable);
- #line 1424 "Python/bytecodes.c"
+ #line 1425 "Python/bytecodes.c"
if (true) goto pop_1_error;
}
Py_DECREF(none_val);
@@ -1980,11 +1980,11 @@
TARGET(SET_UPDATE) {
PyObject *iterable = stack_pointer[-1];
PyObject *set = stack_pointer[-(2 + (oparg-1))];
- #line 1431 "Python/bytecodes.c"
+ #line 1432 "Python/bytecodes.c"
int err = _PySet_Update(set, iterable);
#line 1986 "Python/generated_cases.c.h"
Py_DECREF(iterable);
- #line 1433 "Python/bytecodes.c"
+ #line 1434 "Python/bytecodes.c"
if (err < 0) goto pop_1_error;
#line 1990 "Python/generated_cases.c.h"
STACK_SHRINK(1);
@@ -1994,7 +1994,7 @@
TARGET(BUILD_SET) {
PyObject **values = (stack_pointer - oparg);
PyObject *set;
- #line 1437 "Python/bytecodes.c"
+ #line 1438 "Python/bytecodes.c"
set = PySet_New(NULL);
if (set == NULL)
goto error;
@@ -2019,7 +2019,7 @@
TARGET(BUILD_MAP) {
PyObject **values = (stack_pointer - oparg*2);
PyObject *map;
- #line 1454 "Python/bytecodes.c"
+ #line 1455 "Python/bytecodes.c"
map = _PyDict_FromItems(
values, 2,
values+1, 2,
@@ -2031,7 +2031,7 @@
for (int _i = oparg*2; --_i >= 0;) {
Py_DECREF(values[_i]);
}
- #line 1462 "Python/bytecodes.c"
+ #line 1463 "Python/bytecodes.c"
if (map == NULL) { STACK_SHRINK(oparg*2); goto error; }
#line 2037 "Python/generated_cases.c.h"
STACK_SHRINK(oparg*2);
@@ -2041,7 +2041,7 @@
}
TARGET(SETUP_ANNOTATIONS) {
- #line 1466 "Python/bytecodes.c"
+ #line 1467 "Python/bytecodes.c"
int err;
PyObject *ann_dict;
if (LOCALS() == NULL) {
@@ -2089,7 +2089,7 @@
PyObject *keys = stack_pointer[-1];
PyObject **values = (stack_pointer - (1 + oparg));
PyObject *map;
- #line 1508 "Python/bytecodes.c"
+ #line 1509 "Python/bytecodes.c"
if (!PyTuple_CheckExact(keys) ||
PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) {
_PyErr_SetString(tstate, PyExc_SystemError,
@@ -2104,7 +2104,7 @@
Py_DECREF(values[_i]);
}
Py_DECREF(keys);
- #line 1518 "Python/bytecodes.c"
+ #line 1519 "Python/bytecodes.c"
if (map == NULL) { STACK_SHRINK(oparg); goto pop_1_error; }
#line 2110 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
@@ -2114,7 +2114,7 @@
TARGET(DICT_UPDATE) {
PyObject *update = stack_pointer[-1];
- #line 1522 "Python/bytecodes.c"
+ #line 1523 "Python/bytecodes.c"
PyObject *dict = PEEK(oparg + 1); // update is still on the stack
if (PyDict_Update(dict, update) < 0) {
if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) {
@@ -2124,7 +2124,7 @@
}
#line 2126 "Python/generated_cases.c.h"
Py_DECREF(update);
- #line 1530 "Python/bytecodes.c"
+ #line 1531 "Python/bytecodes.c"
if (true) goto pop_1_error;
}
#line 2131 "Python/generated_cases.c.h"
@@ -2135,14 +2135,14 @@
TARGET(DICT_MERGE) {
PyObject *update = stack_pointer[-1];
- #line 1536 "Python/bytecodes.c"
+ #line 1537 "Python/bytecodes.c"
PyObject *dict = PEEK(oparg + 1); // update is still on the stack
if (_PyDict_MergeEx(dict, update, 2) < 0) {
format_kwargs_error(tstate, PEEK(3 + oparg), update);
#line 2144 "Python/generated_cases.c.h"
Py_DECREF(update);
- #line 1541 "Python/bytecodes.c"
+ #line 1542 "Python/bytecodes.c"
if (true) goto pop_1_error;
}
#line 2149 "Python/generated_cases.c.h"
@@ -2155,7 +2155,7 @@
TARGET(MAP_ADD) {
PyObject *value = stack_pointer[-1];
PyObject *key = stack_pointer[-2];
- #line 1548 "Python/bytecodes.c"
+ #line 1549 "Python/bytecodes.c"
PyObject *dict = PEEK(oparg + 2); // key, value are still on the stack
assert(PyDict_CheckExact(dict));
/* dict[key] = value */
@@ -2167,13 +2167,85 @@
DISPATCH();
}
+ TARGET(LOAD_SUPER_ATTR) {
+ PREDICTED(LOAD_SUPER_ATTR);
+ static_assert(INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR == 9, "incorrect cache size");
+ PyObject *self = stack_pointer[-1];
+ PyObject *class = stack_pointer[-2];
+ PyObject *global_super = stack_pointer[-3];
+ PyObject *res2 = NULL;
+ PyObject *res;
+ #line 1563 "Python/bytecodes.c"
+ PyObject *name = GETITEM(frame->f_code->co_names, oparg >> 2);
+ int load_method = oparg & 1;
+ #if ENABLE_SPECIALIZATION
+ _PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr;
+ if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ next_instr--;
+ _Py_Specialize_LoadSuperAttr(global_super, class, self, next_instr, name, load_method);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(LOAD_SUPER_ATTR, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(cache->counter);
+ #endif /* ENABLE_SPECIALIZATION */
+
+ // we make no attempt to optimize here; specializations should
+ // handle any case whose performance we care about
+ PyObject *stack[] = {class, self};
+ PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
+ #line 2197 "Python/generated_cases.c.h"
+ Py_DECREF(global_super);
+ Py_DECREF(class);
+ Py_DECREF(self);
+ #line 1581 "Python/bytecodes.c"
+ if (super == NULL) goto pop_3_error;
+ res = PyObject_GetAttr(super, name);
+ Py_DECREF(super);
+ if (res == NULL) goto pop_3_error;
+ #line 2206 "Python/generated_cases.c.h"
+ STACK_SHRINK(2);
+ STACK_GROW(((oparg & 1) ? 1 : 0));
+ stack_pointer[-1] = res;
+ if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
+ next_instr += 9;
+ DISPATCH();
+ }
+
+ TARGET(LOAD_SUPER_ATTR_METHOD) {
+ PyObject *self = stack_pointer[-1];
+ PyObject *class = stack_pointer[-2];
+ PyObject *global_super = stack_pointer[-3];
+ PyObject *res2;
+ PyObject *res;
+ uint32_t class_version = read_u32(&next_instr[1].cache);
+ uint32_t self_type_version = read_u32(&next_instr[3].cache);
+ PyObject *method = read_obj(&next_instr[5].cache);
+ #line 1588 "Python/bytecodes.c"
+ DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR);
+ DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR);
+ DEOPT_IF(((PyTypeObject *)class)->tp_version_tag != class_version, LOAD_SUPER_ATTR);
+ PyTypeObject *self_type = Py_TYPE(self);
+ DEOPT_IF(self_type->tp_version_tag != self_type_version, LOAD_SUPER_ATTR);
+ res2 = method;
+ res = self; // transfer ownership
+ Py_INCREF(res2);
+ Py_DECREF(global_super);
+ Py_DECREF(class);
+ #line 2235 "Python/generated_cases.c.h"
+ STACK_SHRINK(1);
+ stack_pointer[-1] = res;
+ stack_pointer[-2] = res2;
+ next_instr += 9;
+ DISPATCH();
+ }
+
TARGET(LOAD_ATTR) {
PREDICTED(LOAD_ATTR);
static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size");
PyObject *owner = stack_pointer[-1];
PyObject *res2 = NULL;
PyObject *res;
- #line 1571 "Python/bytecodes.c"
+ #line 1615 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyAttrCache *cache = (_PyAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -2207,9 +2279,9 @@
NULL | meth | arg1 | ... | argN
*/
- #line 2211 "Python/generated_cases.c.h"
+ #line 2283 "Python/generated_cases.c.h"
Py_DECREF(owner);
- #line 1605 "Python/bytecodes.c"
+ #line 1649 "Python/bytecodes.c"
if (meth == NULL) goto pop_1_error;
res2 = NULL;
res = meth;
@@ -2218,12 +2290,12 @@
else {
/* Classic, pushes one value. */
res = PyObject_GetAttr(owner, name);
- #line 2222 "Python/generated_cases.c.h"
+ #line 2294 "Python/generated_cases.c.h"
Py_DECREF(owner);
- #line 1614 "Python/bytecodes.c"
+ #line 1658 "Python/bytecodes.c"
if (res == NULL) goto pop_1_error;
}
- #line 2227 "Python/generated_cases.c.h"
+ #line 2299 "Python/generated_cases.c.h"
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
@@ -2237,7 +2309,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1619 "Python/bytecodes.c"
+ #line 1663 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@@ -2250,7 +2322,7 @@
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(res);
res2 = NULL;
- #line 2254 "Python/generated_cases.c.h"
+ #line 2326 "Python/generated_cases.c.h"
Py_DECREF(owner);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2265,7 +2337,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1635 "Python/bytecodes.c"
+ #line 1679 "Python/bytecodes.c"
DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR);
PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict;
assert(dict != NULL);
@@ -2278,7 +2350,7 @@
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(res);
res2 = NULL;
- #line 2282 "Python/generated_cases.c.h"
+ #line 2354 "Python/generated_cases.c.h"
Py_DECREF(owner);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2293,7 +2365,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1651 "Python/bytecodes.c"
+ #line 1695 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@@ -2320,7 +2392,7 @@
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(res);
res2 = NULL;
- #line 2324 "Python/generated_cases.c.h"
+ #line 2396 "Python/generated_cases.c.h"
Py_DECREF(owner);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2335,7 +2407,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1681 "Python/bytecodes.c"
+ #line 1725 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@@ -2345,7 +2417,7 @@
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(res);
res2 = NULL;
- #line 2349 "Python/generated_cases.c.h"
+ #line 2421 "Python/generated_cases.c.h"
Py_DECREF(owner);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2360,7 +2432,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
PyObject *descr = read_obj(&next_instr[5].cache);
- #line 1694 "Python/bytecodes.c"
+ #line 1738 "Python/bytecodes.c"
DEOPT_IF(!PyType_Check(cls), LOAD_ATTR);
DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version,
@@ -2372,7 +2444,7 @@
res = descr;
assert(res != NULL);
Py_INCREF(res);
- #line 2376 "Python/generated_cases.c.h"
+ #line 2448 "Python/generated_cases.c.h"
Py_DECREF(cls);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2386,7 +2458,7 @@
uint32_t type_version = read_u32(&next_instr[1].cache);
uint32_t func_version = read_u32(&next_instr[3].cache);
PyObject *fget = read_obj(&next_instr[5].cache);
- #line 1709 "Python/bytecodes.c"
+ #line 1753 "Python/bytecodes.c"
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner);
@@ -2410,7 +2482,7 @@
JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR);
frame->return_offset = 0;
DISPATCH_INLINED(new_frame);
- #line 2414 "Python/generated_cases.c.h"
+ #line 2486 "Python/generated_cases.c.h"
}
TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) {
@@ -2418,7 +2490,7 @@
uint32_t type_version = read_u32(&next_instr[1].cache);
uint32_t func_version = read_u32(&next_instr[3].cache);
PyObject *getattribute = read_obj(&next_instr[5].cache);
- #line 1735 "Python/bytecodes.c"
+ #line 1779 "Python/bytecodes.c"
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner);
DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR);
@@ -2444,7 +2516,7 @@
JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR);
frame->return_offset = 0;
DISPATCH_INLINED(new_frame);
- #line 2448 "Python/generated_cases.c.h"
+ #line 2520 "Python/generated_cases.c.h"
}
TARGET(STORE_ATTR_INSTANCE_VALUE) {
@@ -2452,7 +2524,7 @@
PyObject *value = stack_pointer[-2];
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1763 "Python/bytecodes.c"
+ #line 1807 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@@ -2470,7 +2542,7 @@
Py_DECREF(old_value);
}
Py_DECREF(owner);
- #line 2474 "Python/generated_cases.c.h"
+ #line 2546 "Python/generated_cases.c.h"
STACK_SHRINK(2);
next_instr += 4;
DISPATCH();
@@ -2481,7 +2553,7 @@
PyObject *value = stack_pointer[-2];
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t hint = read_u16(&next_instr[3].cache);
- #line 1783 "Python/bytecodes.c"
+ #line 1827 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@@ -2520,7 +2592,7 @@
/* PEP 509 */
dict->ma_version_tag = new_version;
Py_DECREF(owner);
- #line 2524 "Python/generated_cases.c.h"
+ #line 2596 "Python/generated_cases.c.h"
STACK_SHRINK(2);
next_instr += 4;
DISPATCH();
@@ -2531,7 +2603,7 @@
PyObject *value = stack_pointer[-2];
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1824 "Python/bytecodes.c"
+ #line 1868 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@@ -2541,7 +2613,7 @@
*(PyObject **)addr = value;
Py_XDECREF(old_value);
Py_DECREF(owner);
- #line 2545 "Python/generated_cases.c.h"
+ #line 2617 "Python/generated_cases.c.h"
STACK_SHRINK(2);
next_instr += 4;
DISPATCH();
@@ -2553,7 +2625,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 1843 "Python/bytecodes.c"
+ #line 1887 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -2566,12 +2638,12 @@
#endif /* ENABLE_SPECIALIZATION */
assert((oparg >> 4) <= Py_GE);
res = PyObject_RichCompare(left, right, oparg>>4);
- #line 2570 "Python/generated_cases.c.h"
+ #line 2642 "Python/generated_cases.c.h"
Py_DECREF(left);
Py_DECREF(right);
- #line 1856 "Python/bytecodes.c"
+ #line 1900 "Python/bytecodes.c"
if (res == NULL) goto pop_2_error;
- #line 2575 "Python/generated_cases.c.h"
+ #line 2647 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -2582,7 +2654,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 1860 "Python/bytecodes.c"
+ #line 1904 "Python/bytecodes.c"
DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
@@ -2594,7 +2666,7 @@
_Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
res = (sign_ish & oparg) ? Py_True : Py_False;
Py_INCREF(res);
- #line 2598 "Python/generated_cases.c.h"
+ #line 2670 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -2605,7 +2677,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 1875 "Python/bytecodes.c"
+ #line 1919 "Python/bytecodes.c"
DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP);
@@ -2621,7 +2693,7 @@
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
res = (sign_ish & oparg) ? Py_True : Py_False;
Py_INCREF(res);
- #line 2625 "Python/generated_cases.c.h"
+ #line 2697 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -2632,7 +2704,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 1894 "Python/bytecodes.c"
+ #line 1938 "Python/bytecodes.c"
DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
@@ -2645,7 +2717,7 @@
assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS);
res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? Py_True : Py_False;
Py_INCREF(res);
- #line 2649 "Python/generated_cases.c.h"
+ #line 2721 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -2656,14 +2728,14 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *b;
- #line 1909 "Python/bytecodes.c"
+ #line 1953 "Python/bytecodes.c"
int res = Py_Is(left, right) ^ oparg;
- #line 2662 "Python/generated_cases.c.h"
+ #line 2734 "Python/generated_cases.c.h"
Py_DECREF(left);
Py_DECREF(right);
- #line 1911 "Python/bytecodes.c"
+ #line 1955 "Python/bytecodes.c"
b = Py_NewRef(res ? Py_True : Py_False);
- #line 2667 "Python/generated_cases.c.h"
+ #line 2739 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = b;
DISPATCH();
@@ -2673,15 +2745,15 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *b;
- #line 1915 "Python/bytecodes.c"
+ #line 1959 "Python/bytecodes.c"
int res = PySequence_Contains(right, left);
- #line 2679 "Python/generated_cases.c.h"
+ #line 2751 "Python/generated_cases.c.h"
Py_DECREF(left);
Py_DECREF(right);
- #line 1917 "Python/bytecodes.c"
+ #line 1961 "Python/bytecodes.c"
if (res < 0) goto pop_2_error;
b = Py_NewRef((res^oparg) ? Py_True : Py_False);
- #line 2685 "Python/generated_cases.c.h"
+ #line 2757 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = b;
DISPATCH();
@@ -2692,12 +2764,12 @@
PyObject *exc_value = stack_pointer[-2];
PyObject *rest;
PyObject *match;
- #line 1922 "Python/bytecodes.c"
+ #line 1966 "Python/bytecodes.c"
if (check_except_star_type_valid(tstate, match_type) < 0) {
- #line 2698 "Python/generated_cases.c.h"
+ #line 2770 "Python/generated_cases.c.h"
Py_DECREF(exc_value);
Py_DECREF(match_type);
- #line 1924 "Python/bytecodes.c"
+ #line 1968 "Python/bytecodes.c"
if (true) goto pop_2_error;
}
@@ -2705,10 +2777,10 @@
rest = NULL;
int res = exception_group_match(exc_value, match_type,
&match, &rest);
- #line 2709 "Python/generated_cases.c.h"
+ #line 2781 "Python/generated_cases.c.h"
Py_DECREF(exc_value);
Py_DECREF(match_type);
- #line 1932 "Python/bytecodes.c"
+ #line 1976 "Python/bytecodes.c"
if (res < 0) goto pop_2_error;
assert((match == NULL) == (rest == NULL));
@@ -2717,7 +2789,7 @@
if (!Py_IsNone(match)) {
PyErr_SetHandledException(match);
}
- #line 2721 "Python/generated_cases.c.h"
+ #line 2793 "Python/generated_cases.c.h"
stack_pointer[-1] = match;
stack_pointer[-2] = rest;
DISPATCH();
@@ -2727,21 +2799,21 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *b;
- #line 1943 "Python/bytecodes.c"
+ #line 1987 "Python/bytecodes.c"
assert(PyExceptionInstance_Check(left));
if (check_except_type_valid(tstate, right) < 0) {
- #line 2734 "Python/generated_cases.c.h"
+ #line 2806 "Python/generated_cases.c.h"
Py_DECREF(right);
- #line 1946 "Python/bytecodes.c"
+ #line 1990 "Python/bytecodes.c"
if (true) goto pop_1_error;
}
int res = PyErr_GivenExceptionMatches(left, right);
- #line 2741 "Python/generated_cases.c.h"
+ #line 2813 "Python/generated_cases.c.h"
Py_DECREF(right);
- #line 1951 "Python/bytecodes.c"
+ #line 1995 "Python/bytecodes.c"
b = Py_NewRef(res ? Py_True : Py_False);
- #line 2745 "Python/generated_cases.c.h"
+ #line 2817 "Python/generated_cases.c.h"
stack_pointer[-1] = b;
DISPATCH();
}
@@ -2750,15 +2822,15 @@
PyObject *fromlist = stack_pointer[-1];
PyObject *level = stack_pointer[-2];
PyObject *res;
- #line 1955 "Python/bytecodes.c"
+ #line 1999 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
res = import_name(tstate, frame, name, fromlist, level);
- #line 2757 "Python/generated_cases.c.h"
+ #line 2829 "Python/generated_cases.c.h"
Py_DECREF(level);
Py_DECREF(fromlist);
- #line 1958 "Python/bytecodes.c"
+ #line 2002 "Python/bytecodes.c"
if (res == NULL) goto pop_2_error;
- #line 2762 "Python/generated_cases.c.h"
+ #line 2834 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
DISPATCH();
@@ -2767,29 +2839,29 @@
TARGET(IMPORT_FROM) {
PyObject *from = stack_pointer[-1];
PyObject *res;
- #line 1962 "Python/bytecodes.c"
+ #line 2006 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
res = import_from(tstate, from, name);
if (res == NULL) goto error;
- #line 2775 "Python/generated_cases.c.h"
+ #line 2847 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
DISPATCH();
}
TARGET(JUMP_FORWARD) {
- #line 1968 "Python/bytecodes.c"
+ #line 2012 "Python/bytecodes.c"
JUMPBY(oparg);
- #line 2784 "Python/generated_cases.c.h"
+ #line 2856 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(JUMP_BACKWARD) {
PREDICTED(JUMP_BACKWARD);
- #line 1972 "Python/bytecodes.c"
+ #line 2016 "Python/bytecodes.c"
assert(oparg < INSTR_OFFSET());
JUMPBY(-oparg);
- #line 2793 "Python/generated_cases.c.h"
+ #line 2865 "Python/generated_cases.c.h"
CHECK_EVAL_BREAKER();
DISPATCH();
}
@@ -2797,7 +2869,7 @@
TARGET(POP_JUMP_IF_FALSE) {
PREDICTED(POP_JUMP_IF_FALSE);
PyObject *cond = stack_pointer[-1];
- #line 1978 "Python/bytecodes.c"
+ #line 2022 "Python/bytecodes.c"
if (Py_IsTrue(cond)) {
_Py_DECREF_NO_DEALLOC(cond);
}
@@ -2807,9 +2879,9 @@
}
else {
int err = PyObject_IsTrue(cond);
- #line 2811 "Python/generated_cases.c.h"
+ #line 2883 "Python/generated_cases.c.h"
Py_DECREF(cond);
- #line 1988 "Python/bytecodes.c"
+ #line 2032 "Python/bytecodes.c"
if (err == 0) {
JUMPBY(oparg);
}
@@ -2817,14 +2889,14 @@
if (err < 0) goto pop_1_error;
}
}
- #line 2821 "Python/generated_cases.c.h"
+ #line 2893 "Python/generated_cases.c.h"
STACK_SHRINK(1);
DISPATCH();
}
TARGET(POP_JUMP_IF_TRUE) {
PyObject *cond = stack_pointer[-1];
- #line 1998 "Python/bytecodes.c"
+ #line 2042 "Python/bytecodes.c"
if (Py_IsFalse(cond)) {
_Py_DECREF_NO_DEALLOC(cond);
}
@@ -2834,9 +2906,9 @@
}
else {
int err = PyObject_IsTrue(cond);
- #line 2838 "Python/generated_cases.c.h"
+ #line 2910 "Python/generated_cases.c.h"
Py_DECREF(cond);
- #line 2008 "Python/bytecodes.c"
+ #line 2052 "Python/bytecodes.c"
if (err > 0) {
JUMPBY(oparg);
}
@@ -2844,67 +2916,67 @@
if (err < 0) goto pop_1_error;
}
}
- #line 2848 "Python/generated_cases.c.h"
+ #line 2920 "Python/generated_cases.c.h"
STACK_SHRINK(1);
DISPATCH();
}
TARGET(POP_JUMP_IF_NOT_NONE) {
PyObject *value = stack_pointer[-1];
- #line 2018 "Python/bytecodes.c"
+ #line 2062 "Python/bytecodes.c"
if (!Py_IsNone(value)) {
- #line 2857 "Python/generated_cases.c.h"
+ #line 2929 "Python/generated_cases.c.h"
Py_DECREF(value);
- #line 2020 "Python/bytecodes.c"
+ #line 2064 "Python/bytecodes.c"
JUMPBY(oparg);
}
else {
_Py_DECREF_NO_DEALLOC(value);
}
- #line 2865 "Python/generated_cases.c.h"
+ #line 2937 "Python/generated_cases.c.h"
STACK_SHRINK(1);
DISPATCH();
}
TARGET(POP_JUMP_IF_NONE) {
PyObject *value = stack_pointer[-1];
- #line 2028 "Python/bytecodes.c"
+ #line 2072 "Python/bytecodes.c"
if (Py_IsNone(value)) {
_Py_DECREF_NO_DEALLOC(value);
JUMPBY(oparg);
}
else {
- #line 2878 "Python/generated_cases.c.h"
+ #line 2950 "Python/generated_cases.c.h"
Py_DECREF(value);
- #line 2034 "Python/bytecodes.c"
+ #line 2078 "Python/bytecodes.c"
}
- #line 2882 "Python/generated_cases.c.h"
+ #line 2954 "Python/generated_cases.c.h"
STACK_SHRINK(1);
DISPATCH();
}
TARGET(JUMP_BACKWARD_NO_INTERRUPT) {
- #line 2038 "Python/bytecodes.c"
+ #line 2082 "Python/bytecodes.c"
/* This bytecode is used in the `yield from` or `await` loop.
* If there is an interrupt, we want it handled in the innermost
* generator or coroutine, so we deliberately do not check it here.
* (see bpo-30039).
*/
JUMPBY(-oparg);
- #line 2895 "Python/generated_cases.c.h"
+ #line 2967 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(GET_LEN) {
PyObject *obj = stack_pointer[-1];
PyObject *len_o;
- #line 2047 "Python/bytecodes.c"
+ #line 2091 "Python/bytecodes.c"
// PUSH(len(TOS))
Py_ssize_t len_i = PyObject_Length(obj);
if (len_i < 0) goto error;
len_o = PyLong_FromSsize_t(len_i);
if (len_o == NULL) goto error;
- #line 2908 "Python/generated_cases.c.h"
+ #line 2980 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = len_o;
DISPATCH();
@@ -2915,16 +2987,16 @@
PyObject *type = stack_pointer[-2];
PyObject *subject = stack_pointer[-3];
PyObject *attrs;
- #line 2055 "Python/bytecodes.c"
+ #line 2099 "Python/bytecodes.c"
// Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or
// None on failure.
assert(PyTuple_CheckExact(names));
attrs = match_class(tstate, subject, type, oparg, names);
- #line 2924 "Python/generated_cases.c.h"
+ #line 2996 "Python/generated_cases.c.h"
Py_DECREF(subject);
Py_DECREF(type);
Py_DECREF(names);
- #line 2060 "Python/bytecodes.c"
+ #line 2104 "Python/bytecodes.c"
if (attrs) {
assert(PyTuple_CheckExact(attrs)); // Success!
}
@@ -2932,7 +3004,7 @@
if (_PyErr_Occurred(tstate)) goto pop_3_error;
attrs = Py_NewRef(Py_None); // Failure!
}
- #line 2936 "Python/generated_cases.c.h"
+ #line 3008 "Python/generated_cases.c.h"
STACK_SHRINK(2);
stack_pointer[-1] = attrs;
DISPATCH();
@@ -2941,10 +3013,10 @@
TARGET(MATCH_MAPPING) {
PyObject *subject = stack_pointer[-1];
PyObject *res;
- #line 2070 "Python/bytecodes.c"
+ #line 2114 "Python/bytecodes.c"
int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING;
res = Py_NewRef(match ? Py_True : Py_False);
- #line 2948 "Python/generated_cases.c.h"
+ #line 3020 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
PREDICT(POP_JUMP_IF_FALSE);
@@ -2954,10 +3026,10 @@
TARGET(MATCH_SEQUENCE) {
PyObject *subject = stack_pointer[-1];
PyObject *res;
- #line 2076 "Python/bytecodes.c"
+ #line 2120 "Python/bytecodes.c"
int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE;
res = Py_NewRef(match ? Py_True : Py_False);
- #line 2961 "Python/generated_cases.c.h"
+ #line 3033 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
PREDICT(POP_JUMP_IF_FALSE);
@@ -2968,11 +3040,11 @@
PyObject *keys = stack_pointer[-1];
PyObject *subject = stack_pointer[-2];
PyObject *values_or_none;
- #line 2082 "Python/bytecodes.c"
+ #line 2126 "Python/bytecodes.c"
// On successful match, PUSH(values). Otherwise, PUSH(None).
values_or_none = match_keys(tstate, subject, keys);
if (values_or_none == NULL) goto error;
- #line 2976 "Python/generated_cases.c.h"
+ #line 3048 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = values_or_none;
DISPATCH();
@@ -2981,14 +3053,14 @@
TARGET(GET_ITER) {
PyObject *iterable = stack_pointer[-1];
PyObject *iter;
- #line 2088 "Python/bytecodes.c"
+ #line 2132 "Python/bytecodes.c"
/* before: [obj]; after [getiter(obj)] */
iter = PyObject_GetIter(iterable);
- #line 2988 "Python/generated_cases.c.h"
+ #line 3060 "Python/generated_cases.c.h"
Py_DECREF(iterable);
- #line 2091 "Python/bytecodes.c"
+ #line 2135 "Python/bytecodes.c"
if (iter == NULL) goto pop_1_error;
- #line 2992 "Python/generated_cases.c.h"
+ #line 3064 "Python/generated_cases.c.h"
stack_pointer[-1] = iter;
DISPATCH();
}
@@ -2996,7 +3068,7 @@
TARGET(GET_YIELD_FROM_ITER) {
PyObject *iterable = stack_pointer[-1];
PyObject *iter;
- #line 2095 "Python/bytecodes.c"
+ #line 2139 "Python/bytecodes.c"
/* before: [obj]; after [getiter(obj)] */
if (PyCoro_CheckExact(iterable)) {
/* `iterable` is a coroutine */
@@ -3019,11 +3091,11 @@
if (iter == NULL) {
goto error;
}
- #line 3023 "Python/generated_cases.c.h"
+ #line 3095 "Python/generated_cases.c.h"
Py_DECREF(iterable);
- #line 2118 "Python/bytecodes.c"
+ #line 2162 "Python/bytecodes.c"
}
- #line 3027 "Python/generated_cases.c.h"
+ #line 3099 "Python/generated_cases.c.h"
stack_pointer[-1] = iter;
PREDICT(LOAD_CONST);
DISPATCH();
@@ -3034,7 +3106,7 @@
static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size");
PyObject *iter = stack_pointer[-1];
PyObject *next;
- #line 2137 "Python/bytecodes.c"
+ #line 2181 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyForIterCache *cache = (_PyForIterCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -3065,7 +3137,7 @@
DISPATCH();
}
// Common case: no jump, leave it to the code generator
- #line 3069 "Python/generated_cases.c.h"
+ #line 3141 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = next;
next_instr += 1;
@@ -3073,7 +3145,7 @@
}
TARGET(INSTRUMENTED_FOR_ITER) {
- #line 2170 "Python/bytecodes.c"
+ #line 2214 "Python/bytecodes.c"
_Py_CODEUNIT *here = next_instr-1;
_Py_CODEUNIT *target;
PyObject *iter = TOP();
@@ -3099,14 +3171,14 @@
target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1;
}
INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH);
- #line 3103 "Python/generated_cases.c.h"
+ #line 3175 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(FOR_ITER_LIST) {
PyObject *iter = stack_pointer[-1];
PyObject *next;
- #line 2198 "Python/bytecodes.c"
+ #line 2242 "Python/bytecodes.c"
DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER);
_PyListIterObject *it = (_PyListIterObject *)iter;
STAT_INC(FOR_ITER, hit);
@@ -3126,7 +3198,7 @@
DISPATCH();
end_for_iter_list:
// Common case: no jump, leave it to the code generator
- #line 3130 "Python/generated_cases.c.h"
+ #line 3202 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = next;
next_instr += 1;
@@ -3136,7 +3208,7 @@
TARGET(FOR_ITER_TUPLE) {
PyObject *iter = stack_pointer[-1];
PyObject *next;
- #line 2220 "Python/bytecodes.c"
+ #line 2264 "Python/bytecodes.c"
_PyTupleIterObject *it = (_PyTupleIterObject *)iter;
DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@@ -3156,7 +3228,7 @@
DISPATCH();
end_for_iter_tuple:
// Common case: no jump, leave it to the code generator
- #line 3160 "Python/generated_cases.c.h"
+ #line 3232 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = next;
next_instr += 1;
@@ -3166,7 +3238,7 @@
TARGET(FOR_ITER_RANGE) {
PyObject *iter = stack_pointer[-1];
PyObject *next;
- #line 2242 "Python/bytecodes.c"
+ #line 2286 "Python/bytecodes.c"
_PyRangeIterObject *r = (_PyRangeIterObject *)iter;
DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@@ -3184,7 +3256,7 @@
if (next == NULL) {
goto error;
}
- #line 3188 "Python/generated_cases.c.h"
+ #line 3260 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = next;
next_instr += 1;
@@ -3193,7 +3265,7 @@
TARGET(FOR_ITER_GEN) {
PyObject *iter = stack_pointer[-1];
- #line 2262 "Python/bytecodes.c"
+ #line 2306 "Python/bytecodes.c"
PyGenObject *gen = (PyGenObject *)iter;
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER);
DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER);
@@ -3208,14 +3280,14 @@
assert(next_instr[oparg].op.code == END_FOR ||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
DISPATCH_INLINED(gen_frame);
- #line 3212 "Python/generated_cases.c.h"
+ #line 3284 "Python/generated_cases.c.h"
}
TARGET(BEFORE_ASYNC_WITH) {
PyObject *mgr = stack_pointer[-1];
PyObject *exit;
PyObject *res;
- #line 2279 "Python/bytecodes.c"
+ #line 2323 "Python/bytecodes.c"
PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__));
if (enter == NULL) {
if (!_PyErr_Occurred(tstate)) {
@@ -3238,16 +3310,16 @@
Py_DECREF(enter);
goto error;
}
- #line 3242 "Python/generated_cases.c.h"
+ #line 3314 "Python/generated_cases.c.h"
Py_DECREF(mgr);
- #line 2302 "Python/bytecodes.c"
+ #line 2346 "Python/bytecodes.c"
res = _PyObject_CallNoArgs(enter);
Py_DECREF(enter);
if (res == NULL) {
Py_DECREF(exit);
if (true) goto pop_1_error;
}
- #line 3251 "Python/generated_cases.c.h"
+ #line 3323 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
stack_pointer[-2] = exit;
@@ -3259,7 +3331,7 @@
PyObject *mgr = stack_pointer[-1];
PyObject *exit;
PyObject *res;
- #line 2312 "Python/bytecodes.c"
+ #line 2356 "Python/bytecodes.c"
/* pop the context manager, push its __exit__ and the
* value returned from calling its __enter__
*/
@@ -3285,16 +3357,16 @@
Py_DECREF(enter);
goto error;
}
- #line 3289 "Python/generated_cases.c.h"
+ #line 3361 "Python/generated_cases.c.h"
Py_DECREF(mgr);
- #line 2338 "Python/bytecodes.c"
+ #line 2382 "Python/bytecodes.c"
res = _PyObject_CallNoArgs(enter);
Py_DECREF(enter);
if (res == NULL) {
Py_DECREF(exit);
if (true) goto pop_1_error;
}
- #line 3298 "Python/generated_cases.c.h"
+ #line 3370 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
stack_pointer[-2] = exit;
@@ -3306,7 +3378,7 @@
PyObject *lasti = stack_pointer[-3];
PyObject *exit_func = stack_pointer[-4];
PyObject *res;
- #line 2347 "Python/bytecodes.c"
+ #line 2391 "Python/bytecodes.c"
/* At the top of the stack are 4 values:
- val: TOP = exc_info()
- unused: SECOND = previous exception
@@ -3327,7 +3399,7 @@
res = PyObject_Vectorcall(exit_func, stack + 1,
3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL);
if (res == NULL) goto error;
- #line 3331 "Python/generated_cases.c.h"
+ #line 3403 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
DISPATCH();
@@ -3336,7 +3408,7 @@
TARGET(PUSH_EXC_INFO) {
PyObject *new_exc = stack_pointer[-1];
PyObject *prev_exc;
- #line 2370 "Python/bytecodes.c"
+ #line 2414 "Python/bytecodes.c"
_PyErr_StackItem *exc_info = tstate->exc_info;
if (exc_info->exc_value != NULL) {
prev_exc = exc_info->exc_value;
@@ -3346,7 +3418,7 @@
}
assert(PyExceptionInstance_Check(new_exc));
exc_info->exc_value = Py_NewRef(new_exc);
- #line 3350 "Python/generated_cases.c.h"
+ #line 3422 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = new_exc;
stack_pointer[-2] = prev_exc;
@@ -3360,7 +3432,7 @@
uint32_t type_version = read_u32(&next_instr[1].cache);
uint32_t keys_version = read_u32(&next_instr[3].cache);
PyObject *descr = read_obj(&next_instr[5].cache);
- #line 2382 "Python/bytecodes.c"
+ #line 2426 "Python/bytecodes.c"
/* Cached method object */
PyTypeObject *self_cls = Py_TYPE(self);
assert(type_version != 0);
@@ -3377,7 +3449,7 @@
assert(_PyType_HasFeature(Py_TYPE(res2), Py_TPFLAGS_METHOD_DESCRIPTOR));
res = self;
assert(oparg & 1);
- #line 3381 "Python/generated_cases.c.h"
+ #line 3453 "Python/generated_cases.c.h"
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
@@ -3391,7 +3463,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
PyObject *descr = read_obj(&next_instr[5].cache);
- #line 2401 "Python/bytecodes.c"
+ #line 2445 "Python/bytecodes.c"
PyTypeObject *self_cls = Py_TYPE(self);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
assert(self_cls->tp_dictoffset == 0);
@@ -3401,7 +3473,7 @@
res2 = Py_NewRef(descr);
res = self;
assert(oparg & 1);
- #line 3405 "Python/generated_cases.c.h"
+ #line 3477 "Python/generated_cases.c.h"
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
@@ -3415,7 +3487,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
PyObject *descr = read_obj(&next_instr[5].cache);
- #line 2413 "Python/bytecodes.c"
+ #line 2457 "Python/bytecodes.c"
PyTypeObject *self_cls = Py_TYPE(self);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
Py_ssize_t dictoffset = self_cls->tp_dictoffset;
@@ -3429,7 +3501,7 @@
res2 = Py_NewRef(descr);
res = self;
assert(oparg & 1);
- #line 3433 "Python/generated_cases.c.h"
+ #line 3505 "Python/generated_cases.c.h"
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
@@ -3438,16 +3510,16 @@
}
TARGET(KW_NAMES) {
- #line 2429 "Python/bytecodes.c"
+ #line 2473 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg < PyTuple_GET_SIZE(frame->f_code->co_consts));
kwnames = GETITEM(frame->f_code->co_consts, oparg);
- #line 3446 "Python/generated_cases.c.h"
+ #line 3518 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_CALL) {
- #line 2435 "Python/bytecodes.c"
+ #line 2479 "Python/bytecodes.c"
int is_meth = PEEK(oparg+2) != NULL;
int total_args = oparg + is_meth;
PyObject *function = PEEK(total_args + 1);
@@ -3460,7 +3532,7 @@
_PyCallCache *cache = (_PyCallCache *)next_instr;
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
GO_TO_INSTRUCTION(CALL);
- #line 3464 "Python/generated_cases.c.h"
+ #line 3536 "Python/generated_cases.c.h"
}
TARGET(CALL) {
@@ -3470,7 +3542,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2480 "Python/bytecodes.c"
+ #line 2524 "Python/bytecodes.c"
int is_meth = method != NULL;
int total_args = oparg;
if (is_meth) {
@@ -3552,7 +3624,7 @@
Py_DECREF(args[i]);
}
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3556 "Python/generated_cases.c.h"
+ #line 3628 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3564,7 +3636,7 @@
TARGET(CALL_BOUND_METHOD_EXACT_ARGS) {
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
- #line 2568 "Python/bytecodes.c"
+ #line 2612 "Python/bytecodes.c"
DEOPT_IF(method != NULL, CALL);
DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL);
STAT_INC(CALL, hit);
@@ -3574,7 +3646,7 @@
PEEK(oparg + 2) = Py_NewRef(meth); // method
Py_DECREF(callable);
GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS);
- #line 3578 "Python/generated_cases.c.h"
+ #line 3650 "Python/generated_cases.c.h"
}
TARGET(CALL_PY_EXACT_ARGS) {
@@ -3583,7 +3655,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
uint32_t func_version = read_u32(&next_instr[1].cache);
- #line 2580 "Python/bytecodes.c"
+ #line 2624 "Python/bytecodes.c"
assert(kwnames == NULL);
DEOPT_IF(tstate->interp->eval_frame, CALL);
int is_meth = method != NULL;
@@ -3609,7 +3681,7 @@
JUMPBY(INLINE_CACHE_ENTRIES_CALL);
frame->return_offset = 0;
DISPATCH_INLINED(new_frame);
- #line 3613 "Python/generated_cases.c.h"
+ #line 3685 "Python/generated_cases.c.h"
}
TARGET(CALL_PY_WITH_DEFAULTS) {
@@ -3617,7 +3689,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
uint32_t func_version = read_u32(&next_instr[1].cache);
- #line 2608 "Python/bytecodes.c"
+ #line 2652 "Python/bytecodes.c"
assert(kwnames == NULL);
DEOPT_IF(tstate->interp->eval_frame, CALL);
int is_meth = method != NULL;
@@ -3653,7 +3725,7 @@
JUMPBY(INLINE_CACHE_ENTRIES_CALL);
frame->return_offset = 0;
DISPATCH_INLINED(new_frame);
- #line 3657 "Python/generated_cases.c.h"
+ #line 3729 "Python/generated_cases.c.h"
}
TARGET(CALL_NO_KW_TYPE_1) {
@@ -3661,7 +3733,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *null = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2646 "Python/bytecodes.c"
+ #line 2690 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
@@ -3671,7 +3743,7 @@
res = Py_NewRef(Py_TYPE(obj));
Py_DECREF(obj);
Py_DECREF(&PyType_Type); // I.e., callable
- #line 3675 "Python/generated_cases.c.h"
+ #line 3747 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3684,7 +3756,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *null = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2658 "Python/bytecodes.c"
+ #line 2702 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
@@ -3695,7 +3767,7 @@
Py_DECREF(arg);
Py_DECREF(&PyUnicode_Type); // I.e., callable
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3699 "Python/generated_cases.c.h"
+ #line 3771 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3709,7 +3781,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *null = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2672 "Python/bytecodes.c"
+ #line 2716 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
@@ -3720,7 +3792,7 @@
Py_DECREF(arg);
Py_DECREF(&PyTuple_Type); // I.e., tuple
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3724 "Python/generated_cases.c.h"
+ #line 3796 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3734,7 +3806,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2686 "Python/bytecodes.c"
+ #line 2730 "Python/bytecodes.c"
int is_meth = method != NULL;
int total_args = oparg;
if (is_meth) {
@@ -3756,7 +3828,7 @@
}
Py_DECREF(tp);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3760 "Python/generated_cases.c.h"
+ #line 3832 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3770,7 +3842,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2711 "Python/bytecodes.c"
+ #line 2755 "Python/bytecodes.c"
/* Builtin METH_O functions */
assert(kwnames == NULL);
int is_meth = method != NULL;
@@ -3798,7 +3870,7 @@
Py_DECREF(arg);
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3802 "Python/generated_cases.c.h"
+ #line 3874 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3812,7 +3884,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2742 "Python/bytecodes.c"
+ #line 2786 "Python/bytecodes.c"
/* Builtin METH_FASTCALL functions, without keywords */
assert(kwnames == NULL);
int is_meth = method != NULL;
@@ -3844,7 +3916,7 @@
'invalid'). In those cases an exception is set, so we must
handle it.
*/
- #line 3848 "Python/generated_cases.c.h"
+ #line 3920 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3858,7 +3930,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2777 "Python/bytecodes.c"
+ #line 2821 "Python/bytecodes.c"
/* Builtin METH_FASTCALL | METH_KEYWORDS functions */
int is_meth = method != NULL;
int total_args = oparg;
@@ -3890,7 +3962,7 @@
}
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3894 "Python/generated_cases.c.h"
+ #line 3966 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3904,7 +3976,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2812 "Python/bytecodes.c"
+ #line 2856 "Python/bytecodes.c"
assert(kwnames == NULL);
/* len(o) */
int is_meth = method != NULL;
@@ -3929,7 +4001,7 @@
Py_DECREF(callable);
Py_DECREF(arg);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3933 "Python/generated_cases.c.h"
+ #line 4005 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3942,7 +4014,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2839 "Python/bytecodes.c"
+ #line 2883 "Python/bytecodes.c"
assert(kwnames == NULL);
/* isinstance(o, o2) */
int is_meth = method != NULL;
@@ -3969,7 +4041,7 @@
Py_DECREF(cls);
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3973 "Python/generated_cases.c.h"
+ #line 4045 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3981,7 +4053,7 @@
PyObject **args = (stack_pointer - oparg);
PyObject *self = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
- #line 2869 "Python/bytecodes.c"
+ #line 2913 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 1);
assert(method != NULL);
@@ -3999,14 +4071,14 @@
JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1);
assert(next_instr[-1].op.code == POP_TOP);
DISPATCH();
- #line 4003 "Python/generated_cases.c.h"
+ #line 4075 "Python/generated_cases.c.h"
}
TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) {
PyObject **args = (stack_pointer - oparg);
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2889 "Python/bytecodes.c"
+ #line 2933 "Python/bytecodes.c"
assert(kwnames == NULL);
int is_meth = method != NULL;
int total_args = oparg;
@@ -4037,7 +4109,7 @@
Py_DECREF(arg);
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4041 "Python/generated_cases.c.h"
+ #line 4113 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4050,7 +4122,7 @@
PyObject **args = (stack_pointer - oparg);
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2923 "Python/bytecodes.c"
+ #line 2967 "Python/bytecodes.c"
int is_meth = method != NULL;
int total_args = oparg;
if (is_meth) {
@@ -4079,7 +4151,7 @@
}
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4083 "Python/generated_cases.c.h"
+ #line 4155 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4092,7 +4164,7 @@
PyObject **args = (stack_pointer - oparg);
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2955 "Python/bytecodes.c"
+ #line 2999 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 0 || oparg == 1);
int is_meth = method != NULL;
@@ -4121,7 +4193,7 @@
Py_DECREF(self);
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4125 "Python/generated_cases.c.h"
+ #line 4197 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4134,7 +4206,7 @@
PyObject **args = (stack_pointer - oparg);
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2987 "Python/bytecodes.c"
+ #line 3031 "Python/bytecodes.c"
assert(kwnames == NULL);
int is_meth = method != NULL;
int total_args = oparg;
@@ -4162,7 +4234,7 @@
}
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4166 "Python/generated_cases.c.h"
+ #line 4238 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4172,9 +4244,9 @@
}
TARGET(INSTRUMENTED_CALL_FUNCTION_EX) {
- #line 3018 "Python/bytecodes.c"
+ #line 3062 "Python/bytecodes.c"
GO_TO_INSTRUCTION(CALL_FUNCTION_EX);
- #line 4178 "Python/generated_cases.c.h"
+ #line 4250 "Python/generated_cases.c.h"
}
TARGET(CALL_FUNCTION_EX) {
@@ -4183,7 +4255,7 @@
PyObject *callargs = stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))];
PyObject *func = stack_pointer[-(2 + ((oparg & 1) ? 1 : 0))];
PyObject *result;
- #line 3022 "Python/bytecodes.c"
+ #line 3066 "Python/bytecodes.c"
// DICT_MERGE is called before this opcode if there are kwargs.
// It converts all dict subtypes in kwargs into regular dicts.
assert(kwargs == NULL || PyDict_CheckExact(kwargs));
@@ -4226,14 +4298,14 @@
else {
result = PyObject_Call(func, callargs, kwargs);
}
- #line 4230 "Python/generated_cases.c.h"
+ #line 4302 "Python/generated_cases.c.h"
Py_DECREF(func);
Py_DECREF(callargs);
Py_XDECREF(kwargs);
- #line 3065 "Python/bytecodes.c"
+ #line 3109 "Python/bytecodes.c"
assert(PEEK(3 + (oparg & 1)) == NULL);
if (result == NULL) { STACK_SHRINK(((oparg & 1) ? 1 : 0)); goto pop_3_error; }
- #line 4237 "Python/generated_cases.c.h"
+ #line 4309 "Python/generated_cases.c.h"
STACK_SHRINK(((oparg & 1) ? 1 : 0));
STACK_SHRINK(2);
stack_pointer[-1] = result;
@@ -4248,7 +4320,7 @@
PyObject *kwdefaults = (oparg & 0x02) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0))] : NULL;
PyObject *defaults = (oparg & 0x01) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x01) ? 1 : 0))] : NULL;
PyObject *func;
- #line 3075 "Python/bytecodes.c"
+ #line 3119 "Python/bytecodes.c"
PyFunctionObject *func_obj = (PyFunctionObject *)
PyFunction_New(codeobj, GLOBALS());
@@ -4277,14 +4349,14 @@
func_obj->func_version = ((PyCodeObject *)codeobj)->co_version;
func = (PyObject *)func_obj;
- #line 4281 "Python/generated_cases.c.h"
+ #line 4353 "Python/generated_cases.c.h"
STACK_SHRINK(((oparg & 0x01) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x08) ? 1 : 0));
stack_pointer[-1] = func;
DISPATCH();
}
TARGET(RETURN_GENERATOR) {
- #line 3106 "Python/bytecodes.c"
+ #line 3150 "Python/bytecodes.c"
assert(PyFunction_Check(frame->f_funcobj));
PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj;
PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func);
@@ -4305,7 +4377,7 @@
frame = cframe.current_frame = prev;
_PyFrame_StackPush(frame, (PyObject *)gen);
goto resume_frame;
- #line 4309 "Python/generated_cases.c.h"
+ #line 4381 "Python/generated_cases.c.h"
}
TARGET(BUILD_SLICE) {
@@ -4313,15 +4385,15 @@
PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))];
PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))];
PyObject *slice;
- #line 3129 "Python/bytecodes.c"
+ #line 3173 "Python/bytecodes.c"
slice = PySlice_New(start, stop, step);
- #line 4319 "Python/generated_cases.c.h"
+ #line 4391 "Python/generated_cases.c.h"
Py_DECREF(start);
Py_DECREF(stop);
Py_XDECREF(step);
- #line 3131 "Python/bytecodes.c"
+ #line 3175 "Python/bytecodes.c"
if (slice == NULL) { STACK_SHRINK(((oparg == 3) ? 1 : 0)); goto pop_2_error; }
- #line 4325 "Python/generated_cases.c.h"
+ #line 4397 "Python/generated_cases.c.h"
STACK_SHRINK(((oparg == 3) ? 1 : 0));
STACK_SHRINK(1);
stack_pointer[-1] = slice;
@@ -4332,7 +4404,7 @@
PyObject *fmt_spec = ((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? stack_pointer[-((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))] : NULL;
PyObject *value = stack_pointer[-(1 + (((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))];
PyObject *result;
- #line 3135 "Python/bytecodes.c"
+ #line 3179 "Python/bytecodes.c"
/* Handles f-string value formatting. */
PyObject *(*conv_fn)(PyObject *);
int which_conversion = oparg & FVC_MASK;
@@ -4367,7 +4439,7 @@
Py_DECREF(value);
Py_XDECREF(fmt_spec);
if (result == NULL) { STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); goto pop_1_error; }
- #line 4371 "Python/generated_cases.c.h"
+ #line 4443 "Python/generated_cases.c.h"
STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0));
stack_pointer[-1] = result;
DISPATCH();
@@ -4376,10 +4448,10 @@
TARGET(COPY) {
PyObject *bottom = stack_pointer[-(1 + (oparg-1))];
PyObject *top;
- #line 3172 "Python/bytecodes.c"
+ #line 3216 "Python/bytecodes.c"
assert(oparg > 0);
top = Py_NewRef(bottom);
- #line 4383 "Python/generated_cases.c.h"
+ #line 4455 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = top;
DISPATCH();
@@ -4391,7 +4463,7 @@
PyObject *rhs = stack_pointer[-1];
PyObject *lhs = stack_pointer[-2];
PyObject *res;
- #line 3177 "Python/bytecodes.c"
+ #line 3221 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -4406,12 +4478,12 @@
assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops));
assert(binary_ops[oparg]);
res = binary_ops[oparg](lhs, rhs);
- #line 4410 "Python/generated_cases.c.h"
+ #line 4482 "Python/generated_cases.c.h"
Py_DECREF(lhs);
Py_DECREF(rhs);
- #line 3192 "Python/bytecodes.c"
+ #line 3236 "Python/bytecodes.c"
if (res == NULL) goto pop_2_error;
- #line 4415 "Python/generated_cases.c.h"
+ #line 4487 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -4421,16 +4493,16 @@
TARGET(SWAP) {
PyObject *top = stack_pointer[-1];
PyObject *bottom = stack_pointer[-(2 + (oparg-2))];
- #line 3197 "Python/bytecodes.c"
+ #line 3241 "Python/bytecodes.c"
assert(oparg >= 2);
- #line 4427 "Python/generated_cases.c.h"
+ #line 4499 "Python/generated_cases.c.h"
stack_pointer[-1] = bottom;
stack_pointer[-(2 + (oparg-2))] = top;
DISPATCH();
}
TARGET(INSTRUMENTED_LINE) {
- #line 3201 "Python/bytecodes.c"
+ #line 3245 "Python/bytecodes.c"
_Py_CODEUNIT *here = next_instr-1;
_PyFrame_SetStackPointer(frame, stack_pointer);
int original_opcode = _Py_call_instrumentation_line(
@@ -4450,11 +4522,11 @@
}
opcode = original_opcode;
DISPATCH_GOTO();
- #line 4454 "Python/generated_cases.c.h"
+ #line 4526 "Python/generated_cases.c.h"
}
TARGET(INSTRUMENTED_INSTRUCTION) {
- #line 3223 "Python/bytecodes.c"
+ #line 3267 "Python/bytecodes.c"
int next_opcode = _Py_call_instrumentation_instruction(
tstate, frame, next_instr-1);
if (next_opcode < 0) goto error;
@@ -4466,26 +4538,26 @@
assert(next_opcode > 0 && next_opcode < 256);
opcode = next_opcode;
DISPATCH_GOTO();
- #line 4470 "Python/generated_cases.c.h"
+ #line 4542 "Python/generated_cases.c.h"
}
TARGET(INSTRUMENTED_JUMP_FORWARD) {
- #line 3237 "Python/bytecodes.c"
+ #line 3281 "Python/bytecodes.c"
INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP);
- #line 4476 "Python/generated_cases.c.h"
+ #line 4548 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_JUMP_BACKWARD) {
- #line 3241 "Python/bytecodes.c"
+ #line 3285 "Python/bytecodes.c"
INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP);
- #line 4483 "Python/generated_cases.c.h"
+ #line 4555 "Python/generated_cases.c.h"
CHECK_EVAL_BREAKER();
DISPATCH();
}
TARGET(INSTRUMENTED_POP_JUMP_IF_TRUE) {
- #line 3246 "Python/bytecodes.c"
+ #line 3290 "Python/bytecodes.c"
PyObject *cond = POP();
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
@@ -4494,12 +4566,12 @@
assert(err == 0 || err == 1);
int offset = err*oparg;
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
- #line 4498 "Python/generated_cases.c.h"
+ #line 4570 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_POP_JUMP_IF_FALSE) {
- #line 3257 "Python/bytecodes.c"
+ #line 3301 "Python/bytecodes.c"
PyObject *cond = POP();
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
@@ -4508,12 +4580,12 @@
assert(err == 0 || err == 1);
int offset = (1-err)*oparg;
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
- #line 4512 "Python/generated_cases.c.h"
+ #line 4584 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_POP_JUMP_IF_NONE) {
- #line 3268 "Python/bytecodes.c"
+ #line 3312 "Python/bytecodes.c"
PyObject *value = POP();
_Py_CODEUNIT *here = next_instr-1;
int offset;
@@ -4526,12 +4598,12 @@
offset = 0;
}
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
- #line 4530 "Python/generated_cases.c.h"
+ #line 4602 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_POP_JUMP_IF_NOT_NONE) {
- #line 3283 "Python/bytecodes.c"
+ #line 3327 "Python/bytecodes.c"
PyObject *value = POP();
_Py_CODEUNIT *here = next_instr-1;
int offset;
@@ -4544,30 +4616,30 @@
offset = oparg;
}
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
- #line 4548 "Python/generated_cases.c.h"
+ #line 4620 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(EXTENDED_ARG) {
- #line 3298 "Python/bytecodes.c"
+ #line 3342 "Python/bytecodes.c"
assert(oparg);
opcode = next_instr->op.code;
oparg = oparg << 8 | next_instr->op.arg;
PRE_DISPATCH_GOTO();
DISPATCH_GOTO();
- #line 4559 "Python/generated_cases.c.h"
+ #line 4631 "Python/generated_cases.c.h"
}
TARGET(CACHE) {
- #line 3306 "Python/bytecodes.c"
+ #line 3350 "Python/bytecodes.c"
assert(0 && "Executing a cache.");
Py_UNREACHABLE();
- #line 4566 "Python/generated_cases.c.h"
+ #line 4638 "Python/generated_cases.c.h"
}
TARGET(RESERVED) {
- #line 3311 "Python/bytecodes.c"
+ #line 3355 "Python/bytecodes.c"
assert(0 && "Executing RESERVED instruction.");
Py_UNREACHABLE();
- #line 4573 "Python/generated_cases.c.h"
+ #line 4645 "Python/generated_cases.c.h"
}
diff --git a/Python/import.c b/Python/import.c
index 1db5b9333bbba1..0bf107b28d3990 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -413,8 +413,11 @@ remove_module(PyThreadState *tstate, PyObject *name)
Py_ssize_t
_PyImport_GetNextModuleIndex(void)
{
+ PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK);
LAST_MODULE_INDEX++;
- return LAST_MODULE_INDEX;
+ Py_ssize_t index = LAST_MODULE_INDEX;
+ PyThread_release_lock(EXTENSIONS.mutex);
+ return index;
}
static const char *
@@ -591,11 +594,11 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp)
/*
It may help to have a big picture view of what happens
when an extension is loaded. This includes when it is imported
- for the first time or via imp.load_dynamic().
+ for the first time.
- Here's a summary, using imp.load_dynamic() as the starting point:
+ Here's a summary, using importlib._boostrap._load() as a starting point.
- 1. imp.load_dynamic() -> importlib._bootstrap._load()
+ 1. importlib._bootstrap._load()
2. _load(): acquire import lock
3. _load() -> importlib._bootstrap._load_unlocked()
4. _load_unlocked() -> importlib._bootstrap.module_from_spec()
@@ -703,6 +706,7 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp)
const char *
_PyImport_ResolveNameWithPackageContext(const char *name)
{
+ PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK);
if (PKGCONTEXT != NULL) {
const char *p = strrchr(PKGCONTEXT, '.');
if (p != NULL && strcmp(name, p+1) == 0) {
@@ -710,14 +714,17 @@ _PyImport_ResolveNameWithPackageContext(const char *name)
PKGCONTEXT = NULL;
}
}
+ PyThread_release_lock(EXTENSIONS.mutex);
return name;
}
const char *
_PyImport_SwapPackageContext(const char *newcontext)
{
+ PyThread_acquire_lock(EXTENSIONS.mutex, WAIT_LOCK);
const char *oldcontext = PKGCONTEXT;
PKGCONTEXT = newcontext;
+ PyThread_release_lock(EXTENSIONS.mutex);
return oldcontext;
}
@@ -865,13 +872,13 @@ gets even messier.
static inline void
extensions_lock_acquire(void)
{
- // XXX For now the GIL is sufficient.
+ PyThread_acquire_lock(_PyRuntime.imports.extensions.mutex, WAIT_LOCK);
}
static inline void
extensions_lock_release(void)
{
- // XXX For now the GIL is sufficient.
+ PyThread_release_lock(_PyRuntime.imports.extensions.mutex);
}
/* Magic for extension modules (built-in as well as dynamically
@@ -2021,9 +2028,9 @@ find_frozen(PyObject *nameobj, struct frozen_info *info)
}
static PyObject *
-unmarshal_frozen_code(struct frozen_info *info)
+unmarshal_frozen_code(PyInterpreterState *interp, struct frozen_info *info)
{
- if (info->get_code) {
+ if (info->get_code && _Py_IsMainInterpreter(interp)) {
PyObject *code = info->get_code();
assert(code != NULL);
return code;
@@ -2070,7 +2077,7 @@ PyImport_ImportFrozenModuleObject(PyObject *name)
set_frozen_error(status, name);
return -1;
}
- co = unmarshal_frozen_code(&info);
+ co = unmarshal_frozen_code(tstate->interp, &info);
if (co == NULL) {
return -1;
}
@@ -3528,7 +3535,8 @@ _imp_get_frozen_object_impl(PyObject *module, PyObject *name,
return NULL;
}
- PyObject *codeobj = unmarshal_frozen_code(&info);
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ PyObject *codeobj = unmarshal_frozen_code(interp, &info);
if (dataobj != Py_None) {
PyBuffer_Release(&buf);
}
@@ -3786,7 +3794,7 @@ _imp_source_hash_impl(PyObject *module, long key, Py_buffer *source)
PyDoc_STRVAR(doc_imp,
-"(Extremely) low-level import machinery bits as used by importlib and imp.");
+"(Extremely) low-level import machinery bits as used by importlib.");
static PyMethodDef imp_methods[] = {
_IMP_EXTENSION_SUFFIXES_METHODDEF
diff --git a/Python/instrumentation.c b/Python/instrumentation.c
index 8dc8b01fcb0496..c5bbbdacbb851e 100644
--- a/Python/instrumentation.c
+++ b/Python/instrumentation.c
@@ -16,14 +16,14 @@
static PyObject DISABLE =
{
- _PyObject_IMMORTAL_REFCNT,
- &PyBaseObject_Type
+ .ob_refcnt = _Py_IMMORTAL_REFCNT,
+ .ob_type = &PyBaseObject_Type
};
PyObject _PyInstrumentation_MISSING =
{
- _PyObject_IMMORTAL_REFCNT,
- &PyBaseObject_Type
+ .ob_refcnt = _Py_IMMORTAL_REFCNT,
+ .ob_type = &PyBaseObject_Type
};
static const int8_t EVENT_FOR_OPCODE[256] = {
@@ -113,18 +113,23 @@ static const uint8_t INSTRUMENTED_OPCODES[256] = {
};
static inline bool
-opcode_has_event(int opcode) {
- return opcode < INSTRUMENTED_LINE &&
- INSTRUMENTED_OPCODES[opcode] > 0;
+opcode_has_event(int opcode)
+{
+ return (
+ opcode < INSTRUMENTED_LINE &&
+ INSTRUMENTED_OPCODES[opcode] > 0
+ );
}
static inline bool
-is_instrumented(int opcode) {
+is_instrumented(int opcode)
+{
assert(opcode != 0);
assert(opcode != RESERVED);
return opcode >= MIN_INSTRUMENTED_OPCODE;
}
+#ifndef NDEBUG
static inline bool
monitors_equals(_Py_Monitors a, _Py_Monitors b)
{
@@ -135,6 +140,7 @@ monitors_equals(_Py_Monitors a, _Py_Monitors b)
}
return true;
}
+#endif
static inline _Py_Monitors
monitors_sub(_Py_Monitors a, _Py_Monitors b)
@@ -146,6 +152,7 @@ monitors_sub(_Py_Monitors a, _Py_Monitors b)
return res;
}
+#ifndef NDEBUG
static inline _Py_Monitors
monitors_and(_Py_Monitors a, _Py_Monitors b)
{
@@ -155,6 +162,7 @@ monitors_and(_Py_Monitors a, _Py_Monitors b)
}
return res;
}
+#endif
static inline _Py_Monitors
monitors_or(_Py_Monitors a, _Py_Monitors b)
@@ -335,7 +343,8 @@ dump_monitors(const char *prefix, _Py_Monitors monitors, FILE*out)
/* Like _Py_GetBaseOpcode but without asserts.
* Does its best to give the right answer, but won't abort
* if something is wrong */
-int get_base_opcode_best_attempt(PyCodeObject *code, int offset)
+static int
+get_base_opcode_best_attempt(PyCodeObject *code, int offset)
{
int opcode = _Py_OPCODE(_PyCode_CODE(code)[offset]);
if (INSTRUMENTED_OPCODES[opcode] != opcode) {
@@ -414,13 +423,15 @@ dump_instrumentation_data(PyCodeObject *code, int star, FILE*out)
assert(test); \
} while (0)
-bool valid_opcode(int opcode) {
+static bool
+valid_opcode(int opcode)
+{
if (opcode > 0 &&
opcode != RESERVED &&
opcode < 255 &&
_PyOpcode_OpName[opcode] &&
- _PyOpcode_OpName[opcode][0] != '<'
- ) {
+ _PyOpcode_OpName[opcode][0] != '<')
+ {
return true;
}
return false;
@@ -546,11 +557,11 @@ de_instrument(PyCodeObject *code, int i, int event)
opcode_ptr = &code->_co_monitoring->lines[i].original_opcode;
opcode = *opcode_ptr;
}
- if (opcode == INSTRUMENTED_INSTRUCTION) {
+ if (opcode == INSTRUMENTED_INSTRUCTION) {
opcode_ptr = &code->_co_monitoring->per_instruction_opcodes[i];
opcode = *opcode_ptr;
}
- int deinstrumented = DE_INSTRUMENT[opcode];
+ int deinstrumented = DE_INSTRUMENT[opcode];
if (deinstrumented == 0) {
return;
}
@@ -777,8 +788,7 @@ add_line_tools(PyCodeObject * code, int offset, int tools)
{
assert(tools_is_subset_for_event(code, PY_MONITORING_EVENT_LINE, tools));
assert(code->_co_monitoring);
- if (code->_co_monitoring->line_tools
- ) {
+ if (code->_co_monitoring->line_tools) {
code->_co_monitoring->line_tools[offset] |= tools;
}
else {
@@ -794,8 +804,7 @@ add_per_instruction_tools(PyCodeObject * code, int offset, int tools)
{
assert(tools_is_subset_for_event(code, PY_MONITORING_EVENT_INSTRUCTION, tools));
assert(code->_co_monitoring);
- if (code->_co_monitoring->per_instruction_tools
- ) {
+ if (code->_co_monitoring->per_instruction_tools) {
code->_co_monitoring->per_instruction_tools[offset] |= tools;
}
else {
@@ -810,11 +819,10 @@ static void
remove_per_instruction_tools(PyCodeObject * code, int offset, int tools)
{
assert(code->_co_monitoring);
- if (code->_co_monitoring->per_instruction_tools)
- {
+ if (code->_co_monitoring->per_instruction_tools) {
uint8_t *toolsptr = &code->_co_monitoring->per_instruction_tools[offset];
*toolsptr &= ~tools;
- if (*toolsptr == 0 ) {
+ if (*toolsptr == 0) {
de_instrument_per_instruction(code, offset);
}
}
@@ -839,7 +847,7 @@ call_one_instrument(
assert(tstate->tracing == 0);
PyObject *instrument = interp->monitoring_callables[tool][event];
if (instrument == NULL) {
- return 0;
+ return 0;
}
int old_what = tstate->what_event;
tstate->what_event = event;
@@ -861,16 +869,15 @@ static const int8_t MOST_SIGNIFICANT_BITS[16] = {
3, 3, 3, 3,
};
-/* We could use _Py_bit_length here, but that is designed for larger (32/64) bit ints,
- and can perform relatively poorly on platforms without the necessary intrinsics. */
+/* We could use _Py_bit_length here, but that is designed for larger (32/64)
+ * bit ints, and can perform relatively poorly on platforms without the
+ * necessary intrinsics. */
static inline int most_significant_bit(uint8_t bits) {
assert(bits != 0);
if (bits > 15) {
return MOST_SIGNIFICANT_BITS[bits>>4]+4;
}
- else {
- return MOST_SIGNIFICANT_BITS[bits];
- }
+ return MOST_SIGNIFICANT_BITS[bits];
}
static bool
@@ -998,8 +1005,8 @@ _Py_call_instrumentation_2args(
int
_Py_call_instrumentation_jump(
PyThreadState *tstate, int event,
- _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target
-) {
+ _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _Py_CODEUNIT *target)
+{
assert(event == PY_MONITORING_EVENT_JUMP ||
event == PY_MONITORING_EVENT_BRANCH);
assert(frame->prev_instr == instr);
@@ -1305,8 +1312,8 @@ initialize_line_tools(PyCodeObject *code, _Py_Monitors *all_events)
}
}
-static
-int allocate_instrumentation_data(PyCodeObject *code)
+static int
+allocate_instrumentation_data(PyCodeObject *code)
{
if (code->_co_monitoring == NULL) {
@@ -1400,7 +1407,7 @@ static const uint8_t super_instructions[256] = {
/* Should use instruction metadata for this */
static bool
-is_super_instruction(int opcode) {
+is_super_instruction(uint8_t opcode) {
return super_instructions[opcode] != 0;
}
@@ -1512,7 +1519,7 @@ _Py_Instrument(PyCodeObject *code, PyInterpreterState *interp)
#define C_RETURN_EVENTS \
((1 << PY_MONITORING_EVENT_C_RETURN) | \
- (1 << PY_MONITORING_EVENT_C_RAISE))
+ (1 << PY_MONITORING_EVENT_C_RAISE))
#define C_CALL_EVENTS \
(C_RETURN_EVENTS | (1 << PY_MONITORING_EVENT_CALL))
@@ -1557,8 +1564,8 @@ static int
check_tool(PyInterpreterState *interp, int tool_id)
{
if (tool_id < PY_MONITORING_SYS_PROFILE_ID &&
- interp->monitoring_tool_names[tool_id] == NULL
- ) {
+ interp->monitoring_tool_names[tool_id] == NULL)
+ {
PyErr_Format(PyExc_ValueError, "tool %d is not in use", tool_id);
return -1;
}
diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c
index cf345bddda79b0..e509e63a087a52 100644
--- a/Python/legacy_tracing.c
+++ b/Python/legacy_tracing.c
@@ -324,7 +324,7 @@ sys_trace_exception_handled(
PyTypeObject _PyLegacyEventHandler_Type = {
- _PyVarObject_IMMORTAL_INIT(&PyType_Type, 0),
+ PyVarObject_HEAD_INIT(&PyType_Type, 0)
"sys.legacy_event_handler",
sizeof(_PyLegacyEventHandler),
.tp_dealloc = (destructor)PyObject_Free,
diff --git a/Python/makeopcodetargets.py b/Python/makeopcodetargets.py
index 5aa31803397ce4..2b402ae0b6a031 100755
--- a/Python/makeopcodetargets.py
+++ b/Python/makeopcodetargets.py
@@ -7,24 +7,18 @@
import sys
-try:
- from importlib.machinery import SourceFileLoader
-except ImportError:
- import imp
-
- def find_module(modname):
- """Finds and returns a module in the local dist/checkout.
- """
- modpath = os.path.join(
- os.path.dirname(os.path.dirname(__file__)), "Lib")
- return imp.load_module(modname, *imp.find_module(modname, [modpath]))
-else:
- def find_module(modname):
- """Finds and returns a module in the local dist/checkout.
- """
- modpath = os.path.join(
- os.path.dirname(os.path.dirname(__file__)), "Lib", modname + ".py")
- return SourceFileLoader(modname, modpath).load_module()
+# 2023-04-27(warsaw): Pre-Python 3.12, this would catch ImportErrors and try to
+# import imp, and then use imp.load_module(). The imp module was removed in
+# Python 3.12 (and long deprecated before that), and it's unclear under what
+# conditions this import will now fail, so the fallback was simply removed.
+from importlib.machinery import SourceFileLoader
+
+def find_module(modname):
+ """Finds and returns a module in the local dist/checkout.
+ """
+ modpath = os.path.join(
+ os.path.dirname(os.path.dirname(__file__)), "Lib", modname + ".py")
+ return SourceFileLoader(modname, modpath).load_module()
def write_contents(f):
diff --git a/Python/modsupport.c b/Python/modsupport.c
index 75698455c88166..be229c987b8a78 100644
--- a/Python/modsupport.c
+++ b/Python/modsupport.c
@@ -3,6 +3,7 @@
#include "Python.h"
#include "pycore_abstract.h" // _PyIndex_Check()
+#include "pycore_object.h" // _PyType_IsReady()
#define FLAG_SIZE_T 1
typedef double va_double;
@@ -693,7 +694,7 @@ PyModule_AddStringConstant(PyObject *m, const char *name, const char *value)
int
PyModule_AddType(PyObject *module, PyTypeObject *type)
{
- if (PyType_Ready(type) < 0) {
+ if (!_PyType_IsReady(type) && PyType_Ready(type) < 0) {
return -1;
}
diff --git a/Python/opcode_metadata.h b/Python/opcode_metadata.h
index 4681ed03aff582..77f0ae0c1a4c30 100644
--- a/Python/opcode_metadata.h
+++ b/Python/opcode_metadata.h
@@ -205,6 +205,10 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) {
return 1;
case MAP_ADD:
return 2;
+ case LOAD_SUPER_ATTR:
+ return 3;
+ case LOAD_SUPER_ATTR_METHOD:
+ return 3;
case LOAD_ATTR:
return 1;
case LOAD_ATTR_INSTANCE_VALUE:
@@ -589,6 +593,10 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
return 0;
case MAP_ADD:
return 0;
+ case LOAD_SUPER_ATTR:
+ return ((oparg & 1) ? 1 : 0) + 1;
+ case LOAD_SUPER_ATTR_METHOD:
+ return 2;
case LOAD_ATTR:
return ((oparg & 1) ? 1 : 0) + 1;
case LOAD_ATTR_INSTANCE_VALUE:
@@ -771,7 +779,7 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) {
}
#endif
-enum InstructionFormat { INSTR_FMT_IB, INSTR_FMT_IBC, INSTR_FMT_IBC00, INSTR_FMT_IBC000, INSTR_FMT_IBC00000000, INSTR_FMT_IBIB, INSTR_FMT_IX, INSTR_FMT_IXC, INSTR_FMT_IXC000 };
+enum InstructionFormat { INSTR_FMT_IB, INSTR_FMT_IBC, INSTR_FMT_IBC00, INSTR_FMT_IBC000, INSTR_FMT_IBC00000000, INSTR_FMT_IBIB, INSTR_FMT_IX, INSTR_FMT_IXC, INSTR_FMT_IXC000, INSTR_FMT_IXC00000000 };
struct opcode_metadata {
bool valid_entry;
enum InstructionFormat instr_format;
@@ -879,6 +887,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[256] = {
[DICT_UPDATE] = { true, INSTR_FMT_IB },
[DICT_MERGE] = { true, INSTR_FMT_IB },
[MAP_ADD] = { true, INSTR_FMT_IB },
+ [LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC00000000 },
+ [LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IXC00000000 },
[LOAD_ATTR] = { true, INSTR_FMT_IBC00000000 },
[LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000 },
[LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000 },
diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h
index 9d6616666f7ac1..042cee222f705c 100644
--- a/Python/opcode_targets.h
+++ b/Python/opcode_targets.h
@@ -65,29 +65,29 @@ static void *opcode_targets[256] = {
&&TARGET_FOR_ITER_TUPLE,
&&TARGET_FOR_ITER_RANGE,
&&TARGET_FOR_ITER_GEN,
+ &&TARGET_LOAD_SUPER_ATTR_METHOD,
&&TARGET_LOAD_ATTR_CLASS,
- &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN,
&&TARGET_GET_ITER,
&&TARGET_GET_YIELD_FROM_ITER,
- &&TARGET_LOAD_ATTR_INSTANCE_VALUE,
+ &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN,
&&TARGET_LOAD_BUILD_CLASS,
+ &&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_ATTR_MODULE,
- &&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ASSERTION_ERROR,
&&TARGET_RETURN_GENERATOR,
+ &&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_METHOD_LAZY_DICT,
&&TARGET_LOAD_ATTR_METHOD_NO_DICT,
&&TARGET_LOAD_ATTR_METHOD_WITH_VALUES,
&&TARGET_LOAD_CONST__LOAD_FAST,
- &&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_RETURN_VALUE,
- &&TARGET_LOAD_FAST__LOAD_FAST,
+ &&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_SETUP_ANNOTATIONS,
+ &&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_LOAD_GLOBAL_MODULE,
- &&TARGET_STORE_ATTR_INSTANCE_VALUE,
&&TARGET_POP_EXCEPT,
&&TARGET_STORE_NAME,
&&TARGET_DELETE_NAME,
@@ -110,9 +110,9 @@ static void *opcode_targets[256] = {
&&TARGET_IMPORT_NAME,
&&TARGET_IMPORT_FROM,
&&TARGET_JUMP_FORWARD,
+ &&TARGET_STORE_ATTR_INSTANCE_VALUE,
&&TARGET_STORE_ATTR_SLOT,
&&TARGET_STORE_ATTR_WITH_HINT,
- &&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_POP_JUMP_IF_FALSE,
&&TARGET_POP_JUMP_IF_TRUE,
&&TARGET_LOAD_GLOBAL,
@@ -140,9 +140,9 @@ static void *opcode_targets[256] = {
&&TARGET_STORE_DEREF,
&&TARGET_DELETE_DEREF,
&&TARGET_JUMP_BACKWARD,
- &&TARGET_STORE_FAST__STORE_FAST,
+ &&TARGET_LOAD_SUPER_ATTR,
&&TARGET_CALL_FUNCTION_EX,
- &&TARGET_STORE_SUBSCR_DICT,
+ &&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_EXTENDED_ARG,
&&TARGET_LIST_APPEND,
&&TARGET_SET_ADD,
@@ -152,20 +152,20 @@ static void *opcode_targets[256] = {
&&TARGET_YIELD_VALUE,
&&TARGET_RESUME,
&&TARGET_MATCH_CLASS,
- &&TARGET_STORE_SUBSCR_LIST_INT,
- &&TARGET_UNPACK_SEQUENCE_LIST,
+ &&TARGET_STORE_FAST__STORE_FAST,
+ &&TARGET_STORE_SUBSCR_DICT,
&&TARGET_FORMAT_VALUE,
&&TARGET_BUILD_CONST_KEY_MAP,
&&TARGET_BUILD_STRING,
+ &&TARGET_STORE_SUBSCR_LIST_INT,
+ &&TARGET_UNPACK_SEQUENCE_LIST,
&&TARGET_UNPACK_SEQUENCE_TUPLE,
&&TARGET_UNPACK_SEQUENCE_TWO_TUPLE,
- &&TARGET_SEND_GEN,
- &&_unknown_opcode,
&&TARGET_LIST_EXTEND,
&&TARGET_SET_UPDATE,
&&TARGET_DICT_MERGE,
&&TARGET_DICT_UPDATE,
- &&_unknown_opcode,
+ &&TARGET_SEND_GEN,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index d6627bc6b7e86b..ba248d208e425a 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -2,7 +2,6 @@
#include "Python.h"
-#include "pycore_bytesobject.h" // _PyBytes_InitTypes()
#include "pycore_ceval.h" // _PyEval_FiniGIL()
#include "pycore_context.h" // _PyContext_Init()
#include "pycore_exceptions.h" // _PyExc_InitTypes()
@@ -26,7 +25,6 @@
#include "pycore_sliceobject.h" // _PySlice_Fini()
#include "pycore_sysmodule.h" // _PySys_ClearAuditHooks()
#include "pycore_traceback.h" // _Py_DumpTracebackThreads()
-#include "pycore_tuple.h" // _PyTuple_InitTypes()
#include "pycore_typeobject.h" // _PyTypes_InitTypes()
#include "pycore_unicodeobject.h" // _PyUnicode_InitTypes()
#include "opcode.h"
@@ -547,11 +545,21 @@ pycore_init_runtime(_PyRuntimeState *runtime,
}
-static void
+static PyStatus
init_interp_settings(PyInterpreterState *interp, const _PyInterpreterConfig *config)
{
assert(interp->feature_flags == 0);
+ if (config->use_main_obmalloc) {
+ interp->feature_flags |= Py_RTFLAGS_USE_MAIN_OBMALLOC;
+ }
+ else if (!config->check_multi_interp_extensions) {
+ /* The reason: PyModuleDef.m_base.m_copy leaks objects between
+ interpreters. */
+ return _PyStatus_ERR("per-interpreter obmalloc does not support "
+ "single-phase init extension modules");
+ }
+
if (config->allow_fork) {
interp->feature_flags |= Py_RTFLAGS_FORK;
}
@@ -570,6 +578,8 @@ init_interp_settings(PyInterpreterState *interp, const _PyInterpreterConfig *con
if (config->check_multi_interp_extensions) {
interp->feature_flags |= Py_RTFLAGS_MULTI_INTERP_EXTENSIONS;
}
+
+ return _PyStatus_OK();
}
@@ -622,7 +632,10 @@ pycore_create_interpreter(_PyRuntimeState *runtime,
}
const _PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT;
- init_interp_settings(interp, &config);
+ status = init_interp_settings(interp, &config);
+ if (_PyStatus_EXCEPTION(status)) {
+ return status;
+ }
PyThreadState *tstate = _PyThreadState_New(interp);
if (tstate == NULL) {
@@ -669,11 +682,6 @@ pycore_init_types(PyInterpreterState *interp)
return status;
}
- status = _PyBytes_InitTypes(interp);
- if (_PyStatus_EXCEPTION(status)) {
- return status;
- }
-
status = _PyLong_InitTypes(interp);
if (_PyStatus_EXCEPTION(status)) {
return status;
@@ -689,11 +697,6 @@ pycore_init_types(PyInterpreterState *interp)
return status;
}
- status = _PyTuple_InitTypes(interp);
- if (_PyStatus_EXCEPTION(status)) {
- return status;
- }
-
if (_PyExc_InitTypes(interp) < 0) {
return _PyStatus_ERR("failed to initialize an exception type");
}
@@ -808,11 +811,6 @@ pycore_interp_init(PyThreadState *tstate)
PyStatus status;
PyObject *sysmod = NULL;
- // This is a temporary fix until we have immortal objects.
- // (See _PyType_InitCache() in typeobject.c.)
- extern void _PyType_FixCacheRefcounts(void);
- _PyType_FixCacheRefcounts();
-
// Create singletons before the first PyType_Ready() call, since
// PyType_Ready() uses singletons like the Unicode empty string (tp_doc)
// and the empty tuple singletons (tp_bases).
@@ -1673,6 +1671,8 @@ finalize_interp_types(PyInterpreterState *interp)
_PyFloat_FiniType(interp);
_PyLong_FiniTypes(interp);
_PyThread_FiniType(interp);
+ // XXX fini collections module static types (_PyStaticType_Dealloc())
+ // XXX fini IO module static types (_PyStaticType_Dealloc())
_PyErr_FiniTypes(interp);
_PyTypes_FiniTypes(interp);
@@ -1941,6 +1941,7 @@ Py_FinalizeEx(void)
}
_Py_FinalizeRefTotal(runtime);
#endif
+ _Py_FinalizeAllocatedBlocks(runtime);
#ifdef Py_TRACE_REFS
/* Display addresses (& refcnts) of all objects still alive.
@@ -2041,7 +2042,10 @@ new_interpreter(PyThreadState **tstate_p, const _PyInterpreterConfig *config)
goto error;
}
- init_interp_settings(interp, config);
+ status = init_interp_settings(interp, config);
+ if (_PyStatus_EXCEPTION(status)) {
+ goto error;
+ }
status = init_interp_create_gil(tstate);
if (_PyStatus_EXCEPTION(status)) {
@@ -2169,10 +2173,9 @@ add_main_module(PyInterpreterState *interp)
Py_DECREF(bimod);
}
- /* Main is a little special - imp.is_builtin("__main__") will return
- * False, but BuiltinImporter is still the most appropriate initial
- * setting for its __loader__ attribute. A more suitable value will
- * be set if __main__ gets further initialized later in the startup
+ /* Main is a little special - BuiltinImporter is the most appropriate
+ * initial setting for its __loader__ attribute. A more suitable value
+ * will be set if __main__ gets further initialized later in the startup
* process.
*/
loader = _PyDict_GetItemStringWithError(d, "__loader__");
diff --git a/Python/pystate.c b/Python/pystate.c
index 1e04887ef04a2c..f103a059f0f369 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -60,23 +60,43 @@ extern "C" {
For each of these functions, the GIL must be held by the current thread.
*/
+
+#ifdef HAVE_THREAD_LOCAL
+_Py_thread_local PyThreadState *_Py_tss_tstate = NULL;
+#endif
+
static inline PyThreadState *
-current_fast_get(_PyRuntimeState *runtime)
+current_fast_get(_PyRuntimeState *Py_UNUSED(runtime))
{
- return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->tstate_current);
+#ifdef HAVE_THREAD_LOCAL
+ return _Py_tss_tstate;
+#else
+ // XXX Fall back to the PyThread_tss_*() API.
+# error "no supported thread-local variable storage classifier"
+#endif
}
static inline void
-current_fast_set(_PyRuntimeState *runtime, PyThreadState *tstate)
+current_fast_set(_PyRuntimeState *Py_UNUSED(runtime), PyThreadState *tstate)
{
assert(tstate != NULL);
- _Py_atomic_store_relaxed(&runtime->tstate_current, (uintptr_t)tstate);
+#ifdef HAVE_THREAD_LOCAL
+ _Py_tss_tstate = tstate;
+#else
+ // XXX Fall back to the PyThread_tss_*() API.
+# error "no supported thread-local variable storage classifier"
+#endif
}
static inline void
-current_fast_clear(_PyRuntimeState *runtime)
+current_fast_clear(_PyRuntimeState *Py_UNUSED(runtime))
{
- _Py_atomic_store_relaxed(&runtime->tstate_current, (uintptr_t)NULL);
+#ifdef HAVE_THREAD_LOCAL
+ _Py_tss_tstate = NULL;
+#else
+ // XXX Fall back to the PyThread_tss_*() API.
+# error "no supported thread-local variable storage classifier"
+#endif
}
#define tstate_verify_not_active(tstate) \
@@ -84,6 +104,12 @@ current_fast_clear(_PyRuntimeState *runtime)
_Py_FatalErrorFormat(__func__, "tstate %p is still current", tstate); \
}
+PyThreadState *
+_PyThreadState_GetCurrent(void)
+{
+ return current_fast_get(&_PyRuntime);
+}
+
//------------------------------------------------
// the thread state bound to the current OS thread
@@ -354,7 +380,7 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS
static const _PyRuntimeState initial = _PyRuntimeState_INIT(_PyRuntime);
_Py_COMP_DIAG_POP
-#define NUMLOCKS 4
+#define NUMLOCKS 5
static int
alloc_for_runtime(PyThread_type_lock locks[NUMLOCKS])
@@ -408,6 +434,7 @@ init_runtime(_PyRuntimeState *runtime,
&runtime->xidregistry.mutex,
&runtime->getargs.mutex,
&runtime->unicode_state.ids.lock,
+ &runtime->imports.extensions.mutex,
};
for (int i = 0; i < NUMLOCKS; i++) {
assert(locks[i] != NULL);
@@ -492,6 +519,7 @@ _PyRuntimeState_Fini(_PyRuntimeState *runtime)
&runtime->xidregistry.mutex,
&runtime->getargs.mutex,
&runtime->unicode_state.ids.lock,
+ &runtime->imports.extensions.mutex,
};
for (int i = 0; i < NUMLOCKS; i++) {
FREE_LOCK(*lockptrs[i]);
@@ -520,6 +548,7 @@ _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime)
&runtime->xidregistry.mutex,
&runtime->getargs.mutex,
&runtime->unicode_state.ids.lock,
+ &runtime->imports.extensions.mutex,
};
int reinit_err = 0;
for (int i = 0; i < NUMLOCKS; i++) {
@@ -645,15 +674,23 @@ init_interpreter(PyInterpreterState *interp,
assert(next != NULL || (interp == runtime->interpreters.main));
interp->next = next;
+ /* Initialize obmalloc, but only for subinterpreters,
+ since the main interpreter is initialized statically. */
+ if (interp != &runtime->_main_interpreter) {
+ poolp temp[OBMALLOC_USED_POOLS_SIZE] = \
+ _obmalloc_pools_INIT(interp->obmalloc.pools);
+ memcpy(&interp->obmalloc.pools.used, temp, sizeof(temp));
+ }
+
_PyEval_InitState(&interp->ceval, pending_lock);
_PyGC_InitState(&interp->gc);
PyConfig_InitPythonConfig(&interp->config);
_PyType_InitCache(interp);
- for(int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) {
+ for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) {
interp->monitors.tools[i] = 0;
}
for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) {
- for(int e = 0; e < PY_MONITORING_EVENTS; e++) {
+ for (int e = 0; e < PY_MONITORING_EVENTS; e++) {
interp->monitoring_callables[t][e] = NULL;
}
@@ -797,11 +834,11 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate)
Py_CLEAR(interp->audit_hooks);
- for(int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) {
+ for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) {
interp->monitors.tools[i] = 0;
}
for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) {
- for(int e = 0; e < PY_MONITORING_EVENTS; e++) {
+ for (int e = 0; e < PY_MONITORING_EVENTS; e++) {
Py_CLEAR(interp->monitoring_callables[t][e]);
}
}
@@ -915,11 +952,12 @@ PyInterpreterState_Delete(PyInterpreterState *interp)
_PyEval_FiniState(&interp->ceval);
-#ifdef Py_REF_DEBUG
- // XXX This call should be done at the end of clear_interpreter(),
+ // XXX These two calls should be done at the end of clear_interpreter(),
// but currently some objects get decref'ed after that.
+#ifdef Py_REF_DEBUG
_PyInterpreterState_FinalizeRefTotal(interp);
#endif
+ _PyInterpreterState_FinalizeAllocatedBlocks(interp);
HEAD_LOCK(runtime);
PyInterpreterState **p;
@@ -2294,11 +2332,11 @@ _PyCrossInterpreterData_InitWithSize(_PyCrossInterpreterData *data,
// where it was allocated, so the interpreter is required.
assert(interp != NULL);
_PyCrossInterpreterData_Init(data, interp, NULL, obj, new_object);
- data->data = PyMem_Malloc(size);
+ data->data = PyMem_RawMalloc(size);
if (data->data == NULL) {
return -1;
}
- data->free = PyMem_Free;
+ data->free = PyMem_RawFree;
return 0;
}
diff --git a/Python/specialize.c b/Python/specialize.c
index 3fa28f409892dc..b1cc66124cfa4a 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -96,6 +96,7 @@ _Py_GetSpecializationStats(void) {
return NULL;
}
int err = 0;
+ err += add_stat_dict(stats, LOAD_SUPER_ATTR, "load_super_attr");
err += add_stat_dict(stats, LOAD_ATTR, "load_attr");
err += add_stat_dict(stats, LOAD_GLOBAL, "load_global");
err += add_stat_dict(stats, BINARY_SUBSCR, "binary_subscr");
@@ -147,7 +148,7 @@ print_spec_stats(FILE *out, OpcodeStats *stats)
PRIu64 "\n", i, j, val);
}
}
- for(int j = 0; j < 256; j++) {
+ for (int j = 0; j < 256; j++) {
if (stats[i].pair_count[j]) {
fprintf(out, "opcode[%d].pair_count[%d] : %" PRIu64 "\n",
i, j, stats[i].pair_count[j]);
@@ -320,6 +321,14 @@ _PyCode_Quicken(PyCodeObject *code)
#define SPEC_FAIL_LOAD_GLOBAL_NON_DICT 17
#define SPEC_FAIL_LOAD_GLOBAL_NON_STRING_OR_SPLIT 18
+/* Super */
+
+#define SPEC_FAIL_SUPER_NOT_LOAD_METHOD 9
+#define SPEC_FAIL_SUPER_BAD_CLASS 10
+#define SPEC_FAIL_SUPER_SHADOWED 11
+#define SPEC_FAIL_SUPER_NOT_METHOD 12
+#define SPEC_FAIL_SUPER_ERROR_OR_NOT_FOUND 13
+
/* Attributes */
#define SPEC_FAIL_ATTR_OVERRIDING_DESCRIPTOR 9
@@ -505,6 +514,54 @@ specialize_module_load_attr(
/* Attribute specialization */
+void
+_Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, PyObject *self,
+ _Py_CODEUNIT *instr, PyObject *name, int load_method) {
+ assert(ENABLE_SPECIALIZATION);
+ assert(_PyOpcode_Caches[LOAD_SUPER_ATTR] == INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR);
+ _PySuperAttrCache *cache = (_PySuperAttrCache *)(instr + 1);
+ if (!load_method) {
+ SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_NOT_LOAD_METHOD);
+ goto fail;
+ }
+ if (global_super != (PyObject *)&PySuper_Type) {
+ SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_SHADOWED);
+ goto fail;
+ }
+ if (!PyType_Check(cls)) {
+ SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_BAD_CLASS);
+ goto fail;
+ }
+ PyTypeObject *tp = (PyTypeObject *)cls;
+ PyObject *res = _PySuper_LookupDescr(tp, self, name);
+ if (res == NULL) {
+ SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_ERROR_OR_NOT_FOUND);
+ PyErr_Clear();
+ goto fail;
+ }
+ if (_PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) {
+ write_u32(cache->class_version, tp->tp_version_tag);
+ write_u32(cache->self_type_version, Py_TYPE(self)->tp_version_tag);
+ write_obj(cache->method, res); // borrowed
+ instr->op.code = LOAD_SUPER_ATTR_METHOD;
+ Py_DECREF(res);
+ goto success;
+ }
+ Py_DECREF(res);
+ SPECIALIZATION_FAIL(LOAD_SUPER_ATTR, SPEC_FAIL_SUPER_NOT_METHOD);
+
+fail:
+ STAT_INC(LOAD_SUPER_ATTR, failure);
+ assert(!PyErr_Occurred());
+ instr->op.code = LOAD_SUPER_ATTR;
+ cache->counter = adaptive_counter_backoff(cache->counter);
+ return;
+success:
+ STAT_INC(LOAD_SUPER_ATTR, success);
+ assert(!PyErr_Occurred());
+ cache->counter = adaptive_counter_cooldown();
+}
+
typedef enum {
OVERRIDING, /* Is an overriding descriptor, and will remain so. */
METHOD, /* Attribute has Py_TPFLAGS_METHOD_DESCRIPTOR set */
diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h
index e9f0061a59d3ba..27f42e5202e571 100644
--- a/Python/stdlib_module_names.h
+++ b/Python/stdlib_module_names.h
@@ -164,7 +164,6 @@ static const char* _Py_stdlib_module_names[] = {
"idlelib",
"imaplib",
"imghdr",
-"imp",
"importlib",
"inspect",
"io",
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 4d693a1be1f89e..d673e40af5e1de 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -1871,9 +1871,23 @@ static Py_ssize_t
sys_getallocatedblocks_impl(PyObject *module)
/*[clinic end generated code: output=f0c4e873f0b6dcf7 input=dab13ee346a0673e]*/
{
- return _Py_GetAllocatedBlocks();
+ // It might make sense to return the count
+ // for just the current interpreter.
+ return _Py_GetGlobalAllocatedBlocks();
}
+/*[clinic input]
+sys.getunicodeinternedsize -> Py_ssize_t
+
+Return the number of elements of the unicode interned dictionary
+[clinic start generated code]*/
+
+static Py_ssize_t
+sys_getunicodeinternedsize_impl(PyObject *module)
+/*[clinic end generated code: output=ad0e4c9738ed4129 input=726298eaa063347a]*/
+{
+ return _PyUnicode_InternedSize();
+}
/*[clinic input]
sys._getframe
@@ -2243,6 +2257,7 @@ static PyMethodDef sys_methods[] = {
SYS_GETDEFAULTENCODING_METHODDEF
SYS_GETDLOPENFLAGS_METHODDEF
SYS_GETALLOCATEDBLOCKS_METHODDEF
+ SYS_GETUNICODEINTERNEDSIZE_METHODDEF
SYS_GETFILESYSTEMENCODING_METHODDEF
SYS_GETFILESYSTEMENCODEERRORS_METHODDEF
#ifdef Py_TRACE_REFS
@@ -3151,10 +3166,8 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict)
SET_SYS("float_info", PyFloat_GetInfo());
SET_SYS("int_info", PyLong_GetInfo());
/* initialize hash_info */
- if (Hash_InfoType.tp_name == NULL) {
- if (_PyStructSequence_InitBuiltin(&Hash_InfoType, &hash_info_desc) < 0) {
- goto type_init_failed;
- }
+ if (_PyStructSequence_InitBuiltin(&Hash_InfoType, &hash_info_desc) < 0) {
+ goto type_init_failed;
}
SET_SYS("hash_info", get_hash_info(tstate));
SET_SYS("maxunicode", PyLong_FromLong(0x10FFFF));
@@ -3176,11 +3189,9 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict)
#define ENSURE_INFO_TYPE(TYPE, DESC) \
do { \
- if (TYPE.tp_name == NULL) { \
- if (_PyStructSequence_InitBuiltinWithFlags( \
- &TYPE, &DESC, Py_TPFLAGS_DISALLOW_INSTANTIATION) < 0) { \
- goto type_init_failed; \
- } \
+ if (_PyStructSequence_InitBuiltinWithFlags( \
+ &TYPE, &DESC, Py_TPFLAGS_DISALLOW_INSTANTIATION) < 0) { \
+ goto type_init_failed; \
} \
} while (0)
@@ -3215,11 +3226,9 @@ _PySys_InitCore(PyThreadState *tstate, PyObject *sysdict)
SET_SYS("thread_info", PyThread_GetInfo());
/* initialize asyncgen_hooks */
- if (AsyncGenHooksType.tp_name == NULL) {
- if (_PyStructSequence_InitBuiltin(
- &AsyncGenHooksType, &asyncgen_hooks_desc) < 0) {
- goto type_init_failed;
- }
+ if (_PyStructSequence_InitBuiltin(
+ &AsyncGenHooksType, &asyncgen_hooks_desc) < 0) {
+ goto type_init_failed;
}
#ifdef __EMSCRIPTEN__
diff --git a/Python/thread.c b/Python/thread.c
index 4581f1af043a37..7fdedb0b9b7e26 100644
--- a/Python/thread.c
+++ b/Python/thread.c
@@ -137,10 +137,8 @@ PyThread_GetInfo(void)
int len;
#endif
- if (ThreadInfoType.tp_name == 0) {
- if (_PyStructSequence_InitBuiltin(&ThreadInfoType,
- &threadinfo_desc) < 0)
- return NULL;
+ if (_PyStructSequence_InitBuiltin(&ThreadInfoType, &threadinfo_desc) < 0) {
+ return NULL;
}
threadinfo = PyStructSequence_New(&ThreadInfoType);
diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py
index aba5fecd8b1a99..b084d3e457f782 100644
--- a/Tools/build/deepfreeze.py
+++ b/Tools/build/deepfreeze.py
@@ -142,7 +142,7 @@ def block(self, prefix: str, suffix: str = "") -> None:
def object_head(self, typename: str) -> None:
with self.block(".ob_base =", ","):
- self.write(f".ob_refcnt = 999999999,")
+ self.write(f".ob_refcnt = _Py_IMMORTAL_REFCNT,")
self.write(f".ob_type = &{typename},")
def object_var_head(self, typename: str, size: int) -> None:
@@ -175,6 +175,12 @@ def generate_unicode(self, name: str, s: str) -> str:
return f"&_Py_STR({strings[s]})"
if s in identifiers:
return f"&_Py_ID({s})"
+ if len(s) == 1:
+ c = ord(s)
+ if c < 128:
+ return f"(PyObject *)&_Py_SINGLETON(strings).ascii[{c}]"
+ elif c < 256:
+ return f"(PyObject *)&_Py_SINGLETON(strings).latin1[{c - 128}]"
if re.match(r'\A[A-Za-z0-9_]+\Z', s):
name = f"const_str_{s}"
kind, ascii = analyze_character_width(s)
diff --git a/Tools/build/generate_stdlib_module_names.py b/Tools/build/generate_stdlib_module_names.py
index d15e5e2d5450d7..7e0e9602a10765 100644
--- a/Tools/build/generate_stdlib_module_names.py
+++ b/Tools/build/generate_stdlib_module_names.py
@@ -1,5 +1,5 @@
# This script lists the names of standard library modules
-# to update Python/stdlib_mod_names.h
+# to update Python/stdlib_module_names.h
import _imp
import os.path
import re
diff --git a/Tools/build/generate_token.py b/Tools/build/generate_token.py
index fc12835b7762ad..3bd307c1733867 100755
--- a/Tools/build/generate_token.py
+++ b/Tools/build/generate_token.py
@@ -80,6 +80,8 @@ def update_file(file, content):
(x) == NEWLINE || \\
(x) == INDENT || \\
(x) == DEDENT)
+#define ISSTRINGLIT(x) ((x) == STRING || \\
+ (x) == FSTRING_MIDDLE)
// Symbols exported for test_peg_generator
diff --git a/Tools/build/verify_ensurepip_wheels.py b/Tools/build/verify_ensurepip_wheels.py
index 044d1fd6b3cf2d..09fd5d9e3103ac 100755
--- a/Tools/build/verify_ensurepip_wheels.py
+++ b/Tools/build/verify_ensurepip_wheels.py
@@ -14,7 +14,7 @@
from pathlib import Path
from urllib.request import urlopen
-PACKAGE_NAMES = ("pip", "setuptools")
+PACKAGE_NAMES = ("pip",)
ENSURE_PIP_ROOT = Path(__file__).parent.parent.parent / "Lib/ensurepip"
WHEEL_DIR = ENSURE_PIP_ROOT / "_bundled"
ENSURE_PIP_INIT_PY_TEXT = (ENSURE_PIP_ROOT / "__init__.py").read_text(encoding="utf-8")
diff --git a/Tools/c-analyzer/TODO b/Tools/c-analyzer/TODO
index 43760369b1980e..27a535814ea52b 100644
--- a/Tools/c-analyzer/TODO
+++ b/Tools/c-analyzer/TODO
@@ -495,7 +495,6 @@ Python/import.c:PyImport_ImportModuleLevelObject():PyId___path__ _Py_IDENTIFIER(
Python/import.c:PyImport_ImportModuleLevelObject():PyId___spec__ _Py_IDENTIFIER(__spec__)
Python/import.c:PyImport_ImportModuleLevelObject():PyId__handle_fromlist _Py_IDENTIFIER(_handle_fromlist)
Python/import.c:PyImport_ImportModuleLevelObject():PyId__lock_unlock_module _Py_IDENTIFIER(_lock_unlock_module)
-Python/import.c:PyImport_ReloadModule():PyId_imp _Py_IDENTIFIER(imp)
Python/import.c:PyImport_ReloadModule():PyId_reload _Py_IDENTIFIER(reload)
Python/import.c:_PyImportZip_Init():PyId_zipimporter _Py_IDENTIFIER(zipimporter)
Python/import.c:import_find_and_load():PyId__find_and_load _Py_IDENTIFIER(_find_and_load)
diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv
index 5c173b1041e3e4..4dfbbe72df56a0 100644
--- a/Tools/c-analyzer/cpython/globals-to-fix.tsv
+++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv
@@ -341,7 +341,6 @@ Modules/_testcapi/vectorcall.c - MethodDescriptor2_Type -
##-----------------------
## static types
-Modules/_ctypes/_ctypes.c - DictRemover_Type -
Modules/_ctypes/_ctypes.c - PyCArrayType_Type -
Modules/_ctypes/_ctypes.c - PyCArray_Type -
Modules/_ctypes/_ctypes.c - PyCData_Type -
@@ -352,18 +351,14 @@ Modules/_ctypes/_ctypes.c - PyCPointer_Type -
Modules/_ctypes/_ctypes.c - PyCSimpleType_Type -
Modules/_ctypes/_ctypes.c - PyCStructType_Type -
Modules/_ctypes/_ctypes.c - Simple_Type -
-Modules/_ctypes/_ctypes.c - StructParam_Type -
Modules/_ctypes/_ctypes.c - Struct_Type -
Modules/_ctypes/_ctypes.c - UnionType_Type -
Modules/_ctypes/_ctypes.c - Union_Type -
-Modules/_ctypes/callbacks.c - PyCThunk_Type -
Modules/_ctypes/callproc.c - PyCArg_Type -
-Modules/_ctypes/cfield.c - PyCField_Type -
Modules/_ctypes/ctypes.h - PyCArg_Type -
Modules/_ctypes/ctypes.h - PyCArrayType_Type -
Modules/_ctypes/ctypes.h - PyCArray_Type -
Modules/_ctypes/ctypes.h - PyCData_Type -
-Modules/_ctypes/ctypes.h - PyCField_Type -
Modules/_ctypes/ctypes.h - PyCFuncPtrType_Type -
Modules/_ctypes/ctypes.h - PyCFuncPtr_Type -
Modules/_ctypes/ctypes.h - PyCPointerType_Type -
@@ -371,7 +366,6 @@ Modules/_ctypes/ctypes.h - PyCPointer_Type -
Modules/_ctypes/ctypes.h - PyCSimpleType_Type -
Modules/_ctypes/ctypes.h - PyCStgDict_Type -
Modules/_ctypes/ctypes.h - PyCStructType_Type -
-Modules/_ctypes/ctypes.h - PyCThunk_Type -
Modules/_ctypes/ctypes.h - PyExc_ArgError -
Modules/_ctypes/ctypes.h - _ctypes_conversion_encoding -
Modules/_ctypes/ctypes.h - _ctypes_conversion_errors -
@@ -454,6 +448,8 @@ Modules/_decimal/_decimal.c - SignalTuple -
Modules/_asynciomodule.c - fi_freelist -
Modules/_asynciomodule.c - fi_freelist_len -
Modules/_ctypes/_ctypes.c - _ctypes_ptrtype_cache -
+Modules/_ctypes/_ctypes.c - global_state -
+Modules/_ctypes/ctypes.h - global_state -
Modules/_tkinter.c - tcl_lock -
Modules/_tkinter.c - excInCmd -
Modules/_tkinter.c - valInCmd -
@@ -485,29 +481,6 @@ Modules/_decimal/_decimal.c - _py_float_abs -
Modules/_decimal/_decimal.c - _py_long_bit_length -
Modules/_decimal/_decimal.c - _py_float_as_integer_ratio -
Modules/_elementtree.c - expat_capi -
-Modules/cjkcodecs/_codecs_hk.c - big5_encmap -
-Modules/cjkcodecs/_codecs_hk.c - big5_decmap -
-Modules/cjkcodecs/_codecs_hk.c big5hkscs_codec_init initialized -
-Modules/cjkcodecs/_codecs_iso2022.c - cp949_encmap -
-Modules/cjkcodecs/_codecs_iso2022.c - ksx1001_decmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisxcommon_encmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisx0208_decmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisx0212_decmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_bmp_encmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_1_bmp_decmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_2_bmp_decmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_emp_encmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_1_emp_decmap -
-Modules/cjkcodecs/_codecs_iso2022.c - jisx0213_2_emp_decmap -
-Modules/cjkcodecs/_codecs_iso2022.c - gbcommon_encmap -
-Modules/cjkcodecs/_codecs_iso2022.c - gb2312_decmap -
-Modules/cjkcodecs/_codecs_iso2022.c ksx1001_init initialized -
-Modules/cjkcodecs/_codecs_iso2022.c jisx0208_init initialized -
-Modules/cjkcodecs/_codecs_iso2022.c jisx0212_init initialized -
-Modules/cjkcodecs/_codecs_iso2022.c jisx0213_init initialized -
-Modules/cjkcodecs/_codecs_iso2022.c gb2312_init initialized -
-Modules/cjkcodecs/cjkcodecs.h - codec_list -
-Modules/cjkcodecs/cjkcodecs.h - mapping_list -
Modules/readline.c - libedit_append_replace_history_offset -
Modules/readline.c - using_libedit_emulation -
Modules/readline.c - libedit_history_start -
diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv
index a8ba88efc732fb..7a5d7d45f5184b 100644
--- a/Tools/c-analyzer/cpython/ignored.tsv
+++ b/Tools/c-analyzer/cpython/ignored.tsv
@@ -309,6 +309,7 @@ Objects/obmalloc.c - _PyMem -
Objects/obmalloc.c - _PyMem_Debug -
Objects/obmalloc.c - _PyMem_Raw -
Objects/obmalloc.c - _PyObject -
+Objects/obmalloc.c - last_final_leaks -
Objects/obmalloc.c - usedpools -
Objects/typeobject.c - name_op -
Objects/typeobject.c - slotdefs -
diff --git a/Tools/importbench/importbench.py b/Tools/importbench/importbench.py
index 6c4a537ad86e6c..619263b553c081 100644
--- a/Tools/importbench/importbench.py
+++ b/Tools/importbench/importbench.py
@@ -6,7 +6,7 @@
"""
from test.test_importlib import util
import decimal
-import imp
+from importlib.util import cache_from_source
import importlib
import importlib.machinery
import json
@@ -65,7 +65,7 @@ def source_wo_bytecode(seconds, repeat):
name = '__importlib_test_benchmark__'
# Clears out sys.modules and puts an entry at the front of sys.path.
with util.create_modules(name) as mapping:
- assert not os.path.exists(imp.cache_from_source(mapping[name]))
+ assert not os.path.exists(cache_from_source(mapping[name]))
sys.meta_path.append(importlib.machinery.PathFinder)
loader = (importlib.machinery.SourceFileLoader,
importlib.machinery.SOURCE_SUFFIXES)
@@ -80,7 +80,7 @@ def _wo_bytecode(module):
name = module.__name__
def benchmark_wo_bytecode(seconds, repeat):
"""Source w/o bytecode: {}"""
- bytecode_path = imp.cache_from_source(module.__file__)
+ bytecode_path = cache_from_source(module.__file__)
if os.path.exists(bytecode_path):
os.unlink(bytecode_path)
sys.dont_write_bytecode = True
@@ -108,9 +108,9 @@ def source_writing_bytecode(seconds, repeat):
sys.path_hooks.append(importlib.machinery.FileFinder.path_hook(loader))
def cleanup():
sys.modules.pop(name)
- os.unlink(imp.cache_from_source(mapping[name]))
+ os.unlink(cache_from_source(mapping[name]))
for result in bench(name, cleanup, repeat=repeat, seconds=seconds):
- assert not os.path.exists(imp.cache_from_source(mapping[name]))
+ assert not os.path.exists(cache_from_source(mapping[name]))
yield result
@@ -121,7 +121,7 @@ def writing_bytecode_benchmark(seconds, repeat):
assert not sys.dont_write_bytecode
def cleanup():
sys.modules.pop(name)
- os.unlink(imp.cache_from_source(module.__file__))
+ os.unlink(cache_from_source(module.__file__))
yield from bench(name, cleanup, repeat=repeat, seconds=seconds)
writing_bytecode_benchmark.__doc__ = (
@@ -141,7 +141,7 @@ def source_using_bytecode(seconds, repeat):
importlib.machinery.SOURCE_SUFFIXES)
sys.path_hooks.append(importlib.machinery.FileFinder.path_hook(loader))
py_compile.compile(mapping[name])
- assert os.path.exists(imp.cache_from_source(mapping[name]))
+ assert os.path.exists(cache_from_source(mapping[name]))
yield from bench(name, lambda: sys.modules.pop(name), repeat=repeat,
seconds=seconds)
diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py
index e72ce7afdc4796..f57b6275f671d3 100644
--- a/Tools/peg_generator/pegen/c_generator.py
+++ b/Tools/peg_generator/pegen/c_generator.py
@@ -68,6 +68,7 @@ class NodeTypes(Enum):
KEYWORD = 4
SOFT_KEYWORD = 5
CUT_OPERATOR = 6
+ F_STRING_CHUNK = 7
BASE_NODETYPES = {
diff --git a/Tools/wasm/wasm_assets.py b/Tools/wasm/wasm_assets.py
index 9dc8bda4017e2c..1fc97fd5e70a10 100755
--- a/Tools/wasm/wasm_assets.py
+++ b/Tools/wasm/wasm_assets.py
@@ -6,7 +6,8 @@
- a stripped down, pyc-only stdlib zip file, e.g. {PREFIX}/lib/python311.zip
- os.py as marker module {PREFIX}/lib/python3.11/os.py
-- empty lib-dynload directory, to make sure it is copied into the bundle {PREFIX}/lib/python3.11/lib-dynload/.empty
+- empty lib-dynload directory, to make sure it is copied into the bundle:
+ {PREFIX}/lib/python3.11/lib-dynload/.empty
"""
import argparse
diff --git a/Tools/wasm/wasm_build.py b/Tools/wasm/wasm_build.py
index 493682c5b138a3..241a5d4eed5ae8 100755
--- a/Tools/wasm/wasm_build.py
+++ b/Tools/wasm/wasm_build.py
@@ -73,7 +73,7 @@
run "make clean -C '{SRCDIR}'".
"""
-INSTALL_NATIVE = f"""
+INSTALL_NATIVE = """
Builds require a C compiler (gcc, clang), make, pkg-config, and development
headers for dependencies like zlib.
@@ -598,7 +598,7 @@ def run_browser(self, bind="127.0.0.1", port=8000):
end = time.monotonic() + 3.0
while time.monotonic() < end and srv.returncode is None:
try:
- with socket.create_connection((bind, port), timeout=0.1) as s:
+ with socket.create_connection((bind, port), timeout=0.1) as _:
pass
except OSError:
time.sleep(0.01)
diff --git a/configure b/configure
index 4ae8258438e620..8133d47f61355b 100755
--- a/configure
+++ b/configure
@@ -892,6 +892,8 @@ PGO_PROF_USE_FLAG
PGO_PROF_GEN_FLAG
MERGE_FDATA
LLVM_BOLT
+ac_ct_READELF
+READELF
PREBOLT_RULE
LLVM_AR_FOUND
LLVM_AR
@@ -7916,6 +7918,112 @@ if test "$Py_BOLT" = 'true' ; then
DEF_MAKE_ALL_RULE="bolt-opt"
DEF_MAKE_RULE="build_all"
+
+ if test -n "$ac_tool_prefix"; then
+ for ac_prog in readelf
+ do
+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_READELF+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$READELF"; then
+ ac_cv_prog_READELF="$READELF" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_READELF="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+READELF=$ac_cv_prog_READELF
+if test -n "$READELF"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $READELF" >&5
+$as_echo "$READELF" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$READELF" && break
+ done
+fi
+if test -z "$READELF"; then
+ ac_ct_READELF=$READELF
+ for ac_prog in readelf
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_READELF+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$ac_ct_READELF"; then
+ ac_cv_prog_ac_ct_READELF="$ac_ct_READELF" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_ac_ct_READELF="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_READELF=$ac_cv_prog_ac_ct_READELF
+if test -n "$ac_ct_READELF"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_READELF" >&5
+$as_echo "$ac_ct_READELF" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$ac_ct_READELF" && break
+done
+
+ if test "x$ac_ct_READELF" = x; then
+ READELF=""notfound""
+ else
+ case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+ READELF=$ac_ct_READELF
+ fi
+fi
+
+ if test "$READELF" == "notfound"
+ then
+ as_fn_error $? "readelf is required for a --enable-bolt build but could not be found." "$LINENO" 5
+ fi
+
# -fno-reorder-blocks-and-partition is required for bolt to work.
# Possibly GCC only.
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -fno-reorder-blocks-and-partition" >&5
diff --git a/configure.ac b/configure.ac
index 4d9eb46f5ce7d8..3f20d8980d8abc 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1938,6 +1938,13 @@ if test "$Py_BOLT" = 'true' ; then
DEF_MAKE_ALL_RULE="bolt-opt"
DEF_MAKE_RULE="build_all"
+ AC_SUBST(READELF)
+ AC_CHECK_TOOLS(READELF, [readelf], "notfound")
+ if test "$READELF" == "notfound"
+ then
+ AC_MSG_ERROR([readelf is required for a --enable-bolt build but could not be found.])
+ fi
+
# -fno-reorder-blocks-and-partition is required for bolt to work.
# Possibly GCC only.
AX_CHECK_COMPILE_FLAG([-fno-reorder-blocks-and-partition],[