diff --git a/.github/workflows/mypy_primer_comment.yml b/.github/workflows/mypy_primer_comment.yml index 12ce91c12910..6e3bb590364f 100644 --- a/.github/workflows/mypy_primer_comment.yml +++ b/.github/workflows/mypy_primer_comment.yml @@ -78,7 +78,7 @@ jobs: if (data.trim()) { body = 'Diff from [mypy_primer](https://github.com/hauntsaninja/mypy_primer), showing the effect of this PR on open source code:\n```diff\n' + data + '```' } else { - body = 'According to [mypy_primer](https://github.com/hauntsaninja/mypy_primer), this change has no effect on the checked open source code. 🤖🎉' + body = "According to [mypy_primer](https://github.com/hauntsaninja/mypy_primer), this change doesn't affect type check results on a corpus of open source code. ✅" } const prNumber = parseInt(fs.readFileSync("pr_number.txt", { encoding: "utf8" })) await github.rest.issues.createComment({ diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ed0c82ef5fa1..13cb2e7fa8f0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -93,6 +93,9 @@ jobs: arch: x64 os: windows-latest toxenv: type + # We also run these checks with pre-commit in CI, + # but it's useful to run them with tox too, + # to ensure the tox env works as expected - name: Formatting with Black + isort and code style with flake8 python: '3.7' arch: x64 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d794c780ad3b..e83d694d38d0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,16 +1,20 @@ repos: - repo: https://github.com/psf/black - rev: 22.12.0 # must match test-requirements.txt + rev: 23.3.0 # must match test-requirements.txt hooks: - id: black - repo: https://github.com/pycqa/isort - rev: 5.11.5 # must match test-requirements.txt + rev: 5.11.5 # must match test-requirements.txt (cannot use version 5.12 until python 3.7 support is dropped) hooks: - id: isort - repo: https://github.com/pycqa/flake8 - rev: 5.0.4 # must match test-requirements.txt + rev: 5.0.4 # must match test-requirements.txt (cannot use version 6 until python 3.7 support is dropped) hooks: - id: flake8 additional_dependencies: - flake8-bugbear==22.12.6 # must match test-requirements.txt - flake8-noqa==1.3.0 # must match test-requirements.txt + +ci: + # We run flake8 as part of our GitHub Actions suite in CI + skip: [flake8] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2b2e6cdb9734..72cb609eccfd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -12,27 +12,33 @@ issue tracker, pull requests, and chat, is expected to treat other people with respect and more generally to follow the guidelines articulated in the [Python Community Code of Conduct](https://www.python.org/psf/codeofconduct/). - ## Getting started with development ### Setup -#### (1) Clone the mypy repository and enter into it -``` -git clone https://github.com/python/mypy.git +#### (1) Fork the mypy repository + +Within Github, navigate to and fork the repository. + +#### (2) Clone the mypy repository and enter into it + +```bash +git clone git@github.com:/mypy.git cd mypy ``` -#### (2) Create then activate a virtual environment -``` +#### (3) Create then activate a virtual environment + +```bash # On Windows, the commands may be slightly different. For more details, see # https://docs.python.org/3/library/venv.html#creating-virtual-environments python3 -m venv venv source venv/bin/activate ``` -#### (3) Install the test requirements and the project -``` +#### (4) Install the test requirements and the project + +```bash python3 -m pip install -r test-requirements.txt python3 -m pip install -e . hash -r # This resets shell PATH cache, not necessary on Windows @@ -47,12 +53,14 @@ your PR. However, if you wish to do so, you can run the full test suite like this: -``` + +```bash python3 runtests.py ``` You can also use `tox` to run tests (`tox` handles setting up the test environment for you): -``` + +```bash tox run -e py # Or some specific python version: @@ -63,6 +71,7 @@ tox run -e lint ``` Some useful commands for running specific tests include: + ```bash # Use mypy to check mypy's own code python3 runtests.py self @@ -90,6 +99,7 @@ see [the README in the test-data directory](test-data/unit/README.md). If you're looking for things to help with, browse our [issue tracker](https://github.com/python/mypy/issues)! In particular, look for: + - [good first issues](https://github.com/python/mypy/labels/good-first-issue) - [good second issues](https://github.com/python/mypy/labels/good-second-issue) - [documentation issues](https://github.com/python/mypy/labels/documentation) @@ -151,28 +161,27 @@ You may also find other pages in the [Mypy developer guide](https://github.com/python/mypy/wiki/Developer-Guides) helpful in developing your change. - ## Core developer guidelines Core developers should follow these rules when processing pull requests: -* Always wait for tests to pass before merging PRs. -* Use "[Squash and merge](https://github.com/blog/2141-squash-your-commits)" +- Always wait for tests to pass before merging PRs. +- Use "[Squash and merge](https://github.com/blog/2141-squash-your-commits)" to merge PRs. -* Delete branches for merged PRs (by core devs pushing to the main repo). -* Edit the final commit message before merging to conform to the following +- Delete branches for merged PRs (by core devs pushing to the main repo). +- Edit the final commit message before merging to conform to the following style (we wish to have a clean `git log` output): - * When merging a multi-commit PR make sure that the commit message doesn't + - When merging a multi-commit PR make sure that the commit message doesn't contain the local history from the committer and the review history from the PR. Edit the message to only describe the end state of the PR. - * Make sure there is a *single* newline at the end of the commit message. + - Make sure there is a *single* newline at the end of the commit message. This way there is a single empty line between commits in `git log` output. - * Split lines as needed so that the maximum line length of the commit + - Split lines as needed so that the maximum line length of the commit message is under 80 characters, including the subject line. - * Capitalize the subject and each paragraph. - * Make sure that the subject of the commit message has no trailing dot. - * Use the imperative mood in the subject line (e.g. "Fix typo in README"). - * If the PR fixes an issue, make sure something like "Fixes #xxx." occurs + - Capitalize the subject and each paragraph. + - Make sure that the subject of the commit message has no trailing dot. + - Use the imperative mood in the subject line (e.g. "Fix typo in README"). + - If the PR fixes an issue, make sure something like "Fixes #xxx." occurs in the body of the message (not in the subject). - * Use Markdown for formatting. + - Use Markdown for formatting. diff --git a/README.md b/README.md index 6c9f01968f92..164957b1491a 100644 --- a/README.md +++ b/README.md @@ -40,10 +40,10 @@ To report a bug or request an enhancement: tracker for that library To discuss a new type system feature: + - discuss at [typing-sig mailing list](https://mail.python.org/archives/list/typing-sig@python.org/) - there is also some historical discussion [here](https://github.com/python/typing/issues) - What is mypy? ------------- @@ -82,6 +82,7 @@ See [the documentation](https://mypy.readthedocs.io/en/stable/index.html) for more examples and information. In particular, see: + - [type hints cheat sheet](https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html) - [getting started](https://mypy.readthedocs.io/en/stable/getting_started.html) - [list of error codes](https://mypy.readthedocs.io/en/stable/error_code_list.html) @@ -91,67 +92,75 @@ Quick start Mypy can be installed using pip: - python3 -m pip install -U mypy +```bash +python3 -m pip install -U mypy +``` If you want to run the latest version of the code, you can install from the repo directly: - python3 -m pip install -U git+https://github.com/python/mypy.git - # or if you don't have 'git' installed - python3 -m pip install -U https://github.com/python/mypy/zipball/master +```bash +python3 -m pip install -U git+https://github.com/python/mypy.git +# or if you don't have 'git' installed +python3 -m pip install -U https://github.com/python/mypy/zipball/master +``` Now you can type-check the [statically typed parts] of a program like this: - mypy PROGRAM +```bash +mypy PROGRAM +``` You can always use the Python interpreter to run your statically typed programs, even if mypy reports type errors: - python3 PROGRAM +```bash +python3 PROGRAM +``` You can also try mypy in an [online playground](https://mypy-play.net/) (developed by Yusuke Miyazaki). If you are working with large code bases, you can run mypy in [daemon mode], that will give much faster (often sub-second) incremental updates: - dmypy run -- PROGRAM +```bash +dmypy run -- PROGRAM +``` [statically typed parts]: https://mypy.readthedocs.io/en/latest/getting_started.html#function-signatures-and-dynamic-vs-static-typing [daemon mode]: https://mypy.readthedocs.io/en/stable/mypy_daemon.html - Integrations ------------ Mypy can be integrated into popular IDEs: -* Vim: - * Using [Syntastic](https://github.com/vim-syntastic/syntastic): in `~/.vimrc` add +- Vim: + - Using [Syntastic](https://github.com/vim-syntastic/syntastic): in `~/.vimrc` add `let g:syntastic_python_checkers=['mypy']` - * Using [ALE](https://github.com/dense-analysis/ale): should be enabled by default when `mypy` is installed, + - Using [ALE](https://github.com/dense-analysis/ale): should be enabled by default when `mypy` is installed, or can be explicitly enabled by adding `let b:ale_linters = ['mypy']` in `~/vim/ftplugin/python.vim` -* Emacs: using [Flycheck](https://github.com/flycheck/) -* Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy) -* Atom: [linter-mypy](https://atom.io/packages/linter-mypy) -* PyCharm: [mypy plugin](https://github.com/dropbox/mypy-PyCharm-plugin) (PyCharm integrates +- Emacs: using [Flycheck](https://github.com/flycheck/) +- Sublime Text: [SublimeLinter-contrib-mypy](https://github.com/fredcallaway/SublimeLinter-contrib-mypy) +- Atom: [linter-mypy](https://atom.io/packages/linter-mypy) +- PyCharm: [mypy plugin](https://github.com/dropbox/mypy-PyCharm-plugin) (PyCharm integrates [its own implementation](https://www.jetbrains.com/help/pycharm/type-hinting-in-product.html) of [PEP 484](https://peps.python.org/pep-0484/)) -* VS Code: provides [basic integration](https://code.visualstudio.com/docs/python/linting#_mypy) with mypy. -* pre-commit: use [pre-commit mirrors-mypy](https://github.com/pre-commit/mirrors-mypy). +- VS Code: provides [basic integration](https://code.visualstudio.com/docs/python/linting#_mypy) with mypy. +- pre-commit: use [pre-commit mirrors-mypy](https://github.com/pre-commit/mirrors-mypy). Web site and documentation -------------------------- Additional information is available at the web site: - http://www.mypy-lang.org/ + Jump straight to the documentation: - https://mypy.readthedocs.io/ + Follow along our changelog at: - https://mypy-lang.blogspot.com/ - + Contributing ------------ @@ -164,7 +173,6 @@ To get started with developing mypy, see [CONTRIBUTING.md](CONTRIBUTING.md). If you need help getting started, don't hesitate to ask on [gitter](https://gitter.im/python/typing). - Mypyc and compiled version of mypy ---------------------------------- @@ -174,10 +182,12 @@ mypy approximately 4 times faster than if interpreted! To install an interpreted mypy instead, use: - python3 -m pip install --no-binary mypy -U mypy +```bash +python3 -m pip install --no-binary mypy -U mypy +``` To use a compiled version of a development version of mypy, directly install a binary from -https://github.com/mypyc/mypy_mypyc-wheels/releases/latest. +. -To contribute to the mypyc project, check out https://github.com/mypyc/mypyc +To contribute to the mypyc project, check out diff --git a/build-requirements.txt b/build-requirements.txt index 52c518d53bc2..0b1e6d43103a 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -2,4 +2,4 @@ -r mypy-requirements.txt types-psutil types-setuptools -types-typed-ast>=1.5.8,<1.6.0 +types-typed-ast>=1.5.8.5,<1.6.0 diff --git a/conftest.py b/conftest.py index 0bd7b6a38031..4454b02e7f3a 100644 --- a/conftest.py +++ b/conftest.py @@ -12,7 +12,7 @@ def pytest_configure(config): # This function name is special to pytest. See -# http://doc.pytest.org/en/latest/writing_plugins.html#initialization-command-line-and-configuration-hooks +# https://doc.pytest.org/en/latest/how-to/writing_plugins.html#initialization-command-line-and-configuration-hooks def pytest_addoption(parser) -> None: parser.addoption( "--bench", action="store_true", default=False, help="Enable the benchmark test runs" diff --git a/docs/Makefile b/docs/Makefile index be69e9d88281..c87c4c1abcb2 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -9,7 +9,7 @@ BUILDDIR = build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://www.sphinx-doc.org/) endif # Internal variables. diff --git a/docs/make.bat b/docs/make.bat index 1e3d84320174..3664bed34b7e 100755 --- a/docs/make.bat +++ b/docs/make.bat @@ -56,7 +56,7 @@ if errorlevel 9009 ( echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ + echo.https://www.sphinx-doc.org/ exit /b 1 ) diff --git a/docs/source/additional_features.rst b/docs/source/additional_features.rst index ef5bf9e8936d..133310899b59 100644 --- a/docs/source/additional_features.rst +++ b/docs/source/additional_features.rst @@ -178,7 +178,7 @@ Caveats/Known Issues :py:meth:`__init__ ` will be replaced by ``Any``. * :ref:`Validator decorators ` - and `default decorators `_ + and `default decorators `_ are not type-checked against the attribute they are setting/validating. * Method definitions added by mypy currently overwrite any existing method diff --git a/docs/source/cheat_sheet_py3.rst b/docs/source/cheat_sheet_py3.rst index 5aa1770512b8..9533484e938b 100644 --- a/docs/source/cheat_sheet_py3.rst +++ b/docs/source/cheat_sheet_py3.rst @@ -21,7 +21,7 @@ See :ref:`type-inference-and-annotations` for more details. # You don't need to initialize a variable to annotate it a: int # Ok (no value at runtime until assigned) - # Doing so is useful in conditional branches + # Doing so can be useful in conditional branches child: bool if age < 18: child = True @@ -34,7 +34,7 @@ Useful built-in types .. code-block:: python - # For most types, just use the name of the type. + # For most types, just use the name of the type in the annotation # Note that mypy can usually infer the type of a variable from its value, # so technically these annotations are redundant x: int = 1 @@ -75,10 +75,11 @@ Useful built-in types # Use Optional[X] for a value that could be None # Optional[X] is the same as X | None or Union[X, None] x: Optional[str] = "something" if some_condition() else None - # Mypy understands a value can't be None in an if-statement if x is not None: + # Mypy understands x won't be None here because of the if-statement print(x.upper()) - # If a value can never be None due to some invariants, use an assert + # If you know a value can never be None due to some logic that mypy doesn't + # understand, use an assert assert x is not None print(x.upper()) @@ -102,10 +103,10 @@ Functions def show(value: str, excitement: int = 10) -> None: print(value + "!" * excitement) - # Note that arguments without a type are dynamically typed (treated as Any) - # and that functions without any annotations not checked - def untyped(x): - x.anything() + 1 + "string" # no errors + # Note that arguments without a type are dynamically typed (treated as Any) + # and that functions without any annotations not checked + def untyped(x): + x.anything() + 1 + "string" # no errors # This is how you annotate a callable (function) value x: Callable[[int, float], float] = f @@ -259,6 +260,8 @@ When you're puzzled or when things are complicated In some cases type annotations can cause issues at runtime, see :ref:`runtime_troubles` for dealing with this. +See :ref:`silencing-type-errors` for details on how to silence errors. + Standard "duck types" ********************* @@ -294,37 +297,11 @@ that are common in idiomatic Python are standardized. f({3: 'yes', 4: 'no'}) - -You can even make your own duck types using :ref:`protocol-types`. - -Coroutines and asyncio -********************** - -See :ref:`async-and-await` for the full detail on typing coroutines and asynchronous code. - -.. code-block:: python - - import asyncio - - # A coroutine is typed like a normal function - async def countdown35(tag: str, count: int) -> str: - while count > 0: - print(f'T-minus {count} ({tag})') - await asyncio.sleep(0.1) - count -= 1 - return "Blastoff!" - - -Miscellaneous -************* - -.. code-block:: python - import sys from typing import IO - # Use IO[] for functions that should accept or return any - # object that comes from an open() call (IO[] does not + # Use IO[str] or IO[bytes] for functions that should accept or return + # objects that come from an open() call (note that IO does not # distinguish between reading, writing or other modes) def get_sys_IO(mode: str = 'w') -> IO[str]: if mode == 'w': @@ -334,19 +311,38 @@ Miscellaneous else: return sys.stdout - # Forward references are useful if you want to reference a class before - # it is defined + +You can even make your own duck types using :ref:`protocol-types`. + +Forward references +****************** + +.. code-block:: python + + # You may want to reference a class before it is defined. + # This is known as a "forward reference". def f(foo: A) -> int: # This will fail at runtime with 'A' is not defined ... - class A: + # However, if you add the following special import: + from __future__ import annotations + # It will work at runtime and type checking will succeed as long as there + # is a class of that name later on in the file + def f(foo: A) -> int: # Ok ... - # If you use the string literal 'A', it will pass as long as there is a - # class of that name later on in the file - def f(foo: 'A') -> int: # Ok + # Another option is to just put the type in quotes + def f(foo: 'A') -> int: # Also ok ... + class A: + # This can also come up if you need to reference a class in a type + # annotation inside the definition of that class + @classmethod + def create(cls) -> A: + ... + +See :ref:`forward-references` for more details. Decorators ********** @@ -365,3 +361,20 @@ Decorator functions can be expressed via generics. See def decorator_args(url: str) -> Callable[[F], F]: ... + +Coroutines and asyncio +********************** + +See :ref:`async-and-await` for the full detail on typing coroutines and asynchronous code. + +.. code-block:: python + + import asyncio + + # A coroutine is typed like a normal function + async def countdown(tag: str, count: int) -> str: + while count > 0: + print(f'T-minus {count} ({tag})') + await asyncio.sleep(0.1) + count -= 1 + return "Blastoff!" diff --git a/docs/source/conf.py b/docs/source/conf.py index 5faefdc92ed1..80097ef5b3a8 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -267,8 +267,8 @@ intersphinx_mapping = { "python": ("https://docs.python.org/3", None), "six": ("https://six.readthedocs.io", None), - "attrs": ("http://www.attrs.org/en/stable", None), - "cython": ("http://docs.cython.org/en/latest", None), + "attrs": ("https://www.attrs.org/en/stable/", None), + "cython": ("https://docs.cython.org/en/latest", None), "monkeytype": ("https://monkeytype.readthedocs.io/en/latest", None), "setuptools": ("https://setuptools.readthedocs.io/en/latest", None), } diff --git a/docs/source/error_code_list.rst b/docs/source/error_code_list.rst index 0388cd2165dd..54dc31f2cfcb 100644 --- a/docs/source/error_code_list.rst +++ b/docs/source/error_code_list.rst @@ -344,7 +344,7 @@ Check that assignment target is not a method [method-assign] In general, assigning to a method on class object or instance (a.k.a. monkey-patching) is ambiguous in terms of types, since Python's static type -system cannot express difference between bound and unbound callable types. +system cannot express the difference between bound and unbound callable types. Consider this example: .. code-block:: python @@ -355,18 +355,18 @@ Consider this example: def h(self: A) -> None: pass - A.f = h # type of h is Callable[[A], None] - A().f() # this works - A.f = A().g # type of A().g is Callable[[], None] - A().f() # but this also works at runtime + A.f = h # Type of h is Callable[[A], None] + A().f() # This works + A.f = A().g # Type of A().g is Callable[[], None] + A().f() # ...but this also works at runtime To prevent the ambiguity, mypy will flag both assignments by default. If this -error code is disabled, mypy will treat all method assignments r.h.s. as unbound, -so the second assignment will still generate an error. +error code is disabled, mypy will treat the assigned value in all method assignments as unbound, +so only the second assignment will still generate an error. .. note:: - This error code is a sub-error code of a wider ``[assignment]`` code. + This error code is a subcode of the more general ``[assignment]`` code. Check type variable values [type-var] ------------------------------------- @@ -456,11 +456,11 @@ Example: Check TypedDict items [typeddict-item] -------------------------------------- -When constructing a ``TypedDict`` object, mypy checks that each key and value is compatible -with the ``TypedDict`` type that is inferred from the surrounding context. +When constructing a TypedDict object, mypy checks that each key and value is compatible +with the TypedDict type that is inferred from the surrounding context. -When getting a ``TypedDict`` item, mypy checks that the key -exists. When assigning to a ``TypedDict``, mypy checks that both the +When getting a TypedDict item, mypy checks that the key +exists. When assigning to a TypedDict, mypy checks that both the key and the value are valid. Example: @@ -480,10 +480,13 @@ Example: Check TypedDict Keys [typeddict-unknown-key] -------------------------------------------- -When constructing a ``TypedDict`` object, mypy checks whether the definition -contains unknown keys. For convenience's sake, mypy will not generate an error -when a ``TypedDict`` has extra keys if it's passed to a function as an argument. -However, it will generate an error when these are created. Example: +When constructing a TypedDict object, mypy checks whether the +definition contains unknown keys, to catch invalid keys and +misspellings. On the other hand, mypy will not generate an error when +a previously constructed TypedDict value with extra keys is passed +to a function as an argument, since TypedDict values support +structural subtyping ("static duck typing") and the keys are assumed +to have been validated at the point of construction. Example: .. code-block:: python @@ -502,13 +505,13 @@ However, it will generate an error when these are created. Example: a: Point = {"x": 1, "y": 4} b: Point3D = {"x": 2, "y": 5, "z": 6} - # OK - add_x_coordinates(a, b) + add_x_coordinates(a, b) # OK + # Error: Extra key "z" for TypedDict "Point" [typeddict-unknown-key] add_x_coordinates(a, {"x": 1, "y": 4, "z": 5}) - -Setting an unknown value on a ``TypedDict`` will also generate this error: +Setting a TypedDict item using an unknown key will also generate this +error, since it could be a misspelling: .. code-block:: python @@ -516,9 +519,9 @@ Setting an unknown value on a ``TypedDict`` will also generate this error: # Error: Extra key "z" for TypedDict "Point" [typeddict-unknown-key] a["z"] = 3 - -Whereas reading an unknown value will generate the more generic/serious -``typeddict-item``: +Reading an unknown key will generate the more general (and serious) +``typeddict-item`` error, which is likely to result in an exception at +runtime: .. code-block:: python @@ -528,7 +531,7 @@ Whereas reading an unknown value will generate the more generic/serious .. note:: - This error code is a sub-error code of a wider ``[typeddict-item]`` code. + This error code is a subcode of the wider ``[typeddict-item]`` code. Check that type of target is known [has-type] --------------------------------------------- @@ -810,8 +813,8 @@ Check that literal is used where expected [literal-required] There are some places where only a (string) literal value is expected for the purposes of static type checking, for example a ``TypedDict`` key, or a ``__match_args__`` item. Providing a ``str``-valued variable in such contexts -will result in an error. Note however, in many cases you can use ``Final``, -or ``Literal`` variables, for example: +will result in an error. Note that in many cases you can also use ``Final`` +or ``Literal`` variables. Example: .. code-block:: python @@ -905,6 +908,24 @@ Functions will always evaluate to true in boolean contexts. if f: # Error: Function "Callable[[], Any]" could always be true in boolean context [truthy-function] pass +Check for implicit bytes coercions [str-bytes-safe] +------------------------------------------------------------------- + +Warn about cases where a bytes object may be converted to a string in an unexpected manner. + +.. code-block:: python + + b = b"abc" + + # Error: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". + # If this is desired behavior, use f"{x!r}" or "{!r}".format(x). + # Otherwise, decode the bytes [str-bytes-safe] + print(f"The alphabet starts with {b}") + + # Okay + print(f"The alphabet starts with {b!r}") # The alphabet starts with b'abc' + print(f"The alphabet starts with {b.decode('utf-8')}") # The alphabet starts with abc + Report syntax errors [syntax] ----------------------------- diff --git a/docs/source/error_code_list2.rst b/docs/source/error_code_list2.rst index f160515f0a9e..8be2ac0b1d73 100644 --- a/docs/source/error_code_list2.rst +++ b/docs/source/error_code_list2.rst @@ -347,3 +347,47 @@ silence the error: async def g() -> None: _ = asyncio.create_task(f()) # No error + +Check that ``# type: ignore`` comment is used [unused-ignore] +------------------------------------------------------------- + +If you use :option:`--enable-error-code unused-ignore `, +or :option:`--warn-unused-ignores ` +mypy generates an error if you don't use a ``# type: ignore`` comment, i.e. if +there is a comment, but there would be no error generated by mypy on this line +anyway. + +Example: + +.. code-block:: python + + # Use "mypy --warn-unused-ignores ..." + + def add(a: int, b: int) -> int: + # Error: unused "type: ignore" comment + return a + b # type: ignore + +Note that due to a specific nature of this comment, the only way to selectively +silence it, is to include the error code explicitly. Also note that this error is +not shown if the ``# type: ignore`` is not used due to code being statically +unreachable (e.g. due to platform or version checks). + +Example: + +.. code-block:: python + + # Use "mypy --warn-unused-ignores ..." + + import sys + + try: + # The "[unused-ignore]" is needed to get a clean mypy run + # on both Python 3.8, and 3.9 where this module was added + import graphlib # type: ignore[import,unused-ignore] + except ImportError: + pass + + if sys.version_info >= (3, 9): + # The following will not generate an error on either + # Python 3.8, or Python 3.9 + 42 + "testing..." # type: ignore diff --git a/docs/source/error_codes.rst b/docs/source/error_codes.rst index 34bb8ab6b5e1..c8a2728b5697 100644 --- a/docs/source/error_codes.rst +++ b/docs/source/error_codes.rst @@ -114,13 +114,13 @@ So one can e.g. enable some code globally, disable it for all tests in the corresponding config section, and then re-enable it with an inline comment in some specific test. -Sub-error codes of other error codes ------------------------------------- +Subcodes of error codes +----------------------- -In rare cases (mostly for backwards compatibility reasons), some error -code may be covered by another, wider error code. For example, an error with +In some cases, mostly for backwards compatibility reasons, an error +code may be covered also by another, wider error code. For example, an error with code ``[method-assign]`` can be ignored by ``# type: ignore[assignment]``. Similar logic works for disabling error codes globally. If a given error code -is a sub code of another one, it must mentioned in the docs for the narrower -code. This hierarchy is not nested, there cannot be sub-error codes of other -sub-error codes. +is a subcode of another one, it will be mentioned in the documentation for the narrower +code. This hierarchy is not nested: there cannot be subcodes of other +subcodes. diff --git a/docs/source/existing_code.rst b/docs/source/existing_code.rst index 410d7af0c350..c66008f4b782 100644 --- a/docs/source/existing_code.rst +++ b/docs/source/existing_code.rst @@ -183,7 +183,7 @@ An excellent goal to aim for is to have your codebase pass when run against ``my This basically ensures that you will never have a type related error without an explicit circumvention somewhere (such as a ``# type: ignore`` comment). -The following config is equivalent to ``--strict`` (as of mypy 0.990): +The following config is equivalent to ``--strict`` (as of mypy 1.0): .. code-block:: text @@ -191,7 +191,6 @@ The following config is equivalent to ``--strict`` (as of mypy 0.990): warn_unused_configs = True warn_redundant_casts = True warn_unused_ignores = True - no_implicit_optional = True # Getting these passing should be easy strict_equality = True diff --git a/docs/source/faq.rst b/docs/source/faq.rst index d97929c2cfa6..195805382cd3 100644 --- a/docs/source/faq.rst +++ b/docs/source/faq.rst @@ -36,7 +36,7 @@ Here are some potential benefits of mypy-style static typing: grows, you can adapt tricky application logic to static typing to help maintenance. -See also the `front page `_ of the mypy web +See also the `front page `_ of the mypy web site. Would my project benefit from static typing? @@ -202,7 +202,7 @@ Mypy is a cool project. Can I help? *********************************** Any help is much appreciated! `Contact -`_ the developers if you would +`_ the developers if you would like to contribute. Any help related to development, design, publicity, documentation, testing, web site maintenance, financing, etc. can be helpful. You can learn a lot by contributing, and anybody diff --git a/docs/source/index.rst b/docs/source/index.rst index 7ab3edebad39..c9dc6bc1f8c9 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -109,7 +109,7 @@ Contents :caption: Project Links GitHub - Website + Website Indices and tables ================== diff --git a/docs/source/kinds_of_types.rst b/docs/source/kinds_of_types.rst index b575a6eac4c5..c3180850f119 100644 --- a/docs/source/kinds_of_types.rst +++ b/docs/source/kinds_of_types.rst @@ -200,6 +200,28 @@ using bidirectional type inference: If you want to give the argument or return value types explicitly, use an ordinary, perhaps nested function definition. +Callables can also be used against type objects, matching their +``__init__`` or ``__new__`` signature: + +.. code-block:: python + + from typing import Callable + + class C: + def __init__(self, app: str) -> None: + pass + + CallableType = Callable[[str], C] + + def class_or_callable(arg: CallableType) -> None: + inst = arg("my_app") + reveal_type(inst) # Revealed type is "C" + +This is useful if you want ``arg`` to be either a ``Callable`` returning an +instance of ``C`` or the type of ``C`` itself. This also works with +:ref:`callback protocols `. + + .. _union-types: Union types diff --git a/docs/source/more_types.rst b/docs/source/more_types.rst index ff5e8d384351..542ff1c57c71 100644 --- a/docs/source/more_types.rst +++ b/docs/source/more_types.rst @@ -824,11 +824,11 @@ classes are generic, self-type allows giving them precise signatures: Typing async/await ****************** -Mypy supports the ability to type coroutines that use the ``async/await`` -syntax introduced in Python 3.5. For more information regarding coroutines and -this new syntax, see :pep:`492`. +Mypy lets you type coroutines that use the ``async/await`` syntax. +For more information regarding coroutines, see :pep:`492` and the +`asyncio documentation `_. -Functions defined using ``async def`` are typed just like normal functions. +Functions defined using ``async def`` are typed similar to normal functions. The return type annotation should be the same as the type of the value you expect to get back when ``await``-ing the coroutine. @@ -839,65 +839,40 @@ expect to get back when ``await``-ing the coroutine. async def format_string(tag: str, count: int) -> str: return f'T-minus {count} ({tag})' - async def countdown_1(tag: str, count: int) -> str: + async def countdown(tag: str, count: int) -> str: while count > 0: - my_str = await format_string(tag, count) # has type 'str' + my_str = await format_string(tag, count) # type is inferred to be str print(my_str) await asyncio.sleep(0.1) count -= 1 return "Blastoff!" - loop = asyncio.get_event_loop() - loop.run_until_complete(countdown_1("Millennium Falcon", 5)) - loop.close() + asyncio.run(countdown("Millennium Falcon", 5)) -The result of calling an ``async def`` function *without awaiting* will be a -value of type :py:class:`Coroutine[Any, Any, T] `, which is a subtype of +The result of calling an ``async def`` function *without awaiting* will +automatically be inferred to be a value of type +:py:class:`Coroutine[Any, Any, T] `, which is a subtype of :py:class:`Awaitable[T] `: .. code-block:: python - my_coroutine = countdown_1("Millennium Falcon", 5) - reveal_type(my_coroutine) # has type 'Coroutine[Any, Any, str]' + my_coroutine = countdown("Millennium Falcon", 5) + reveal_type(my_coroutine) # Revealed type is "typing.Coroutine[Any, Any, builtins.str]" -.. note:: - - :ref:`reveal_type() ` displays the inferred static type of - an expression. - -You may also choose to create a subclass of :py:class:`~typing.Awaitable` instead: - -.. code-block:: python - - from typing import Any, Awaitable, Generator - import asyncio - - class MyAwaitable(Awaitable[str]): - def __init__(self, tag: str, count: int) -> None: - self.tag = tag - self.count = count +.. _async-iterators: - def __await__(self) -> Generator[Any, None, str]: - for i in range(n, 0, -1): - print(f'T-minus {i} ({tag})') - yield from asyncio.sleep(0.1) - return "Blastoff!" +Asynchronous iterators +---------------------- - def countdown_3(tag: str, count: int) -> Awaitable[str]: - return MyAwaitable(tag, count) - - loop = asyncio.get_event_loop() - loop.run_until_complete(countdown_3("Heart of Gold", 5)) - loop.close() - -To create an iterable coroutine, subclass :py:class:`~typing.AsyncIterator`: +If you have an asynchronous iterator, you can use the +:py:class:`~typing.AsyncIterator` type in your annotations: .. code-block:: python from typing import Optional, AsyncIterator import asyncio - class arange(AsyncIterator[int]): + class arange: def __init__(self, start: int, stop: int, step: int) -> None: self.start = start self.stop = stop @@ -914,35 +889,92 @@ To create an iterable coroutine, subclass :py:class:`~typing.AsyncIterator`: else: return self.count - async def countdown_4(tag: str, n: int) -> str: - async for i in arange(n, 0, -1): + async def run_countdown(tag: str, countdown: AsyncIterator[int]) -> str: + async for i in countdown: print(f'T-minus {i} ({tag})') await asyncio.sleep(0.1) return "Blastoff!" - loop = asyncio.get_event_loop() - loop.run_until_complete(countdown_4("Serenity", 5)) - loop.close() + asyncio.run(run_countdown("Serenity", arange(5, 0, -1))) -If you use coroutines in legacy code that was originally written for -Python 3.4, which did not support the ``async def`` syntax, you would -instead use the :py:func:`@asyncio.coroutine ` -decorator to convert a generator into a coroutine, and use a -generator type as the return type: +Async generators (introduced in :pep:`525`) are an easy way to create +async iterators: .. code-block:: python - from typing import Any, Generator + from typing import AsyncGenerator, Optional import asyncio - @asyncio.coroutine - def countdown_2(tag: str, count: int) -> Generator[Any, None, str]: - while count > 0: - print(f'T-minus {count} ({tag})') - yield from asyncio.sleep(0.1) - count -= 1 - return "Blastoff!" + # Could also type this as returning AsyncIterator[int] + async def arange(start: int, stop: int, step: int) -> AsyncGenerator[int, None]: + current = start + while (step > 0 and current < stop) or (step < 0 and current > stop): + yield current + current += step + + asyncio.run(run_countdown("Battlestar Galactica", arange(5, 0, -1))) + +One common confusion is that the presence of a ``yield`` statement in an +``async def`` function has an effect on the type of the function: + +.. code-block:: python + + from typing import AsyncIterator + + async def arange(stop: int) -> AsyncIterator[int]: + # When called, arange gives you an async iterator + # Equivalent to Callable[[int], AsyncIterator[int]] + i = 0 + while i < stop: + yield i + i += 1 + + async def coroutine(stop: int) -> AsyncIterator[int]: + # When called, coroutine gives you something you can await to get an async iterator + # Equivalent to Callable[[int], Coroutine[Any, Any, AsyncIterator[int]]] + return arange(stop) + + async def main() -> None: + reveal_type(arange(5)) # Revealed type is "typing.AsyncIterator[builtins.int]" + reveal_type(coroutine(5)) # Revealed type is "typing.Coroutine[Any, Any, typing.AsyncIterator[builtins.int]]" + + await arange(5) # Error: Incompatible types in "await" (actual type "AsyncIterator[int]", expected type "Awaitable[Any]") + reveal_type(await coroutine(5)) # Revealed type is "typing.AsyncIterator[builtins.int]" + +This can sometimes come up when trying to define base classes, Protocols or overloads: + +.. code-block:: python + + from typing import AsyncIterator, Protocol, overload + + class LauncherIncorrect(Protocol): + # Because launch does not have yield, this has type + # Callable[[], Coroutine[Any, Any, AsyncIterator[int]]] + # instead of + # Callable[[], AsyncIterator[int]] + async def launch(self) -> AsyncIterator[int]: + raise NotImplementedError + + class LauncherCorrect(Protocol): + def launch(self) -> AsyncIterator[int]: + raise NotImplementedError + + class LauncherAlsoCorrect(Protocol): + async def launch(self) -> AsyncIterator[int]: + raise NotImplementedError + if False: + yield 0 + + # The type of the overloads is independent of the implementation. + # In particular, their type is not affected by whether or not the + # implementation contains a `yield`. + # Use of `def`` makes it clear the type is Callable[..., AsyncIterator[int]], + # whereas with `async def` it would be Callable[..., Coroutine[Any, Any, AsyncIterator[int]]] + @overload + def launch(*, count: int = ...) -> AsyncIterator[int]: ... + @overload + def launch(*, time: float = ...) -> AsyncIterator[int]: ... - loop = asyncio.get_event_loop() - loop.run_until_complete(countdown_2("USS Enterprise", 5)) - loop.close() + async def launch(*, count: int = 0, time: float = 0) -> AsyncIterator[int]: + # The implementation of launch is an async generator and contains a yield + yield 0 diff --git a/docs/source/protocols.rst b/docs/source/protocols.rst index cb51809a66d5..95b870265f73 100644 --- a/docs/source/protocols.rst +++ b/docs/source/protocols.rst @@ -319,7 +319,7 @@ member: batch_proc([], bad_cb) # Error! Argument 2 has incompatible type because of # different name and kind in the callback -Callback protocols and :py:data:`~typing.Callable` types can be used interchangeably. +Callback protocols and :py:data:`~typing.Callable` types can be used mostly interchangeably. Argument names in :py:meth:`__call__ ` methods must be identical, unless a double underscore prefix is used. For example: diff --git a/docs/source/runtime_troubles.rst b/docs/source/runtime_troubles.rst index a62652111de6..909215a774a9 100644 --- a/docs/source/runtime_troubles.rst +++ b/docs/source/runtime_troubles.rst @@ -117,6 +117,8 @@ Since code inside ``if TYPE_CHECKING:`` is not executed at runtime, it provides a convenient way to tell mypy something without the code being evaluated at runtime. This is most useful for resolving :ref:`import cycles `. +.. _forward-references: + Class name forward references ----------------------------- @@ -275,10 +277,18 @@ sections, these can be dealt with by using :ref:`typing.TYPE_CHECKING .. code-block:: python + from __future__ import annotations from typing import TYPE_CHECKING if TYPE_CHECKING: from _typeshed import SupportsRichComparison + def f(x: SupportsRichComparison) -> None + +The ``from __future__ import annotations`` is required to avoid +a ``NameError`` when using the imported symbol. +For more information and caveats, see the section on +:ref:`future annotations `. + .. _generic-builtins: Using generic builtins diff --git a/docs/source/stubtest.rst b/docs/source/stubtest.rst index f3c036f56c06..59889252f056 100644 --- a/docs/source/stubtest.rst +++ b/docs/source/stubtest.rst @@ -85,7 +85,7 @@ Usage Running stubtest can be as simple as ``stubtest module_to_check``. Run :option:`stubtest --help` for a quick summary of options. -Subtest must be able to import the code to be checked, so make sure that mypy +Stubtest must be able to import the code to be checked, so make sure that mypy is installed in the same environment as the library to be tested. In some cases, setting ``PYTHONPATH`` can help stubtest find the code to import. @@ -122,14 +122,29 @@ The rest of this section documents the command line interface of stubtest. allowlists. Allowlists can be created with --generate-allowlist. Allowlists support regular expressions. + The presence of an entry in the allowlist means stubtest will not generate + any errors for the corresponding definition. + .. option:: --generate-allowlist Print an allowlist (to stdout) to be used with --allowlist + When introducing stubtest to an existing project, this is an easy way to + silence all existing errors. + .. option:: --ignore-unused-allowlist Ignore unused allowlist entries + Without this option enabled, the default is for stubtest to complain if an + allowlist entry is not necessary for stubtest to pass successfully. + + Note if an allowlist entry is a regex that matches the empty string, + stubtest will never consider it unused. For example, to get + `--ignore-unused-allowlist` behaviour for a single allowlist entry like + ``foo.bar`` you could add an allowlist entry ``(foo\.bar)?``. + This can be useful when an error only occurs on a specific platform. + .. option:: --mypy-config-file FILE Use specified mypy config file to determine mypy plugins and mypy path diff --git a/misc/analyze_cache.py b/misc/analyze_cache.py index 8b805d8da0bc..45c44139b473 100644 --- a/misc/analyze_cache.py +++ b/misc/analyze_cache.py @@ -62,7 +62,7 @@ def load_json(data_path: str, meta_path: str) -> CacheData: def get_files(root: str) -> Iterable[CacheData]: - for (dirpath, dirnames, filenames) in os.walk(root): + for dirpath, dirnames, filenames in os.walk(root): for filename in filenames: if filename.endswith(".data.json"): meta_filename = filename.replace(".data.json", ".meta.json") diff --git a/misc/convert-cache.py b/misc/convert-cache.py index e5da9c2650d5..2a8a9579c11b 100755 --- a/misc/convert-cache.py +++ b/misc/convert-cache.py @@ -8,6 +8,7 @@ from __future__ import annotations import os +import re import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) @@ -36,15 +37,23 @@ def main() -> None: input_dir = args.input_dir output_dir = args.output_dir or input_dir + assert os.path.isdir(output_dir), f"{output_dir} is not a directory" if args.to_sqlite: input: MetadataStore = FilesystemMetadataStore(input_dir) output: MetadataStore = SqliteMetadataStore(output_dir) else: + fnam = os.path.join(input_dir, "cache.db") + msg = f"{fnam} does not exist" + if not re.match(r"[0-9]+\.[0-9]+$", os.path.basename(input_dir)): + msg += f" (are you missing Python version at the end, e.g. {input_dir}/3.11)" + assert os.path.isfile(fnam), msg input, output = SqliteMetadataStore(input_dir), FilesystemMetadataStore(output_dir) for s in input.list_all(): if s.endswith(".json"): - assert output.write(s, input.read(s), input.getmtime(s)), "Failed to write cache file!" + assert output.write( + s, input.read(s), input.getmtime(s) + ), f"Failed to write cache file {s}!" output.commit() diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py index 7fffba8a8507..fc8ac27466d5 100644 --- a/misc/fix_annotate.py +++ b/misc/fix_annotate.py @@ -38,7 +38,6 @@ def foo(self, bar, baz=12): class FixAnnotate(BaseFix): - # This fixer is compatible with the bottom matcher. BM_compatible = True diff --git a/misc/remove-eol-whitespace.sh b/misc/remove-eol-whitespace.sh index 3da6b9de64a5..5cf666997e34 100644 --- a/misc/remove-eol-whitespace.sh +++ b/misc/remove-eol-whitespace.sh @@ -3,6 +3,6 @@ # Remove trailing whitespace from all non-binary files in a git repo. # From https://gist.github.com/dpaluy/3690668; originally from here: -# http://unix.stackexchange.com/questions/36233/how-to-skip-file-in-sed-if-it-contains-regex/36240#36240 +# https://unix.stackexchange.com/questions/36233/how-to-skip-file-in-sed-if-it-contains-regex/36240#36240 git grep -I --name-only -z -e '' | xargs -0 sed -i -e 's/[ \t]\+\(\r\?\)$/\1/' diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index 86b0fd774e0c..fc6cbc1d88e7 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -179,9 +179,10 @@ def main() -> None: print("Created typeshed sync commit.") commits_to_cherry_pick = [ - "874afd970", # LiteralString reverts - "3a240111e", # sum reverts - "f968d6ce0", # ctypes reverts + "c844270a4", # LiteralString reverts + "9ebe5fd49", # sum reverts + "d1987191f", # ctypes reverts + "b1761f4c9", # ParamSpec for functools.wraps ] for commit in commits_to_cherry_pick: subprocess.run(["git", "cherry-pick", commit], check=True) diff --git a/mypy/api.py b/mypy/api.py index 589bfbbfa1a7..e2179dba30ca 100644 --- a/mypy/api.py +++ b/mypy/api.py @@ -47,11 +47,10 @@ import sys from io import StringIO -from typing import Callable, TextIO, cast +from typing import Callable, TextIO def _run(main_wrapper: Callable[[TextIO, TextIO], None]) -> tuple[str, str, int]: - stdout = StringIO() stderr = StringIO() @@ -59,7 +58,8 @@ def _run(main_wrapper: Callable[[TextIO, TextIO], None]) -> tuple[str, str, int] main_wrapper(stdout, stderr) exit_status = 0 except SystemExit as system_exit: - exit_status = cast(int, system_exit.code) + assert isinstance(system_exit.code, int) + exit_status = system_exit.code return stdout.getvalue(), stderr.getvalue(), exit_status diff --git a/mypy/applytype.py b/mypy/applytype.py index a81ed3cd1f16..55a51d4adbb6 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -8,6 +8,7 @@ from mypy.types import ( AnyType, CallableType, + Instance, Parameters, ParamSpecType, PartialType, @@ -75,7 +76,6 @@ def apply_generic_arguments( report_incompatible_typevar_value: Callable[[CallableType, Type, str, Context], None], context: Context, skip_unsatisfied: bool = False, - allow_erased_callables: bool = False, ) -> CallableType: """Apply generic type arguments to a callable type. @@ -110,7 +110,7 @@ def apply_generic_arguments( nt = id_to_type.get(param_spec.id) if nt is not None: nt = get_proper_type(nt) - if isinstance(nt, CallableType) or isinstance(nt, Parameters): + if isinstance(nt, (CallableType, Parameters)): callable = callable.expand_param_spec(nt) # Apply arguments to argument types. @@ -119,15 +119,9 @@ def apply_generic_arguments( star_index = callable.arg_kinds.index(ARG_STAR) callable = callable.copy_modified( arg_types=( - [ - expand_type(at, id_to_type, allow_erased_callables) - for at in callable.arg_types[:star_index] - ] + [expand_type(at, id_to_type) for at in callable.arg_types[:star_index]] + [callable.arg_types[star_index]] - + [ - expand_type(at, id_to_type, allow_erased_callables) - for at in callable.arg_types[star_index + 1 :] - ] + + [expand_type(at, id_to_type) for at in callable.arg_types[star_index + 1 :]] ) ) @@ -155,22 +149,30 @@ def apply_generic_arguments( assert False, f"mypy bug: unimplemented case, {expanded_tuple}" elif isinstance(unpacked_type, TypeVarTupleType): expanded_tvt = expand_unpack_with_variables(var_arg.typ, id_to_type) - assert isinstance(expanded_tvt, list) - for t in expanded_tvt: - assert not isinstance(t, UnpackType) - callable = replace_starargs(callable, expanded_tvt) + if isinstance(expanded_tvt, list): + for t in expanded_tvt: + assert not isinstance(t, UnpackType) + callable = replace_starargs(callable, expanded_tvt) + else: + assert isinstance(expanded_tvt, Instance) + assert expanded_tvt.type.fullname == "builtins.tuple" + callable = callable.copy_modified( + arg_types=( + callable.arg_types[:star_index] + + [expanded_tvt.args[0]] + + callable.arg_types[star_index + 1 :] + ) + ) else: assert False, "mypy bug: unhandled case applying unpack" else: callable = callable.copy_modified( - arg_types=[ - expand_type(at, id_to_type, allow_erased_callables) for at in callable.arg_types - ] + arg_types=[expand_type(at, id_to_type) for at in callable.arg_types] ) # Apply arguments to TypeGuard if any. if callable.type_guard is not None: - type_guard = expand_type(callable.type_guard, id_to_type, allow_erased_callables) + type_guard = expand_type(callable.type_guard, id_to_type) else: type_guard = None @@ -178,7 +180,7 @@ def apply_generic_arguments( remaining_tvars = [tv for tv in tvars if tv.id not in id_to_type] return callable.copy_modified( - ret_type=expand_type(callable.ret_type, id_to_type, allow_erased_callables), + ret_type=expand_type(callable.ret_type, id_to_type), variables=remaining_tvars, type_guard=type_guard, ) diff --git a/mypy/binder.py b/mypy/binder.py index d822aecec2f3..37c0b6bb9006 100644 --- a/mypy/binder.py +++ b/mypy/binder.py @@ -51,6 +51,9 @@ def __init__(self, id: int, conditional_frame: bool = False) -> None: # need this field. self.suppress_unreachable_warnings = False + def __repr__(self) -> str: + return f"Frame({self.id}, {self.types}, {self.unreachable}, {self.conditional_frame})" + Assigns = DefaultDict[Expression, List[Tuple[Type, Optional[Type]]]] @@ -63,7 +66,7 @@ class ConditionalTypeBinder: ``` class A: - a = None # type: Union[int, str] + a: Union[int, str] = None x = A() lst = [x] reveal_type(x.a) # Union[int, str] @@ -446,6 +449,7 @@ def top_frame_context(self) -> Iterator[Frame]: assert len(self.frames) == 1 yield self.push_frame() self.pop_frame(True, 0) + assert len(self.frames) == 1 def get_declaration(expr: BindableExpression) -> Type | None: diff --git a/mypy/build.py b/mypy/build.py index 6b0e4a9faa86..c239afb56236 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -235,17 +235,7 @@ def _build( source_set = BuildSourceSet(sources) cached_read = fscache.read - errors = Errors( - options.show_error_context, - options.show_column_numbers, - options.hide_error_codes, - options.pretty, - options.show_error_end, - lambda path: read_py_file(path, cached_read), - options.show_absolute_path, - options.many_errors_threshold, - options, - ) + errors = Errors(options, read_source=lambda path: read_py_file(path, cached_read)) plugin, snapshot = load_plugins(options, errors, stdout, extra_plugins) # Add catch-all .gitignore to cache dir if we created it @@ -845,6 +835,8 @@ def parse_file( Raise CompileError if there is a parse error. """ t0 = time.time() + if ignore_errors: + self.errors.ignored_files.add(path) tree = parse(source, path, id, self.errors, options=options) tree._fullname = id self.add_stats( @@ -1739,8 +1731,8 @@ def delete_cache(id: str, path: str, manager: BuildManager) -> None: Now we can execute steps A-C from the first section. Finding SCCs for step A shouldn't be hard; there's a recipe here: -http://code.activestate.com/recipes/578507/. There's also a plethora -of topsort recipes, e.g. http://code.activestate.com/recipes/577413/. +https://code.activestate.com/recipes/578507/. There's also a plethora +of topsort recipes, e.g. https://code.activestate.com/recipes/577413/. For single nodes, processing is simple. If the node was cached, we deserialize the cache data and fix up cross-references. Otherwise, we @@ -1921,7 +1913,7 @@ def __init__( self.caller_state = caller_state self.caller_line = caller_line if caller_state: - self.import_context = caller_state.import_context[:] + self.import_context = caller_state.import_context.copy() self.import_context.append((caller_state.xpath, caller_line)) else: self.import_context = [] @@ -2246,6 +2238,7 @@ def semantic_analysis_pass1(self) -> None: analyzer = SemanticAnalyzerPreAnalysis() with self.wrap_context(): analyzer.visit_file(self.tree, self.xpath, self.id, options) + self.manager.errors.set_unreachable_lines(self.xpath, self.tree.unreachable_lines) # TODO: Do this while constructing the AST? self.tree.names = SymbolTable() if not self.tree.is_stub: @@ -2413,7 +2406,9 @@ def finish_passes(self) -> None: self.update_fine_grained_deps(self.manager.fg_deps) if manager.options.export_ref_info: - write_undocumented_ref_info(self, manager.metastore, manager.options) + write_undocumented_ref_info( + self, manager.metastore, manager.options, self.type_map() + ) self.free_state() if not manager.options.fine_grained_incremental and not manager.options.preserve_asts: @@ -2578,7 +2573,10 @@ def dependency_lines(self) -> list[int]: return [self.dep_line_map.get(dep, 1) for dep in self.dependencies + self.suppressed] def generate_unused_ignore_notes(self) -> None: - if self.options.warn_unused_ignores: + if ( + self.options.warn_unused_ignores + or codes.UNUSED_IGNORE in self.options.enabled_error_codes + ) and codes.UNUSED_IGNORE not in self.options.disabled_error_codes: # If this file was initially loaded from the cache, it may have suppressed # dependencies due to imports with ignores on them. We need to generate # those errors to avoid spuriously flagging them as unused ignores. @@ -3299,7 +3297,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None: manager.trace(f"Queuing {fresh_msg} SCC ({scc_str})") fresh_scc_queue.append(scc) else: - if len(fresh_scc_queue) > 0: + if fresh_scc_queue: manager.log(f"Processing {len(fresh_scc_queue)} queued fresh SCCs") # Defer processing fresh SCCs until we actually run into a stale SCC # and need the earlier modules to be loaded. @@ -3515,7 +3513,7 @@ def strongly_connected_components( exactly once; vertices not part of a SCC are returned as singleton sets. - From http://code.activestate.com/recipes/578507/. + From https://code.activestate.com/recipes/578507/. """ identified: set[str] = set() stack: list[str] = [] @@ -3578,7 +3576,7 @@ def topsort(data: dict[T, set[T]]) -> Iterable[set[T]]: {B, C} {A} - From http://code.activestate.com/recipes/577413/. + From https://code.activestate.com/recipes/577413/. """ # TODO: Use a faster algorithm? for k, v in data.items(): @@ -3624,7 +3622,9 @@ def is_silent_import_module(manager: BuildManager, path: str) -> bool: ) -def write_undocumented_ref_info(state: State, metastore: MetadataStore, options: Options) -> None: +def write_undocumented_ref_info( + state: State, metastore: MetadataStore, options: Options, type_map: dict[Expression, Type] +) -> None: # This exports some dependency information in a rather ad-hoc fashion, which # can be helpful for some tools. This is all highly experimental and could be # removed at any time. @@ -3639,5 +3639,5 @@ def write_undocumented_ref_info(state: State, metastore: MetadataStore, options: ref_info_file = ".".join(data_file.split(".")[:-2]) + ".refs.json" assert not ref_info_file.startswith(".") - deps_json = get_undocumented_ref_info_json(state.tree) + deps_json = get_undocumented_ref_info_json(state.tree, type_map) metastore.write(ref_info_file, json.dumps(deps_json, separators=(",", ":"))) diff --git a/mypy/checker.py b/mypy/checker.py index 5f72e6f7f399..f81cb7a1fd32 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -26,7 +26,7 @@ import mypy.checkexpr from mypy import errorcodes as codes, message_registry, nodes, operators -from mypy.binder import ConditionalTypeBinder, get_declaration +from mypy.binder import ConditionalTypeBinder, Frame, get_declaration from mypy.checkmember import ( MemberContext, analyze_decorator_or_funcbase_access, @@ -41,7 +41,7 @@ from mypy.errors import Errors, ErrorWatcher, report_internal_error from mypy.expandtype import expand_self_type, expand_type, expand_type_by_instance from mypy.join import join_types -from mypy.literals import Key, literal, literal_hash +from mypy.literals import Key, extract_var_from_literal_hash, literal, literal_hash from mypy.maptype import map_instance_to_supertype from mypy.meet import is_overlapping_erased_types, is_overlapping_types from mypy.message_registry import ErrorMessage @@ -134,6 +134,7 @@ is_final_node, ) from mypy.options import Options +from mypy.patterns import AsPattern, StarredPattern from mypy.plugin import CheckerPluginInterface, Plugin from mypy.scope import Scope from mypy.semanal import is_trivial_body, refers_to_fullname, set_callable_name @@ -151,7 +152,7 @@ restrict_subtype_away, unify_generic_callable, ) -from mypy.traverser import all_return_statements, has_return_statement +from mypy.traverser import TraverserVisitor, all_return_statements, has_return_statement from mypy.treetransform import TransformVisitor from mypy.typeanal import check_for_explicit_any, has_any_from_unimported_type, make_optional_type from mypy.typeops import ( @@ -419,7 +420,7 @@ def __init__( self.expr_checker = mypy.checkexpr.ExpressionChecker( self, self.msg, self.plugin, per_line_checking_time_ns ) - self.pattern_checker = PatternChecker(self, self.msg, self.plugin) + self.pattern_checker = PatternChecker(self, self.msg, self.plugin, options) @property def type_context(self) -> list[Type | None]: @@ -483,7 +484,9 @@ def check_first_pass(self) -> None: "typing.Sequence", [self.named_type("builtins.str")] ) if not is_subtype(all_.type, seq_str): - str_seq_s, all_s = format_type_distinctly(seq_str, all_.type) + str_seq_s, all_s = format_type_distinctly( + seq_str, all_.type, options=self.options + ) self.fail( message_registry.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s), all_node ) @@ -629,7 +632,8 @@ def _visit_overloaded_func_def(self, defn: OverloadedFuncDef) -> None: if defn.is_property: # HACK: Infer the type of the property. - self.visit_decorator(cast(Decorator, defn.items[0])) + assert isinstance(defn.items[0], Decorator) + self.visit_decorator(defn.items[0]) for fdef in defn.items: assert isinstance(fdef, Decorator) self.check_func_item(fdef.func, name=fdef.func.name, allow_empty=True) @@ -1177,7 +1181,8 @@ def check_func_def( msg = None elif typ.arg_names[i] in {"self", "cls"}: msg = message_registry.ERASED_SELF_TYPE_NOT_SUPERTYPE.format( - erased, ref_type + erased.str_with_options(self.options), + ref_type.str_with_options(self.options), ) else: msg = message_registry.MISSING_OR_INVALID_SELF_TYPE @@ -1203,6 +1208,21 @@ def check_func_def( # Type check body in a new scope. with self.binder.top_frame_context(): + # Copy some type narrowings from an outer function when it seems safe enough + # (i.e. we can't find an assignment that might change the type of the + # variable afterwards). + new_frame: Frame | None = None + for frame in old_binder.frames: + for key, narrowed_type in frame.types.items(): + key_var = extract_var_from_literal_hash(key) + if key_var is not None and not self.is_var_redefined_in_outer_context( + key_var, defn.line + ): + # It seems safe to propagate the type narrowing to a nested scope. + if new_frame is None: + new_frame = self.binder.push_frame() + new_frame.types[key] = narrowed_type + self.binder.declarations[key] = old_binder.declarations[key] with self.scope.push_function(defn): # We suppress reachability warnings when we use TypeVars with value # restrictions: we only want to report a warning if a certain statement is @@ -1214,6 +1234,8 @@ def check_func_def( self.binder.suppress_unreachable_warnings() self.accept(item.body) unreachable = self.binder.is_unreachable() + if new_frame is not None: + self.binder.pop_frame(True, 0) if not unreachable: if defn.is_generator or is_named_instance( @@ -1306,6 +1328,23 @@ def check_func_def( self.binder = old_binder + def is_var_redefined_in_outer_context(self, v: Var, after_line: int) -> bool: + """Can the variable be assigned to at module top level or outer function? + + Note that this doesn't do a full CFG analysis but uses a line number based + heuristic that isn't correct in some (rare) cases. + """ + outers = self.tscope.outer_functions() + if not outers: + # Top-level function -- outer context is top level, and we can't reason about + # globals + return True + for outer in outers: + if isinstance(outer, FuncDef): + if find_last_var_assignment_line(outer.body, v) >= after_line: + return True + return False + def check_unbound_return_typevar(self, typ: CallableType) -> None: """Fails when the return typevar is not defined in arguments.""" if isinstance(typ.ret_type, TypeVarType) and typ.ret_type in typ.variables: @@ -1322,7 +1361,7 @@ def check_unbound_return_typevar(self, typ: CallableType) -> None: ): self.note( "Consider using the upper bound " - f"{format_type(typ.ret_type.upper_bound)} instead", + f"{format_type(typ.ret_type.upper_bound, self.options)} instead", context=typ.ret_type, ) @@ -1429,7 +1468,9 @@ def check___new___signature(self, fdef: FuncDef, typ: CallableType) -> None: get_proper_type(bound_type.ret_type), (AnyType, Instance, TupleType, UninhabitedType) ): self.fail( - message_registry.NON_INSTANCE_NEW_TYPE.format(format_type(bound_type.ret_type)), + message_registry.NON_INSTANCE_NEW_TYPE.format( + format_type(bound_type.ret_type, self.options) + ), fdef, ) else: @@ -1753,8 +1794,7 @@ def expand_typevars( result: list[tuple[FuncItem, CallableType]] = [] for substitutions in itertools.product(*subst): mapping = dict(substitutions) - expanded = cast(CallableType, expand_type(typ, mapping)) - result.append((expand_func(defn, mapping), expanded)) + result.append((expand_func(defn, mapping), expand_type(typ, mapping))) return result else: return [(defn, typ)] @@ -1876,23 +1916,6 @@ def check_method_override_for_base_with_name( original_class_or_static = False # a variable can't be class or static if isinstance(original_type, FunctionLike): - active_self_type = self.scope.active_self_type() - if isinstance(original_type, Overloaded) and active_self_type: - # If we have an overload, filter to overloads that match the self type. - # This avoids false positives for concrete subclasses of generic classes, - # see testSelfTypeOverrideCompatibility for an example. - # It's possible we might want to do this as part of bind_and_map_method - filtered_items = [ - item - for item in original_type.items - if not item.arg_types or is_subtype(active_self_type, item.arg_types[0]) - ] - # If we don't have any filtered_items, maybe it's always a valid override - # of the superclass? However if you get to that point you're in murky type - # territory anyway, so we just preserve the type and have the behaviour match - # that of older versions of mypy. - if filtered_items: - original_type = Overloaded(filtered_items) original_type = self.bind_and_map_method(base_attr, original_type, defn.info, base) if original_node and is_property(original_node): original_type = get_property_type(original_type) @@ -1964,10 +1987,32 @@ def bind_and_map_method( is_class_method = sym.node.func.is_class else: is_class_method = sym.node.is_class - bound = bind_self(typ, self.scope.active_self_type(), is_class_method) + + mapped_typ = cast(FunctionLike, map_type_from_supertype(typ, sub_info, super_info)) + active_self_type = self.scope.active_self_type() + if isinstance(mapped_typ, Overloaded) and active_self_type: + # If we have an overload, filter to overloads that match the self type. + # This avoids false positives for concrete subclasses of generic classes, + # see testSelfTypeOverrideCompatibility for an example. + filtered_items = [] + for item in mapped_typ.items: + if not item.arg_types: + filtered_items.append(item) + item_arg = item.arg_types[0] + if isinstance(item_arg, TypeVarType): + item_arg = item_arg.upper_bound + if is_subtype(active_self_type, item_arg): + filtered_items.append(item) + # If we don't have any filtered_items, maybe it's always a valid override + # of the superclass? However if you get to that point you're in murky type + # territory anyway, so we just preserve the type and have the behaviour match + # that of older versions of mypy. + if filtered_items: + mapped_typ = Overloaded(filtered_items) + + return bind_self(mapped_typ, active_self_type, is_class_method) else: - bound = typ - return cast(FunctionLike, map_type_from_supertype(bound, sub_info, super_info)) + return cast(FunctionLike, map_type_from_supertype(typ, sub_info, super_info)) def get_op_other_domain(self, tp: FunctionLike) -> Type | None: if isinstance(tp, CallableType): @@ -2067,7 +2112,6 @@ def erase_override(t: Type) -> Type: if not is_subtype( original.arg_types[i], erase_override(override.arg_types[i]) ): - arg_type_in_super = original.arg_types[i] if isinstance(node, FuncDef): @@ -2347,7 +2391,10 @@ class Baz(int, Foo, Bar, enum.Flag): ... enum_base = base continue elif enum_base is not None and not base.type.is_enum: - self.fail(f'No non-enum mixin classes are allowed after "{enum_base}"', defn) + self.fail( + f'No non-enum mixin classes are allowed after "{enum_base.str_with_options(self.options)}"', + defn, + ) break def check_enum_new(self, defn: ClassDef) -> None: @@ -2372,7 +2419,7 @@ def has_new_method(info: TypeInfo) -> bool: if candidate and has_new: self.fail( "Only a single data type mixin is allowed for Enum subtypes, " - 'found extra "{}"'.format(base), + 'found extra "{}"'.format(base.str_with_options(self.options)), defn, ) elif candidate: @@ -2484,7 +2531,14 @@ class C(B, A[int]): ... # this is unsafe because... first_type = get_proper_type(self.determine_type_of_member(first)) second_type = get_proper_type(self.determine_type_of_member(second)) - if isinstance(first_type, FunctionLike) and isinstance(second_type, FunctionLike): + # start with the special case that Instance can be a subtype of FunctionLike + call = None + if isinstance(first_type, Instance): + call = find_member("__call__", first_type, first_type, is_operator=True) + if call and isinstance(second_type, FunctionLike): + second_sig = self.bind_and_map_method(second, second_type, ctx, base2) + ok = is_subtype(call, second_sig, ignore_pos_arg_names=True) + elif isinstance(first_type, FunctionLike) and isinstance(second_type, FunctionLike): if first_type.is_type_obj() and second_type.is_type_obj(): # For class objects only check the subtype relationship of the classes, # since we allow incompatible overrides of '__init__'/'__new__' @@ -2576,9 +2630,8 @@ def check_import(self, node: ImportBase) -> None: if lvalue_type is None: # TODO: This is broken. lvalue_type = AnyType(TypeOfAny.special_form) - message = message_registry.INCOMPATIBLE_IMPORT_OF.format( - cast(NameExpr, assign.rvalue).name - ) + assert isinstance(assign.rvalue, NameExpr) + message = message_registry.INCOMPATIBLE_IMPORT_OF.format(assign.rvalue.name) self.check_simple_assignment( lvalue_type, assign.rvalue, @@ -2706,7 +2759,7 @@ def check_assignment( new_syntax: bool = False, ) -> None: """Type check a single assignment: lvalue = rvalue.""" - if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): + if isinstance(lvalue, (TupleExpr, ListExpr)): self.check_assignment_to_multiple_lvalues( lvalue.items, rvalue, rvalue, infer_lvalue_type ) @@ -2947,7 +3000,6 @@ def check_compatibility_all_supers( and lvalue.kind in (MDEF, None) and len(lvalue_node.info.bases) > 0 # None for Vars defined via self ): - for base in lvalue_node.info.mro[1:]: tnode = base.names.get(lvalue_node.name) if tnode is not None: @@ -3284,7 +3336,7 @@ def check_assignment_to_multiple_lvalues( context: Context, infer_lvalue_type: bool = True, ) -> None: - if isinstance(rvalue, TupleExpr) or isinstance(rvalue, ListExpr): + if isinstance(rvalue, (TupleExpr, ListExpr)): # Recursively go into Tuple or List expression rhs instead of # using the type of rhs, because this allowed more fine grained # control in cases like: a, b = [int, str] where rhs would get @@ -3299,14 +3351,14 @@ def check_assignment_to_multiple_lvalues( rvalues.extend([TempNode(typ) for typ in typs.items]) elif self.type_is_iterable(typs) and isinstance(typs, Instance): if iterable_type is not None and iterable_type != self.iterable_item_type( - typs + typs, rvalue ): self.fail(message_registry.CONTIGUOUS_ITERABLE_EXPECTED, context) else: if last_idx is None or last_idx + 1 == idx_rval: rvalues.append(rval) last_idx = idx_rval - iterable_type = self.iterable_item_type(typs) + iterable_type = self.iterable_item_type(typs, rvalue) else: self.fail(message_registry.CONTIGUOUS_ITERABLE_EXPECTED, context) else: @@ -3632,8 +3684,10 @@ def check_multi_assignment_from_iterable( infer_lvalue_type: bool = True, ) -> None: rvalue_type = get_proper_type(rvalue_type) - if self.type_is_iterable(rvalue_type) and isinstance(rvalue_type, Instance): - item_type = self.iterable_item_type(rvalue_type) + if self.type_is_iterable(rvalue_type) and isinstance( + rvalue_type, (Instance, CallableType, TypeType, Overloaded) + ): + item_type = self.iterable_item_type(rvalue_type, context) for lv in lvalues: if isinstance(lv, StarExpr): items_type = self.named_generic_type("builtins.list", [item_type]) @@ -3656,8 +3710,8 @@ def check_lvalue(self, lvalue: Lvalue) -> tuple[Type | None, IndexExpr | None, V not isinstance(lvalue, NameExpr) or isinstance(lvalue.node, Var) ): if isinstance(lvalue, NameExpr): - inferred = cast(Var, lvalue.node) - assert isinstance(inferred, Var) + assert isinstance(lvalue.node, Var) + inferred = lvalue.node else: assert isinstance(lvalue, MemberExpr) self.expr_checker.accept(lvalue.expr) @@ -3670,7 +3724,7 @@ def check_lvalue(self, lvalue: Lvalue) -> tuple[Type | None, IndexExpr | None, V elif isinstance(lvalue, NameExpr): lvalue_type = self.expr_checker.analyze_ref_expr(lvalue, lvalue=True) self.store_type(lvalue, lvalue_type) - elif isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): + elif isinstance(lvalue, (TupleExpr, ListExpr)): types = [ self.check_lvalue(sub_expr)[0] or # This type will be used as a context for further inference of rvalue, @@ -3967,7 +4021,12 @@ def check_member_assignment( dunder_set = attribute_type.type.get_method("__set__") if dunder_set is None: - self.fail(message_registry.DESCRIPTOR_SET_NOT_CALLABLE.format(attribute_type), context) + self.fail( + message_registry.DESCRIPTOR_SET_NOT_CALLABLE.format( + attribute_type.str_with_options(self.options) + ), + context, + ) return AnyType(TypeOfAny.from_error), get_type, False bound_method = analyze_decorator_or_funcbase_access( @@ -4121,7 +4180,9 @@ def visit_expression_stmt(self, s: ExpressionStmt) -> None: if error_note_and_code: error_note, code = error_note_and_code self.fail( - message_registry.TYPE_MUST_BE_USED.format(format_type(expr_type)), s, code=code + message_registry.TYPE_MUST_BE_USED.format(format_type(expr_type, self.options)), + s, + code=code, ) self.note(error_note, s, code=code) @@ -4951,7 +5012,9 @@ def _make_fake_typeinfo_and_full_name( # We use the pretty_names_list for error messages but can't # use it for the real name that goes into the symbol table # because it can have dots in it. - pretty_names_list = pretty_seq(format_type_distinctly(*base_classes, bare=True), "and") + pretty_names_list = pretty_seq( + format_type_distinctly(*base_classes, options=self.options, bare=True), "and" + ) try: info, full_name = _make_fake_typeinfo_and_full_name(base_classes, curr_module) with self.msg.filter_errors() as local_errors: @@ -4983,11 +5046,12 @@ def intersect_instance_callable(self, typ: Instance, callable_type: CallableType # In order for this to work in incremental mode, the type we generate needs to # have a valid fullname and a corresponding entry in a symbol table. We generate # a unique name inside the symbol table of the current module. - cur_module = cast(MypyFile, self.scope.stack[0]) + cur_module = self.scope.stack[0] + assert isinstance(cur_module, MypyFile) gen_name = gen_unique_name(f"", cur_module.names) # Synthesize a fake TypeInfo - short_name = format_type_bare(typ) + short_name = format_type_bare(typ, self.options) cdef, info = self.make_fake_typeinfo(cur_module.fullname, gen_name, short_name, [typ]) # Build up a fake FuncDef so we can populate the symbol table. @@ -5034,7 +5098,7 @@ def partition_by_callable( """ typ = get_proper_type(typ) - if isinstance(typ, FunctionLike) or isinstance(typ, TypeType): + if isinstance(typ, (FunctionLike, TypeType)): return [typ], [] if isinstance(typ, AnyType): @@ -5068,7 +5132,7 @@ def partition_by_callable( callables, uncallables = self.partition_by_callable( erase_to_union_or_bound(typ), unsound_partition ) - uncallables = [typ] if len(uncallables) else [] + uncallables = [typ] if uncallables else [] return callables, uncallables # A TupleType is callable if its fallback is, but needs special handling @@ -5083,7 +5147,7 @@ def partition_by_callable( callables, uncallables = self.partition_by_callable( method.type, unsound_partition=False ) - if len(callables) and not len(uncallables): + if callables and not uncallables: # Only consider the type callable if its __call__ method is # definitely callable. return [typ], [] @@ -5119,14 +5183,12 @@ def conditional_callable_type_map( callables, uncallables = self.partition_by_callable(current_type, unsound_partition=False) - if len(callables) and len(uncallables): - callable_map = {expr: UnionType.make_union(callables)} if len(callables) else None - uncallable_map = ( - {expr: UnionType.make_union(uncallables)} if len(uncallables) else None - ) + if callables and uncallables: + callable_map = {expr: UnionType.make_union(callables)} if callables else None + uncallable_map = {expr: UnionType.make_union(uncallables)} if uncallables else None return callable_map, uncallable_map - elif len(callables): + elif callables: return {}, None return None, {} @@ -5195,7 +5257,7 @@ def _check_for_truthy_type(self, t: Type, expr: Expression) -> None: return def format_expr_type() -> str: - typ = format_type(t) + typ = format_type(t, self.options) if isinstance(expr, MemberExpr): return f'Member "{expr.name}" has type {typ}' elif isinstance(expr, RefExpr) and expr.fullname: @@ -5210,14 +5272,16 @@ def format_expr_type() -> str: return f"Expression has type {typ}" if isinstance(t, FunctionLike): - self.fail(message_registry.FUNCTION_ALWAYS_TRUE.format(format_type(t)), expr) + self.fail( + message_registry.FUNCTION_ALWAYS_TRUE.format(format_type(t, self.options)), expr + ) elif isinstance(t, UnionType): self.fail(message_registry.TYPE_ALWAYS_TRUE_UNIONTYPE.format(format_expr_type()), expr) elif isinstance(t, Instance) and t.type.fullname == "typing.Iterable": _, info = self.make_fake_typeinfo("typing", "Collection", "Collection", []) self.fail( message_registry.ITERABLE_ALWAYS_TRUE.format( - format_expr_type(), format_type(Instance(info, t.args)) + format_expr_type(), format_type(Instance(info, t.args), self.options) ), expr, ) @@ -5806,7 +5870,14 @@ def refine_identity_comparison_expression( """ should_coerce = True if coerce_only_in_literal_context: - should_coerce = any(is_literal_type_like(operand_types[i]) for i in chain_indices) + + def should_coerce_inner(typ: Type) -> bool: + typ = get_proper_type(typ) + return is_literal_type_like(typ) or ( + isinstance(typ, Instance) and typ.type.is_enum + ) + + should_coerce = any(should_coerce_inner(operand_types[i]) for i in chain_indices) target: Type | None = None possible_target_indices = [] @@ -6002,7 +6073,9 @@ def check_subtype( note_msg = "" notes = notes or [] if subtype_label is not None or supertype_label is not None: - subtype_str, supertype_str = format_type_distinctly(orig_subtype, orig_supertype) + subtype_str, supertype_str = format_type_distinctly( + orig_subtype, orig_supertype, options=self.options + ) if subtype_label is not None: extra_info.append(subtype_label + " " + subtype_str) if supertype_label is not None: @@ -6195,7 +6268,8 @@ def lookup(self, name: str) -> SymbolTableNode: else: b = self.globals.get("__builtins__", None) if b: - table = cast(MypyFile, b.node).names + assert isinstance(b.node, MypyFile) + table = b.node.names if name in table: return table[name] raise KeyError(f"Failed lookup: {name}") @@ -6209,7 +6283,8 @@ def lookup_qualified(self, name: str) -> SymbolTableNode: for i in range(1, len(parts) - 1): sym = n.names.get(parts[i]) assert sym is not None, "Internal error: attempted lookup of unknown name" - n = cast(MypyFile, sym.node) + assert isinstance(sym.node, MypyFile) + n = sym.node last = parts[-1] if last in n.names: return n.names[last] @@ -6387,15 +6462,18 @@ def note( return self.msg.note(msg, context, offset=offset, code=code) - def iterable_item_type(self, instance: Instance) -> Type: - iterable = map_instance_to_supertype(instance, self.lookup_typeinfo("typing.Iterable")) - item_type = iterable.args[0] - if not isinstance(get_proper_type(item_type), AnyType): - # This relies on 'map_instance_to_supertype' returning 'Iterable[Any]' - # in case there is no explicit base class. - return item_type + def iterable_item_type( + self, it: Instance | CallableType | TypeType | Overloaded, context: Context + ) -> Type: + if isinstance(it, Instance): + iterable = map_instance_to_supertype(it, self.lookup_typeinfo("typing.Iterable")) + item_type = iterable.args[0] + if not isinstance(get_proper_type(item_type), AnyType): + # This relies on 'map_instance_to_supertype' returning 'Iterable[Any]' + # in case there is no explicit base class. + return item_type # Try also structural typing. - return self.analyze_iterable_item_type_without_expression(instance, instance)[1] + return self.analyze_iterable_item_type_without_expression(it, context)[1] def function_type(self, func: FuncBase) -> FunctionLike: return function_type(func, self.named_type("builtins.function")) @@ -6496,7 +6574,7 @@ def conditional_types_with_intersection( if intersection is None: continue out.append(intersection) - if len(out) == 0: + if not out: # Only report errors if no element in the union worked. if self.should_report_unreachable_issues(): for types, reason in errors: @@ -6512,7 +6590,8 @@ def is_writable_attribute(self, node: Node) -> bool: return False return True elif isinstance(node, OverloadedFuncDef) and node.is_property: - first_item = cast(Decorator, node.items[0]) + first_item = node.items[0] + assert isinstance(first_item, Decorator) return first_item.var.is_settable_property return False @@ -6763,6 +6842,7 @@ def conditional_types( def conditional_types_to_typemaps( expr: Expression, yes_type: Type | None, no_type: Type | None ) -> tuple[TypeMap, TypeMap]: + expr = collapse_walrus(expr) maps: list[TypeMap] = [] for typ in (yes_type, no_type): proper_type = get_proper_type(typ) @@ -6950,7 +7030,7 @@ def convert_to_typetype(type_map: TypeMap) -> TypeMap: def flatten(t: Expression) -> list[Expression]: """Flatten a nested sequence of tuples/lists into one list of nodes.""" - if isinstance(t, TupleExpr) or isinstance(t, ListExpr): + if isinstance(t, (TupleExpr, ListExpr)): return [b for a in t.items for b in flatten(a)] elif isinstance(t, StarExpr): return flatten(t.expr) @@ -7107,7 +7187,6 @@ def overload_can_never_match(signature: CallableType, other: CallableType) -> bo exp_signature = expand_type( signature, {tvar.id: erase_def_to_union_or_bound(tvar) for tvar in signature.variables} ) - assert isinstance(exp_signature, CallableType) return is_callable_compatible( exp_signature, other, is_compat=is_more_precise, ignore_return=True ) @@ -7335,7 +7414,7 @@ def add_mapping(self, keys: set[TKey], values: set[TValue]) -> None: Note that the given set of keys must be non-empty -- otherwise, nothing happens. """ - if len(keys) == 0: + if not keys: return subtree_roots = [self._lookup_or_make_root_id(key) for key in keys] @@ -7446,7 +7525,7 @@ def group_comparison_operands( if current_indices and (operator != last_operator or operator not in operators_to_group): # If some of the operands in the chain are assignable, defer adding it: we might # end up needing to merge it with other chains that appear later. - if len(current_hashes) == 0: + if not current_hashes: simplified_operator_list.append((last_operator, sorted(current_indices))) else: groups[last_operator].add_mapping(current_hashes, current_indices) @@ -7469,7 +7548,7 @@ def group_comparison_operands( current_hashes.add(right_hash) if last_operator is not None: - if len(current_hashes) == 0: + if not current_hashes: simplified_operator_list.append((last_operator, sorted(current_indices))) else: groups[last_operator].add_mapping(current_hashes, current_indices) @@ -7585,3 +7664,80 @@ def collapse_walrus(e: Expression) -> Expression: if isinstance(e, AssignmentExpr): return e.target return e + + +def find_last_var_assignment_line(n: Node, v: Var) -> int: + """Find the highest line number of a potential assignment to variable within node. + + This supports local and global variables. + + Return -1 if no assignment was found. + """ + visitor = VarAssignVisitor(v) + n.accept(visitor) + return visitor.last_line + + +class VarAssignVisitor(TraverserVisitor): + def __init__(self, v: Var) -> None: + self.last_line = -1 + self.lvalue = False + self.var_node = v + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + self.lvalue = True + for lv in s.lvalues: + lv.accept(self) + self.lvalue = False + + def visit_name_expr(self, e: NameExpr) -> None: + if self.lvalue and e.node is self.var_node: + self.last_line = max(self.last_line, e.line) + + def visit_member_expr(self, e: MemberExpr) -> None: + old_lvalue = self.lvalue + self.lvalue = False + super().visit_member_expr(e) + self.lvalue = old_lvalue + + def visit_index_expr(self, e: IndexExpr) -> None: + old_lvalue = self.lvalue + self.lvalue = False + super().visit_index_expr(e) + self.lvalue = old_lvalue + + def visit_with_stmt(self, s: WithStmt) -> None: + self.lvalue = True + for lv in s.target: + if lv is not None: + lv.accept(self) + self.lvalue = False + s.body.accept(self) + + def visit_for_stmt(self, s: ForStmt) -> None: + self.lvalue = True + s.index.accept(self) + self.lvalue = False + s.body.accept(self) + if s.else_body: + s.else_body.accept(self) + + def visit_assignment_expr(self, e: AssignmentExpr) -> None: + self.lvalue = True + e.target.accept(self) + self.lvalue = False + e.value.accept(self) + + def visit_as_pattern(self, p: AsPattern) -> None: + if p.pattern is not None: + p.pattern.accept(self) + if p.name is not None: + self.lvalue = True + p.name.accept(self) + self.lvalue = False + + def visit_starred_pattern(self, p: StarredPattern) -> None: + if p.capture is not None: + self.lvalue = True + p.capture.accept(self) + self.lvalue = False diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 38b5c2419d95..fce43fb68669 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -170,7 +170,7 @@ # Type of callback user for checking individual function arguments. See # check_args() below for details. ArgChecker: _TypeAlias = Callable[ - [Type, Type, ArgKind, Type, int, int, CallableType, Optional[Type], Context, Context], None, + [Type, Type, ArgKind, Type, int, int, CallableType, Optional[Type], Context, Context], None ] # Maximum nesting level for math union in overloads, setting this to large values @@ -686,7 +686,7 @@ def check_typeddict_call( context: Context, orig_callee: Type | None, ) -> Type: - if len(args) >= 1 and all([ak == ARG_NAMED for ak in arg_kinds]): + if args and all([ak == ARG_NAMED for ak in arg_kinds]): # ex: Point(x=42, y=1337) assert all(arg_name is not None for arg_name in arg_names) item_names = cast(List[str], arg_names) @@ -708,7 +708,7 @@ def check_typeddict_call( callee, unique_arg.analyzed, context, orig_callee ) - if len(args) == 0: + if not args: # ex: EmptyDict() return self.check_typeddict_call_with_kwargs(callee, {}, context, orig_callee) @@ -845,7 +845,7 @@ def check_typeddict_call_with_kwargs( # this may give a better error message. ret_type = callee - for (item_name, item_expected_type) in ret_type.items.items(): + for item_name, item_expected_type in ret_type.items.items(): if item_name in kwargs: item_value = kwargs[item_name] self.chk.check_simple_assignment( @@ -2126,7 +2126,9 @@ def check_argument_types( if actual_kind == nodes.ARG_STAR2 and not self.is_valid_keyword_var_arg( actual_type ): - is_mapping = is_subtype(actual_type, self.chk.named_type("typing.Mapping")) + is_mapping = is_subtype( + actual_type, self.chk.named_type("_typeshed.SupportsKeysAndGetItem") + ) self.msg.invalid_keyword_var_arg(actual_type, is_mapping, context) expanded_actual = mapper.expand_actual_type( actual_type, actual_kind, callee.arg_names[i], callee_arg_kind @@ -2337,10 +2339,8 @@ def plausible_overload_call_targets( def has_shape(typ: Type) -> bool: typ = get_proper_type(typ) - return ( - isinstance(typ, TupleType) - or isinstance(typ, TypedDictType) - or (isinstance(typ, Instance) and typ.type.is_named_tuple) + return isinstance(typ, (TupleType, TypedDictType)) or ( + isinstance(typ, Instance) and typ.type.is_named_tuple ) matches: list[CallableType] = [] @@ -2423,8 +2423,7 @@ def infer_overload_return_type( inferred_types.append(infer_type) type_maps.append(m) - if len(matches) == 0: - # No match was found + if not matches: return None elif any_causes_overload_ambiguity(matches, return_types, arg_types, arg_kinds, arg_names): # An argument of type or containing the type 'Any' caused ambiguity. @@ -3015,7 +3014,7 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: if not encountered_partial_type and not failed_out: iterable_type = UnionType.make_union(iterable_types) if not is_subtype(left_type, iterable_type): - if len(container_types) == 0: + if not container_types: self.msg.unsupported_operand_types("in", left_type, right_type, e) else: container_type = UnionType.make_union(container_types) @@ -3917,6 +3916,12 @@ def visit_assert_type_expr(self, expr: AssertTypeExpr) -> Type: always_allow_any=True, ) target_type = expr.type + proper_source_type = get_proper_type(source_type) + if ( + isinstance(proper_source_type, mypy.types.Instance) + and proper_source_type.last_known_value is not None + ): + source_type = proper_source_type.last_known_value if not is_same_type(source_type, target_type): if not self.chk.in_checked_function(): self.msg.note( @@ -3966,7 +3971,12 @@ def visit_type_application(self, tapp: TypeApplication) -> Type: if isinstance(tapp.expr, RefExpr) and isinstance(tapp.expr.node, TypeAlias): # Subscription of a (generic) alias in runtime context, expand the alias. item = expand_type_alias( - tapp.expr.node, tapp.types, self.chk.fail, tapp.expr.node.no_args, tapp + tapp.expr.node, + tapp.types, + self.chk.fail, + tapp.expr.node.no_args, + tapp, + self.chk.options, ) item = get_proper_type(item) if isinstance(item, Instance): @@ -4031,7 +4041,12 @@ class LongName(Generic[T]): ... disallow_any = self.chk.options.disallow_any_generics and self.is_callee item = get_proper_type( set_any_tvars( - alias, ctx.line, ctx.column, disallow_any=disallow_any, fail=self.msg.fail + alias, + ctx.line, + ctx.column, + self.chk.options, + disallow_any=disallow_any, + fail=self.msg.fail, ) ) if isinstance(item, Instance): @@ -4304,12 +4319,19 @@ def visit_dict_expr(self, e: DictExpr) -> Type: if dt: return dt + # Define type variables (used in constructors below). + kt = TypeVarType("KT", "KT", -1, [], self.object_type()) + vt = TypeVarType("VT", "VT", -2, [], self.object_type()) + # Collect function arguments, watching out for **expr. - args: list[Expression] = [] # Regular "key: value" - stargs: list[Expression] = [] # For "**expr" + args: list[Expression] = [] + expected_types: list[Type] = [] for key, value in e.items: if key is None: - stargs.append(value) + args.append(value) + expected_types.append( + self.chk.named_generic_type("_typeshed.SupportsKeysAndGetItem", [kt, vt]) + ) else: tup = TupleExpr([key, value]) if key.line >= 0: @@ -4318,48 +4340,23 @@ def visit_dict_expr(self, e: DictExpr) -> Type: else: tup.line = value.line tup.column = value.column + tup.end_line = value.end_line + tup.end_column = value.end_column args.append(tup) - # Define type variables (used in constructors below). - kt = TypeVarType("KT", "KT", -1, [], self.object_type()) - vt = TypeVarType("VT", "VT", -2, [], self.object_type()) - rv = None - # Call dict(*args), unless it's empty and stargs is not. - if args or not stargs: - # The callable type represents a function like this: - # - # def (*v: Tuple[kt, vt]) -> Dict[kt, vt]: ... - constructor = CallableType( - [TupleType([kt, vt], self.named_type("builtins.tuple"))], - [nodes.ARG_STAR], - [None], - self.chk.named_generic_type("builtins.dict", [kt, vt]), - self.named_type("builtins.function"), - name="", - variables=[kt, vt], - ) - rv = self.check_call(constructor, args, [nodes.ARG_POS] * len(args), e)[0] - else: - # dict(...) will be called below. - pass - # Call rv.update(arg) for each arg in **stargs, - # except if rv isn't set yet, then set rv = dict(arg). - if stargs: - for arg in stargs: - if rv is None: - constructor = CallableType( - [self.chk.named_generic_type("typing.Mapping", [kt, vt])], - [nodes.ARG_POS], - [None], - self.chk.named_generic_type("builtins.dict", [kt, vt]), - self.named_type("builtins.function"), - name="", - variables=[kt, vt], - ) - rv = self.check_call(constructor, [arg], [nodes.ARG_POS], arg)[0] - else: - self.check_method_call_by_name("update", rv, [arg], [nodes.ARG_POS], arg) - assert rv is not None - return rv + expected_types.append(TupleType([kt, vt], self.named_type("builtins.tuple"))) + + # The callable type represents a function like this (except we adjust for **expr): + # def (*v: Tuple[kt, vt]) -> Dict[kt, vt]: ... + constructor = CallableType( + expected_types, + [nodes.ARG_POS] * len(expected_types), + [None] * len(expected_types), + self.chk.named_generic_type("builtins.dict", [kt, vt]), + self.named_type("builtins.function"), + name="", + variables=[kt, vt], + ) + return self.check_call(constructor, args, [nodes.ARG_POS] * len(args), e)[0] def find_typeddict_context( self, context: Type | None, dict_expr: DictExpr @@ -4457,7 +4454,7 @@ def infer_lambda_type_using_context( is_ellipsis_args=False, arg_types=[AnyType(TypeOfAny.special_form)] * len(arg_kinds), arg_kinds=arg_kinds, - arg_names=e.arg_names[:], + arg_names=e.arg_names.copy(), ) if ARG_STAR in arg_kinds or ARG_STAR2 in arg_kinds: @@ -4638,7 +4635,11 @@ def _super_arg_types(self, e: SuperExpr) -> Type | tuple[Type, Type]: return type_type, instance_type def visit_slice_expr(self, e: SliceExpr) -> Type: - expected = make_optional_type(self.named_type("builtins.int")) + try: + supports_index = self.chk.named_type("typing_extensions.SupportsIndex") + except KeyError: + supports_index = self.chk.named_type("builtins.int") # thanks, fixture life + expected = make_optional_type(supports_index) for index in [e.begin_index, e.end_index, e.stride]: if index: t = self.accept(index) @@ -4916,15 +4917,8 @@ def named_type(self, name: str) -> Instance: def is_valid_var_arg(self, typ: Type) -> bool: """Is a type valid as a *args argument?""" typ = get_proper_type(typ) - return ( - isinstance(typ, TupleType) - or is_subtype( - typ, - self.chk.named_generic_type("typing.Iterable", [AnyType(TypeOfAny.special_form)]), - ) - or isinstance(typ, AnyType) - or isinstance(typ, ParamSpecType) - or isinstance(typ, UnpackType) + return isinstance(typ, (TupleType, AnyType, ParamSpecType, UnpackType)) or is_subtype( + typ, self.chk.named_generic_type("typing.Iterable", [AnyType(TypeOfAny.special_form)]) ) def is_valid_keyword_var_arg(self, typ: Type) -> bool: @@ -4933,14 +4927,14 @@ def is_valid_keyword_var_arg(self, typ: Type) -> bool: is_subtype( typ, self.chk.named_generic_type( - "typing.Mapping", + "_typeshed.SupportsKeysAndGetItem", [self.named_type("builtins.str"), AnyType(TypeOfAny.special_form)], ), ) or is_subtype( typ, self.chk.named_generic_type( - "typing.Mapping", [UninhabitedType(), UninhabitedType()] + "_typeshed.SupportsKeysAndGetItem", [UninhabitedType(), UninhabitedType()] ), ) or isinstance(typ, ParamSpecType) @@ -5474,7 +5468,7 @@ def any_causes_overload_ambiguity( def all_same_types(types: list[Type]) -> bool: - if len(types) == 0: + if not types: return True return all(is_same_type(t, types[0]) for t in types[1:]) @@ -5518,7 +5512,7 @@ def merge_typevars_in_callables_by_name( variables.append(tv) rename[tv.id] = unique_typevars[name] - target = cast(CallableType, expand_type(target, rename)) + target = expand_type(target, rename) output.append(target) return output, variables diff --git a/mypy/checkmember.py b/mypy/checkmember.py index a2c580e13446..c2c6b3555805 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -312,7 +312,8 @@ def analyze_instance_member_access( if method.is_property: assert isinstance(method, OverloadedFuncDef) - first_item = cast(Decorator, method.items[0]) + first_item = method.items[0] + assert isinstance(first_item, Decorator) return analyze_var(name, first_item.var, typ, info, mx) if mx.is_lvalue: mx.msg.cant_assign_to_method(mx.context) @@ -412,6 +413,13 @@ def analyze_type_type_member_access( upper_bound = get_proper_type(typ.item.upper_bound) if isinstance(upper_bound, Instance): item = upper_bound + elif isinstance(upper_bound, UnionType): + return _analyze_member_access( + name, + TypeType.make_normalized(upper_bound, line=typ.line, column=typ.column), + mx, + override_info, + ) elif isinstance(upper_bound, TupleType): item = tuple_fallback(upper_bound) elif isinstance(upper_bound, AnyType): @@ -629,7 +637,10 @@ def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: dunder_get = descriptor_type.type.get_method("__get__") if dunder_get is None: mx.msg.fail( - message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format(descriptor_type), mx.context + message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format( + descriptor_type.str_with_options(mx.msg.options) + ), + mx.context, ) return AnyType(TypeOfAny.from_error) @@ -686,7 +697,10 @@ def analyze_descriptor_access(descriptor_type: Type, mx: MemberContext) -> Type: if not isinstance(inferred_dunder_get_type, CallableType): mx.msg.fail( - message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format(descriptor_type), mx.context + message_registry.DESCRIPTOR_GET_NOT_CALLABLE.format( + descriptor_type.str_with_options(mx.msg.options) + ), + mx.context, ) return AnyType(TypeOfAny.from_error) @@ -1150,7 +1164,7 @@ class B(A[str]): pass t = freshen_all_functions_type_vars(t) t = bind_self(t, original_type, is_classmethod=True) assert isuper is not None - t = cast(CallableType, expand_type_by_instance(t, isuper)) + t = expand_type_by_instance(t, isuper) freeze_all_type_vars(t) return t.copy_modified(variables=list(tvars) + list(t.variables)) elif isinstance(t, Overloaded): diff --git a/mypy/checkpattern.py b/mypy/checkpattern.py index 603b392eee29..e132a23ff55f 100644 --- a/mypy/checkpattern.py +++ b/mypy/checkpattern.py @@ -15,7 +15,8 @@ from mypy.maptype import map_instance_to_supertype from mypy.meet import narrow_declared_type from mypy.messages import MessageBuilder -from mypy.nodes import ARG_POS, Expression, NameExpr, TypeAlias, TypeInfo, Var +from mypy.nodes import ARG_POS, Context, Expression, NameExpr, TypeAlias, TypeInfo, Var +from mypy.options import Options from mypy.patterns import ( AsPattern, ClassPattern, @@ -104,7 +105,11 @@ class PatternChecker(PatternVisitor[PatternType]): # non_sequence_match_type_names non_sequence_match_types: list[Type] - def __init__(self, chk: mypy.checker.TypeChecker, msg: MessageBuilder, plugin: Plugin) -> None: + options: Options + + def __init__( + self, chk: mypy.checker.TypeChecker, msg: MessageBuilder, plugin: Plugin, options: Options + ) -> None: self.chk = chk self.msg = msg self.plugin = plugin @@ -114,6 +119,7 @@ def __init__(self, chk: mypy.checker.TypeChecker, msg: MessageBuilder, plugin: P self.non_sequence_match_types = self.generate_types_from_names( non_sequence_match_type_names ) + self.options = options def accept(self, o: Pattern, type_context: Type) -> PatternType: self.type_context.append(type_context) @@ -242,7 +248,7 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: elif size_diff > 0 and star_position is None: return self.early_non_match() else: - inner_type = self.get_sequence_type(current_type) + inner_type = self.get_sequence_type(current_type, o) if inner_type is None: inner_type = self.chk.named_type("builtins.object") inner_types = [inner_type] * len(o.patterns) @@ -309,14 +315,14 @@ def visit_sequence_pattern(self, o: SequencePattern) -> PatternType: new_type = current_type return PatternType(new_type, rest_type, captures) - def get_sequence_type(self, t: Type) -> Type | None: + def get_sequence_type(self, t: Type, context: Context) -> Type | None: t = get_proper_type(t) if isinstance(t, AnyType): return AnyType(TypeOfAny.from_another_any, t) if isinstance(t, UnionType): - items = [self.get_sequence_type(item) for item in t.items] + items = [self.get_sequence_type(item, context) for item in t.items] not_none_items = [item for item in items if item is not None] - if len(not_none_items) > 0: + if not_none_items: return make_simplified_union(not_none_items) else: return None @@ -324,7 +330,7 @@ def get_sequence_type(self, t: Type) -> Type | None: if self.chk.type_is_iterable(t) and isinstance(t, (Instance, TupleType)): if isinstance(t, TupleType): t = tuple_fallback(t) - return self.chk.iterable_item_type(t) + return self.chk.iterable_item_type(t, context) else: return None @@ -458,8 +464,8 @@ def visit_class_pattern(self, o: ClassPattern) -> PatternType: elif isinstance(type_info, TypeAlias): typ = type_info.target else: - if isinstance(type_info, Var): - name = str(type_info.type) + if isinstance(type_info, Var) and type_info.type is not None: + name = type_info.type.str_with_options(self.options) else: name = type_info.name self.msg.fail(message_registry.CLASS_PATTERN_TYPE_REQUIRED.format(name), o.class_ref) @@ -508,7 +514,12 @@ def visit_class_pattern(self, o: ClassPattern) -> PatternType: ) has_local_errors = local_errors.has_new_errors() if has_local_errors: - self.msg.fail(message_registry.MISSING_MATCH_ARGS.format(typ), o) + self.msg.fail( + message_registry.MISSING_MATCH_ARGS.format( + typ.str_with_options(self.options) + ), + o, + ) return self.early_non_match() proper_match_args_type = get_proper_type(match_args_type) @@ -573,7 +584,10 @@ def visit_class_pattern(self, o: ClassPattern) -> PatternType: if has_local_errors or key_type is None: key_type = AnyType(TypeOfAny.from_error) self.msg.fail( - message_registry.CLASS_PATTERN_UNKNOWN_KEYWORD.format(typ, keyword), pattern + message_registry.CLASS_PATTERN_UNKNOWN_KEYWORD.format( + typ.str_with_options(self.options), keyword + ), + pattern, ) inner_type, inner_rest_type, inner_captures = self.accept(pattern, key_type) diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py index e7602f33095d..974985d8b4fc 100644 --- a/mypy/checkstrformat.py +++ b/mypy/checkstrformat.py @@ -139,7 +139,6 @@ class ConversionSpecifier: def __init__( self, match: Match[str], start_pos: int = -1, non_standard_format_spec: bool = False ) -> None: - self.whole_seq = match.group() self.start_pos = start_pos @@ -435,9 +434,9 @@ def perform_special_format_checks( actual_type, "__str__" ): self.msg.fail( - 'On Python 3 formatting "b\'abc\'" with "{}" ' - 'produces "b\'abc\'", not "abc"; ' - 'use "{!r}" if this is desired behavior', + 'If x = b\'abc\' then f"{x}" or "{}".format(x) produces "b\'abc\'", ' + 'not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). ' + "Otherwise, decode the bytes", call, code=codes.STR_BYTES_PY3, ) @@ -589,7 +588,7 @@ def apply_field_accessors( return repl assert spec.field - temp_errors = Errors() + temp_errors = Errors(self.chk.options) dummy = DUMMY_FIELD_NAME + spec.field[len(spec.key) :] temp_ast: Node = parse( dummy, fnam="", module=None, options=self.chk.options, errors=temp_errors @@ -844,10 +843,14 @@ def build_dict_type(self, expr: FormatStringExpr) -> Type: any_type = AnyType(TypeOfAny.special_form) if isinstance(expr, BytesExpr): bytes_type = self.chk.named_generic_type("builtins.bytes", []) - return self.chk.named_generic_type("typing.Mapping", [bytes_type, any_type]) + return self.chk.named_generic_type( + "_typeshed.SupportsKeysAndGetItem", [bytes_type, any_type] + ) elif isinstance(expr, StrExpr): str_type = self.chk.named_generic_type("builtins.str", []) - return self.chk.named_generic_type("typing.Mapping", [str_type, any_type]) + return self.chk.named_generic_type( + "_typeshed.SupportsKeysAndGetItem", [str_type, any_type] + ) else: assert False, "Unreachable" @@ -946,9 +949,8 @@ def check_s_special_cases(self, expr: FormatStringExpr, typ: Type, context: Cont # Couple special cases for string formatting. if has_type_component(typ, "builtins.bytes"): self.msg.fail( - 'On Python 3 formatting "b\'abc\'" with "%s" ' - 'produces "b\'abc\'", not "abc"; ' - 'use "%r" if this is desired behavior', + 'If x = b\'abc\' then "%s" % x produces "b\'abc\'", not "abc". ' + 'If this is desired behavior use "%r" % x. Otherwise, decode the bytes', context, code=codes.STR_BYTES_PY3, ) diff --git a/mypy/config_parser.py b/mypy/config_parser.py index 190782a3bded..05af2ba6e21e 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -34,7 +34,7 @@ from mypy.options import PER_MODULE_OPTIONS, Options _CONFIG_VALUE_TYPES: _TypeAlias = Union[ - str, bool, int, float, Dict[str, str], List[str], Tuple[int, int], + str, bool, int, float, Dict[str, str], List[str], Tuple[int, int] ] _INI_PARSER_CALLABLE: _TypeAlias = Callable[[Any], _CONFIG_VALUE_TYPES] @@ -538,10 +538,7 @@ def split_directive(s: str) -> tuple[list[str], list[str]]: def mypy_comments_to_config_map(line: str, template: Options) -> tuple[dict[str, str], list[str]]: - """Rewrite the mypy comment syntax into ini file syntax. - - Returns - """ + """Rewrite the mypy comment syntax into ini file syntax.""" options = {} entries, errors = split_directive(line) for entry in entries: diff --git a/mypy/constant_fold.py b/mypy/constant_fold.py index a22c1b9ba9e5..005d67b8f961 100644 --- a/mypy/constant_fold.py +++ b/mypy/constant_fold.py @@ -8,11 +8,21 @@ from typing import Union from typing_extensions import Final -from mypy.nodes import Expression, FloatExpr, IntExpr, NameExpr, OpExpr, StrExpr, UnaryExpr, Var +from mypy.nodes import ( + ComplexExpr, + Expression, + FloatExpr, + IntExpr, + NameExpr, + OpExpr, + StrExpr, + UnaryExpr, + Var, +) # All possible result types of constant folding -ConstantValue = Union[int, bool, float, str] -CONST_TYPES: Final = (int, bool, float, str) +ConstantValue = Union[int, bool, float, complex, str] +CONST_TYPES: Final = (int, bool, float, complex, str) def constant_fold_expr(expr: Expression, cur_mod_id: str) -> ConstantValue | None: @@ -39,6 +49,8 @@ def constant_fold_expr(expr: Expression, cur_mod_id: str) -> ConstantValue | Non return expr.value if isinstance(expr, FloatExpr): return expr.value + if isinstance(expr, ComplexExpr): + return expr.value elif isinstance(expr, NameExpr): if expr.name == "True": return True @@ -56,24 +68,60 @@ def constant_fold_expr(expr: Expression, cur_mod_id: str) -> ConstantValue | Non elif isinstance(expr, OpExpr): left = constant_fold_expr(expr.left, cur_mod_id) right = constant_fold_expr(expr.right, cur_mod_id) - if isinstance(left, int) and isinstance(right, int): - return constant_fold_binary_int_op(expr.op, left, right) - elif isinstance(left, str) and isinstance(right, str): - return constant_fold_binary_str_op(expr.op, left, right) + if left is not None and right is not None: + return constant_fold_binary_op(expr.op, left, right) elif isinstance(expr, UnaryExpr): value = constant_fold_expr(expr.expr, cur_mod_id) - if isinstance(value, int): - return constant_fold_unary_int_op(expr.op, value) + if value is not None: + return constant_fold_unary_op(expr.op, value) + return None + + +def constant_fold_binary_op( + op: str, left: ConstantValue, right: ConstantValue +) -> ConstantValue | None: + if isinstance(left, int) and isinstance(right, int): + return constant_fold_binary_int_op(op, left, right) + + # Float and mixed int/float arithmetic. + if isinstance(left, float) and isinstance(right, float): + return constant_fold_binary_float_op(op, left, right) + elif isinstance(left, float) and isinstance(right, int): + return constant_fold_binary_float_op(op, left, right) + elif isinstance(left, int) and isinstance(right, float): + return constant_fold_binary_float_op(op, left, right) + + # String concatenation and multiplication. + if op == "+" and isinstance(left, str) and isinstance(right, str): + return left + right + elif op == "*" and isinstance(left, str) and isinstance(right, int): + return left * right + elif op == "*" and isinstance(left, int) and isinstance(right, str): + return left * right + + # Complex construction. + if op == "+" and isinstance(left, (int, float)) and isinstance(right, complex): + return left + right + elif op == "+" and isinstance(left, complex) and isinstance(right, (int, float)): + return left + right + elif op == "-" and isinstance(left, (int, float)) and isinstance(right, complex): + return left - right + elif op == "-" and isinstance(left, complex) and isinstance(right, (int, float)): + return left - right + return None -def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | None: +def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | float | None: if op == "+": return left + right if op == "-": return left - right elif op == "*": return left * right + elif op == "/": + if right != 0: + return left / right elif op == "//": if right != 0: return left // right @@ -100,17 +148,41 @@ def constant_fold_binary_int_op(op: str, left: int, right: int) -> int | None: return None -def constant_fold_unary_int_op(op: str, value: int) -> int | None: - if op == "-": - return -value - elif op == "~": - return ~value - elif op == "+": - return value +def constant_fold_binary_float_op(op: str, left: int | float, right: int | float) -> float | None: + assert not (isinstance(left, int) and isinstance(right, int)), (op, left, right) + if op == "+": + return left + right + elif op == "-": + return left - right + elif op == "*": + return left * right + elif op == "/": + if right != 0: + return left / right + elif op == "//": + if right != 0: + return left // right + elif op == "%": + if right != 0: + return left % right + elif op == "**": + if (left < 0 and right >= 1 or right == 0) or (left >= 0 and right >= 0): + try: + ret = left**right + except OverflowError: + return None + else: + assert isinstance(ret, float) + return ret + return None -def constant_fold_binary_str_op(op: str, left: str, right: str) -> str | None: - if op == "+": - return left + right +def constant_fold_unary_op(op: str, value: ConstantValue) -> int | float | None: + if op == "-" and isinstance(value, (int, float)): + return -value + elif op == "~" and isinstance(value, int): + return ~value + elif op == "+" and isinstance(value, (int, float)): + return value return None diff --git a/mypy/constraints.py b/mypy/constraints.py index a8f04094ca63..ded434af1972 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -695,7 +695,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: from_concat = bool(prefix.arg_types) or suffix.from_concatenate suffix = suffix.copy_modified(from_concatenate=from_concat) - if isinstance(suffix, Parameters) or isinstance(suffix, CallableType): + if isinstance(suffix, (Parameters, CallableType)): # no such thing as variance for ParamSpecs # TODO: is there a case I am missing? # TODO: constraints between prefixes @@ -765,7 +765,7 @@ def visit_instance(self, template: Instance) -> list[Constraint]: from_concat = bool(prefix.arg_types) or suffix.from_concatenate suffix = suffix.copy_modified(from_concatenate=from_concat) - if isinstance(suffix, Parameters) or isinstance(suffix, CallableType): + if isinstance(suffix, (Parameters, CallableType)): # no such thing as variance for ParamSpecs # TODO: is there a case I am missing? # TODO: constraints between prefixes @@ -949,7 +949,7 @@ def visit_callable_type(self, template: CallableType) -> list[Constraint]: ) # TODO: see above "FIX" comments for param_spec is None case - # TODO: this assume positional arguments + # TODO: this assumes positional arguments for t, a in zip(prefix.arg_types, cactual_prefix.arg_types): res.extend(infer_constraints(t, a, neg_op(self.direction))) @@ -1006,7 +1006,6 @@ def infer_against_overloaded( return infer_constraints(template, item, self.direction) def visit_tuple_type(self, template: TupleType) -> list[Constraint]: - actual = self.actual unpack_index = find_unpack_in_list(template.items) is_varlength_tuple = ( @@ -1065,7 +1064,7 @@ def visit_typeddict_type(self, template: TypedDictType) -> list[Constraint]: res: list[Constraint] = [] # NOTE: Non-matching keys are ignored. Compatibility is checked # elsewhere so this shouldn't be unsafe. - for (item_name, template_item_type, actual_item_type) in template.zip(actual): + for item_name, template_item_type, actual_item_type in template.zip(actual): res.extend(infer_constraints(template_item_type, actual_item_type, self.direction)) return res elif isinstance(actual, AnyType): @@ -1159,7 +1158,7 @@ def find_matching_overload_items( if not res: # Falling back to all items if we can't find a match is pretty arbitrary, but # it maintains backward compatibility. - res = items[:] + res = items.copy() return res diff --git a/mypy/defaults.py b/mypy/defaults.py index 02562b5f0963..d167997464f4 100644 --- a/mypy/defaults.py +++ b/mypy/defaults.py @@ -44,5 +44,5 @@ ] # Threshold after which we sometimes filter out most errors to avoid very -# verbose output -MANY_ERRORS_THRESHOLD: Final = 200 +# verbose output. The default is to show all errors. +MANY_ERRORS_THRESHOLD: Final = -1 diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index 3cc50f4ece36..1f038397f2ea 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -163,7 +163,6 @@ def ignore_suppressed_imports(module: str) -> bool: class Server: - # NOTE: the instance is constructed in the parent process but # serve() is called in the grandchild (by daemonize()). @@ -600,7 +599,7 @@ def fine_grained_increment_follow_imports(self, sources: list[BuildSource]) -> l messages = fine_grained_manager.update(changed, [], followed=True) # Follow deps from changed modules (still within graph). - worklist = changed[:] + worklist = changed.copy() while worklist: module = worklist.pop() if module[0] not in graph: @@ -707,7 +706,7 @@ def find_reachable_changed_modules( """ changed = [] new_files = [] - worklist = roots[:] + worklist = roots.copy() seen.update(source.module for source in worklist) while worklist: nxt = worklist.pop() @@ -828,7 +827,6 @@ def update_sources(self, sources: list[BuildSource]) -> None: def update_changed( self, sources: list[BuildSource], remove: list[str], update: list[str] ) -> ChangesAndRemovals: - changed_paths = self.fswatcher.update_changed(remove, update) return self._find_changed(sources, changed_paths) diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 2eb2d5c624b6..6b63bad72683 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -113,7 +113,7 @@ def __str__(self) -> str: "str-format", "Check that string formatting/interpolation is type-safe", "General" ) STR_BYTES_PY3: Final = ErrorCode( - "str-bytes-safe", "Warn about dangerous coercions related to bytes and string types", "General" + "str-bytes-safe", "Warn about implicit coercions related to bytes and string types", "General" ) EXIT_RETURN: Final = ErrorCode( "exit-return", "Warn about too general return type for '__exit__'", "General" @@ -221,10 +221,13 @@ def __str__(self) -> str: USED_BEFORE_DEF: Final[ErrorCode] = ErrorCode( "used-before-def", "Warn about variables that are used before they are defined", "General" ) +UNUSED_IGNORE: Final = ErrorCode( + "unused-ignore", "Ensure that all type ignores are used", "General", default_enabled=False +) # Syntax errors are often blocking. -SYNTAX: Final = ErrorCode("syntax", "Report syntax errors", "General") +SYNTAX: Final[ErrorCode] = ErrorCode("syntax", "Report syntax errors", "General") # This is an internal marker code for a whole-file ignore. It is not intended to # be user-visible. diff --git a/mypy/errors.py b/mypy/errors.py index 2c2c1e5ca227..9d29259e943c 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -222,6 +222,9 @@ class Errors: # (path -> line -> error-codes) ignored_lines: dict[str, dict[int, list[str]]] + # Lines that are statically unreachable (e.g. due to platform/version check). + unreachable_lines: dict[str, set[int]] + # Lines on which an error was actually ignored. used_ignored_lines: dict[str, dict[int, list[str]]] @@ -258,28 +261,17 @@ class Errors: def __init__( self, - show_error_context: bool = False, - show_column_numbers: bool = False, - hide_error_codes: bool = False, - pretty: bool = False, - show_error_end: bool = False, + options: Options, + *, read_source: Callable[[str], list[str] | None] | None = None, - show_absolute_path: bool = False, - many_errors_threshold: int = -1, - options: Options | None = None, + hide_error_codes: bool | None = None, ) -> None: - self.show_error_context = show_error_context - self.show_column_numbers = show_column_numbers - self.hide_error_codes = hide_error_codes - self.show_absolute_path = show_absolute_path - self.pretty = pretty - self.show_error_end = show_error_end - if show_error_end: - assert show_column_numbers, "Inconsistent formatting, must be prevented by argparse" + self.options = options + self.hide_error_codes = ( + hide_error_codes if hide_error_codes is not None else options.hide_error_codes + ) # We use fscache to read source code when showing snippets. self.read_source = read_source - self.many_errors_threshold = many_errors_threshold - self.options = options self.initialize() def initialize(self) -> None: @@ -288,6 +280,7 @@ def initialize(self) -> None: self.import_ctx = [] self.function_or_member = [None] self.ignored_lines = {} + self.unreachable_lines = {} self.used_ignored_lines = defaultdict(lambda: defaultdict(list)) self.ignored_files = set() self.only_once_messages = set() @@ -308,7 +301,7 @@ def set_ignore_prefix(self, prefix: str) -> None: self.ignore_prefix = prefix def simplify_path(self, file: str) -> str: - if self.show_absolute_path: + if self.options.show_absolute_path: return os.path.abspath(file) else: file = os.path.normpath(file) @@ -336,6 +329,9 @@ def set_file_ignored_lines( if ignore_all: self.ignored_files.add(file) + def set_unreachable_lines(self, file: str, unreachable_lines: set[int]) -> None: + self.unreachable_lines[file] = unreachable_lines + def current_target(self) -> str | None: """Retrieves the current target from the associated scope. @@ -349,11 +345,11 @@ def current_module(self) -> str | None: def import_context(self) -> list[tuple[str, int]]: """Return a copy of the import context.""" - return self.import_ctx[:] + return self.import_ctx.copy() def set_import_context(self, ctx: list[tuple[str, int]]) -> None: """Replace the entire import context with a new value.""" - self.import_ctx = ctx[:] + self.import_ctx = ctx.copy() def report( self, @@ -534,13 +530,13 @@ def add_error_info(self, info: ErrorInfo) -> None: self._add_error_info(file, note) def has_many_errors(self) -> bool: - if self.many_errors_threshold < 0: + if self.options.many_errors_threshold < 0: return False - if len(self.error_info_map) >= self.many_errors_threshold: + if len(self.error_info_map) >= self.options.many_errors_threshold: return True if ( sum(len(errors) for errors in self.error_info_map.values()) - >= self.many_errors_threshold + >= self.options.many_errors_threshold ): return True return False @@ -634,23 +630,27 @@ def generate_unused_ignore_errors(self, file: str) -> None: ignored_lines = self.ignored_lines[file] used_ignored_lines = self.used_ignored_lines[file] for line, ignored_codes in ignored_lines.items(): + if line in self.unreachable_lines[file]: + continue + if codes.UNUSED_IGNORE.code in ignored_codes: + continue used_ignored_codes = used_ignored_lines[line] unused_ignored_codes = set(ignored_codes) - set(used_ignored_codes) # `ignore` is used - if len(ignored_codes) == 0 and len(used_ignored_codes) > 0: + if not ignored_codes and used_ignored_codes: continue # All codes appearing in `ignore[...]` are used - if len(ignored_codes) > 0 and len(unused_ignored_codes) == 0: + if ignored_codes and not unused_ignored_codes: continue # Display detail only when `ignore[...]` specifies more than one error code unused_codes_message = "" - if len(ignored_codes) > 1 and len(unused_ignored_codes) > 0: + if len(ignored_codes) > 1 and unused_ignored_codes: unused_codes_message = f"[{', '.join(sorted(unused_ignored_codes))}]" message = f'Unused "type: ignore{unused_codes_message}" comment' for unused in unused_ignored_codes: narrower = set(used_ignored_codes) & codes.sub_code_map[unused] if narrower: - message += f", use narrower [{', '.join(narrower)}] instead of [{unused}]" + message += f", use narrower [{', '.join(narrower)}] instead of [{unused}] code" # Don't use report since add_error_info will ignore the error! info = ErrorInfo( self.import_context(), @@ -664,7 +664,7 @@ def generate_unused_ignore_errors(self, file: str) -> None: -1, "error", message, - None, + codes.UNUSED_IGNORE, False, False, False, @@ -802,9 +802,9 @@ def format_messages( ) in errors: s = "" if file is not None: - if self.show_column_numbers and line >= 0 and column >= 0: + if self.options.show_column_numbers and line >= 0 and column >= 0: srcloc = f"{file}:{line}:{1 + column}" - if self.show_error_end and end_line >= 0 and end_column >= 0: + if self.options.show_error_end and end_line >= 0 and end_column >= 0: srcloc += f":{end_line}:{end_column}" elif line >= 0: srcloc = f"{file}:{line}" @@ -822,7 +822,7 @@ def format_messages( # displaying duplicate error codes. s = f"{s} [{code.code}]" a.append(s) - if self.pretty: + if self.options.pretty: # Add source code fragment and a location marker. if severity == "error" and source_lines and line > 0: source_line = source_lines[line - 1] @@ -853,7 +853,7 @@ def file_messages(self, path: str) -> list[str]: return [] self.flushed_files.add(path) source_lines = None - if self.pretty: + if self.options.pretty: assert self.read_source source_lines = self.read_source(path) return self.format_messages(self.error_info_map[path], source_lines) @@ -894,7 +894,7 @@ def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTuple]: for e in errors: # Report module import context, if different from previous message. - if not self.show_error_context: + if not self.options.show_error_context: pass elif e.import_ctx != prev_import_context: last = len(e.import_ctx) - 1 @@ -919,7 +919,7 @@ def render_messages(self, errors: list[ErrorInfo]) -> list[ErrorTuple]: file = self.simplify_path(e.file) # Report context within a source file. - if not self.show_error_context: + if not self.options.show_error_context: pass elif e.function_or_member != prev_function_or_member or e.type != prev_type: if e.function_or_member is None: diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 7933283b24d6..fed38b27bbda 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -48,26 +48,30 @@ @overload -def expand_type( - typ: ProperType, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = ... -) -> ProperType: +def expand_type(typ: CallableType, env: Mapping[TypeVarId, Type]) -> CallableType: ... @overload -def expand_type( - typ: Type, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = ... -) -> Type: +def expand_type(typ: ProperType, env: Mapping[TypeVarId, Type]) -> ProperType: ... -def expand_type( - typ: Type, env: Mapping[TypeVarId, Type], allow_erased_callables: bool = False -) -> Type: +@overload +def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: + ... + + +def expand_type(typ: Type, env: Mapping[TypeVarId, Type]) -> Type: """Substitute any type variable references in a type given by a type environment. """ - return typ.accept(ExpandTypeVisitor(env, allow_erased_callables)) + return typ.accept(ExpandTypeVisitor(env)) + + +@overload +def expand_type_by_instance(typ: CallableType, instance: Instance) -> CallableType: + ... @overload @@ -133,7 +137,7 @@ def freshen_function_type_vars(callee: F) -> F: tv = ParamSpecType.new_unification_variable(v) tvs.append(tv) tvmap[v.id] = tv - fresh = cast(CallableType, expand_type(callee, tvmap)).copy_modified(variables=tvs) + fresh = expand_type(callee, tvmap).copy_modified(variables=tvs) return cast(F, fresh) else: assert isinstance(callee, Overloaded) @@ -183,11 +187,8 @@ class ExpandTypeVisitor(TypeVisitor[Type]): variables: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value - def __init__( - self, variables: Mapping[TypeVarId, Type], allow_erased_callables: bool = False - ) -> None: + def __init__(self, variables: Mapping[TypeVarId, Type]) -> None: self.variables = variables - self.allow_erased_callables = allow_erased_callables def visit_unbound_type(self, t: UnboundType) -> Type: return t @@ -205,13 +206,12 @@ def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: - if not self.allow_erased_callables: - raise RuntimeError() # This may happen during type inference if some function argument # type is a generic callable, and its erased form will appear in inferred # constraints, then solver may check subtyping between them, which will trigger - # unify_generic_callables(), this is why we can get here. In all other cases it - # is a sign of a bug, since should never appear in any stored types. + # unify_generic_callables(), this is why we can get here. Another example is + # when inferring type of lambda in generic context, the lambda body contains + # a generic method in generic class. return t def visit_instance(self, t: Instance) -> Type: @@ -234,7 +234,10 @@ def visit_type_var(self, t: TypeVarType) -> Type: return repl def visit_param_spec(self, t: ParamSpecType) -> Type: - repl = get_proper_type(self.variables.get(t.id, t)) + # set prefix to something empty so we don't duplicate it + repl = get_proper_type( + self.variables.get(t.id, t.copy_modified(prefix=Parameters([], [], []))) + ) if isinstance(repl, Instance): # TODO: what does prefix mean in this case? # TODO: why does this case even happen? Instances aren't plural. @@ -346,7 +349,7 @@ def interpolate_args_for_unpack( ) return (arg_names, arg_kinds, arg_types) - def visit_callable_type(self, t: CallableType) -> Type: + def visit_callable_type(self, t: CallableType) -> CallableType: param_spec = t.param_spec() if param_spec is not None: repl = get_proper_type(self.variables.get(param_spec.id)) @@ -357,7 +360,7 @@ def visit_callable_type(self, t: CallableType) -> Type: # must expand both of them with all the argument types, # kinds and names in the replacement. The return type in # the replacement is ignored. - if isinstance(repl, CallableType) or isinstance(repl, Parameters): + if isinstance(repl, (CallableType, Parameters)): # Substitute *args: P.args, **kwargs: P.kwargs prefix = param_spec.prefix # we need to expand the types in the prefix, so might as well @@ -370,6 +373,23 @@ def visit_callable_type(self, t: CallableType) -> Type: ret_type=t.ret_type.accept(self), type_guard=(t.type_guard.accept(self) if t.type_guard is not None else None), ) + # TODO: Conceptually, the "len(t.arg_types) == 2" should not be here. However, this + # errors without it. Either figure out how to eliminate this or place an + # explanation for why this is necessary. + elif isinstance(repl, ParamSpecType) and len(t.arg_types) == 2: + # We're substituting one paramspec for another; this can mean that the prefix + # changes. (e.g. sub Concatenate[int, P] for Q) + prefix = repl.prefix + old_prefix = param_spec.prefix + + # Check assumptions. I'm not sure what order to place new prefix vs old prefix: + assert not old_prefix.arg_types or not prefix.arg_types + + t = t.copy_modified( + arg_types=prefix.arg_types + old_prefix.arg_types + t.arg_types, + arg_kinds=prefix.arg_kinds + old_prefix.arg_kinds + t.arg_kinds, + arg_names=prefix.arg_names + old_prefix.arg_names + t.arg_names, + ) var_arg = t.var_arg() if var_arg is not None and isinstance(var_arg.typ, UnpackType): @@ -453,9 +473,15 @@ def visit_union_type(self, t: UnionType) -> Type: # After substituting for type variables in t.items, some resulting types # might be subtypes of others, however calling make_simplified_union() # can cause recursion, so we just remove strict duplicates. - return UnionType.make_union( + simplified = UnionType.make_union( remove_trivial(flatten_nested_unions(expanded)), t.line, t.column ) + # This call to get_proper_type() is unfortunate but is required to preserve + # the invariant that ProperType will stay ProperType after applying expand_type(), + # otherwise a single item union of a type alias will break it. Note this should not + # cause infinite recursion since pathological aliases like A = Union[A, B] are + # banned at the semantic analysis level. + return get_proper_type(simplified) def visit_partial_type(self, t: PartialType) -> Type: return t diff --git a/mypy/fastparse.py b/mypy/fastparse.py index a993bd287f06..902bde110421 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -9,6 +9,7 @@ from mypy import defaults, errorcodes as codes, message_registry from mypy.errors import Errors +from mypy.message_registry import ErrorMessage from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -99,6 +100,7 @@ ) from mypy.reachability import infer_reachability_of_if_statement, mark_block_unreachable from mypy.sharedparse import argument_elide_name, special_function_elide_names +from mypy.traverser import TraverserVisitor from mypy.types import ( AnyType, CallableArgument, @@ -241,10 +243,6 @@ def ast3_parse( MISSING_FALLBACK: Final = FakeInfo("fallback can't be filled out until semanal") _dummy_fallback: Final = Instance(MISSING_FALLBACK, [], -1) -TYPE_COMMENT_SYNTAX_ERROR: Final = "syntax error in type comment" - -INVALID_TYPE_IGNORE: Final = 'Invalid "type: ignore" comment' - TYPE_IGNORE_PATTERN: Final = re.compile(r"[^#]*#\s*type:\s*ignore\s*(.*)") @@ -255,17 +253,21 @@ def parse( errors: Errors | None = None, options: Options | None = None, ) -> MypyFile: - """Parse a source file, without doing any semantic analysis. Return the parse tree. If errors is not provided, raise ParseError on failure. Otherwise, use the errors object to report parse errors. """ + ignore_errors = (options is not None and options.ignore_errors) or ( + errors is not None and fnam in errors.ignored_files + ) + # If errors are ignored, we can drop many function bodies to speed up type checking. + strip_function_bodies = ignore_errors and (options is None or not options.preserve_asts) raise_on_error = False if options is None: options = Options() if errors is None: - errors = Errors(hide_error_codes=options.hide_error_codes) + errors = Errors(options) raise_on_error = True errors.set_file(fnam, module, options=options) is_stub_file = fnam.endswith(".pyi") @@ -282,7 +284,13 @@ def parse( warnings.filterwarnings("ignore", category=DeprecationWarning) ast = ast3_parse(source, fnam, "exec", feature_version=feature_version) - tree = ASTConverter(options=options, is_stub=is_stub_file, errors=errors).visit(ast) + tree = ASTConverter( + options=options, + is_stub=is_stub_file, + errors=errors, + ignore_errors=ignore_errors, + strip_function_bodies=strip_function_bodies, + ).visit(ast) tree.path = fnam tree.is_stub = is_stub_file except SyntaxError as e: @@ -343,8 +351,8 @@ def parse_type_comment( except SyntaxError: if errors is not None: stripped_type = type_comment.split("#", 2)[0].strip() - err_msg = f'{TYPE_COMMENT_SYNTAX_ERROR} "{stripped_type}"' - errors.report(line, column, err_msg, blocker=True, code=codes.SYNTAX) + err_msg = message_registry.TYPE_COMMENT_SYNTAX_ERROR_VALUE.format(stripped_type) + errors.report(line, column, err_msg.value, blocker=True, code=err_msg.code) return None, None else: raise @@ -355,7 +363,9 @@ def parse_type_comment( ignored: list[str] | None = parse_type_ignore_tag(tag) if ignored is None: if errors is not None: - errors.report(line, column, INVALID_TYPE_IGNORE, code=codes.SYNTAX) + errors.report( + line, column, message_registry.INVALID_TYPE_IGNORE.value, code=codes.SYNTAX + ) else: raise SyntaxError else: @@ -401,14 +411,24 @@ def is_no_type_check_decorator(expr: ast3.expr) -> bool: class ASTConverter: - def __init__(self, options: Options, is_stub: bool, errors: Errors) -> None: - # 'C' for class, 'F' for function - self.class_and_function_stack: list[Literal["C", "F"]] = [] + def __init__( + self, + options: Options, + is_stub: bool, + errors: Errors, + *, + ignore_errors: bool, + strip_function_bodies: bool, + ) -> None: + # 'C' for class, 'D' for function signature, 'F' for function, 'L' for lambda + self.class_and_function_stack: list[Literal["C", "D", "F", "L"]] = [] self.imports: list[ImportBase] = [] self.options = options self.is_stub = is_stub self.errors = errors + self.ignore_errors = ignore_errors + self.strip_function_bodies = strip_function_bodies self.type_ignores: dict[int, list[str]] = {} @@ -418,24 +438,16 @@ def __init__(self, options: Options, is_stub: bool, errors: Errors) -> None: def note(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg, severity="note", code=codes.SYNTAX) - def fail( - self, - msg: str, - line: int, - column: int, - blocker: bool = True, - code: codes.ErrorCode = codes.SYNTAX, - ) -> None: + def fail(self, msg: ErrorMessage, line: int, column: int, blocker: bool = True) -> None: if blocker or not self.options.ignore_errors: - self.errors.report(line, column, msg, blocker=blocker, code=code) + self.errors.report(line, column, msg.value, blocker=blocker, code=msg.code) def fail_merge_overload(self, node: IfStmt) -> None: self.fail( - "Condition can't be inferred, unable to merge overloads", + message_registry.FAILED_TO_MERGE_OVERLOADS, line=node.line, column=node.column, blocker=False, - code=codes.MISC, ) def visit(self, node: AST | None) -> Any: @@ -476,7 +488,12 @@ def get_lineno(self, node: ast3.expr | ast3.stmt) -> int: return node.lineno def translate_stmt_list( - self, stmts: Sequence[ast3.stmt], ismodule: bool = False + self, + stmts: Sequence[ast3.stmt], + *, + ismodule: bool = False, + can_strip: bool = False, + is_coroutine: bool = False, ) -> list[Statement]: # A "# type: ignore" comment before the first statement of a module # ignores the whole module: @@ -490,10 +507,7 @@ def translate_stmt_list( if ignores: joined_ignores = ", ".join(ignores) self.fail( - ( - "type ignore with error code is not supported for modules; " - f'use `# mypy: disable-error-code="{joined_ignores}"`' - ), + message_registry.TYPE_IGNORE_WITH_ERRCODE_ON_MODULE.format(joined_ignores), line=min(self.type_ignores), column=0, blocker=False, @@ -502,14 +516,45 @@ def translate_stmt_list( codes.FILE.code ) block = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) + self.set_block_lines(block, stmts) mark_block_unreachable(block) return [block] + stack = self.class_and_function_stack + if self.strip_function_bodies and len(stack) == 1 and stack[0] == "F": + return [] + res: list[Statement] = [] for stmt in stmts: node = self.visit(stmt) res.append(node) + if ( + self.strip_function_bodies + and can_strip + and stack[-2:] == ["C", "F"] + and not is_possible_trivial_body(res) + ): + # We only strip method bodies if they don't assign to an attribute, as + # this may define an attribute which has an externally visible effect. + visitor = FindAttributeAssign() + for s in res: + s.accept(visitor) + if visitor.found: + break + else: + if is_coroutine: + # Yields inside an async function affect the return type and should not + # be stripped. + yield_visitor = FindYield() + for s in res: + s.accept(yield_visitor) + if yield_visitor.found: + break + else: + return [] + else: + return [] return res def translate_type_comment( @@ -567,19 +612,38 @@ def from_comp_operator(self, op: ast3.cmpop) -> str: else: return op_name - def as_block(self, stmts: list[ast3.stmt], lineno: int) -> Block | None: + def set_block_lines(self, b: Block, stmts: Sequence[ast3.stmt]) -> None: + first, last = stmts[0], stmts[-1] + b.line = first.lineno + b.column = first.col_offset + b.end_line = getattr(last, "end_lineno", None) + b.end_column = getattr(last, "end_col_offset", None) + if not b.body: + return + new_first = b.body[0] + if isinstance(new_first, (Decorator, OverloadedFuncDef)): + # Decorated function lines are different between Python versions. + # copy the normalization we do for them to block first lines. + b.line = new_first.line + b.column = new_first.column + + def as_block(self, stmts: list[ast3.stmt]) -> Block | None: b = None if stmts: b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) - b.set_line(lineno) + self.set_block_lines(b, stmts) return b - def as_required_block(self, stmts: list[ast3.stmt], lineno: int) -> Block: + def as_required_block( + self, stmts: list[ast3.stmt], *, can_strip: bool = False, is_coroutine: bool = False + ) -> Block: assert stmts # must be non-empty - b = Block(self.fix_function_overloads(self.translate_stmt_list(stmts))) - # TODO: in most call sites line is wrong (includes first line of enclosing statement) - # TODO: also we need to set the column, and the end position here. - b.set_line(lineno) + b = Block( + self.fix_function_overloads( + self.translate_stmt_list(stmts, can_strip=can_strip, is_coroutine=is_coroutine) + ) + ) + self.set_block_lines(b, stmts) return b def fix_function_overloads(self, stmts: list[Statement]) -> list[Statement]: @@ -665,7 +729,9 @@ def fix_function_overloads(self, stmts: list[Statement]) -> list[Statement]: if current_overload and current_overload_name == last_if_stmt_overload_name: # Remove last stmt (IfStmt) from ret if the overload names matched # Only happens if no executable block had been found in IfStmt - skipped_if_stmts.append(cast(IfStmt, ret.pop())) + popped = ret.pop() + assert isinstance(popped, IfStmt) + skipped_if_stmts.append(popped) if current_overload and skipped_if_stmts: # Add bare IfStmt (without overloads) to ret # Required for mypy to be able to still check conditions @@ -830,9 +896,6 @@ def _is_stripped_if_stmt(self, stmt: Statement) -> bool: # For elif, IfStmt are stored recursively in else_body return self._is_stripped_if_stmt(stmt.else_body.body[0]) - def in_method_scope(self) -> bool: - return self.class_and_function_stack[-2:] == ["C", "F"] - def translate_module_id(self, id: str) -> str: """Return the actual, internal module id for a source text id.""" if id == self.options.custom_typing_module: @@ -842,11 +905,11 @@ def translate_module_id(self, id: str) -> str: def visit_Module(self, mod: ast3.Module) -> MypyFile: self.type_ignores = {} for ti in mod.type_ignores: - parsed = parse_type_ignore_tag(ti.tag) # type: ignore[attr-defined] + parsed = parse_type_ignore_tag(ti.tag) if parsed is not None: self.type_ignores[ti.lineno] = parsed else: - self.fail(INVALID_TYPE_IGNORE, ti.lineno, -1, blocker=False) + self.fail(message_registry.INVALID_TYPE_IGNORE, ti.lineno, -1, blocker=False) body = self.fix_function_overloads(self.translate_stmt_list(mod.body, ismodule=True)) return MypyFile(body, self.imports, False, self.type_ignores) @@ -867,7 +930,7 @@ def do_func_def( self, n: ast3.FunctionDef | ast3.AsyncFunctionDef, is_coroutine: bool = False ) -> FuncDef | Decorator: """Helper shared between visit_FunctionDef and visit_AsyncFunctionDef.""" - self.class_and_function_stack.append("F") + self.class_and_function_stack.append("D") no_type_check = bool( n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list) ) @@ -914,11 +977,12 @@ def do_func_def( return_type = TypeConverter(self.errors, line=lineno).visit(func_type_ast.returns) # add implicit self type - if self.in_method_scope() and len(arg_types) < len(args): + in_method_scope = self.class_and_function_stack[-2:] == ["C", "D"] + if in_method_scope and len(arg_types) < len(args): arg_types.insert(0, AnyType(TypeOfAny.special_form)) except SyntaxError: stripped_type = n.type_comment.split("#", 2)[0].strip() - err_msg = f'{TYPE_COMMENT_SYNTAX_ERROR} "{stripped_type}"' + err_msg = message_registry.TYPE_COMMENT_SYNTAX_ERROR_VALUE.format(stripped_type) self.fail(err_msg, lineno, n.col_offset) if n.type_comment and n.type_comment[0] not in ["(", "#"]: self.note( @@ -938,18 +1002,20 @@ def do_func_def( func_type = None if any(arg_types) or return_type: if len(arg_types) != 1 and any(isinstance(t, EllipsisType) for t in arg_types): + self.fail(message_registry.ELLIPSIS_WITH_OTHER_TYPEARGS, lineno, n.col_offset) + elif len(arg_types) > len(arg_kinds): self.fail( - "Ellipses cannot accompany other argument types in function type signature", + message_registry.TYPE_SIGNATURE_TOO_MANY_ARGS, lineno, n.col_offset, - ) - elif len(arg_types) > len(arg_kinds): - self.fail( - "Type signature has too many arguments", lineno, n.col_offset, blocker=False + blocker=False, ) elif len(arg_types) < len(arg_kinds): self.fail( - "Type signature has too few arguments", lineno, n.col_offset, blocker=False + message_registry.TYPE_SIGNATURE_TOO_FEW_ARGS, + lineno, + n.col_offset, + blocker=False, ) else: func_type = CallableType( @@ -964,7 +1030,10 @@ def do_func_def( end_line = getattr(n, "end_lineno", None) end_column = getattr(n, "end_col_offset", None) - func_def = FuncDef(n.name, args, self.as_required_block(n.body, lineno), func_type) + self.class_and_function_stack.pop() + self.class_and_function_stack.append("F") + body = self.as_required_block(n.body, can_strip=True, is_coroutine=is_coroutine) + func_def = FuncDef(n.name, args, body, func_type) if isinstance(func_def.type, CallableType): # semanal.py does some in-place modifications we want to avoid func_def.unanalyzed_type = func_def.type.copy_modified() @@ -992,9 +1061,6 @@ def do_func_def( func_def.is_decorated = True func_def.deco_line = deco_line func_def.set_line(lineno, n.col_offset, end_line, end_column) - # Set the line again after we updated it (to make value same in Python 3.7/3.8) - # Note that TODOs in as_required_block() apply here as well. - func_def.body.set_line(lineno) deco = Decorator(func_def, self.translate_expr_list(n.decorator_list), var) first = n.decorator_list[0] @@ -1092,7 +1158,7 @@ def make_argument( return argument def fail_arg(self, msg: str, arg: ast3.arg) -> None: - self.fail(msg, arg.lineno, arg.col_offset) + self.fail(ErrorMessage(msg), arg.lineno, arg.col_offset) # ClassDef(identifier name, # expr* bases, @@ -1105,7 +1171,7 @@ def visit_ClassDef(self, n: ast3.ClassDef) -> ClassDef: cdef = ClassDef( n.name, - self.as_required_block(n.body, n.lineno), + self.as_required_block(n.body), None, self.translate_expr_list(n.bases), metaclass=dict(keywords).get("metaclass"), @@ -1177,8 +1243,8 @@ def visit_For(self, n: ast3.For) -> ForStmt: node = ForStmt( self.visit(n.target), self.visit(n.iter), - self.as_required_block(n.body, n.lineno), - self.as_block(n.orelse, n.lineno), + self.as_required_block(n.body), + self.as_block(n.orelse), target_type, ) return self.set_line(node, n) @@ -1189,8 +1255,8 @@ def visit_AsyncFor(self, n: ast3.AsyncFor) -> ForStmt: node = ForStmt( self.visit(n.target), self.visit(n.iter), - self.as_required_block(n.body, n.lineno), - self.as_block(n.orelse, n.lineno), + self.as_required_block(n.body), + self.as_block(n.orelse), target_type, ) node.is_async = True @@ -1199,19 +1265,14 @@ def visit_AsyncFor(self, n: ast3.AsyncFor) -> ForStmt: # While(expr test, stmt* body, stmt* orelse) def visit_While(self, n: ast3.While) -> WhileStmt: node = WhileStmt( - self.visit(n.test), - self.as_required_block(n.body, n.lineno), - self.as_block(n.orelse, n.lineno), + self.visit(n.test), self.as_required_block(n.body), self.as_block(n.orelse) ) return self.set_line(node, n) # If(expr test, stmt* body, stmt* orelse) def visit_If(self, n: ast3.If) -> IfStmt: - lineno = n.lineno node = IfStmt( - [self.visit(n.test)], - [self.as_required_block(n.body, lineno)], - self.as_block(n.orelse, lineno), + [self.visit(n.test)], [self.as_required_block(n.body)], self.as_block(n.orelse) ) return self.set_line(node, n) @@ -1221,7 +1282,7 @@ def visit_With(self, n: ast3.With) -> WithStmt: node = WithStmt( [self.visit(i.context_expr) for i in n.items], [self.visit(i.optional_vars) for i in n.items], - self.as_required_block(n.body, n.lineno), + self.as_required_block(n.body), target_type, ) return self.set_line(node, n) @@ -1232,7 +1293,7 @@ def visit_AsyncWith(self, n: ast3.AsyncWith) -> WithStmt: s = WithStmt( [self.visit(i.context_expr) for i in n.items], [self.visit(i.optional_vars) for i in n.items], - self.as_required_block(n.body, n.lineno), + self.as_required_block(n.body), target_type, ) s.is_async = True @@ -1249,15 +1310,15 @@ def visit_Try(self, n: ast3.Try) -> TryStmt: self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers ] types = [self.visit(h.type) for h in n.handlers] - handlers = [self.as_required_block(h.body, h.lineno) for h in n.handlers] + handlers = [self.as_required_block(h.body) for h in n.handlers] node = TryStmt( - self.as_required_block(n.body, n.lineno), + self.as_required_block(n.body), vs, types, handlers, - self.as_block(n.orelse, n.lineno), - self.as_block(n.finalbody, n.lineno), + self.as_block(n.orelse), + self.as_block(n.finalbody), ) return self.set_line(node, n) @@ -1266,15 +1327,15 @@ def visit_TryStar(self, n: TryStar) -> TryStmt: self.set_line(NameExpr(h.name), h) if h.name is not None else None for h in n.handlers ] types = [self.visit(h.type) for h in n.handlers] - handlers = [self.as_required_block(h.body, h.lineno) for h in n.handlers] + handlers = [self.as_required_block(h.body) for h in n.handlers] node = TryStmt( - self.as_required_block(n.body, n.lineno), + self.as_required_block(n.body), vs, types, handlers, - self.as_block(n.orelse, n.lineno), - self.as_block(n.finalbody, n.lineno), + self.as_block(n.orelse), + self.as_block(n.finalbody), ) node.is_star = True return self.set_line(node, n) @@ -1408,9 +1469,9 @@ def visit_Lambda(self, n: ast3.Lambda) -> LambdaExpr: body.lineno = n.body.lineno body.col_offset = n.body.col_offset - e = LambdaExpr( - self.transform_args(n.args, n.lineno), self.as_required_block([body], n.lineno) - ) + self.class_and_function_stack.append("L") + e = LambdaExpr(self.transform_args(n.args, n.lineno), self.as_required_block([body])) + self.class_and_function_stack.pop() e.set_line(n.lineno, n.col_offset) # Overrides set_line -- can't use self.set_line return e @@ -1681,7 +1742,7 @@ def visit_Match(self, n: Match) -> MatchStmt: self.visit(n.subject), [self.visit(c.pattern) for c in n.cases], [self.visit(c.guard) for c in n.cases], - [self.as_required_block(c.body, n.lineno) for c in n.cases], + [self.as_required_block(c.body) for c in n.cases], ) return self.set_line(node, n) @@ -1705,7 +1766,8 @@ def visit_MatchStar(self, n: MatchStar) -> StarredPattern: if n.name is None: node = StarredPattern(None) else: - node = StarredPattern(NameExpr(n.name)) + name = self.set_line(NameExpr(n.name), n) + node = StarredPattern(name) return self.set_line(node, n) @@ -1817,9 +1879,9 @@ def parent(self) -> AST | None: return None return self.node_stack[-2] - def fail(self, msg: str, line: int, column: int) -> None: + def fail(self, msg: ErrorMessage, line: int, column: int) -> None: if self.errors: - self.errors.report(line, column, msg, blocker=True, code=codes.SYNTAX) + self.errors.report(line, column, msg.value, blocker=True, code=msg.code) def note(self, msg: str, line: int, column: int) -> None: if self.errors: @@ -1839,7 +1901,7 @@ def visit_Call(self, e: Call) -> Type: note = "Suggestion: use {0}[...] instead of {0}(...)".format(constructor) return self.invalid_type(e, note=note) if not constructor: - self.fail("Expected arg constructor name", e.lineno, e.col_offset) + self.fail(message_registry.ARG_CONSTRUCTOR_NAME_EXPECTED, e.lineno, e.col_offset) name: str | None = None default_type = AnyType(TypeOfAny.special_form) @@ -1852,15 +1914,13 @@ def visit_Call(self, e: Call) -> Type: elif i == 1: name = self._extract_argument_name(arg) else: - self.fail("Too many arguments for argument constructor", f.lineno, f.col_offset) + self.fail(message_registry.ARG_CONSTRUCTOR_TOO_MANY_ARGS, f.lineno, f.col_offset) for k in e.keywords: value = k.value if k.arg == "name": if name is not None: self.fail( - '"{}" gets multiple values for keyword argument "name"'.format( - constructor - ), + message_registry.MULTIPLE_VALUES_FOR_NAME_KWARG.format(constructor), f.lineno, f.col_offset, ) @@ -1868,9 +1928,7 @@ def visit_Call(self, e: Call) -> Type: elif k.arg == "type": if typ is not default_type: self.fail( - '"{}" gets multiple values for keyword argument "type"'.format( - constructor - ), + message_registry.MULTIPLE_VALUES_FOR_TYPE_KWARG.format(constructor), f.lineno, f.col_offset, ) @@ -1879,7 +1937,7 @@ def visit_Call(self, e: Call) -> Type: typ = converted else: self.fail( - f'Unexpected argument "{k.arg}" for argument constructor', + message_registry.ARG_CONSTRUCTOR_UNEXPECTED_ARG.format(k.arg), value.lineno, value.col_offset, ) @@ -1894,7 +1952,9 @@ def _extract_argument_name(self, n: ast3.expr) -> str | None: elif isinstance(n, NameConstant) and str(n.value) == "None": return None self.fail( - f"Expected string literal for argument name, got {type(n).__name__}", self.line, 0 + message_registry.ARG_NAME_EXPECTED_STRING_LITERAL.format(type(n).__name__), + self.line, + 0, ) return None @@ -2080,3 +2140,85 @@ def stringify_name(n: AST) -> str | None: if sv is not None: return f"{sv}.{n.attr}" return None # Can't do it. + + +class FindAttributeAssign(TraverserVisitor): + """Check if an AST contains attribute assignments (e.g. self.x = 0).""" + + def __init__(self) -> None: + self.lvalue = False + self.found = False + + def visit_assignment_stmt(self, s: AssignmentStmt) -> None: + self.lvalue = True + for lv in s.lvalues: + lv.accept(self) + self.lvalue = False + + def visit_with_stmt(self, s: WithStmt) -> None: + self.lvalue = True + for lv in s.target: + if lv is not None: + lv.accept(self) + self.lvalue = False + s.body.accept(self) + + def visit_for_stmt(self, s: ForStmt) -> None: + self.lvalue = True + s.index.accept(self) + self.lvalue = False + s.body.accept(self) + if s.else_body: + s.else_body.accept(self) + + def visit_expression_stmt(self, s: ExpressionStmt) -> None: + # No need to look inside these + pass + + def visit_call_expr(self, e: CallExpr) -> None: + # No need to look inside these + pass + + def visit_index_expr(self, e: IndexExpr) -> None: + # No need to look inside these + pass + + def visit_member_expr(self, e: MemberExpr) -> None: + if self.lvalue: + self.found = True + + +class FindYield(TraverserVisitor): + """Check if an AST contains yields or yield froms.""" + + def __init__(self) -> None: + self.found = False + + def visit_yield_expr(self, e: YieldExpr) -> None: + self.found = True + + def visit_yield_from_expr(self, e: YieldFromExpr) -> None: + self.found = True + + +def is_possible_trivial_body(s: list[Statement]) -> bool: + """Could the statements form a "trivial" function body, such as 'pass'? + + This mimics mypy.semanal.is_trivial_body, but this runs before + semantic analysis so some checks must be conservative. + """ + l = len(s) + if l == 0: + return False + i = 0 + if isinstance(s[0], ExpressionStmt) and isinstance(s[0].expr, StrExpr): + # Skip docstring + i += 1 + if i == l: + return True + if l > i + 1: + return False + stmt = s[i] + return isinstance(stmt, (PassStmt, RaiseStmt)) or ( + isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) + ) diff --git a/mypy/fixup.py b/mypy/fixup.py index 7b0f5f433d72..01e4c0a716fc 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -170,7 +170,7 @@ def visit_class_def(self, c: ClassDef) -> None: if isinstance(v, TypeVarType): for value in v.values: value.accept(self.type_fixer) - v.upper_bound.accept(self.type_fixer) + v.upper_bound.accept(self.type_fixer) def visit_type_var_expr(self, tv: TypeVarExpr) -> None: for value in tv.values: diff --git a/mypy/inspections.py b/mypy/inspections.py index d99e087b93a1..cb695a80eef2 100644 --- a/mypy/inspections.py +++ b/mypy/inspections.py @@ -247,7 +247,9 @@ def expr_type(self, expression: Expression) -> tuple[str, bool]: if expr_type is None: return self.missing_type(expression), False - type_str = format_type(expr_type, verbosity=self.verbosity) + type_str = format_type( + expr_type, self.fg_manager.manager.options, verbosity=self.verbosity + ) return self.add_prefixes(type_str, expression), True def object_type(self) -> Instance: diff --git a/mypy/ipc.py b/mypy/ipc.py index f07616df0fd0..21ef61918de5 100644 --- a/mypy/ipc.py +++ b/mypy/ipc.py @@ -169,7 +169,6 @@ def __exit__( class IPCServer(IPCBase): - BUFFER_SIZE: Final = 2**16 def __init__(self, name: str, timeout: float | None = None) -> None: diff --git a/mypy/literals.py b/mypy/literals.py index 9d91cf728b06..53ba559c56bb 100644 --- a/mypy/literals.py +++ b/mypy/literals.py @@ -139,6 +139,16 @@ def literal_hash(e: Expression) -> Key | None: return e.accept(_hasher) +def extract_var_from_literal_hash(key: Key) -> Var | None: + """If key refers to a Var node, return it. + + Return None otherwise. + """ + if len(key) == 2 and key[0] == "Var" and isinstance(key[1], Var): + return key[1] + return None + + class _Hasher(ExpressionVisitor[Optional[Key]]): def visit_int_expr(self, e: IntExpr) -> Key: return ("Literal", e.value) diff --git a/mypy/main.py b/mypy/main.py index 3f5e02ec3f79..943030f396c5 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -734,6 +734,14 @@ def add_invertible_flag( help="Disable strict Optional checks (inverse: --strict-optional)", ) + add_invertible_flag( + "--force-uppercase-builtins", default=False, help=argparse.SUPPRESS, group=none_group + ) + + add_invertible_flag( + "--force-union-syntax", default=False, help=argparse.SUPPRESS, group=none_group + ) + lint_group = parser.add_argument_group( title="Configuring warnings", description="Detect code that is sound but redundant or problematic.", @@ -1490,7 +1498,7 @@ def read_types_packages_to_install(cache_dir: str, after_run: bool) -> list[str] # No missing stubs. return [] with open(fnam) as f: - return [line.strip() for line in f.readlines()] + return [line.strip() for line in f] def install_types( diff --git a/mypy/meet.py b/mypy/meet.py index 3214b4b43975..29c4d3663503 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -702,7 +702,7 @@ def visit_unpack_type(self, t: UnpackType) -> ProperType: def visit_parameters(self, t: Parameters) -> ProperType: # TODO: is this the right variance? - if isinstance(self.s, Parameters) or isinstance(self.s, CallableType): + if isinstance(self.s, (Parameters, CallableType)): if len(t.arg_types) != len(self.s.arg_types): return self.default(self.s) return t.copy_modified( @@ -828,13 +828,13 @@ def visit_tuple_type(self, t: TupleType) -> ProperType: def visit_typeddict_type(self, t: TypedDictType) -> ProperType: if isinstance(self.s, TypedDictType): - for (name, l, r) in self.s.zip(t): + for name, l, r in self.s.zip(t): if not is_equivalent(l, r) or (name in t.required_keys) != ( name in self.s.required_keys ): return self.default(self.s) item_list: list[tuple[str, Type]] = [] - for (item_name, s_item_type, t_item_type) in self.s.zipall(t): + for item_name, s_item_type, t_item_type in self.s.zipall(t): if s_item_type is not None: item_list.append((item_name, s_item_type)) else: diff --git a/mypy/memprofile.py b/mypy/memprofile.py index 20e18c3c0bf2..48c0cb5ce022 100644 --- a/mypy/memprofile.py +++ b/mypy/memprofile.py @@ -103,7 +103,7 @@ def visit(o: object) -> None: objs.append(o) seen.add(id(o)) - for obj in objs[:]: + for obj in objs.copy(): if type(obj) is FakeInfo: # Processing these would cause a crash. continue diff --git a/mypy/message_registry.py b/mypy/message_registry.py index e00aca2869bd..b32edc06571a 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -43,7 +43,9 @@ def with_additional_msg(self, info: str) -> ErrorMessage: RETURN_VALUE_EXPECTED: Final = ErrorMessage("Return value expected", codes.RETURN_VALUE) NO_RETURN_EXPECTED: Final = ErrorMessage("Return statement in function which does not return") INVALID_EXCEPTION: Final = ErrorMessage("Exception must be derived from BaseException") -INVALID_EXCEPTION_TYPE: Final = ErrorMessage("Exception type must be derived from BaseException") +INVALID_EXCEPTION_TYPE: Final = ErrorMessage( + "Exception type must be derived from BaseException (or be a tuple of exception classes)" +) INVALID_EXCEPTION_GROUP: Final = ErrorMessage( "Exception type in except* cannot derive from BaseExceptionGroup" ) @@ -82,7 +84,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: INCOMPATIBLE_TYPES_IN_CAPTURE: Final = ErrorMessage("Incompatible types in capture pattern") MUST_HAVE_NONE_RETURN_TYPE: Final = ErrorMessage('The return type of "{}" must be None') TUPLE_INDEX_OUT_OF_RANGE: Final = ErrorMessage("Tuple index out of range") -INVALID_SLICE_INDEX: Final = ErrorMessage("Slice index must be an integer or None") +INVALID_SLICE_INDEX: Final = ErrorMessage("Slice index must be an integer, SupportsIndex or None") CANNOT_INFER_LAMBDA_TYPE: Final = ErrorMessage("Cannot infer type of lambda") CANNOT_ACCESS_INIT: Final = ( 'Accessing "__init__" on an instance is unsound, since instance.__init__ could be from' @@ -131,7 +133,7 @@ def with_additional_msg(self, info: str) -> ErrorMessage: "Expected TypedDict key to be string literal" ) MALFORMED_ASSERT: Final = ErrorMessage("Assertion is always true, perhaps remove parentheses?") -DUPLICATE_TYPE_SIGNATURES: Final = "Function has duplicate type signatures" +DUPLICATE_TYPE_SIGNATURES: Final = ErrorMessage("Function has duplicate type signatures") DESCRIPTOR_SET_NOT_CALLABLE: Final = ErrorMessage("{}.__set__ is not callable") DESCRIPTOR_GET_NOT_CALLABLE: Final = "{}.__get__ is not callable" MODULE_LEVEL_GETATTRIBUTE: Final = ErrorMessage( @@ -268,9 +270,49 @@ def with_additional_msg(self, info: str) -> ErrorMessage: ) CLASS_PATTERN_DUPLICATE_KEYWORD_PATTERN: Final = 'Duplicate keyword pattern "{}"' CLASS_PATTERN_UNKNOWN_KEYWORD: Final = 'Class "{}" has no attribute "{}"' +CLASS_PATTERN_CLASS_OR_STATIC_METHOD: Final = "Cannot have both classmethod and staticmethod" MULTIPLE_ASSIGNMENTS_IN_PATTERN: Final = 'Multiple assignments to name "{}" in pattern' CANNOT_MODIFY_MATCH_ARGS: Final = 'Cannot assign to "__match_args__"' DATACLASS_FIELD_ALIAS_MUST_BE_LITERAL: Final = ( '"alias" argument to dataclass field must be a string literal' ) + +# fastparse +FAILED_TO_MERGE_OVERLOADS: Final = ErrorMessage( + "Condition can't be inferred, unable to merge overloads" +) +TYPE_IGNORE_WITH_ERRCODE_ON_MODULE: Final = ErrorMessage( + "type ignore with error code is not supported for modules; " + 'use `# mypy: disable-error-code="{}"`', + codes.SYNTAX, +) +INVALID_TYPE_IGNORE: Final = ErrorMessage('Invalid "type: ignore" comment', codes.SYNTAX) +TYPE_COMMENT_SYNTAX_ERROR_VALUE: Final = ErrorMessage( + 'syntax error in type comment "{}"', codes.SYNTAX +) +ELLIPSIS_WITH_OTHER_TYPEARGS: Final = ErrorMessage( + "Ellipses cannot accompany other argument types in function type signature", codes.SYNTAX +) +TYPE_SIGNATURE_TOO_MANY_ARGS: Final = ErrorMessage( + "Type signature has too many arguments", codes.SYNTAX +) +TYPE_SIGNATURE_TOO_FEW_ARGS: Final = ErrorMessage( + "Type signature has too few arguments", codes.SYNTAX +) +ARG_CONSTRUCTOR_NAME_EXPECTED: Final = ErrorMessage("Expected arg constructor name", codes.SYNTAX) +ARG_CONSTRUCTOR_TOO_MANY_ARGS: Final = ErrorMessage( + "Too many arguments for argument constructor", codes.SYNTAX +) +MULTIPLE_VALUES_FOR_NAME_KWARG: Final = ErrorMessage( + '"{}" gets multiple values for keyword argument "name"', codes.SYNTAX +) +MULTIPLE_VALUES_FOR_TYPE_KWARG: Final = ErrorMessage( + '"{}" gets multiple values for keyword argument "type"', codes.SYNTAX +) +ARG_CONSTRUCTOR_UNEXPECTED_ARG: Final = ErrorMessage( + 'Unexpected argument "{}" for argument constructor', codes.SYNTAX +) +ARG_NAME_EXPECTED_STRING_LITERAL: Final = ErrorMessage( + "Expected string literal for argument name, got {}", codes.SYNTAX +) diff --git a/mypy/messages.py b/mypy/messages.py index ba2508033790..b40b32c487bd 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -19,6 +19,7 @@ from typing import Any, Callable, Collection, Iterable, Iterator, List, Sequence, cast from typing_extensions import Final +import mypy.typeops from mypy import errorcodes as codes, message_registry from mypy.erasetype import erase_type from mypy.errorcodes import ErrorCode @@ -51,6 +52,7 @@ reverse_builtin_aliases, ) from mypy.operators import op_methods, op_methods_to_symbols +from mypy.options import Options from mypy.subtypes import ( IS_CLASS_OR_STATIC, IS_CLASSVAR, @@ -80,6 +82,7 @@ TypeAliasType, TypedDictType, TypeOfAny, + TypeStrVisitor, TypeType, TypeVarTupleType, TypeVarType, @@ -134,6 +137,14 @@ "typing._SpecialForm": "typing-medium.pyi", } +UNSUPPORTED_NUMBERS_TYPES: Final = { + "numbers.Number", + "numbers.Complex", + "numbers.Real", + "numbers.Rational", + "numbers.Integral", +} + class MessageBuilder: """Helper class for reporting type checker error messages with parameters. @@ -157,6 +168,7 @@ class MessageBuilder: def __init__(self, errors: Errors, modules: dict[str, MypyFile]) -> None: self.errors = errors + self.options = errors.options self.modules = modules self._disable_type_names = [] @@ -366,7 +378,7 @@ def has_no_attr( self.fail(f'Member "{member}" is not assignable', context) elif member == "__contains__": self.fail( - f"Unsupported right operand type for in ({format_type(original_type)})", + f"Unsupported right operand type for in ({format_type(original_type, self.options)})", context, code=codes.OPERATOR, ) @@ -379,19 +391,19 @@ def has_no_attr( break elif member == "__neg__": self.fail( - f"Unsupported operand type for unary - ({format_type(original_type)})", + f"Unsupported operand type for unary - ({format_type(original_type, self.options)})", context, code=codes.OPERATOR, ) elif member == "__pos__": self.fail( - f"Unsupported operand type for unary + ({format_type(original_type)})", + f"Unsupported operand type for unary + ({format_type(original_type, self.options)})", context, code=codes.OPERATOR, ) elif member == "__invert__": self.fail( - f"Unsupported operand type for ~ ({format_type(original_type)})", + f"Unsupported operand type for ~ ({format_type(original_type, self.options)})", context, code=codes.OPERATOR, ) @@ -401,13 +413,13 @@ def has_no_attr( if isinstance(original_type, CallableType) and original_type.is_type_obj(): self.fail( "The type {} is not generic and not indexable".format( - format_type(original_type) + format_type(original_type, self.options) ), context, ) else: self.fail( - f"Value of type {format_type(original_type)} is not indexable", + f"Value of type {format_type(original_type, self.options)} is not indexable", context, code=codes.INDEX, ) @@ -415,7 +427,7 @@ def has_no_attr( # Indexed set. self.fail( "Unsupported target for indexed assignment ({})".format( - format_type(original_type) + format_type(original_type, self.options) ), context, code=codes.INDEX, @@ -429,7 +441,7 @@ def has_no_attr( self.fail("Cannot call function of unknown type", context, code=codes.OPERATOR) else: self.fail( - message_registry.NOT_CALLABLE.format(format_type(original_type)), + message_registry.NOT_CALLABLE.format(format_type(original_type, self.options)), context, code=codes.OPERATOR, ) @@ -449,7 +461,7 @@ def has_no_attr( and not module_symbol_table[member].module_public ): self.fail( - f"{format_type(original_type, module_names=True)} does not " + f"{format_type(original_type, self.options, module_names=True)} does not " f'explicitly export attribute "{member}"', context, code=codes.ATTR_DEFINED, @@ -471,7 +483,7 @@ def has_no_attr( if matches: self.fail( '{} has no attribute "{}"; maybe {}?{}'.format( - format_type(original_type), + format_type(original_type, self.options), member, pretty_seq(matches, "or"), extra, @@ -483,7 +495,7 @@ def has_no_attr( if not failed: self.fail( '{} has no attribute "{}"{}'.format( - format_type(original_type), member, extra + format_type(original_type, self.options), member, extra ), context, code=codes.ATTR_DEFINED, @@ -491,7 +503,9 @@ def has_no_attr( elif isinstance(original_type, UnionType): # The checker passes "object" in lieu of "None" for attribute # checks, so we manually convert it back. - typ_format, orig_type_format = format_type_distinctly(typ, original_type) + typ_format, orig_type_format = format_type_distinctly( + typ, original_type, options=self.options + ) if typ_format == '"object"' and any( type(item) == NoneType for item in original_type.items ): @@ -506,8 +520,8 @@ def has_no_attr( elif isinstance(original_type, TypeVarType): bound = get_proper_type(original_type.upper_bound) if isinstance(bound, UnionType): - typ_fmt, bound_fmt = format_type_distinctly(typ, bound) - original_type_fmt = format_type(original_type) + typ_fmt, bound_fmt = format_type_distinctly(typ, bound, options=self.options) + original_type_fmt = format_type(original_type, self.options) self.fail( "Item {} of the upper bound {} of type variable {} has no " 'attribute "{}"{}'.format( @@ -516,6 +530,14 @@ def has_no_attr( context, code=codes.UNION_ATTR, ) + else: + self.fail( + '{} has no attribute "{}"{}'.format( + format_type(original_type, self.options), member, extra + ), + context, + code=codes.ATTR_DEFINED, + ) return AnyType(TypeOfAny.from_error) def unsupported_operand_types( @@ -535,13 +557,13 @@ def unsupported_operand_types( if isinstance(left_type, str): left_str = left_type else: - left_str = format_type(left_type) + left_str = format_type(left_type, self.options) right_str = "" if isinstance(right_type, str): right_str = right_type else: - right_str = format_type(right_type) + right_str = format_type(right_type, self.options) if self.are_type_names_disabled(): msg = f"Unsupported operand types for {op} (likely involving Union)" @@ -553,11 +575,11 @@ def unsupported_left_operand(self, op: str, typ: Type, context: Context) -> None if self.are_type_names_disabled(): msg = f"Unsupported left operand type for {op} (some union)" else: - msg = f"Unsupported left operand type for {op} ({format_type(typ)})" + msg = f"Unsupported left operand type for {op} ({format_type(typ, self.options)})" self.fail(msg, context, code=codes.OPERATOR) def not_callable(self, typ: Type, context: Context) -> Type: - self.fail(message_registry.NOT_CALLABLE.format(format_type(typ)), context) + self.fail(message_registry.NOT_CALLABLE.format(format_type(typ, self.options)), context) return AnyType(TypeOfAny.from_error) def untyped_function_call(self, callee: CallableType, context: Context) -> Type: @@ -597,7 +619,7 @@ def incompatible_argument( if callee_name is not None: name = callee_name if callee.bound_args and callee.bound_args[0] is not None: - base = format_type(callee.bound_args[0]) + base = format_type(callee.bound_args[0], self.options) else: base = extract_type(name) @@ -630,7 +652,7 @@ def incompatible_argument( return codes.INDEX else: arg_type_str, callee_type_str = format_type_distinctly( - arg_type, callee.arg_types[n - 1] + arg_type, callee.arg_types[n - 1], options=self.options ) info = ( f" (expression has type {arg_type_str}, " @@ -651,32 +673,34 @@ def incompatible_argument( name = callee_name[1:-1] n -= 1 actual_type_str, expected_type_str = format_type_distinctly( - arg_type, callee.arg_types[0] + arg_type, callee.arg_types[0], options=self.options ) msg = "{} item {} has incompatible type {}; expected {}".format( name.title(), n, actual_type_str, expected_type_str ) code = codes.LIST_ITEM - elif callee_name == "": + elif callee_name == "" and isinstance( + get_proper_type(callee.arg_types[n - 1]), TupleType + ): name = callee_name[1:-1] n -= 1 key_type, value_type = cast(TupleType, arg_type).items - expected_key_type, expected_value_type = cast(TupleType, callee.arg_types[0]).items + expected_key_type, expected_value_type = cast(TupleType, callee.arg_types[n]).items # don't increase verbosity unless there is need to do so if is_subtype(key_type, expected_key_type): - key_type_str = format_type(key_type) - expected_key_type_str = format_type(expected_key_type) + key_type_str = format_type(key_type, self.options) + expected_key_type_str = format_type(expected_key_type, self.options) else: key_type_str, expected_key_type_str = format_type_distinctly( - key_type, expected_key_type + key_type, expected_key_type, options=self.options ) if is_subtype(value_type, expected_value_type): - value_type_str = format_type(value_type) - expected_value_type_str = format_type(expected_value_type) + value_type_str = format_type(value_type, self.options) + expected_value_type_str = format_type(expected_value_type, self.options) else: value_type_str, expected_value_type_str = format_type_distinctly( - value_type, expected_value_type + value_type, expected_value_type, options=self.options ) msg = "{} entry {} has incompatible type {}: {}; expected {}: {}".format( @@ -688,23 +712,33 @@ def incompatible_argument( expected_value_type_str, ) code = codes.DICT_ITEM + elif callee_name == "": + value_type_str, expected_value_type_str = format_type_distinctly( + arg_type, callee.arg_types[n - 1], options=self.options + ) + msg = "Unpacked dict entry {} has incompatible type {}; expected {}".format( + n - 1, value_type_str, expected_value_type_str + ) + code = codes.DICT_ITEM elif callee_name == "": actual_type_str, expected_type_str = map( - strip_quotes, format_type_distinctly(arg_type, callee.arg_types[0]) + strip_quotes, + format_type_distinctly(arg_type, callee.arg_types[0], options=self.options), ) msg = "List comprehension has incompatible type List[{}]; expected List[{}]".format( actual_type_str, expected_type_str ) elif callee_name == "": actual_type_str, expected_type_str = map( - strip_quotes, format_type_distinctly(arg_type, callee.arg_types[0]) + strip_quotes, + format_type_distinctly(arg_type, callee.arg_types[0], options=self.options), ) msg = "Set comprehension has incompatible type Set[{}]; expected Set[{}]".format( actual_type_str, expected_type_str ) elif callee_name == "": actual_type_str, expected_type_str = format_type_distinctly( - arg_type, callee.arg_types[n - 1] + arg_type, callee.arg_types[n - 1], options=self.options ) msg = ( "{} expression in dictionary comprehension has incompatible type {}; " @@ -712,7 +746,7 @@ def incompatible_argument( ).format("Key" if n == 1 else "Value", actual_type_str, expected_type_str) elif callee_name == "": actual_type_str, expected_type_str = format_type_distinctly( - arg_type, callee.arg_types[0] + arg_type, callee.arg_types[0], options=self.options ) msg = "Generator has incompatible item type {}; expected {}".format( actual_type_str, expected_type_str @@ -726,7 +760,7 @@ def incompatible_argument( except IndexError: # Varargs callees expected_type = callee.arg_types[-1] arg_type_str, expected_type_str = format_type_distinctly( - arg_type, expected_type, bare=True + arg_type, expected_type, bare=True, options=self.options ) if arg_kind == ARG_STAR: arg_type_str = "*" + arg_type_str @@ -750,7 +784,7 @@ def incompatible_argument( arg_name = callee.arg_names[m - 1] assert arg_name is not None arg_type_str, expected_type_str = format_type_distinctly( - arg_type.items[arg_name], expected_type, bare=True + arg_type.items[arg_name], expected_type, bare=True, options=self.options ) arg_label = f'"{arg_name}"' if isinstance(outer_context, IndexExpr) and isinstance( @@ -776,6 +810,7 @@ def incompatible_argument( for type in get_proper_types(expected_types): if isinstance(arg_type, Instance) and isinstance(type, Instance): notes = append_invariance_notes(notes, arg_type, type) + notes = append_numbers_notes(notes, arg_type, type) object_type = get_proper_type(object_type) if isinstance(object_type, TypedDictType): code = codes.TYPEDDICT_ITEM @@ -860,7 +895,9 @@ def invalid_index_type( *, code: ErrorCode, ) -> None: - index_str, expected_str = format_type_distinctly(index_type, expected_type) + index_str, expected_str = format_type_distinctly( + index_type, expected_type, options=self.options + ) self.fail( "Invalid index type {} for {}; expected type {}".format( index_str, base_str, expected_str @@ -1030,7 +1067,7 @@ def no_variant_matches_arguments( name_str = f" of {name}" else: name_str = "" - arg_types_str = ", ".join(format_type(arg) for arg in arg_types) + arg_types_str = ", ".join(format_type(arg, self.options) for arg in arg_types) num_args = len(arg_types) if num_args == 0: self.fail( @@ -1053,7 +1090,7 @@ def no_variant_matches_arguments( self.note(f"Possible overload variant{plural_s(len(overload.items))}:", context, code=code) for item in overload.items: - self.note(pretty_callable(item), context, offset=4, code=code) + self.note(pretty_callable(item, self.options), context, offset=4, code=code) def wrong_number_values_to_unpack( self, provided: int, expected: int, context: Context @@ -1074,7 +1111,7 @@ def unpacking_strings_disallowed(self, context: Context) -> None: self.fail("Unpacking a string is disallowed", context) def type_not_iterable(self, type: Type, context: Context) -> None: - self.fail(f"{format_type(type)} object is not iterable", context) + self.fail(f"{format_type(type, self.options)} object is not iterable", context) def possible_missing_await(self, context: Context) -> None: self.note('Maybe you forgot to use "await"?', context) @@ -1157,7 +1194,11 @@ def pretty_callable_or_overload( if decorator is not None: self.note(decorator, context, offset=offset, allow_dups=allow_dups, code=code) self.note( - pretty_callable(tp), context, offset=offset, allow_dups=allow_dups, code=code + pretty_callable(tp, self.options), + context, + offset=offset, + allow_dups=allow_dups, + code=code, ) elif isinstance(tp, Overloaded): self.pretty_overload( @@ -1181,7 +1222,7 @@ def argument_incompatible_with_supertype( secondary_context: Context, ) -> None: target = self.override_target(name, name_in_supertype, supertype) - arg_type_in_supertype_f = format_type_bare(arg_type_in_supertype) + arg_type_in_supertype_f = format_type_bare(arg_type_in_supertype, self.options) self.fail( 'Argument {} of "{}" is incompatible with {}; ' 'supertype defines the argument type as "{}"'.format( @@ -1233,7 +1274,9 @@ def return_type_incompatible_with_supertype( context: Context, ) -> None: target = self.override_target(name, name_in_supertype, supertype) - override_str, original_str = format_type_distinctly(override, original) + override_str, original_str = format_type_distinctly( + override, original, options=self.options + ) self.fail( 'Return type {} of "{}" incompatible with return type {} in {}'.format( override_str, name, original_str, target @@ -1268,6 +1311,12 @@ def could_not_infer_type_arguments( callee_name = callable_name(callee_type) if callee_name is not None and n > 0: self.fail(f"Cannot infer type argument {n} of {callee_name}", context) + if callee_name == "": + # Invariance in key type causes more of these errors than we would want. + self.note( + "Try assigning the literal to a variable annotated as dict[, ]", + context, + ) else: self.fail("Cannot infer function type argument", context) @@ -1280,7 +1329,7 @@ def invalid_keyword_var_arg(self, typ: Type, is_mapping: bool, context: Context) self.fail("Keywords must be strings", context) else: self.fail( - f"Argument after ** must be a mapping, not {format_type(typ)}", + f"Argument after ** must be a mapping, not {format_type(typ, self.options)}", context, code=codes.ARG_TYPE, ) @@ -1301,7 +1350,7 @@ def first_argument_for_super_must_be_type(self, actual: Type, context: Context) # object. type_str = "a non-type instance" else: - type_str = format_type(actual) + type_str = format_type(actual, self.options) self.fail( f'Argument 1 for "super" must be a type object; got {type_str}', context, @@ -1373,7 +1422,7 @@ def cannot_determine_type_in_base(self, name: str, base: str, context: Context) def no_formal_self(self, name: str, item: CallableType, context: Context) -> None: self.fail( 'Attribute function "%s" with type %s does not accept self argument' - % (name, format_type(item)), + % (name, format_type(item, self.options)), context, ) @@ -1383,7 +1432,7 @@ def incompatible_self_argument( kind = "class attribute function" if is_classmethod else "attribute function" self.fail( 'Invalid self argument %s to %s "%s" with type %s' - % (format_type(arg), kind, name, format_type(sig)), + % (format_type(arg, self.options), kind, name, format_type(sig, self.options)), context, ) @@ -1476,7 +1525,7 @@ def incompatible_typevar_value( ) -> None: self.fail( message_registry.INCOMPATIBLE_TYPEVAR_VALUE.format( - typevar_name, callable_name(callee) or "function", format_type(typ) + typevar_name, callable_name(callee) or "function", format_type(typ, self.options) ), context, code=codes.TYPE_VAR, @@ -1486,7 +1535,7 @@ def dangerous_comparison(self, left: Type, right: Type, kind: str, ctx: Context) left_str = "element" if kind == "container" else "left operand" right_str = "container item" if kind == "container" else "right operand" message = "Non-overlapping {} check ({} type: {}, {} type: {})" - left_typ, right_typ = format_type_distinctly(left, right) + left_typ, right_typ = format_type_distinctly(left, right, options=self.options) self.fail( message.format(kind, left_str, left_typ, right_str, right_typ), ctx, @@ -1544,7 +1593,9 @@ def warn_both_operands_are_from_unions(self, context: Context) -> None: def warn_operand_was_from_union(self, side: str, original: Type, context: Context) -> None: self.note( - f"{side} operand is of type {format_type(original)}", context, code=codes.OPERATOR + f"{side} operand is of type {format_type(original, self.options)}", + context, + code=codes.OPERATOR, ) def operator_method_signatures_overlap( @@ -1558,7 +1609,10 @@ def operator_method_signatures_overlap( self.fail( 'Signatures of "{}" of "{}" and "{}" of {} ' "are unsafely overlapping".format( - reverse_method, reverse_class.name, forward_method, format_type(forward_class) + reverse_method, + reverse_class.name, + forward_method, + format_type(forward_class, self.options), ), context, ) @@ -1570,20 +1624,28 @@ def signatures_incompatible(self, method: str, other_method: str, context: Conte self.fail(f'Signatures of "{method}" and "{other_method}" are incompatible', context) def yield_from_invalid_operand_type(self, expr: Type, context: Context) -> Type: - text = format_type(expr) if format_type(expr) != "object" else expr + text = ( + format_type(expr, self.options) + if format_type(expr, self.options) != "object" + else expr + ) self.fail(f'"yield from" can\'t be applied to {text}', context) return AnyType(TypeOfAny.from_error) def invalid_signature(self, func_type: Type, context: Context) -> None: - self.fail(f"Invalid signature {format_type(func_type)}", context) + self.fail(f"Invalid signature {format_type(func_type, self.options)}", context) def invalid_signature_for_special_method( self, func_type: Type, context: Context, method_name: str ) -> None: - self.fail(f'Invalid signature {format_type(func_type)} for "{method_name}"', context) + self.fail( + f'Invalid signature {format_type(func_type, self.options)} for "{method_name}"', + context, + ) def reveal_type(self, typ: Type, context: Context) -> None: - self.note(f'Revealed type is "{typ}"', context) + visitor = TypeStrVisitor(options=self.options) + self.note(f'Revealed type is "{typ.accept(visitor)}"', context) def reveal_locals(self, type_map: dict[str, Type | None], context: Context) -> None: # To ensure that the output is predictable on Python < 3.6, @@ -1592,27 +1654,30 @@ def reveal_locals(self, type_map: dict[str, Type | None], context: Context) -> N if sorted_locals: self.note("Revealed local types are:", context) for k, v in sorted_locals.items(): - self.note(f" {k}: {v}", context) + visitor = TypeStrVisitor(options=self.options) + self.note(f" {k}: {v.accept(visitor) if v is not None else None}", context) else: self.note("There are no locals to reveal", context) def unsupported_type_type(self, item: Type, context: Context) -> None: - self.fail(f'Cannot instantiate type "Type[{format_type_bare(item)}]"', context) + self.fail( + f'Cannot instantiate type "Type[{format_type_bare(item, self.options)}]"', context + ) def redundant_cast(self, typ: Type, context: Context) -> None: - self.fail(f"Redundant cast to {format_type(typ)}", context, code=codes.REDUNDANT_CAST) - - def assert_type_fail(self, source_type: Type, target_type: Type, context: Context) -> None: self.fail( - f"Expression is of type {format_type(source_type)}, " - f"not {format_type(target_type)}", + f"Redundant cast to {format_type(typ, self.options)}", context, - code=codes.ASSERT_TYPE, + code=codes.REDUNDANT_CAST, ) + def assert_type_fail(self, source_type: Type, target_type: Type, context: Context) -> None: + (source, target) = format_type_distinctly(source_type, target_type, options=self.options) + self.fail(f"Expression is of type {source}, not {target}", context, code=codes.ASSERT_TYPE) + def unimported_type_becomes_any(self, prefix: str, typ: Type, ctx: Context) -> None: self.fail( - f"{prefix} becomes {format_type(typ)} due to an unfollowed import", + f"{prefix} becomes {format_type(typ, self.options)} due to an unfollowed import", ctx, code=codes.NO_ANY_UNIMPORTED, ) @@ -1677,7 +1742,7 @@ def unexpected_typeddict_keys( if missing: self.fail( "Missing {} for TypedDict {}".format( - format_key_list(missing, short=True), format_type(typ) + format_key_list(missing, short=True), format_type(typ, self.options) ), context, code=codes.TYPEDDICT_ITEM, @@ -1686,7 +1751,7 @@ def unexpected_typeddict_keys( if extra: self.fail( "Extra {} for TypedDict {}".format( - format_key_list(extra, short=True), format_type(typ) + format_key_list(extra, short=True), format_type(typ, self.options) ), context, code=codes.TYPEDDICT_UNKNOWN_KEY, @@ -1732,7 +1797,9 @@ def typeddict_key_not_found( else: err_code = codes.TYPEDDICT_UNKNOWN_KEY if setitem else codes.TYPEDDICT_ITEM self.fail( - f'TypedDict {format_type(typ)} has no key "{item_name}"', context, code=err_code + f'TypedDict {format_type(typ, self.options)} has no key "{item_name}"', + context, + code=err_code, ) matches = best_matches(item_name, typ.items.keys(), n=3) if matches: @@ -1741,7 +1808,7 @@ def typeddict_key_not_found( ) def typeddict_context_ambiguous(self, types: list[TypedDictType], context: Context) -> None: - formatted_types = ", ".join(list(format_type_distinctly(*types))) + formatted_types = ", ".join(list(format_type_distinctly(*types, options=self.options))) self.fail( f"Type of TypedDict is ambiguous, none of ({formatted_types}) matches cleanly", context ) @@ -1753,7 +1820,8 @@ def typeddict_key_cannot_be_deleted( self.fail(f'TypedDict key "{item_name}" cannot be deleted', context) else: self.fail( - f'Key "{item_name}" of TypedDict {format_type(typ)} cannot be deleted', context + f'Key "{item_name}" of TypedDict {format_type(typ, self.options)} cannot be deleted', + context, ) def typeddict_setdefault_arguments_inconsistent( @@ -1761,7 +1829,7 @@ def typeddict_setdefault_arguments_inconsistent( ) -> None: msg = 'Argument 2 to "setdefault" of "TypedDict" has incompatible type {}; expected {}' self.fail( - msg.format(format_type(default), format_type(expected)), + msg.format(format_type(default, self.options), format_type(expected, self.options)), context, code=codes.TYPEDDICT_ITEM, ) @@ -1774,11 +1842,13 @@ def disallowed_any_type(self, typ: Type, context: Context) -> None: if isinstance(typ, AnyType): message = 'Expression has type "Any"' else: - message = f'Expression type contains "Any" (has type {format_type(typ)})' + message = f'Expression type contains "Any" (has type {format_type(typ, self.options)})' self.fail(message, context) def incorrectly_returning_any(self, typ: Type, context: Context) -> None: - message = f"Returning Any from function declared to return {format_type(typ)}" + message = ( + f"Returning Any from function declared to return {format_type(typ, self.options)}" + ) self.fail(message, context, code=codes.NO_ANY_RETURN) def incorrect__exit__return(self, context: Context) -> None: @@ -1805,7 +1875,8 @@ def untyped_decorated_function(self, typ: Type, context: Context) -> None: self.fail("Function is untyped after decorator transformation", context) else: self.fail( - f'Type of decorated function contains type "Any" ({format_type(typ)})', context + f'Type of decorated function contains type "Any" ({format_type(typ, self.options)})', + context, ) def typed_function_untyped_decorator(self, func_name: str, context: Context) -> None: @@ -1824,14 +1895,14 @@ def bad_proto_variance( def concrete_only_assign(self, typ: Type, context: Context) -> None: self.fail( - f"Can only assign concrete classes to a variable of type {format_type(typ)}", + f"Can only assign concrete classes to a variable of type {format_type(typ, self.options)}", context, code=codes.TYPE_ABSTRACT, ) def concrete_only_call(self, typ: Type, context: Context) -> None: self.fail( - f"Only concrete class can be given where {format_type(typ)} is expected", + f"Only concrete class can be given where {format_type(typ, self.options)} is expected", context, code=codes.TYPE_ABSTRACT, ) @@ -1857,7 +1928,8 @@ def note_call( ) -> None: self.note( '"{}.__call__" has type {}'.format( - format_type_bare(subtype), format_type(call, verbosity=1) + format_type_bare(subtype, self.options), + format_type(call, self.options, verbosity=1), ), context, code=code, @@ -1999,7 +2071,7 @@ def report_protocol_problems( or not subtype.type.defn.type_vars or not supertype.type.defn.type_vars ): - type_name = format_type(subtype, module_names=True) + type_name = format_type(subtype, self.options, module_names=True) self.note(f"Following member(s) of {type_name} have conflicts:", context, code=code) for name, got, exp in conflict_types[:MAX_ITEMS]: exp = get_proper_type(exp) @@ -2008,7 +2080,9 @@ def report_protocol_problems( got, (CallableType, Overloaded) ): self.note( - "{}: expected {}, got {}".format(name, *format_type_distinctly(exp, got)), + "{}: expected {}, got {}".format( + name, *format_type_distinctly(exp, got, options=self.options) + ), context, offset=OFFSET, code=code, @@ -2017,7 +2091,7 @@ def report_protocol_problems( self.note("Expected:", context, offset=OFFSET, code=code) if isinstance(exp, CallableType): self.note( - pretty_callable(exp, skip_self=class_obj or is_module), + pretty_callable(exp, self.options, skip_self=class_obj or is_module), context, offset=2 * OFFSET, code=code, @@ -2030,7 +2104,7 @@ def report_protocol_problems( self.note("Got:", context, offset=OFFSET, code=code) if isinstance(got, CallableType): self.note( - pretty_callable(got, skip_self=class_obj or is_module), + pretty_callable(got, self.options, skip_self=class_obj or is_module), context, offset=2 * OFFSET, code=code, @@ -2115,7 +2189,7 @@ def pretty_overload( self.note(decorator, context, offset=offset, allow_dups=allow_dups, code=code) self.note( - pretty_callable(item, skip_self=skip_self), + pretty_callable(item, self.options, skip_self=skip_self), context, offset=offset, allow_dups=allow_dups, @@ -2188,11 +2262,14 @@ def format_long_tuple_type(self, typ: TupleType) -> str: """Format very long tuple type using an ellipsis notation""" item_cnt = len(typ.items) if item_cnt > 10: - return "Tuple[{}, {}, ... <{} more items>]".format( - format_type_bare(typ.items[0]), format_type_bare(typ.items[1]), str(item_cnt - 2) + return "{}[{}, {}, ... <{} more items>]".format( + "tuple" if self.options.use_lowercase_names() else "Tuple", + format_type_bare(typ.items[0], self.options), + format_type_bare(typ.items[1], self.options), + str(item_cnt - 2), ) else: - return format_type_bare(typ) + return format_type_bare(typ, self.options) def generate_incompatible_tuple_error( self, @@ -2203,13 +2280,15 @@ def generate_incompatible_tuple_error( ) -> None: """Generate error message for individual incompatible tuple pairs""" error_cnt = 0 - notes = [] # List[str] + notes: list[str] = [] for i, (lhs_t, rhs_t) in enumerate(zip(lhs_types, rhs_types)): if not is_subtype(lhs_t, rhs_t): if error_cnt < 3: notes.append( "Expression tuple item {} has type {}; {} expected; ".format( - str(i), format_type(rhs_t), format_type(lhs_t) + str(i), + format_type(rhs_t, self.options), + format_type(lhs_t, self.options), ) ) error_cnt += 1 @@ -2269,7 +2348,6 @@ def format_callable_args( arg_strings = [] for arg_name, arg_type, arg_kind in zip(arg_names, arg_types, arg_kinds): if arg_kind == ARG_POS and arg_name is None or verbosity == 0 and arg_kind.is_positional(): - arg_strings.append(format(arg_type)) else: constructor = ARG_CONSTRUCTOR_NAMES[arg_kind] @@ -2282,7 +2360,11 @@ def format_callable_args( def format_type_inner( - typ: Type, verbosity: int, fullnames: set[str] | None, module_names: bool = False + typ: Type, + verbosity: int, + options: Options, + fullnames: set[str] | None, + module_names: bool = False, ) -> str: """ Convert a type to a relatively short string suitable for error messages. @@ -2293,11 +2375,17 @@ def format_type_inner( """ def format(typ: Type) -> str: - return format_type_inner(typ, verbosity, fullnames) + return format_type_inner(typ, verbosity, options, fullnames) def format_list(types: Sequence[Type]) -> str: return ", ".join(format(typ) for typ in types) + def format_union(types: Sequence[Type]) -> str: + formatted = [format(typ) for typ in types if format(typ) != "None"] + if any(format(typ) == "None" for typ in types): + formatted.append("None") + return " | ".join(formatted) + def format_literal_value(typ: LiteralType) -> str: if typ.is_enum_literal(): underlying_type = format(typ.fallback) @@ -2330,7 +2418,10 @@ def format_literal_value(typ: LiteralType) -> str: if itype.type.fullname == "typing._SpecialForm": # This is not a real type but used for some typing-related constructs. return "" - if verbosity >= 2 or (fullnames and itype.type.fullname in fullnames): + if itype.type.fullname in reverse_builtin_aliases and not options.use_lowercase_names(): + alias = reverse_builtin_aliases[itype.type.fullname] + base_str = alias.split(".")[-1] + elif verbosity >= 2 or (fullnames and itype.type.fullname in fullnames): base_str = itype.type.fullname else: base_str = itype.type.name @@ -2339,11 +2430,7 @@ def format_literal_value(typ: LiteralType) -> str: return base_str elif itype.type.fullname == "builtins.tuple": item_type_str = format(itype.args[0]) - return f"Tuple[{item_type_str}, ...]" - elif itype.type.fullname in reverse_builtin_aliases: - alias = reverse_builtin_aliases[itype.type.fullname] - alias = alias.split(".")[-1] - return f"{alias}[{format_list(itype.args)}]" + return f"{'tuple' if options.use_lowercase_names() else 'Tuple'}[{item_type_str}, ...]" else: # There are type arguments. Convert the arguments to strings. return f"{base_str}[{format_list(itype.args)}]" @@ -2369,14 +2456,17 @@ def format_literal_value(typ: LiteralType) -> str: # Prefer the name of the fallback class (if not tuple), as it's more informative. if typ.partial_fallback.type.fullname != "builtins.tuple": return format(typ.partial_fallback) - s = f"Tuple[{format_list(typ.items)}]" + if options.use_lowercase_names(): + s = f"tuple[{format_list(typ.items)}]" + else: + s = f"Tuple[{format_list(typ.items)}]" return s elif isinstance(typ, TypedDictType): # If the TypedDictType is named, return the name if not typ.is_anonymous(): return format(typ.fallback) items = [] - for (item_name, item_type) in typ.items.items(): + for item_name, item_type in typ.items.items(): modifier = "" if item_name in typ.required_keys else "?" items.append(f"{item_name!r}{modifier}: {format(item_type)}") s = f"TypedDict({{{', '.join(items)}}})" @@ -2394,9 +2484,17 @@ def format_literal_value(typ: LiteralType) -> str: ) if len(union_items) == 1 and isinstance(get_proper_type(union_items[0]), NoneType): - return f"Optional[{literal_str}]" + return ( + f"{literal_str} | None" + if options.use_or_syntax() + else f"Optional[{literal_str}]" + ) elif union_items: - return f"Union[{format_list(union_items)}, {literal_str}]" + return ( + f"{literal_str} | {format_union(union_items)}" + if options.use_or_syntax() + else f"Union[{format_list(union_items)}, {literal_str}]" + ) else: return literal_str else: @@ -2407,10 +2505,17 @@ def format_literal_value(typ: LiteralType) -> str: ) if print_as_optional: rest = [t for t in typ.items if not isinstance(get_proper_type(t), NoneType)] - return f"Optional[{format(rest[0])}]" + return ( + f"{format(rest[0])} | None" + if options.use_or_syntax() + else f"Optional[{format(rest[0])}]" + ) else: - s = f"Union[{format_list(typ.items)}]" - + s = ( + format_union(typ.items) + if options.use_or_syntax() + else f"Union[{format_list(typ.items)}]" + ) return s elif isinstance(typ, NoneType): return "None" @@ -2424,7 +2529,8 @@ def format_literal_value(typ: LiteralType) -> str: else: return "" elif isinstance(typ, TypeType): - return f"Type[{format(typ.item)}]" + type_name = "type" if options.use_lowercase_names() else "Type" + return f"{type_name}[{format(typ.item)}]" elif isinstance(typ, FunctionLike): func = typ if func.is_type_obj(): @@ -2451,7 +2557,7 @@ def format_literal_value(typ: LiteralType) -> str: # error messages. return "overloaded function" elif isinstance(typ, UnboundType): - return str(typ) + return typ.accept(TypeStrVisitor(options=options)) elif isinstance(typ, Parameters): args = format_callable_args(typ.arg_types, typ.arg_kinds, typ.arg_names, format, verbosity) return f"[{args}]" @@ -2508,7 +2614,9 @@ def find_type_overlaps(*types: Type) -> set[str]: return overlaps -def format_type(typ: Type, verbosity: int = 0, module_names: bool = False) -> str: +def format_type( + typ: Type, options: Options, verbosity: int = 0, module_names: bool = False +) -> str: """ Convert a type to a relatively short string suitable for error messages. @@ -2519,10 +2627,12 @@ def format_type(typ: Type, verbosity: int = 0, module_names: bool = False) -> st modification of the formatted string is required, callers should use format_type_bare. """ - return quote_type_string(format_type_bare(typ, verbosity, module_names)) + return quote_type_string(format_type_bare(typ, options, verbosity, module_names)) -def format_type_bare(typ: Type, verbosity: int = 0, module_names: bool = False) -> str: +def format_type_bare( + typ: Type, options: Options, verbosity: int = 0, module_names: bool = False +) -> str: """ Convert a type to a relatively short string suitable for error messages. @@ -2534,10 +2644,10 @@ def format_type_bare(typ: Type, verbosity: int = 0, module_names: bool = False) instead. (The caller may want to use quote_type_string after processing has happened, to maintain consistent quoting in messages.) """ - return format_type_inner(typ, verbosity, find_type_overlaps(typ), module_names) + return format_type_inner(typ, verbosity, options, find_type_overlaps(typ), module_names) -def format_type_distinctly(*types: Type, bare: bool = False) -> tuple[str, ...]: +def format_type_distinctly(*types: Type, options: Options, bare: bool = False) -> tuple[str, ...]: """Jointly format types to distinct strings. Increase the verbosity of the type strings until they become distinct @@ -2552,7 +2662,8 @@ def format_type_distinctly(*types: Type, bare: bool = False) -> tuple[str, ...]: overlapping = find_type_overlaps(*types) for verbosity in range(2): strs = [ - format_type_inner(type, verbosity=verbosity, fullnames=overlapping) for type in types + format_type_inner(type, verbosity=verbosity, options=options, fullnames=overlapping) + for type in types ] if len(set(strs)) == len(strs): break @@ -2572,7 +2683,7 @@ def pretty_class_or_static_decorator(tp: CallableType) -> str | None: return None -def pretty_callable(tp: CallableType, skip_self: bool = False) -> str: +def pretty_callable(tp: CallableType, options: Options, skip_self: bool = False) -> str: """Return a nice easily-readable representation of a callable type. For example: def [T <: int] f(self, x: int, y: T) -> None @@ -2598,7 +2709,7 @@ def [T <: int] f(self, x: int, y: T) -> None name = tp.arg_names[i] if name: s += name + ": " - type_str = format_type_bare(tp.arg_types[i]) + type_str = format_type_bare(tp.arg_types[i], options) if tp.arg_kinds[i] == ARG_STAR2 and tp.unpack_kwargs: type_str = f"Unpack[{type_str}]" s += type_str @@ -2640,9 +2751,9 @@ def [T <: int] f(self, x: int, y: T) -> None s += " -> " if tp.type_guard is not None: - s += f"TypeGuard[{format_type_bare(tp.type_guard)}]" + s += f"TypeGuard[{format_type_bare(tp.type_guard, options)}]" else: - s += format_type_bare(tp.ret_type) + s += format_type_bare(tp.ret_type, options) if tp.variables: tvars = [] @@ -2653,11 +2764,12 @@ def [T <: int] f(self, x: int, y: T) -> None isinstance(upper_bound, Instance) and upper_bound.type.fullname != "builtins.object" ): - tvars.append(f"{tvar.name} <: {format_type_bare(upper_bound)}") + tvars.append(f"{tvar.name} <: {format_type_bare(upper_bound, options)}") elif tvar.values: tvars.append( "{} in ({})".format( - tvar.name, ", ".join([format_type_bare(tp) for tp in tvar.values]) + tvar.name, + ", ".join([format_type_bare(tp, options) for tp in tvar.values]), ) ) else: @@ -2705,7 +2817,7 @@ def get_conflict_protocol_types( continue supertype = find_member(member, right, left) assert supertype is not None - subtype = find_member(member, left, left, class_obj=class_obj) + subtype = mypy.typeops.get_protocol_member(left, member, class_obj) if not subtype: continue is_compat = is_subtype(subtype, supertype, ignore_pos_arg_names=True) @@ -2775,7 +2887,7 @@ def strip_quotes(s: str) -> str: def format_string_list(lst: list[str]) -> str: - assert len(lst) > 0 + assert lst if len(lst) == 1: return lst[0] elif len(lst) <= 5: @@ -2902,6 +3014,17 @@ def append_invariance_notes( return notes +def append_numbers_notes( + notes: list[str], arg_type: Instance, expected_type: Instance +) -> list[str]: + """Explain if an unsupported type from "numbers" is used in a subtype check.""" + if expected_type.type.fullname in UNSUPPORTED_NUMBERS_TYPES: + notes.append('Types from "numbers" aren\'t supported for static type checking') + notes.append("See https://peps.python.org/pep-0484/#the-numeric-tower") + notes.append("Consider using a protocol instead, such as typing.SupportsFloat") + return notes + + def make_inferred_type_note( context: Context, subtype: Type, supertype: Type, supertype_str: str ) -> str: diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index 265d76ed5bb6..e0406bffcc7b 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -751,12 +751,19 @@ def get_search_dirs(python_executable: str | None) -> tuple[list[str], list[str] else: # Use subprocess to get the package directory of given Python # executable + env = {**dict(os.environ), "PYTHONSAFEPATH": "1"} try: sys_path, site_packages = ast.literal_eval( subprocess.check_output( - [python_executable, pyinfo.__file__, "getsearchdirs"], stderr=subprocess.PIPE + [python_executable, pyinfo.__file__, "getsearchdirs"], + env=env, + stderr=subprocess.PIPE, ).decode() ) + except subprocess.CalledProcessError as err: + print(err.stderr) + print(err.stdout) + raise except OSError as err: reason = os.strerror(err.errno) raise CompileError( diff --git a/mypy/mro.py b/mypy/mro.py index cc9f88a9d045..f34f3fa0c46d 100644 --- a/mypy/mro.py +++ b/mypy/mro.py @@ -44,7 +44,7 @@ def linearize_hierarchy( def merge(seqs: list[list[TypeInfo]]) -> list[TypeInfo]: - seqs = [s[:] for s in seqs] + seqs = [s.copy() for s in seqs] result: list[TypeInfo] = [] while True: seqs = [s for s in seqs if s] diff --git a/mypy/nodes.py b/mypy/nodes.py index e4d8514ad6e2..ea68fa7abf23 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -20,11 +20,12 @@ Union, cast, ) -from typing_extensions import Final, TypeAlias as _TypeAlias +from typing_extensions import Final, TypeAlias as _TypeAlias, TypeGuard from mypy_extensions import trait import mypy.strconv +from mypy.options import Options from mypy.util import short_type from mypy.visitor import ExpressionVisitor, NodeVisitor, StatementVisitor @@ -173,7 +174,7 @@ def set_line( def get_nongen_builtins(python_version: tuple[int, int]) -> dict[str, str]: - # After 3.9 with pep585 generic builtins are allowed. + # After 3.9 with pep585 generic builtins are allowed return _nongen_builtins if python_version < (3, 9) else {} @@ -190,11 +191,16 @@ class Node(Context): __slots__ = () def __str__(self) -> str: - ans = self.accept(mypy.strconv.StrConv()) + ans = self.accept(mypy.strconv.StrConv(options=Options())) if ans is None: return repr(self) return ans + def str_with_options(self, options: Options) -> str: + ans = self.accept(mypy.strconv.StrConv(options=options)) + assert ans + return ans + def accept(self, visitor: NodeVisitor[T]) -> T: raise RuntimeError("Not implemented") @@ -281,6 +287,7 @@ class MypyFile(SymbolNode): "names", "imports", "ignored_lines", + "unreachable_lines", "is_stub", "is_cache_skeleton", "is_partial_stub_package", @@ -307,6 +314,8 @@ class MypyFile(SymbolNode): # If the value is empty, ignore all errors; otherwise, the list contains all # error codes to ignore. ignored_lines: dict[int, list[str]] + # Lines that are statically unreachable (e.g. due to platform/version check). + unreachable_lines: set[int] # Is this file represented by a stub file (.pyi)? is_stub: bool # Is this loaded from the cache and thus missing the actual body of the file? @@ -339,6 +348,7 @@ def __init__( self.ignored_lines = ignored_lines else: self.ignored_lines = {} + self.unreachable_lines = set() self.path = "" self.is_stub = False @@ -556,7 +566,7 @@ def __init__(self, items: list[OverloadPart]) -> None: self.items = items self.unanalyzed_items = items.copy() self.impl = None - if len(items) > 0: + if items: # TODO: figure out how to reliably set end position (we don't know the impl here). self.set_line(items[0].line, items[0].column) self.is_final = False @@ -989,7 +999,7 @@ def __init__(self, name: str, type: mypy.types.Type | None = None) -> None: # If constant value is a simple literal, # store the literal value (unboxed) for the benefit of # tools like mypyc. - self.final_value: int | float | bool | str | None = None + self.final_value: int | float | complex | bool | str | None = None # Where the value was set (only for class attributes) self.final_unset_in_class = False self.final_set_in_init = False @@ -1635,6 +1645,10 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_str_expr(self) +def is_StrExpr_list(seq: list[Expression]) -> TypeGuard[list[StrExpr]]: + return all(isinstance(item, StrExpr) for item in seq) + + class BytesExpr(Expression): """Bytes literal""" @@ -2179,7 +2193,8 @@ def name(self) -> str: def expr(self) -> Expression: """Return the expression (the body) of the lambda.""" - ret = cast(ReturnStmt, self.body.body[-1]) + ret = self.body.body[-1] + assert isinstance(ret, ReturnStmt) expr = ret.expr assert expr is not None # lambda can't have empty body return expr @@ -3078,7 +3093,7 @@ def protocol_members(self) -> list[str]: for base in self.mro[:-1]: # we skip "object" since everyone implements it if base.is_protocol: for name, node in base.names.items(): - if isinstance(node.node, (TypeAlias, TypeVarExpr)): + if isinstance(node.node, (TypeAlias, TypeVarExpr, MypyFile)): # These are auxiliary definitions (and type aliases are prohibited). continue members.add(name) @@ -3178,22 +3193,21 @@ def __str__(self) -> str: This includes the most important information about the type. """ - return self.dump() + options = Options() + return self.dump( + str_conv=mypy.strconv.StrConv(options=options), + type_str_conv=mypy.types.TypeStrVisitor(options=options), + ) def dump( - self, - str_conv: mypy.strconv.StrConv | None = None, - type_str_conv: mypy.types.TypeStrVisitor | None = None, + self, str_conv: mypy.strconv.StrConv, type_str_conv: mypy.types.TypeStrVisitor ) -> str: """Return a string dump of the contents of the TypeInfo.""" - if not str_conv: - str_conv = mypy.strconv.StrConv() + base: str = "" def type_str(typ: mypy.types.Type) -> str: - if type_str_conv: - return typ.accept(type_str_conv) - return str(typ) + return typ.accept(type_str_conv) head = "TypeInfo" + str_conv.format_id(self) if self.bases: @@ -3321,7 +3335,6 @@ def deserialize(cls, data: JsonDict) -> TypeInfo: class FakeInfo(TypeInfo): - __slots__ = ("msg",) # types.py defines a single instance of this class, called types.NOT_READY. @@ -3910,7 +3923,7 @@ def serialize(self) -> JsonDict: "eq_default": self.eq_default, "order_default": self.order_default, "kw_only_default": self.kw_only_default, - "frozen_only_default": self.frozen_default, + "frozen_default": self.frozen_default, "field_specifiers": list(self.field_specifiers), } diff --git a/mypy/options.py b/mypy/options.py index 077e0d4ed90a..45591597ba69 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -3,6 +3,7 @@ import pprint import re import sys +import sysconfig from typing import Any, Callable, Dict, Mapping, Pattern from typing_extensions import Final @@ -86,7 +87,15 @@ def __init__(self) -> None: # The executable used to search for PEP 561 packages. If this is None, # then mypy does not search for PEP 561 packages. self.python_executable: str | None = sys.executable - self.platform = sys.platform + + # When cross compiling to emscripten, we need to rely on MACHDEP because + # sys.platform is the host build platform, not emscripten. + MACHDEP = sysconfig.get_config_var("MACHDEP") + if MACHDEP == "emscripten": + self.platform = MACHDEP + else: + self.platform = sys.platform + self.custom_typing_module: str | None = None self.custom_typeshed_dir: str | None = None # The abspath() version of the above, we compute it once as an optimization. @@ -346,6 +355,19 @@ def __init__(self) -> None: self.disable_bytearray_promotion = False self.disable_memoryview_promotion = False + self.force_uppercase_builtins = False + self.force_union_syntax = False + + def use_lowercase_names(self) -> bool: + if self.python_version >= (3, 9): + return not self.force_uppercase_builtins + return False + + def use_or_syntax(self) -> bool: + if self.python_version >= (3, 10): + return not self.force_union_syntax + return False + # To avoid breaking plugin compatibility, keep providing new_semantic_analyzer @property def new_semantic_analyzer(self) -> bool: diff --git a/mypy/partially_defined.py b/mypy/partially_defined.py index 085384989705..47cbd671f168 100644 --- a/mypy/partially_defined.py +++ b/mypy/partially_defined.py @@ -153,7 +153,7 @@ def done(self) -> BranchState: all_vars.update(b.must_be_defined) # For the rest of the things, we only care about branches that weren't skipped. non_skipped_branches = [b for b in self.branches if not b.skipped] - if len(non_skipped_branches) > 0: + if non_skipped_branches: must_be_defined = non_skipped_branches[0].must_be_defined for b in non_skipped_branches[1:]: must_be_defined.intersection_update(b.must_be_defined) @@ -660,7 +660,7 @@ def visit_import(self, o: Import) -> None: else: # When you do `import x.y`, only `x` becomes defined. names = mod.split(".") - if len(names) > 0: + if names: # `names` should always be nonempty, but we don't want mypy # to crash on invalid code. self.tracker.record_definition(names[0]) diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 6fda965ade8b..e4328d764be6 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -2,12 +2,19 @@ from __future__ import annotations -from typing import Iterable, List, cast +from collections import defaultdict +from functools import reduce +from typing import Iterable, List, Mapping, cast from typing_extensions import Final, Literal import mypy.plugin # To avoid circular imports. +from mypy.applytype import apply_generic_arguments +from mypy.checker import TypeChecker from mypy.errorcodes import LITERAL_REQ +from mypy.expandtype import expand_type, expand_type_by_instance from mypy.exprtotype import TypeTranslationError, expr_to_unanalyzed_type +from mypy.meet import meet_types +from mypy.messages import format_type_bare from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -21,6 +28,7 @@ Decorator, Expression, FuncDef, + IndexExpr, JsonDict, LambdaExpr, ListExpr, @@ -32,6 +40,7 @@ SymbolTableNode, TempNode, TupleExpr, + TypeApplication, TypeInfo, TypeVarExpr, Var, @@ -47,7 +56,7 @@ deserialize_and_fixup_type, ) from mypy.server.trigger import make_wildcard_trigger -from mypy.typeops import make_simplified_union, map_type_from_supertype +from mypy.typeops import get_type_vars, make_simplified_union, map_type_from_supertype from mypy.types import ( AnyType, CallableType, @@ -56,10 +65,12 @@ LiteralType, NoneType, Overloaded, + ProperType, TupleType, Type, TypeOfAny, TypeVarType, + UninhabitedType, UnionType, get_proper_type, ) @@ -77,13 +88,15 @@ SELF_TVAR_NAME: Final = "_AT" MAGIC_ATTR_NAME: Final = "__attrs_attrs__" MAGIC_ATTR_CLS_NAME_TEMPLATE: Final = "__{}_AttrsAttributes__" # The tuple subclass pattern. +ATTRS_INIT_NAME: Final = "__attrs_init__" class Converter: """Holds information about a `converter=` argument""" - def __init__(self, init_type: Type | None = None) -> None: + def __init__(self, init_type: Type | None = None, ret_type: Type | None = None) -> None: self.init_type = init_type + self.ret_type = ret_type class Attribute: @@ -112,11 +125,20 @@ def __init__( def argument(self, ctx: mypy.plugin.ClassDefContext) -> Argument: """Return this attribute as an argument to __init__.""" assert self.init - init_type: Type | None = None if self.converter: if self.converter.init_type: init_type = self.converter.init_type + if init_type and self.init_type and self.converter.ret_type: + # The converter return type should be the same type as the attribute type. + # Copy type vars from attr type to converter. + converter_vars = get_type_vars(self.converter.ret_type) + init_vars = get_type_vars(self.init_type) + if converter_vars and len(converter_vars) == len(init_vars): + variables = { + binder.id: arg for binder, arg in zip(converter_vars, init_vars) + } + init_type = expand_type(init_type, variables) else: ctx.api.fail("Cannot determine __init__ type from converter", self.context) init_type = AnyType(TypeOfAny.from_error) @@ -282,7 +304,7 @@ def attr_class_maker_callback( it will add an __init__ or all the compare methods. For frozen=True it will turn the attrs into properties. - See http://www.attrs.org/en/stable/how-does-it-work.html for information on how attrs works. + See https://www.attrs.org/en/stable/how-does-it-work.html for information on how attrs works. If this returns False, some required metadata was not ready yet and we need another pass. @@ -330,7 +352,7 @@ def attr_class_maker_callback( adder = MethodAdder(ctx) # If __init__ is not being generated, attrs still generates it as __attrs_init__ instead. - _add_init(ctx, attributes, adder, "__init__" if init else "__attrs_init__") + _add_init(ctx, attributes, adder, "__init__" if init else ATTRS_INIT_NAME) if order: _add_order(ctx, adder) if frozen: @@ -511,7 +533,6 @@ def _cleanup_decorator(stmt: Decorator, attr_map: dict[str, Attribute]) -> None: and isinstance(func_decorator.expr, NameExpr) and func_decorator.expr.name in attr_map ): - if func_decorator.name == "default": attr_map[func_decorator.expr.name].has_default = True @@ -650,6 +671,26 @@ def _parse_converter( from mypy.checkmember import type_object_type # To avoid import cycle. converter_type = type_object_type(converter_expr.node, ctx.api.named_type) + elif ( + isinstance(converter_expr, IndexExpr) + and isinstance(converter_expr.analyzed, TypeApplication) + and isinstance(converter_expr.base, RefExpr) + and isinstance(converter_expr.base.node, TypeInfo) + ): + # The converter is a generic type. + from mypy.checkmember import type_object_type # To avoid import cycle. + + converter_type = type_object_type(converter_expr.base.node, ctx.api.named_type) + if isinstance(converter_type, CallableType): + converter_type = apply_generic_arguments( + converter_type, + converter_expr.analyzed.types, + ctx.api.msg.incompatible_typevar_value, + converter_type, + ) + else: + converter_type = None + if isinstance(converter_expr, LambdaExpr): # TODO: should we send a fail if converter_expr.min_args > 1? converter_info.init_type = AnyType(TypeOfAny.unannotated) @@ -668,6 +709,8 @@ def _parse_converter( converter_type = get_proper_type(converter_type) if isinstance(converter_type, CallableType) and converter_type.arg_types: converter_info.init_type = converter_type.arg_types[0] + if not is_attr_converters_optional: + converter_info.ret_type = converter_type.ret_type elif isinstance(converter_type, Overloaded): types: list[Type] = [] for item in converter_type.items: @@ -888,3 +931,132 @@ def add_method( """ self_type = self_type if self_type is not None else self.self_type add_method(self.ctx, method_name, args, ret_type, self_type, tvd) + + +def _get_attrs_init_type(typ: Instance) -> CallableType | None: + """ + If `typ` refers to an attrs class, gets the type of its initializer method. + """ + magic_attr = typ.type.get(MAGIC_ATTR_NAME) + if magic_attr is None or not magic_attr.plugin_generated: + return None + init_method = typ.type.get_method("__init__") or typ.type.get_method(ATTRS_INIT_NAME) + if not isinstance(init_method, FuncDef) or not isinstance(init_method.type, CallableType): + return None + return init_method.type + + +def _fail_not_attrs_class(ctx: mypy.plugin.FunctionSigContext, t: Type, parent_t: Type) -> None: + t_name = format_type_bare(t, ctx.api.options) + if parent_t is t: + msg = ( + f'Argument 1 to "evolve" has a variable type "{t_name}" not bound to an attrs class' + if isinstance(t, TypeVarType) + else f'Argument 1 to "evolve" has incompatible type "{t_name}"; expected an attrs class' + ) + else: + pt_name = format_type_bare(parent_t, ctx.api.options) + msg = ( + f'Argument 1 to "evolve" has type "{pt_name}" whose item "{t_name}" is not bound to an attrs class' + if isinstance(t, TypeVarType) + else f'Argument 1 to "evolve" has incompatible type "{pt_name}" whose item "{t_name}" is not an attrs class' + ) + + ctx.api.fail(msg, ctx.context) + + +def _get_expanded_attr_types( + ctx: mypy.plugin.FunctionSigContext, + typ: ProperType, + display_typ: ProperType, + parent_typ: ProperType, +) -> list[Mapping[str, Type]] | None: + """ + For a given type, determine what attrs classes it can be: for each class, return the field types. + For generic classes, the field types are expanded. + If the type contains Any or a non-attrs type, returns None; in the latter case, also reports an error. + """ + if isinstance(typ, AnyType): + return None + elif isinstance(typ, UnionType): + ret: list[Mapping[str, Type]] | None = [] + for item in typ.relevant_items(): + item = get_proper_type(item) + item_types = _get_expanded_attr_types(ctx, item, item, parent_typ) + if ret is not None and item_types is not None: + ret += item_types + else: + ret = None # but keep iterating to emit all errors + return ret + elif isinstance(typ, TypeVarType): + return _get_expanded_attr_types( + ctx, get_proper_type(typ.upper_bound), display_typ, parent_typ + ) + elif isinstance(typ, Instance): + init_func = _get_attrs_init_type(typ) + if init_func is None: + _fail_not_attrs_class(ctx, display_typ, parent_typ) + return None + init_func = expand_type_by_instance(init_func, typ) + # [1:] to skip the self argument of AttrClass.__init__ + field_names = cast(List[str], init_func.arg_names[1:]) + field_types = init_func.arg_types[1:] + return [dict(zip(field_names, field_types))] + else: + _fail_not_attrs_class(ctx, display_typ, parent_typ) + return None + + +def _meet_fields(types: list[Mapping[str, Type]]) -> Mapping[str, Type]: + """ + "Meets" the fields of a list of attrs classes, i.e. for each field, its new type will be the lower bound. + """ + field_to_types = defaultdict(list) + for fields in types: + for name, typ in fields.items(): + field_to_types[name].append(typ) + + return { + name: get_proper_type(reduce(meet_types, f_types)) + if len(f_types) == len(types) + else UninhabitedType() + for name, f_types in field_to_types.items() + } + + +def evolve_function_sig_callback(ctx: mypy.plugin.FunctionSigContext) -> CallableType: + """ + Generates a signature for the 'attr.evolve' function that's specific to the call site + and dependent on the type of the first argument. + """ + if len(ctx.args) != 2: + # Ideally the name and context should be callee's, but we don't have it in FunctionSigContext. + ctx.api.fail(f'"{ctx.default_signature.name}" has unexpected type annotation', ctx.context) + return ctx.default_signature + + if len(ctx.args[0]) != 1: + return ctx.default_signature # leave it to the type checker to complain + + inst_arg = ctx.args[0][0] + + # + assert isinstance(ctx.api, TypeChecker) + inst_type = ctx.api.expr_checker.accept(inst_arg) + # + + inst_type = get_proper_type(inst_type) + inst_type_str = format_type_bare(inst_type, ctx.api.options) + + attr_types = _get_expanded_attr_types(ctx, inst_type, inst_type, inst_type) + if attr_types is None: + return ctx.default_signature + fields = _meet_fields(attr_types) + + return CallableType( + arg_names=["inst", *fields.keys()], + arg_kinds=[ARG_POS] + [ARG_NAMED_OPT] * len(fields), + arg_types=[inst_type, *fields.values()], + ret_type=inst_type, + fallback=ctx.default_signature.fallback, + name=f"{ctx.default_signature.name} of {inst_type_str}", + ) diff --git a/mypy/plugins/common.py b/mypy/plugins/common.py index 0acf3e3a6369..67796ef15cf3 100644 --- a/mypy/plugins/common.py +++ b/mypy/plugins/common.py @@ -1,5 +1,6 @@ from __future__ import annotations +from mypy.argmap import map_actuals_to_formals from mypy.fixup import TypeFixer from mypy.nodes import ( ARG_POS, @@ -13,6 +14,7 @@ Expression, FuncDef, JsonDict, + NameExpr, Node, PassStmt, RefExpr, @@ -22,6 +24,7 @@ from mypy.plugin import CheckerPluginInterface, ClassDefContext, SemanticAnalyzerPluginInterface from mypy.semanal_shared import ( ALLOW_INCOMPATIBLE_OVERRIDE, + parse_bool, require_bool_literal_argument, set_callable_name, ) @@ -29,13 +32,19 @@ try_getting_str_literals as try_getting_str_literals, ) from mypy.types import ( + AnyType, CallableType, + Instance, + LiteralType, + NoneType, Overloaded, Type, + TypeOfAny, TypeType, TypeVarType, deserialize_type, get_proper_type, + is_optional, ) from mypy.typevars import fill_typevars from mypy.util import get_unique_redefinition_name @@ -87,6 +96,71 @@ def _get_argument(call: CallExpr, name: str) -> Expression | None: return None +def find_shallow_matching_overload_item(overload: Overloaded, call: CallExpr) -> CallableType: + """Perform limited lookup of a matching overload item. + + Full overload resolution is only supported during type checking, but plugins + sometimes need to resolve overloads. This can be used in some such use cases. + + Resolve overloads based on these things only: + + * Match using argument kinds and names + * If formal argument has type None, only accept the "None" expression in the callee + * If formal argument has type Literal[True] or Literal[False], only accept the + relevant bool literal + + Return the first matching overload item, or the last one if nothing matches. + """ + for item in overload.items[:-1]: + ok = True + mapped = map_actuals_to_formals( + call.arg_kinds, + call.arg_names, + item.arg_kinds, + item.arg_names, + lambda i: AnyType(TypeOfAny.special_form), + ) + + # Look for extra actuals + matched_actuals = set() + for actuals in mapped: + matched_actuals.update(actuals) + if any(i not in matched_actuals for i in range(len(call.args))): + ok = False + + for arg_type, kind, actuals in zip(item.arg_types, item.arg_kinds, mapped): + if kind.is_required() and not actuals: + # Missing required argument + ok = False + break + elif actuals: + args = [call.args[i] for i in actuals] + arg_type = get_proper_type(arg_type) + arg_none = any(isinstance(arg, NameExpr) and arg.name == "None" for arg in args) + if isinstance(arg_type, NoneType): + if not arg_none: + ok = False + break + elif ( + arg_none + and not is_optional(arg_type) + and not ( + isinstance(arg_type, Instance) + and arg_type.type.fullname == "builtins.object" + ) + and not isinstance(arg_type, AnyType) + ): + ok = False + break + elif isinstance(arg_type, LiteralType) and type(arg_type.value) is bool: + if not any(parse_bool(arg) == arg_type.value for arg in args): + ok = False + break + if ok: + return item + return overload.items[-1] + + def _get_callee_type(call: CallExpr) -> CallableType | None: """Return the type of the callee, regardless of its syntatic form.""" @@ -103,8 +177,7 @@ def _get_callee_type(call: CallExpr) -> CallableType | None: if isinstance(callee_node, (Var, SYMBOL_FUNCBASE_TYPES)) and callee_node.type: callee_node_type = get_proper_type(callee_node.type) if isinstance(callee_node_type, Overloaded): - # We take the last overload. - return callee_node_type.items[-1] + return find_shallow_matching_overload_item(callee_node_type, call) elif isinstance(callee_node_type, CallableType): return callee_node_type diff --git a/mypy/plugins/ctypes.py b/mypy/plugins/ctypes.py index edfbe506fcca..b6dbec13ce90 100644 --- a/mypy/plugins/ctypes.py +++ b/mypy/plugins/ctypes.py @@ -30,10 +30,10 @@ def _find_simplecdata_base_arg( None is returned if _SimpleCData appears nowhere in tp's (direct or indirect) bases. """ - if tp.type.has_base("ctypes._SimpleCData"): + if tp.type.has_base("_ctypes._SimpleCData"): simplecdata_base = map_instance_to_supertype( tp, - api.named_generic_type("ctypes._SimpleCData", [AnyType(TypeOfAny.special_form)]).type, + api.named_generic_type("_ctypes._SimpleCData", [AnyType(TypeOfAny.special_form)]).type, ) assert len(simplecdata_base.args) == 1, "_SimpleCData takes exactly one type argument" return get_proper_type(simplecdata_base.args[0]) @@ -88,7 +88,7 @@ def _autounboxed_cdata(tp: Type) -> ProperType: return make_simplified_union([_autounboxed_cdata(t) for t in tp.items]) elif isinstance(tp, Instance): for base in tp.type.bases: - if base.type.fullname == "ctypes._SimpleCData": + if base.type.fullname == "_ctypes._SimpleCData": # If tp has _SimpleCData as a direct base class, # the auto-unboxed type is the single type argument of the _SimpleCData type. assert len(base.args) == 1 @@ -102,7 +102,7 @@ def _get_array_element_type(tp: Type) -> ProperType | None: """Get the element type of the Array type tp, or None if not specified.""" tp = get_proper_type(tp) if isinstance(tp, Instance): - assert tp.type.fullname == "ctypes.Array" + assert tp.type.fullname == "_ctypes.Array" if len(tp.args) == 1: return get_proper_type(tp.args[0]) return None @@ -123,7 +123,9 @@ def array_constructor_callback(ctx: mypy.plugin.FunctionContext) -> Type: ctx.api.msg.fail( "Array constructor argument {} of type {}" " is not convertible to the array element type {}".format( - arg_num, format_type(arg_type), format_type(et) + arg_num, + format_type(arg_type, ctx.api.options), + format_type(et, ctx.api.options), ), ctx.context, ) @@ -134,7 +136,9 @@ def array_constructor_callback(ctx: mypy.plugin.FunctionContext) -> Type: ctx.api.msg.fail( "Array constructor argument {} of type {}" " is not convertible to the array element type {}".format( - arg_num, format_type(arg_type), format_type(it) + arg_num, + format_type(arg_type, ctx.api.options), + format_type(it, ctx.api.options), ), ctx.context, ) @@ -209,7 +213,9 @@ def array_value_callback(ctx: mypy.plugin.AttributeContext) -> Type: else: ctx.api.msg.fail( 'Array attribute "value" is only available' - ' with element type "c_char" or "c_wchar", not {}'.format(format_type(et)), + ' with element type "c_char" or "c_wchar", not {}'.format( + format_type(et, ctx.api.options) + ), ctx.context, ) return make_simplified_union(types) @@ -232,7 +238,7 @@ def array_raw_callback(ctx: mypy.plugin.AttributeContext) -> Type: else: ctx.api.msg.fail( 'Array attribute "raw" is only available' - ' with element type "c_char", not {}'.format(format_type(et)), + ' with element type "c_char", not {}'.format(format_type(et, ctx.api.options)), ctx.context, ) return make_simplified_union(types) diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 7694134ac09e..a577784217aa 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -2,11 +2,11 @@ from __future__ import annotations -from typing import Optional +from typing import Iterator, Optional from typing_extensions import Final from mypy import errorcodes, message_registry -from mypy.expandtype import expand_type +from mypy.expandtype import expand_type, expand_type_by_instance from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, @@ -17,11 +17,14 @@ MDEF, Argument, AssignmentStmt, + Block, CallExpr, ClassDef, Context, DataclassTransformSpec, Expression, + FuncDef, + IfStmt, JsonDict, NameExpr, Node, @@ -37,6 +40,7 @@ ) from mypy.plugin import ClassDefContext, SemanticAnalyzerPluginInterface from mypy.plugins.common import ( + _get_callee_type, _get_decorator_bool_argument, add_attribute_to_class, add_method_to_class, @@ -45,7 +49,7 @@ from mypy.semanal_shared import find_dataclass_transform_spec, require_bool_literal_argument from mypy.server.trigger import make_wildcard_trigger from mypy.state import state -from mypy.typeops import map_type_from_supertype +from mypy.typeops import map_type_from_supertype, try_getting_literals_from_type from mypy.types import ( AnyType, CallableType, @@ -87,6 +91,7 @@ def __init__( type: Type | None, info: TypeInfo, kw_only: bool, + is_neither_frozen_nor_nonfrozen: bool, ) -> None: self.name = name self.alias = alias @@ -95,9 +100,10 @@ def __init__( self.has_default = has_default self.line = line self.column = column - self.type = type + self.type = type # Type as __init__ argument self.info = info self.kw_only = kw_only + self.is_neither_frozen_nor_nonfrozen = is_neither_frozen_nor_nonfrozen def to_argument(self, current_info: TypeInfo) -> Argument: arg_kind = ARG_POS @@ -138,6 +144,7 @@ def serialize(self) -> JsonDict: "column": self.column, "type": self.type.serialize(), "kw_only": self.kw_only, + "is_neither_frozen_nor_nonfrozen": self.is_neither_frozen_nor_nonfrozen, } @classmethod @@ -213,7 +220,6 @@ def transform(self) -> bool: and ("__init__" not in info.names or info.names["__init__"].plugin_generated) and attributes ): - with state.strict_optional_set(self._api.options.strict_optional): args = [ attr.to_argument(info) @@ -290,7 +296,11 @@ def transform(self) -> bool: parent_decorator_arguments = [] for parent in info.mro[1:-1]: parent_args = parent.metadata.get("dataclass") - if parent_args: + + # Ignore parent classes that directly specify a dataclass transform-decorated metaclass + # when searching for usage of the frozen parameter. PEP 681 states that a class that + # directly specifies such a metaclass must be treated as neither frozen nor non-frozen. + if parent_args and not _has_direct_dataclass_transform_metaclass(parent): parent_decorator_arguments.append(parent_args) if decorator_arguments["frozen"]: @@ -380,6 +390,22 @@ def reset_init_only_vars(self, info: TypeInfo, attributes: list[DataclassAttribu # recreate a symbol node for this attribute. lvalue.node = None + def _get_assignment_statements_from_if_statement( + self, stmt: IfStmt + ) -> Iterator[AssignmentStmt]: + for body in stmt.body: + if not body.is_unreachable: + yield from self._get_assignment_statements_from_block(body) + if stmt.else_body is not None and not stmt.else_body.is_unreachable: + yield from self._get_assignment_statements_from_block(stmt.else_body) + + def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: + for stmt in block.body: + if isinstance(stmt, AssignmentStmt): + yield stmt + elif isinstance(stmt, IfStmt): + yield from self._get_assignment_statements_from_if_statement(stmt) + def collect_attributes(self) -> list[DataclassAttribute] | None: """Collect all attributes declared in the dataclass and its parents. @@ -438,10 +464,10 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: # Second, collect attributes belonging to the current class. current_attr_names: set[str] = set() kw_only = self._get_bool_arg("kw_only", self._spec.kw_only_default) - for stmt in cls.defs.body: + for stmt in self._get_assignment_statements_from_block(cls.defs): # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. - if not (isinstance(stmt, AssignmentStmt) and stmt.new_syntax): + if not stmt.new_syntax: continue # a: int, b: str = 1, 'foo' is not supported syntax so we @@ -491,7 +517,7 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: is_in_init_param = field_args.get("init") if is_in_init_param is None: - is_in_init = True + is_in_init = self._get_default_init_value_for_field_specifier(stmt.rvalue) else: is_in_init = bool(self._api.parse_bool(is_in_init_param)) @@ -510,9 +536,12 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: elif not isinstance(stmt.rvalue, TempNode): has_default = True - if not has_default: - # Make all non-default attributes implicit because they are de-facto set - # on self in the generated __init__(), not in the class body. + if not has_default and self._spec is _TRANSFORM_SPEC_FOR_DATACLASSES: + # Make all non-default dataclass attributes implicit because they are de-facto + # set on self in the generated __init__(), not in the class body. On the other + # hand, we don't know how custom dataclass transforms initialize attributes, + # so we don't treat them as implicit. This is required to support descriptors + # (https://github.com/python/mypy/issues/14868). sym.implicit = True is_kw_only = kw_only @@ -553,6 +582,7 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: ) current_attr_names.add(lhs.name) + init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) found_attrs[lhs.name] = DataclassAttribute( name=lhs.name, alias=alias, @@ -561,9 +591,12 @@ def collect_attributes(self) -> list[DataclassAttribute] | None: has_default=has_default, line=stmt.line, column=stmt.column, - type=sym.type, + type=init_type, info=cls.info, kw_only=is_kw_only, + is_neither_frozen_nor_nonfrozen=_has_direct_dataclass_transform_metaclass( + cls.info + ), ) all_attrs = list(found_attrs.values()) @@ -606,6 +639,13 @@ def _freeze(self, attributes: list[DataclassAttribute]) -> None: """ info = self._cls.info for attr in attributes: + # Classes that directly specify a dataclass_transform metaclass must be neither frozen + # non non-frozen per PEP681. Though it is surprising, this means that attributes from + # such a class must be writable even if the rest of the class heirarchy is frozen. This + # matches the behavior of Pyright (the reference implementation). + if attr.is_neither_frozen_nor_nonfrozen: + continue + sym_node = info.names.get(attr.name) if sym_node is not None: var = sym_node.node @@ -720,6 +760,74 @@ def _get_bool_arg(self, name: str, default: bool) -> bool: return require_bool_literal_argument(self._api, expression, name, default) return default + def _get_default_init_value_for_field_specifier(self, call: Expression) -> bool: + """ + Find a default value for the `init` parameter of the specifier being called. If the + specifier's type signature includes an `init` parameter with a type of `Literal[True]` or + `Literal[False]`, return the appropriate boolean value from the literal. Otherwise, + fall back to the standard default of `True`. + """ + if not isinstance(call, CallExpr): + return True + + specifier_type = _get_callee_type(call) + if specifier_type is None: + return True + + parameter = specifier_type.argument_by_name("init") + if parameter is None: + return True + + literals = try_getting_literals_from_type(parameter.typ, bool, "builtins.bool") + if literals is None or len(literals) != 1: + return True + + return literals[0] + + def _infer_dataclass_attr_init_type( + self, sym: SymbolTableNode, name: str, context: Context + ) -> Type | None: + """Infer __init__ argument type for an attribute. + + In particular, possibly use the signature of __set__. + """ + default = sym.type + if sym.implicit: + return default + t = get_proper_type(sym.type) + + # Perform a simple-minded inference from the signature of __set__, if present. + # We can't use mypy.checkmember here, since this plugin runs before type checking. + # We only support some basic scanerios here, which is hopefully sufficient for + # the vast majority of use cases. + if not isinstance(t, Instance): + return default + setter = t.type.get("__set__") + if setter: + if isinstance(setter.node, FuncDef): + super_info = t.type.get_containing_type_info("__set__") + assert super_info + if setter.type: + setter_type = get_proper_type( + map_type_from_supertype(setter.type, t.type, super_info) + ) + else: + return AnyType(TypeOfAny.unannotated) + if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [ + ARG_POS, + ARG_POS, + ARG_POS, + ]: + return expand_type_by_instance(setter_type.arg_types[2], t) + else: + self._api.fail( + f'Unsupported signature for "__set__" in "{t.type.name}"', context + ) + else: + self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context) + + return default + def add_dataclass_tag(info: TypeInfo) -> None: # The value is ignored, only the existence matters. @@ -769,3 +877,10 @@ def _is_dataclasses_decorator(node: Node) -> bool: if isinstance(node, RefExpr): return node.fullname in dataclass_makers return False + + +def _has_direct_dataclass_transform_metaclass(info: TypeInfo) -> bool: + return ( + info.declared_metaclass is not None + and info.declared_metaclass.type.dataclass_transform_spec is not None + ) diff --git a/mypy/plugins/default.py b/mypy/plugins/default.py index 4d6f46860939..1edc91a1183c 100644 --- a/mypy/plugins/default.py +++ b/mypy/plugins/default.py @@ -10,6 +10,7 @@ AttributeContext, ClassDefContext, FunctionContext, + FunctionSigContext, MethodContext, MethodSigContext, Plugin, @@ -40,12 +41,21 @@ class DefaultPlugin(Plugin): def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: from mypy.plugins import ctypes, singledispatch - if fullname == "ctypes.Array": + if fullname == "_ctypes.Array": return ctypes.array_constructor_callback elif fullname == "functools.singledispatch": return singledispatch.create_singledispatch_function_callback return None + def get_function_signature_hook( + self, fullname: str + ) -> Callable[[FunctionSigContext], FunctionLike] | None: + from mypy.plugins import attrs + + if fullname in ("attr.evolve", "attrs.evolve", "attr.assoc", "attrs.assoc"): + return attrs.evolve_function_sig_callback + return None + def get_method_signature_hook( self, fullname: str ) -> Callable[[MethodSigContext], FunctionLike] | None: @@ -59,7 +69,7 @@ def get_method_signature_hook( return typed_dict_pop_signature_callback elif fullname in {n + ".update" for n in TPDICT_FB_NAMES}: return typed_dict_update_signature_callback - elif fullname == "ctypes.Array.__setitem__": + elif fullname == "_ctypes.Array.__setitem__": return ctypes.array_setitem_callback elif fullname == singledispatch.SINGLEDISPATCH_CALLABLE_CALL_METHOD: return singledispatch.call_singledispatch_function_callback @@ -82,9 +92,9 @@ def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | No return typed_dict_pop_callback elif fullname in {n + ".__delitem__" for n in TPDICT_FB_NAMES}: return typed_dict_delitem_callback - elif fullname == "ctypes.Array.__getitem__": + elif fullname == "_ctypes.Array.__getitem__": return ctypes.array_getitem_callback - elif fullname == "ctypes.Array.__iter__": + elif fullname == "_ctypes.Array.__iter__": return ctypes.array_iter_callback elif fullname == singledispatch.SINGLEDISPATCH_REGISTER_METHOD: return singledispatch.singledispatch_register_callback @@ -95,9 +105,9 @@ def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | No def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: from mypy.plugins import ctypes, enums - if fullname == "ctypes.Array.value": + if fullname == "_ctypes.Array.value": return ctypes.array_value_callback - elif fullname == "ctypes.Array.raw": + elif fullname == "_ctypes.Array.raw": return ctypes.array_raw_callback elif fullname in enums.ENUM_NAME_ACCESS: return enums.enum_name_callback diff --git a/mypy/plugins/singledispatch.py b/mypy/plugins/singledispatch.py index cd6a3a9fa1cc..a44493f900b1 100644 --- a/mypy/plugins/singledispatch.py +++ b/mypy/plugins/singledispatch.py @@ -5,6 +5,7 @@ from mypy.messages import format_type from mypy.nodes import ARG_POS, Argument, Block, ClassDef, Context, SymbolTable, TypeInfo, Var +from mypy.options import Options from mypy.plugin import CheckerPluginInterface, FunctionContext, MethodContext, MethodSigContext from mypy.plugins.common import add_method_to_class from mypy.subtypes import is_subtype @@ -99,7 +100,6 @@ def create_singledispatch_function_callback(ctx: FunctionContext) -> Type: """Called for functools.singledispatch""" func_type = get_proper_type(get_first_arg(ctx.arg_types)) if isinstance(func_type, CallableType): - if len(func_type.arg_kinds) < 1: fail( ctx, "Singledispatch function requires at least one argument", func_type.definition @@ -143,7 +143,7 @@ def singledispatch_register_callback(ctx: MethodContext) -> Type: return register_callable elif isinstance(first_arg_type, CallableType): # TODO: do more checking for registered functions - register_function(ctx, ctx.type, first_arg_type) + register_function(ctx, ctx.type, first_arg_type, ctx.api.options) # The typeshed stubs for register say that the function returned is Callable[..., T], even # though the function returned is the same as the one passed in. We return the type of the # function so that mypy can properly type check cases where the registered function is used @@ -155,7 +155,11 @@ def singledispatch_register_callback(ctx: MethodContext) -> Type: def register_function( - ctx: PluginContext, singledispatch_obj: Instance, func: Type, register_arg: Type | None = None + ctx: PluginContext, + singledispatch_obj: Instance, + func: Type, + options: Options, + register_arg: Type | None = None, ) -> None: """Register a function""" @@ -176,11 +180,10 @@ def register_function( fallback_dispatch_type = fallback.arg_types[0] if not is_subtype(dispatch_type, fallback_dispatch_type): - fail( ctx, "Dispatch type {} must be subtype of fallback function first argument {}".format( - format_type(dispatch_type), format_type(fallback_dispatch_type) + format_type(dispatch_type, options), format_type(fallback_dispatch_type, options) ), func.definition, ) @@ -203,7 +206,9 @@ def call_singledispatch_function_after_register_argument(ctx: MethodContext) -> type_args = RegisterCallableInfo(*register_callable.args) # type: ignore[arg-type] func = get_first_arg(ctx.arg_types) if func is not None: - register_function(ctx, type_args.singledispatch_obj, func, type_args.register_type) + register_function( + ctx, type_args.singledispatch_obj, func, ctx.api.options, type_args.register_type + ) # see call to register_function in the callback for register return func return ctx.default_return_type diff --git a/mypy/pyinfo.py b/mypy/pyinfo.py index 5929bfb696b5..778b0b163ce6 100644 --- a/mypy/pyinfo.py +++ b/mypy/pyinfo.py @@ -7,20 +7,22 @@ library found in Python 3.7. This file is run each mypy run, so it should be kept as fast as possible. """ -import os -import site import sys -import sysconfig if __name__ == "__main__": # HACK: We don't want to pick up mypy.types as the top-level types # module. This could happen if this file is run as a script. - # This workaround fixes it. - old_sys_path = sys.path - sys.path = sys.path[1:] - import types # noqa: F401 + # This workaround fixes this for Python versions before 3.11. + if sys.version_info < (3, 11): + old_sys_path = sys.path + sys.path = sys.path[1:] + import types # noqa: F401 + + sys.path = old_sys_path - sys.path = old_sys_path +import os +import site +import sysconfig def getsitepackages() -> list[str]: @@ -31,9 +33,7 @@ def getsitepackages() -> list[str]: if hasattr(site, "getusersitepackages") and site.ENABLE_USER_SITE: res.insert(0, site.getusersitepackages()) else: - from distutils.sysconfig import get_python_lib - - res = [get_python_lib()] + res = [sysconfig.get_paths()["purelib"]] return res diff --git a/mypy/refinfo.py b/mypy/refinfo.py index 4262824f8f97..3df1e575a35c 100644 --- a/mypy/refinfo.py +++ b/mypy/refinfo.py @@ -2,13 +2,24 @@ from __future__ import annotations -from mypy.nodes import LDEF, MemberExpr, MypyFile, NameExpr, RefExpr +from mypy.nodes import LDEF, Expression, MemberExpr, MypyFile, NameExpr, RefExpr from mypy.traverser import TraverserVisitor +from mypy.typeops import tuple_fallback +from mypy.types import ( + FunctionLike, + Instance, + TupleType, + Type, + TypeType, + TypeVarLikeType, + get_proper_type, +) class RefInfoVisitor(TraverserVisitor): - def __init__(self) -> None: + def __init__(self, type_map: dict[Expression, Type]) -> None: super().__init__() + self.type_map = type_map self.data: list[dict[str, object]] = [] def visit_name_expr(self, expr: NameExpr) -> None: @@ -23,13 +34,36 @@ def record_ref_expr(self, expr: RefExpr) -> None: fullname = None if expr.kind != LDEF and "." in expr.fullname: fullname = expr.fullname - elif isinstance(expr, MemberExpr) and not expr.fullname: - fullname = f"*.{expr.name}" + elif isinstance(expr, MemberExpr): + typ = self.type_map.get(expr.expr) + if typ: + tfn = type_fullname(typ) + if tfn: + fullname = f"{tfn}.{expr.name}" + if not fullname: + fullname = f"*.{expr.name}" if fullname is not None: self.data.append({"line": expr.line, "column": expr.column, "target": fullname}) -def get_undocumented_ref_info_json(tree: MypyFile) -> list[dict[str, object]]: - visitor = RefInfoVisitor() +def type_fullname(typ: Type) -> str | None: + typ = get_proper_type(typ) + if isinstance(typ, Instance): + return typ.type.fullname + elif isinstance(typ, TypeType): + return type_fullname(typ.item) + elif isinstance(typ, FunctionLike) and typ.is_type_obj(): + return type_fullname(typ.fallback) + elif isinstance(typ, TupleType): + return type_fullname(tuple_fallback(typ)) + elif isinstance(typ, TypeVarLikeType): + return type_fullname(typ.upper_bound) + return None + + +def get_undocumented_ref_info_json( + tree: MypyFile, type_map: dict[Expression, Type] +) -> list[dict[str, object]]: + visitor = RefInfoVisitor(type_map) tree.accept(visitor) return visitor.data diff --git a/mypy/report.py b/mypy/report.py index 75c372200ca3..81d49baf50da 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -12,7 +12,7 @@ import tokenize from abc import ABCMeta, abstractmethod from operator import attrgetter -from typing import Any, Callable, Dict, Iterator, Tuple, cast +from typing import Any, Callable, Dict, Iterator, Tuple from typing_extensions import Final, TypeAlias as _TypeAlias from urllib.request import pathname2url @@ -44,7 +44,7 @@ ) ReporterClasses: _TypeAlias = Dict[ - str, Tuple[Callable[["Reports", str], "AbstractReporter"], bool], + str, Tuple[Callable[["Reports", str], "AbstractReporter"], bool] ] reporter_classes: Final[ReporterClasses] = {} @@ -704,8 +704,9 @@ def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) memory_reporter = reports.add_report("memory-xml", "") + assert isinstance(memory_reporter, MemoryXmlReporter) # The dependency will be called first. - self.memory_xml = cast(MemoryXmlReporter, memory_reporter) + self.memory_xml = memory_reporter class XmlReporter(AbstractXmlReporter): @@ -859,7 +860,6 @@ def on_file( type_map: dict[Expression, Type], options: Options, ) -> None: - try: path = os.path.relpath(tree.path) except ValueError: diff --git a/mypy/scope.py b/mypy/scope.py index 19a690df8220..021dd9a7d8a5 100644 --- a/mypy/scope.py +++ b/mypy/scope.py @@ -21,6 +21,7 @@ def __init__(self) -> None: self.module: str | None = None self.classes: list[TypeInfo] = [] self.function: FuncBase | None = None + self.functions: list[FuncBase] = [] # Number of nested scopes ignored (that don't get their own separate targets) self.ignored = 0 @@ -65,12 +66,14 @@ def module_scope(self, prefix: str) -> Iterator[None]: @contextmanager def function_scope(self, fdef: FuncBase) -> Iterator[None]: + self.functions.append(fdef) if not self.function: self.function = fdef else: # Nested functions are part of the topmost function target. self.ignored += 1 yield + self.functions.pop() if self.ignored: # Leave a scope that's included in the enclosing target. self.ignored -= 1 @@ -78,6 +81,9 @@ def function_scope(self, fdef: FuncBase) -> Iterator[None]: assert self.function self.function = None + def outer_functions(self) -> list[FuncBase]: + return self.functions[:-1] + def enter_class(self, info: TypeInfo) -> None: """Enter a class target scope.""" if not self.function: diff --git a/mypy/semanal.py b/mypy/semanal.py index 2720d2606e92..6ba32d9cd420 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -216,6 +216,7 @@ calculate_tuple_fallback, find_dataclass_transform_spec, has_placeholder, + parse_bool, require_bool_literal_argument, set_callable_name as set_callable_name, ) @@ -509,6 +510,7 @@ def prepare_typing_namespace(self, file_node: MypyFile, aliases: dict[str, str]) They will be replaced with real aliases when corresponding targets are ready. """ + # This is all pretty unfortunate. typeshed now has a # sys.version_info check for OrderedDict, and we shouldn't # take it out, because it is correct and a typechecker should @@ -715,7 +717,7 @@ def create_alias(self, tree: MypyFile, target_name: str, alias: str, name: str) target = self.named_type_or_none(target_name, []) assert target is not None # Transform List to List[Any], etc. - fix_instance_types(target, self.fail, self.note, self.options.python_version) + fix_instance_types(target, self.fail, self.note, self.options) alias_node = TypeAlias( target, alias, @@ -867,11 +869,8 @@ def analyze_func_def(self, defn: FuncDef) -> None: assert isinstance(result, ProperType) if isinstance(result, CallableType): # type guards need to have a positional argument, to spec - if ( - result.type_guard - and ARG_POS not in result.arg_kinds[self.is_class_scope() :] - and not defn.is_static - ): + skip_self = self.is_class_scope() and not defn.is_static + if result.type_guard and ARG_POS not in result.arg_kinds[skip_self:]: self.fail( "TypeGuard functions must have a positional argument", result, @@ -1314,7 +1313,8 @@ def analyze_property_with_multi_part_definition(self, defn: OverloadedFuncDef) - """ defn.is_property = True items = defn.items - first_item = cast(Decorator, defn.items[0]) + first_item = defn.items[0] + assert isinstance(first_item, Decorator) deleted_items = [] for i, item in enumerate(items[1:]): if isinstance(item, Decorator): @@ -1357,7 +1357,8 @@ def analyze_function_body(self, defn: FuncItem) -> None: # Bind the type variables again to visit the body. if defn.type: a = self.type_analyzer() - typ = cast(CallableType, defn.type) + typ = defn.type + assert isinstance(typ, CallableType) a.bind_function_type_variables(typ, defn) for i in range(len(typ.arg_types)): store_argument_type(defn, i, typ, self.named_type) @@ -1547,6 +1548,8 @@ def visit_decorator(self, dec: Decorator) -> None: self.fail("Only instance methods can be decorated with @property", dec) if dec.func.abstract_status == IS_ABSTRACT and dec.func.is_final: self.fail(f"Method {dec.func.name} is both abstract and final", dec) + if dec.func.is_static and dec.func.is_class: + self.fail(message_registry.CLASS_PATTERN_CLASS_OR_STATIC_METHOD, dec) def check_decorated_function_is_method(self, decorator: str, context: Context) -> None: if not self.type or self.is_func_scope(): @@ -2616,11 +2619,14 @@ def report_missing_module_attribute( typing_extensions = self.modules.get("typing_extensions") if typing_extensions and source_id in typing_extensions.names: self.msg.note( - f"Use `from typing_extensions import {source_id}` instead", context + f"Use `from typing_extensions import {source_id}` instead", + context, + code=codes.ATTR_DEFINED, ) self.msg.note( "See https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module", context, + code=codes.ATTR_DEFINED, ) def process_import_over_existing_name( @@ -3350,7 +3356,7 @@ def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Typ return None value = constant_fold_expr(rvalue, self.cur_mod_id) - if value is None: + if value is None or isinstance(value, complex): return None if isinstance(value, bool): @@ -3531,7 +3537,7 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: # if the expected number of arguments is non-zero, so that aliases like A = List work. # However, eagerly expanding aliases like Text = str is a nice performance optimization. no_args = isinstance(res, Instance) and not res.args # type: ignore[misc] - fix_instance_types(res, self.fail, self.note, self.options.python_version) + fix_instance_types(res, self.fail, self.note, self.options) # Aliases defined within functions can't be accessed outside # the function, since the symbol table will no longer # exist. Work around by expanding them eagerly when used. @@ -3641,7 +3647,7 @@ def analyze_lvalue( has_explicit_value=has_explicit_value, ) elif isinstance(lval, MemberExpr): - self.analyze_member_lvalue(lval, explicit_type, is_final) + self.analyze_member_lvalue(lval, explicit_type, is_final, has_explicit_value) if explicit_type and not self.is_self_member_ref(lval): self.fail("Type cannot be declared in assignment to non-self attribute", lval) elif isinstance(lval, IndexExpr): @@ -3818,7 +3824,9 @@ def analyze_tuple_or_list_lvalue(self, lval: TupleExpr, explicit_type: bool = Fa has_explicit_value=True, ) - def analyze_member_lvalue(self, lval: MemberExpr, explicit_type: bool, is_final: bool) -> None: + def analyze_member_lvalue( + self, lval: MemberExpr, explicit_type: bool, is_final: bool, has_explicit_value: bool + ) -> None: """Analyze lvalue that is a member expression. Arguments: @@ -3847,12 +3855,18 @@ def analyze_member_lvalue(self, lval: MemberExpr, explicit_type: bool, is_final: and explicit_type ): self.attribute_already_defined(lval.name, lval, cur_node) - # If the attribute of self is not defined in superclasses, create a new Var, ... + if self.type.is_protocol and has_explicit_value and cur_node is not None: + # Make this variable non-abstract, it would be safer to do this only if we + # are inside __init__, but we do this always to preserve historical behaviour. + if isinstance(cur_node.node, Var): + cur_node.node.is_abstract_var = False if ( + # If the attribute of self is not defined, create a new Var, ... node is None - or (isinstance(node.node, Var) and node.node.is_abstract_var) + # ... or if it is defined as abstract in a *superclass*. + or (cur_node is None and isinstance(node.node, Var) and node.node.is_abstract_var) # ... also an explicit declaration on self also creates a new Var. - # Note that `explicit_type` might has been erased for bare `Final`, + # Note that `explicit_type` might have been erased for bare `Final`, # so we also check if `is_final` is passed. or (cur_node is None and (explicit_type or is_final)) ): @@ -4419,7 +4433,6 @@ def process__slots__(self, s: AssignmentStmt) -> None: and s.lvalues[0].name == "__slots__" and s.lvalues[0].kind == MDEF ): - # We understand `__slots__` defined as string, tuple, list, set, and dict: if not isinstance(s.rvalue, (StrExpr, ListExpr, TupleExpr, SetExpr, DictExpr)): # For example, `__slots__` can be defined as a variable, @@ -5108,15 +5121,10 @@ def analyze_type_application_args(self, expr: IndexExpr) -> list[Type] | None: return None types.append(analyzed) - if has_param_spec and num_args == 1 and len(types) > 0: + if has_param_spec and num_args == 1 and types: first_arg = get_proper_type(types[0]) if not ( - len(types) == 1 - and ( - isinstance(first_arg, Parameters) - or isinstance(first_arg, ParamSpecType) - or isinstance(first_arg, AnyType) - ) + len(types) == 1 and isinstance(first_arg, (Parameters, ParamSpecType, AnyType)) ): types = [Parameters(types, [ARG_POS] * len(types), [None] * len(types))] @@ -6460,12 +6468,8 @@ def is_initial_mangled_global(self, name: str) -> bool: return name == unmangle(name) + "'" def parse_bool(self, expr: Expression) -> bool | None: - if isinstance(expr, NameExpr): - if expr.fullname == "builtins.True": - return True - if expr.fullname == "builtins.False": - return False - return None + # This wrapper is preserved for plugins. + return parse_bool(expr) def parse_str_literal(self, expr: Expression) -> str | None: """Attempt to find the string literal value of the given expression. Returns `None` if no @@ -6685,7 +6689,7 @@ def is_trivial_body(block: Block) -> bool: "..." (ellipsis), or "raise NotImplementedError()". A trivial body may also start with a statement containing just a string (e.g. a docstring). - Note: functions that raise other kinds of exceptions do not count as + Note: Functions that raise other kinds of exceptions do not count as "trivial". We use this function to help us determine when it's ok to relax certain checks on body, but functions that raise arbitrary exceptions are more likely to do non-trivial work. For example: @@ -6695,11 +6699,18 @@ def halt(self, reason: str = ...) -> NoReturn: A function that raises just NotImplementedError is much less likely to be this complex. + + Note: If you update this, you may also need to update + mypy.fastparse.is_possible_trivial_body! """ body = block.body + if not body: + # Functions have empty bodies only if the body is stripped or the function is + # generated or deserialized. In these cases the body is unknown. + return False # Skip a docstring - if body and isinstance(body[0], ExpressionStmt) and isinstance(body[0].expr, StrExpr): + if isinstance(body[0], ExpressionStmt) and isinstance(body[0].expr, StrExpr): body = block.body[1:] if len(body) == 0: diff --git a/mypy/semanal_enum.py b/mypy/semanal_enum.py index c7b8e44f65aa..f8d321ffada9 100644 --- a/mypy/semanal_enum.py +++ b/mypy/semanal_enum.py @@ -27,6 +27,7 @@ TupleExpr, TypeInfo, Var, + is_StrExpr_list, ) from mypy.options import Options from mypy.semanal_shared import SemanticAnalyzerInterface @@ -177,8 +178,8 @@ def parse_enum_call_args( items.append(field) elif isinstance(names, (TupleExpr, ListExpr)): seq_items = names.items - if all(isinstance(seq_item, StrExpr) for seq_item in seq_items): - items = [cast(StrExpr, seq_item).value for seq_item in seq_items] + if is_StrExpr_list(seq_items): + items = [seq_item.value for seq_item in seq_items] elif all( isinstance(seq_item, (TupleExpr, ListExpr)) and len(seq_item.items) == 2 @@ -231,7 +232,7 @@ def parse_enum_call_args( % class_name, call, ) - if len(items) == 0: + if not items: return self.fail_enum_call_arg(f"{class_name}() needs at least one item", call) if not values: values = [None] * len(items) diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 912851520958..8e8c455dd686 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -75,6 +75,7 @@ "abc", "collections", "collections.abc", + "typing_extensions", ] @@ -190,7 +191,7 @@ def process_top_levels(graph: Graph, scc: list[str], patches: Patches) -> None: # Initially all namespaces in the SCC are incomplete (well they are empty). state.manager.incomplete_namespaces.update(scc) - worklist = scc[:] + worklist = scc.copy() # HACK: process core stuff first. This is mostly needed to support defining # named tuples in builtin SCC. if all(m in worklist for m in core_modules): diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 1194557836b1..a9f12ceae5c2 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -41,6 +41,7 @@ TypeInfo, TypeVarExpr, Var, + is_StrExpr_list, ) from mypy.options import Options from mypy.semanal_shared import ( @@ -280,7 +281,7 @@ def check_namedtuple( # two methods of a class can define a named tuple with the same name, # and they will be stored in the same namespace (see below). name += "@" + str(call.line) - if len(defaults) > 0: + if defaults: default_items = { arg_name: default for arg_name, default in zip(items[-len(defaults) :], defaults) } @@ -373,7 +374,7 @@ def parse_namedtuple_args( if not isinstance(args[0], StrExpr): self.fail(f'"{type_name}()" expects a string literal as the first argument', call) return None - typename = cast(StrExpr, call.args[0]).value + typename = args[0].value types: list[Type] = [] tvar_defs = [] if not isinstance(args[1], (ListExpr, TupleExpr)): @@ -392,10 +393,10 @@ def parse_namedtuple_args( listexpr = args[1] if fullname == "collections.namedtuple": # The fields argument contains just names, with implicit Any types. - if any(not isinstance(item, StrExpr) for item in listexpr.items): + if not is_StrExpr_list(listexpr.items): self.fail('String literal expected as "namedtuple()" item', call) return None - items = [cast(StrExpr, item).value for item in listexpr.items] + items = [item.value for item in listexpr.items] else: type_exprs = [ t.items[1] diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py index cb1055a62186..a8380309d310 100644 --- a/mypy/semanal_newtype.py +++ b/mypy/semanal_newtype.py @@ -105,7 +105,11 @@ def process_newtype_declaration(self, s: AssignmentStmt) -> bool: else: if old_type is not None: message = "Argument 2 to NewType(...) must be subclassable (got {})" - self.fail(message.format(format_type(old_type)), s, code=codes.VALID_NEWTYPE) + self.fail( + message.format(format_type(old_type, self.options)), + s, + code=codes.VALID_NEWTYPE, + ) # Otherwise the error was already reported. old_type = AnyType(TypeOfAny.from_error) object_type = self.api.named_type("builtins.object") diff --git a/mypy/semanal_pass1.py b/mypy/semanal_pass1.py index 55430be00a1e..659f33e65ead 100644 --- a/mypy/semanal_pass1.py +++ b/mypy/semanal_pass1.py @@ -62,6 +62,7 @@ def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) - self.cur_mod_node = file self.options = options self.is_global_scope = True + self.unreachable_lines: set[int] = set() for i, defn in enumerate(file.defs): defn.accept(self) @@ -69,8 +70,14 @@ def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) - # We've encountered an assert that's always false, # e.g. assert sys.platform == 'lol'. Truncate the # list of statements. This mutates file.defs too. + if i < len(file.defs) - 1: + next_def, last = file.defs[i + 1], file.defs[-1] + if last.end_line is not None: + # We are on a Python version recent enough to support end lines. + self.unreachable_lines |= set(range(next_def.line, last.end_line + 1)) del file.defs[i + 1 :] break + file.unreachable_lines = self.unreachable_lines def visit_func_def(self, node: FuncDef) -> None: old_global_scope = self.is_global_scope @@ -118,6 +125,9 @@ def visit_if_stmt(self, s: IfStmt) -> None: def visit_block(self, b: Block) -> None: if b.is_unreachable: + if b.end_line is not None: + # We are on a Python version recent enough to support end lines. + self.unreachable_lines |= set(range(b.line, b.end_line + 1)) return super().visit_block(b) diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 03efbe6ca1b8..c86ed828b2b9 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -18,6 +18,7 @@ Decorator, Expression, FuncDef, + NameExpr, Node, OverloadedFuncDef, RefExpr, @@ -451,7 +452,7 @@ def require_bool_literal_argument( default: bool | None = None, ) -> bool | None: """Attempt to interpret an expression as a boolean literal, and fail analysis if we can't.""" - value = api.parse_bool(expression) + value = parse_bool(expression) if value is None: api.fail( f'"{name}" argument must be a True or False literal', expression, code=LITERAL_REQ @@ -459,3 +460,12 @@ def require_bool_literal_argument( return default return value + + +def parse_bool(expr: Expression) -> bool | None: + if isinstance(expr, NameExpr): + if expr.fullname == "builtins.True": + return True + if expr.fullname == "builtins.False": + return False + return None diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index b9965236c379..5d66c03aa33e 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -141,7 +141,9 @@ def validate_args( is_error = True self.fail( message_registry.INVALID_TYPEVAR_ARG_BOUND.format( - format_type(arg), name, format_type(tvar.upper_bound) + format_type(arg, self.options), + name, + format_type(tvar.upper_bound, self.options), ), ctx, code=codes.TYPE_VAR, @@ -152,7 +154,7 @@ def validate_args( ): self.fail( "Can only replace ParamSpec with a parameter types list or" - f" another ParamSpec, got {format_type(arg)}", + f" another ParamSpec, got {format_type(arg, self.options)}", ctx, ) return is_error @@ -170,7 +172,9 @@ def visit_unpack_type(self, typ: UnpackType) -> None: # TODO: Infer something when it can't be unpacked to allow rest of # typechecking to work. - self.fail(message_registry.INVALID_UNPACK.format(proper_type), typ) + self.fail( + message_registry.INVALID_UNPACK.format(format_type(proper_type, self.options)), typ + ) def check_type_var_values( self, name: str, actuals: list[Type], arg_name: str, valids: list[Type], context: Context diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index acb93edb7d2d..04e0c85d5b68 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -469,7 +469,7 @@ def parse_typeddict_fields_with_types( seen_keys = set() items: list[str] = [] types: list[Type] = [] - for (field_name_expr, field_type_expr) in dict_items: + for field_name_expr, field_type_expr in dict_items: if isinstance(field_name_expr, StrExpr): key = field_name_expr.value items.append(key) diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index c942a5eb3b0f..83ae64fbc1a8 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -52,7 +52,7 @@ class level -- these are handled at attribute level (say, 'mod.Cls.method' from __future__ import annotations -from typing import Sequence, Tuple, Union, cast +from typing import Sequence, Tuple, Union from typing_extensions import TypeAlias as _TypeAlias from mypy.expandtype import expand_type @@ -442,7 +442,7 @@ def normalize_callable_variables(self, typ: CallableType) -> CallableType: tv = v.copy_modified(id=tid) tvs.append(tv) tvmap[v.id] = tv - return cast(CallableType, expand_type(typ, tvmap)).copy_modified(variables=tvs) + return expand_type(typ, tvmap).copy_modified(variables=tvs) def visit_tuple_type(self, typ: TupleType) -> SnapshotItem: return ("TupleType", snapshot_types(typ.items)) diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 1ec6d572a82c..0cc6377bfb0f 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -358,7 +358,8 @@ def fixup_and_reset_typeinfo(self, node: TypeInfo) -> TypeInfo: if node in self.replacements: # The subclass relationships may change, so reset all caches relevant to the # old MRO. - new = cast(TypeInfo, self.replacements[node]) + new = self.replacements[node] + assert isinstance(new, TypeInfo) type_state.reset_all_subtype_caches_for(new) return self.fixup(node) diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 50b66b70b8aa..2659f942817d 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -1027,7 +1027,7 @@ def visit_tuple_type(self, typ: TupleType) -> list[str]: def visit_type_type(self, typ: TypeType) -> list[str]: triggers = self.get_type_triggers(typ.item) if not self.use_logical_deps: - old_triggers = triggers[:] + old_triggers = triggers.copy() for trigger in old_triggers: triggers.append(trigger.rstrip(">") + ".__init__>") triggers.append(trigger.rstrip(">") + ".__new__>") diff --git a/mypy/server/update.py b/mypy/server/update.py index 00b823c99dfd..7b439eb0ab9f 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -187,7 +187,7 @@ def __init__(self, result: BuildResult) -> None: # Merge in any root dependencies that may not have been loaded merge_dependencies(manager.load_fine_grained_deps(FAKE_ROOT_MODULE), self.deps) self.previous_targets_with_errors = manager.errors.targets() - self.previous_messages: list[str] = result.errors[:] + self.previous_messages: list[str] = result.errors.copy() # Module, if any, that had blocking errors in the last run as (id, path) tuple. self.blocking_error: tuple[str, str] | None = None # Module that we haven't processed yet but that are known to be stale. @@ -302,7 +302,7 @@ def update( break messages = sort_messages_preserving_file_order(messages, self.previous_messages) - self.previous_messages = messages[:] + self.previous_messages = messages.copy() return messages def trigger(self, target: str) -> list[str]: @@ -322,7 +322,7 @@ def trigger(self, target: str) -> list[str]: ) # Preserve state needed for the next update. self.previous_targets_with_errors = self.manager.errors.targets() - self.previous_messages = self.manager.errors.new_messages()[:] + self.previous_messages = self.manager.errors.new_messages().copy() return self.update(changed_modules, []) def flush_cache(self) -> None: @@ -986,6 +986,7 @@ def key(node: FineGrainedDeferredNode) -> int: manager.errors.set_file_ignored_lines( file_node.path, file_node.ignored_lines, options.ignore_errors or state.ignore_all ) + manager.errors.set_unreachable_lines(file_node.path, file_node.unreachable_lines) targets = set() for node in nodes: diff --git a/mypy/stats.py b/mypy/stats.py index b3a32c1ce72c..5f4b9d4d201f 100644 --- a/mypy/stats.py +++ b/mypy/stats.py @@ -5,7 +5,7 @@ import os from collections import Counter from contextlib import contextmanager -from typing import Iterator, cast +from typing import Iterator from typing_extensions import Final from mypy import nodes @@ -154,10 +154,12 @@ def visit_func_def(self, o: FuncDef) -> None: ) return for defn in o.expanded: - self.visit_func_def(cast(FuncDef, defn)) + assert isinstance(defn, FuncDef) + self.visit_func_def(defn) else: if o.type: - sig = cast(CallableType, o.type) + assert isinstance(o.type, CallableType) + sig = o.type arg_types = sig.arg_types if sig.arg_names and sig.arg_names[0] == "self" and not self.inferred: arg_types = arg_types[1:] diff --git a/mypy/strconv.py b/mypy/strconv.py index b2e9da5dbf6a..c428addd43aa 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -7,11 +7,13 @@ from typing import TYPE_CHECKING, Any, Sequence import mypy.nodes +from mypy.options import Options from mypy.util import IdMapper, short_type from mypy.visitor import NodeVisitor if TYPE_CHECKING: import mypy.patterns + import mypy.types class StrConv(NodeVisitor[str]): @@ -26,12 +28,20 @@ class StrConv(NodeVisitor[str]): IntExpr(1))) """ - def __init__(self, show_ids: bool = False) -> None: + __slots__ = ["options", "show_ids", "id_mapper"] + + def __init__(self, *, show_ids: bool = False, options: Options) -> None: + self.options = options self.show_ids = show_ids self.id_mapper: IdMapper | None = None if show_ids: self.id_mapper = IdMapper() + def stringify_type(self, t: mypy.types.Type) -> str: + import mypy.types + + return t.accept(mypy.types.TypeStrVisitor(id_mapper=self.id_mapper, options=self.options)) + def get_id(self, o: object) -> int | None: if self.id_mapper: return self.id_mapper.id(o) @@ -144,7 +154,7 @@ def visit_func_def(self, o: mypy.nodes.FuncDef) -> str: return self.dump(a, o) def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> str: - a: Any = o.items[:] + a: Any = o.items.copy() if o.type: a.insert(0, o.type) if o.impl: @@ -168,11 +178,11 @@ def visit_class_def(self, o: mypy.nodes.ClassDef) -> str: if o.type_vars: a.insert(1, ("TypeVars", o.type_vars)) if o.metaclass: - a.insert(1, f"Metaclass({o.metaclass})") + a.insert(1, f"Metaclass({o.metaclass.accept(self)})") if o.decorators: a.insert(1, ("Decorators", o.decorators)) if o.info and o.info._promote: - a.insert(1, f"Promote({o.info._promote})") + a.insert(1, f"Promote([{','.join(self.stringify_type(p) for p in o.info._promote)}])") if o.info and o.info.tuple_type: a.insert(1, ("TupleType", [o.info.tuple_type])) if o.info and o.info.fallback_to_any: @@ -473,7 +483,7 @@ def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> str: if o.values: a += [("Values", o.values)] if not mypy.types.is_named_instance(o.upper_bound, "builtins.object"): - a += [f"UpperBound({o.upper_bound})"] + a += [f"UpperBound({self.stringify_type(o.upper_bound)})"] return self.dump(a, o) def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> str: @@ -485,7 +495,7 @@ def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> str: if o.variance == mypy.nodes.CONTRAVARIANT: a += ["Variance(CONTRAVARIANT)"] if not mypy.types.is_named_instance(o.upper_bound, "builtins.object"): - a += [f"UpperBound({o.upper_bound})"] + a += [f"UpperBound({self.stringify_type(o.upper_bound)})"] return self.dump(a, o) def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> str: @@ -497,14 +507,14 @@ def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> str: if o.variance == mypy.nodes.CONTRAVARIANT: a += ["Variance(CONTRAVARIANT)"] if not mypy.types.is_named_instance(o.upper_bound, "builtins.object"): - a += [f"UpperBound({o.upper_bound})"] + a += [f"UpperBound({self.stringify_type(o.upper_bound)})"] return self.dump(a, o) def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> str: - return f"TypeAliasExpr({o.type})" + return f"TypeAliasExpr({self.stringify_type(o.type)})" def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> str: - return f"NamedTupleExpr:{o.line}({o.info.name}, {o.info.tuple_type})" + return f"NamedTupleExpr:{o.line}({o.info.name}, {self.stringify_type(o.info.tuple_type) if o.info.tuple_type is not None else None})" def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> str: return f"EnumCallExpr:{o.line}({o.info.name}, {o.items})" @@ -513,7 +523,7 @@ def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> str: return f"TypedDictExpr:{o.line}({o.info.name})" def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> str: - return f"PromoteExpr:{o.line}({o.type})" + return f"PromoteExpr:{o.line}({self.stringify_type(o.type)})" def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> str: return f"NewTypeExpr:{o.line}({o.name}, {self.dump([o.old_type], o)})" @@ -614,7 +624,9 @@ def dump_tagged(nodes: Sequence[object], tag: str | None, str_conv: StrConv) -> elif isinstance(n, mypy.nodes.Node): a.append(indent(n.accept(str_conv), 2)) elif isinstance(n, Type): - a.append(indent(n.accept(TypeStrVisitor(str_conv.id_mapper)), 2)) + a.append( + indent(n.accept(TypeStrVisitor(str_conv.id_mapper, options=str_conv.options)), 2) + ) elif n is not None: a.append(indent(str(n), 2)) if tag: diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 6cb4669887fe..071a238b5714 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -43,12 +43,13 @@ import argparse import glob +import keyword import os import os.path import sys import traceback from collections import defaultdict -from typing import Iterable, List, Mapping, cast +from typing import Iterable, Mapping from typing_extensions import Final import mypy.build @@ -80,6 +81,7 @@ ClassDef, ComparisonExpr, Decorator, + DictExpr, EllipsisExpr, Expression, FloatExpr, @@ -125,6 +127,8 @@ from mypy.traverser import all_yield_expressions, has_return_statement, has_yield_expression from mypy.types import ( OVERLOAD_NAMES, + TPDICT_NAMES, + TYPED_NAMEDTUPLE_NAMES, AnyType, CallableType, Instance, @@ -306,7 +310,7 @@ class AnnotationPrinter(TypeStrVisitor): # TODO: Generate valid string representation for callable types. # TODO: Use short names for Instances. def __init__(self, stubgen: StubGenerator) -> None: - super().__init__() + super().__init__(options=mypy.options.Options()) self.stubgen = stubgen def visit_any(self, t: AnyType) -> str: @@ -396,14 +400,24 @@ def visit_str_expr(self, node: StrExpr) -> str: def visit_index_expr(self, node: IndexExpr) -> str: base = node.base.accept(self) index = node.index.accept(self) + if len(index) > 2 and index.startswith("(") and index.endswith(")"): + index = index[1:-1] return f"{base}[{index}]" def visit_tuple_expr(self, node: TupleExpr) -> str: - return ", ".join(n.accept(self) for n in node.items) + return f"({', '.join(n.accept(self) for n in node.items)})" def visit_list_expr(self, node: ListExpr) -> str: return f"[{', '.join(n.accept(self) for n in node.items)}]" + def visit_dict_expr(self, o: DictExpr) -> str: + dict_items = [] + for key, value in o.items: + # This is currently only used for TypedDict where all keys are strings. + assert isinstance(key, StrExpr) + dict_items.append(f"{key.accept(self)}: {value.accept(self)}") + return f"{{{', '.join(dict_items)}}}" + def visit_ellipsis(self, node: EllipsisExpr) -> str: return "..." @@ -640,6 +654,7 @@ def visit_mypy_file(self, o: MypyFile) -> None: "_typeshed": ["Incomplete"], "typing": ["Any", "TypeVar"], "collections.abc": ["Generator"], + "typing_extensions": ["TypedDict"], } for pkg, imports in known_imports.items(): for t in imports: @@ -848,6 +863,9 @@ def process_name_expr_decorator(self, expr: NameExpr, context: Decorator) -> tup self.add_decorator("property") self.add_decorator("abc.abstractmethod") is_abstract = True + elif self.refers_to_fullname(name, "functools.cached_property"): + self.import_tracker.require_name(name) + self.add_decorator(name) elif self.refers_to_fullname(name, OVERLOAD_NAMES): self.add_decorator(name) self.add_typing_import("overload") @@ -887,12 +905,20 @@ def process_member_expr_decorator( ): if expr.name == "abstractproperty": self.import_tracker.require_name(expr.expr.name) - self.add_decorator("%s" % ("property")) - self.add_decorator("{}.{}".format(expr.expr.name, "abstractmethod")) + self.add_decorator("property") + self.add_decorator(f"{expr.expr.name}.abstractmethod") else: self.import_tracker.require_name(expr.expr.name) self.add_decorator(f"{expr.expr.name}.{expr.name}") is_abstract = True + elif expr.name == "cached_property" and isinstance(expr.expr, NameExpr): + explicit_name = expr.expr.name + reverse = self.import_tracker.reverse_alias.get(explicit_name) + if reverse == "functools" or (reverse is None and explicit_name == "functools"): + if reverse is not None: + self.import_tracker.add_import(reverse, alias=explicit_name) + self.import_tracker.require_name(explicit_name) + self.add_decorator(f"{explicit_name}.{expr.name}") elif expr.name == "coroutine": if ( isinstance(expr.expr, MemberExpr) @@ -986,6 +1012,37 @@ def get_base_types(self, cdef: ClassDef) -> list[str]: elif isinstance(base, IndexExpr): p = AliasPrinter(self) base_types.append(base.accept(p)) + elif isinstance(base, CallExpr): + # namedtuple(typename, fields), NamedTuple(typename, fields) calls can + # be used as a base class. The first argument is a string literal that + # is usually the same as the class name. + # + # Note: + # A call-based named tuple as a base class cannot be safely converted to + # a class-based NamedTuple definition because class attributes defined + # in the body of the class inheriting from the named tuple call are not + # namedtuple fields at runtime. + if self.is_namedtuple(base): + nt_fields = self._get_namedtuple_fields(base) + assert isinstance(base.args[0], StrExpr) + typename = base.args[0].value + if nt_fields is not None: + # A valid namedtuple() call, use NamedTuple() instead with + # Incomplete as field types + fields_str = ", ".join(f"({f!r}, {t})" for f, t in nt_fields) + base_types.append(f"NamedTuple({typename!r}, [{fields_str}])") + self.add_typing_import("NamedTuple") + else: + # Invalid namedtuple() call, cannot determine fields + base_types.append("Incomplete") + elif self.is_typed_namedtuple(base): + p = AliasPrinter(self) + base_types.append(base.accept(p)) + else: + # At this point, we don't know what the base class is, so we + # just use Incomplete as the base class. + base_types.append("Incomplete") + self.import_tracker.require_name("Incomplete") return base_types def visit_block(self, o: Block) -> None: @@ -998,10 +1055,20 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: foundl = [] for lvalue in o.lvalues: - if isinstance(lvalue, NameExpr) and self.is_namedtuple(o.rvalue): - assert isinstance(o.rvalue, CallExpr) + if ( + isinstance(lvalue, NameExpr) + and isinstance(o.rvalue, CallExpr) + and (self.is_namedtuple(o.rvalue) or self.is_typed_namedtuple(o.rvalue)) + ): self.process_namedtuple(lvalue, o.rvalue) continue + if ( + isinstance(lvalue, NameExpr) + and isinstance(o.rvalue, CallExpr) + and self.is_typeddict(o.rvalue) + ): + self.process_typeddict(lvalue, o.rvalue) + continue if ( isinstance(lvalue, NameExpr) and not self.is_private_name(lvalue.name) @@ -1012,7 +1079,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: ): self.process_typealias(lvalue, o.rvalue) continue - if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): + if isinstance(lvalue, (TupleExpr, ListExpr)): items = lvalue.items if isinstance(o.unanalyzed_type, TupleType): # type: ignore[misc] annotations: Iterable[Type | None] = o.unanalyzed_type.items @@ -1038,36 +1105,148 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: if all(foundl): self._state = VAR - def is_namedtuple(self, expr: Expression) -> bool: - if not isinstance(expr, CallExpr): - return False + def is_namedtuple(self, expr: CallExpr) -> bool: + callee = expr.callee + return ( + isinstance(callee, NameExpr) + and (self.refers_to_fullname(callee.name, "collections.namedtuple")) + ) or ( + isinstance(callee, MemberExpr) + and isinstance(callee.expr, NameExpr) + and f"{callee.expr.name}.{callee.name}" == "collections.namedtuple" + ) + + def is_typed_namedtuple(self, expr: CallExpr) -> bool: callee = expr.callee - return (isinstance(callee, NameExpr) and callee.name.endswith("namedtuple")) or ( - isinstance(callee, MemberExpr) and callee.name == "namedtuple" + return ( + isinstance(callee, NameExpr) + and self.refers_to_fullname(callee.name, TYPED_NAMEDTUPLE_NAMES) + ) or ( + isinstance(callee, MemberExpr) + and isinstance(callee.expr, NameExpr) + and f"{callee.expr.name}.{callee.name}" in TYPED_NAMEDTUPLE_NAMES ) + def _get_namedtuple_fields(self, call: CallExpr) -> list[tuple[str, str]] | None: + if self.is_namedtuple(call): + fields_arg = call.args[1] + if isinstance(fields_arg, StrExpr): + field_names = fields_arg.value.replace(",", " ").split() + elif isinstance(fields_arg, (ListExpr, TupleExpr)): + field_names = [] + for field in fields_arg.items: + if not isinstance(field, StrExpr): + return None + field_names.append(field.value) + else: + return None # Invalid namedtuple fields type + if field_names: + self.import_tracker.require_name("Incomplete") + return [(field_name, "Incomplete") for field_name in field_names] + elif self.is_typed_namedtuple(call): + fields_arg = call.args[1] + if not isinstance(fields_arg, (ListExpr, TupleExpr)): + return None + fields: list[tuple[str, str]] = [] + b = AliasPrinter(self) + for field in fields_arg.items: + if not (isinstance(field, TupleExpr) and len(field.items) == 2): + return None + field_name, field_type = field.items + if not isinstance(field_name, StrExpr): + return None + fields.append((field_name.value, field_type.accept(b))) + return fields + else: + return None # Not a named tuple call + def process_namedtuple(self, lvalue: NameExpr, rvalue: CallExpr) -> None: if self._state != EMPTY: self.add("\n") - if isinstance(rvalue.args[1], StrExpr): - items = rvalue.args[1].value.replace(",", " ").split() - elif isinstance(rvalue.args[1], (ListExpr, TupleExpr)): - list_items = cast(List[StrExpr], rvalue.args[1].items) - items = [item.value for item in list_items] - else: + fields = self._get_namedtuple_fields(rvalue) + if fields is None: self.add(f"{self._indent}{lvalue.name}: Incomplete") self.import_tracker.require_name("Incomplete") return self.import_tracker.require_name("NamedTuple") self.add(f"{self._indent}class {lvalue.name}(NamedTuple):") - if len(items) == 0: + if len(fields) == 0: self.add(" ...\n") + self._state = EMPTY_CLASS else: - self.import_tracker.require_name("Incomplete") self.add("\n") - for item in items: - self.add(f"{self._indent} {item}: Incomplete\n") - self._state = CLASS + for f_name, f_type in fields: + self.add(f"{self._indent} {f_name}: {f_type}\n") + self._state = CLASS + + def is_typeddict(self, expr: CallExpr) -> bool: + callee = expr.callee + return ( + isinstance(callee, NameExpr) and self.refers_to_fullname(callee.name, TPDICT_NAMES) + ) or ( + isinstance(callee, MemberExpr) + and isinstance(callee.expr, NameExpr) + and f"{callee.expr.name}.{callee.name}" in TPDICT_NAMES + ) + + def process_typeddict(self, lvalue: NameExpr, rvalue: CallExpr) -> None: + if self._state != EMPTY: + self.add("\n") + + if not isinstance(rvalue.args[0], StrExpr): + self.add(f"{self._indent}{lvalue.name}: Incomplete") + self.import_tracker.require_name("Incomplete") + return + + items: list[tuple[str, Expression]] = [] + total: Expression | None = None + if len(rvalue.args) > 1 and rvalue.arg_kinds[1] == ARG_POS: + if not isinstance(rvalue.args[1], DictExpr): + self.add(f"{self._indent}{lvalue.name}: Incomplete") + self.import_tracker.require_name("Incomplete") + return + for attr_name, attr_type in rvalue.args[1].items: + if not isinstance(attr_name, StrExpr): + self.add(f"{self._indent}{lvalue.name}: Incomplete") + self.import_tracker.require_name("Incomplete") + return + items.append((attr_name.value, attr_type)) + if len(rvalue.args) > 2: + if rvalue.arg_kinds[2] != ARG_NAMED or rvalue.arg_names[2] != "total": + self.add(f"{self._indent}{lvalue.name}: Incomplete") + self.import_tracker.require_name("Incomplete") + return + total = rvalue.args[2] + else: + for arg_name, arg in zip(rvalue.arg_names[1:], rvalue.args[1:]): + if not isinstance(arg_name, str): + self.add(f"{self._indent}{lvalue.name}: Incomplete") + self.import_tracker.require_name("Incomplete") + return + if arg_name == "total": + total = arg + else: + items.append((arg_name, arg)) + self.import_tracker.require_name("TypedDict") + p = AliasPrinter(self) + if any(not key.isidentifier() or keyword.iskeyword(key) for key, _ in items): + # Keep the call syntax if there are non-identifier or keyword keys. + self.add(f"{self._indent}{lvalue.name} = {rvalue.accept(p)}\n") + self._state = VAR + else: + bases = "TypedDict" + # TODO: Add support for generic TypedDicts. Requires `Generic` as base class. + if total is not None: + bases += f", total={total.accept(p)}" + self.add(f"{self._indent}class {lvalue.name}({bases}):") + if len(items) == 0: + self.add(" ...\n") + self._state = EMPTY_CLASS + else: + self.add("\n") + for key, key_type in items: + self.add(f"{self._indent} {key}: {key_type.accept(p)}\n") + self._state = CLASS def is_alias_expression(self, expr: Expression, top_level: bool = True) -> bool: """Return True for things that look like target for an alias. @@ -1575,6 +1754,14 @@ def mypy_options(stubgen_options: Options) -> MypyOptions: options.python_version = stubgen_options.pyversion options.show_traceback = True options.transform_source = remove_misplaced_type_comments + options.preserve_asts = True + + # Override cache_dir if provided in the environment + environ_cache_dir = os.getenv("MYPY_CACHE_DIR", "") + if environ_cache_dir.strip(): + options.cache_dir = environ_cache_dir + options.cache_dir = os.path.expanduser(options.cache_dir) + return options @@ -1588,7 +1775,7 @@ def parse_source_file(mod: StubSource, mypy_options: MypyOptions) -> None: with open(mod.path, "rb") as f: data = f.read() source = mypy.util.decode_python_encoding(data) - errors = Errors() + errors = Errors(mypy_options) mod.ast = mypy.parse.parse( source, fnam=mod.path, module=mod.module, errors=errors, options=mypy_options ) @@ -1654,8 +1841,8 @@ def generate_stub_from_ast( file.write("".join(gen.output())) -def get_sig_generators(options: Options) -> List[SignatureGenerator]: - sig_generators: List[SignatureGenerator] = [ +def get_sig_generators(options: Options) -> list[SignatureGenerator]: + sig_generators: list[SignatureGenerator] = [ DocstringSignatureGenerator(), FallbackSignatureGenerator(), ] @@ -1707,6 +1894,7 @@ def generate_stubs(options: Options) -> None: ) # Separately analyse C modules using different logic. + all_modules = sorted(m.module for m in (py_modules + c_modules)) for mod in c_modules: if any(py_mod.module.startswith(mod.module + ".") for py_mod in py_modules + c_modules): target = mod.module.replace(".", "/") + "/__init__.pyi" @@ -1715,7 +1903,9 @@ def generate_stubs(options: Options) -> None: target = os.path.join(options.output_dir, target) files.append(target) with generate_guarded(mod.module, target, options.ignore_errors, options.verbose): - generate_stub_for_c_module(mod.module, target, sig_generators=sig_generators) + generate_stub_for_c_module( + mod.module, target, known_modules=all_modules, sig_generators=sig_generators + ) num_modules = len(py_modules) + len(c_modules) if not options.quiet and num_modules > 0: print("Processed %d modules" % num_modules) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index add33e66cee3..4fc9f8c6fdfa 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -44,6 +44,17 @@ class SignatureGenerator: """Abstract base class for extracting a list of FunctionSigs for each function.""" + def remove_self_type( + self, inferred: list[FunctionSig] | None, self_var: str + ) -> list[FunctionSig] | None: + """Remove type annotation from self/cls argument""" + if inferred: + for signature in inferred: + if signature.args: + if signature.args[0].name == self_var: + signature.args[0].type = None + return inferred + @abstractmethod def get_function_sig( self, func: object, module_name: str, name: str @@ -52,7 +63,7 @@ def get_function_sig( @abstractmethod def get_method_sig( - self, func: object, module_name: str, class_name: str, name: str, self_var: str + self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str ) -> list[FunctionSig] | None: pass @@ -83,7 +94,7 @@ def get_function_sig( return None def get_method_sig( - self, func: object, module_name: str, class_name: str, name: str, self_var: str + self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str ) -> list[FunctionSig] | None: if ( name in ("__new__", "__init__") @@ -94,10 +105,11 @@ def get_method_sig( FunctionSig( name=name, args=infer_arg_sig_from_anon_docstring(self.class_sigs[class_name]), - ret_type="None" if name == "__init__" else "Any", + ret_type=infer_method_ret_type(name), ) ] - return self.get_function_sig(func, module_name, name) + inferred = self.get_function_sig(func, module_name, name) + return self.remove_self_type(inferred, self_var) class DocstringSignatureGenerator(SignatureGenerator): @@ -114,9 +126,19 @@ def get_function_sig( return inferred def get_method_sig( - self, func: object, module_name: str, class_name: str, name: str, self_var: str + self, + cls: type, + func: object, + module_name: str, + class_name: str, + func_name: str, + self_var: str, ) -> list[FunctionSig] | None: - return self.get_function_sig(func, module_name, name) + inferred = self.get_function_sig(func, module_name, func_name) + if not inferred and func_name == "__init__": + # look for class-level constructor signatures of the form () + inferred = self.get_function_sig(cls, module_name, class_name) + return self.remove_self_type(inferred, self_var) class FallbackSignatureGenerator(SignatureGenerator): @@ -132,19 +154,22 @@ def get_function_sig( ] def get_method_sig( - self, func: object, module_name: str, class_name: str, name: str, self_var: str + self, cls: type, func: object, module_name: str, class_name: str, name: str, self_var: str ) -> list[FunctionSig] | None: return [ FunctionSig( name=name, - args=infer_method_sig(name, self_var), - ret_type="None" if name == "__init__" else "Any", + args=infer_method_args(name, self_var), + ret_type=infer_method_ret_type(name), ) ] def generate_stub_for_c_module( - module_name: str, target: str, sig_generators: Iterable[SignatureGenerator] + module_name: str, + target: str, + known_modules: list[str], + sig_generators: Iterable[SignatureGenerator], ) -> None: """Generate stub for C module. @@ -166,11 +191,17 @@ def generate_stub_for_c_module( imports: list[str] = [] functions: list[str] = [] done = set() - items = sorted(module.__dict__.items(), key=lambda x: x[0]) + items = sorted(get_members(module), key=lambda x: x[0]) for name, obj in items: if is_c_function(obj): generate_c_function_stub( - module, name, obj, functions, imports=imports, sig_generators=sig_generators + module, + name, + obj, + output=functions, + known_modules=known_modules, + imports=imports, + sig_generators=sig_generators, ) done.add(name) types: list[str] = [] @@ -179,7 +210,13 @@ def generate_stub_for_c_module( continue if is_c_type(obj): generate_c_type_stub( - module, name, obj, types, imports=imports, sig_generators=sig_generators + module, + name, + obj, + output=types, + known_modules=known_modules, + imports=imports, + sig_generators=sig_generators, ) done.add(name) variables = [] @@ -187,7 +224,9 @@ def generate_stub_for_c_module( if name.startswith("__") and name.endswith("__"): continue if name not in done and not inspect.ismodule(obj): - type_str = strip_or_import(get_type_fullname(type(obj)), module, imports) + type_str = strip_or_import( + get_type_fullname(type(obj)), module, known_modules, imports + ) variables.append(f"{name}: {type_str}") output = sorted(set(imports)) for line in variables: @@ -215,7 +254,23 @@ def add_typing_import(output: list[str]) -> list[str]: if names: return [f"from typing import {', '.join(names)}", ""] + output else: - return output[:] + return output.copy() + + +def get_members(obj: object) -> list[tuple[str, Any]]: + obj_dict: Mapping[str, Any] = getattr(obj, "__dict__") # noqa: B009 + results = [] + for name in obj_dict: + if is_skipped_attribute(name): + continue + # Try to get the value via getattr + try: + value = getattr(obj, name) + except AttributeError: + continue + else: + results.append((name, value)) + return results def is_c_function(obj: object) -> bool: @@ -257,10 +312,13 @@ def generate_c_function_stub( module: ModuleType, name: str, obj: object, + *, + known_modules: list[str], + sig_generators: Iterable[SignatureGenerator], output: list[str], imports: list[str], - sig_generators: Iterable[SignatureGenerator], self_var: str | None = None, + cls: type | None = None, class_name: str | None = None, ) -> None: """Generate stub for a single function or method. @@ -273,13 +331,16 @@ def generate_c_function_stub( inferred: list[FunctionSig] | None = None if class_name: # method: + assert cls is not None, "cls should be provided for methods" assert self_var is not None, "self_var should be provided for methods" for sig_gen in sig_generators: - inferred = sig_gen.get_method_sig(obj, module.__name__, class_name, name, self_var) + inferred = sig_gen.get_method_sig( + cls, obj, module.__name__, class_name, name, self_var + ) if inferred: # add self/cls var, if not present for sig in inferred: - if not sig.args or sig.args[0].name != self_var: + if not sig.args or sig.args[0].name not in ("self", "cls"): sig.args.insert(0, ArgSig(name=self_var)) break else: @@ -295,7 +356,6 @@ def generate_c_function_stub( "if FallbackSignatureGenerator is provided" ) - is_classmethod = self_var == "cls" is_overloaded = len(inferred) > 1 if inferred else False if is_overloaded: imports.append("from typing import overload") @@ -303,35 +363,35 @@ def generate_c_function_stub( for signature in inferred: args: list[str] = [] for arg in signature.args: - if arg.name == self_var: - arg_def = self_var - else: - arg_def = arg.name - if arg_def == "None": - arg_def = "_none" # None is not a valid argument name + arg_def = arg.name + if arg_def == "None": + arg_def = "_none" # None is not a valid argument name - if arg.type: - arg_def += ": " + strip_or_import(arg.type, module, imports) + if arg.type: + arg_def += ": " + strip_or_import(arg.type, module, known_modules, imports) - if arg.default: - arg_def += " = ..." + if arg.default: + arg_def += " = ..." args.append(arg_def) if is_overloaded: output.append("@overload") - if is_classmethod: + # a sig generator indicates @classmethod by specifying the cls arg + if class_name and signature.args and signature.args[0].name == "cls": output.append("@classmethod") output.append( "def {function}({args}) -> {ret}: ...".format( function=name, args=", ".join(args), - ret=strip_or_import(signature.ret_type, module, imports), + ret=strip_or_import(signature.ret_type, module, known_modules, imports), ) ) -def strip_or_import(typ: str, module: ModuleType, imports: list[str]) -> str: +def strip_or_import( + typ: str, module: ModuleType, known_modules: list[str], imports: list[str] +) -> str: """Strips unnecessary module names from typ. If typ represents a type that is inside module or is a type coming from builtins, remove @@ -340,21 +400,33 @@ def strip_or_import(typ: str, module: ModuleType, imports: list[str]) -> str: Arguments: typ: name of the type module: in which this type is used + known_modules: other modules being processed imports: list of import statements (may be modified during the call) """ + local_modules = ["builtins"] + if module: + local_modules.append(module.__name__) + stripped_type = typ if any(c in typ for c in "[,"): for subtyp in re.split(r"[\[,\]]", typ): - strip_or_import(subtyp.strip(), module, imports) - if module: - stripped_type = re.sub(r"(^|[\[, ]+)" + re.escape(module.__name__ + "."), r"\1", typ) - elif module and typ.startswith(module.__name__ + "."): - stripped_type = typ[len(module.__name__) + 1 :] + stripped_subtyp = strip_or_import(subtyp.strip(), module, known_modules, imports) + if stripped_subtyp != subtyp: + stripped_type = re.sub( + r"(^|[\[, ]+)" + re.escape(subtyp) + r"($|[\], ]+)", + r"\1" + stripped_subtyp + r"\2", + stripped_type, + ) elif "." in typ: - arg_module = typ[: typ.rindex(".")] - if arg_module == "builtins": - stripped_type = typ[len("builtins") + 1 :] + for module_name in local_modules + list(reversed(known_modules)): + if typ.startswith(module_name + "."): + if module_name in local_modules: + stripped_type = typ[len(module_name) + 1 :] + arg_module = module_name + break else: + arg_module = typ[: typ.rindex(".")] + if arg_module not in local_modules: imports.append(f"import {arg_module}") if stripped_type == "NoneType": stripped_type = "None" @@ -373,6 +445,7 @@ def generate_c_property_stub( ro_properties: list[str], readonly: bool, module: ModuleType | None = None, + known_modules: list[str] | None = None, imports: list[str] | None = None, ) -> None: """Generate property stub using introspection of 'obj'. @@ -392,10 +465,6 @@ def infer_prop_type(docstr: str | None) -> str | None: else: return None - # Ignore special properties/attributes. - if is_skipped_attribute(name): - return - inferred = infer_prop_type(getattr(obj, "__doc__", None)) if not inferred: fget = getattr(obj, "fget", None) @@ -403,8 +472,8 @@ def infer_prop_type(docstr: str | None) -> str | None: if not inferred: inferred = "Any" - if module is not None and imports is not None: - inferred = strip_or_import(inferred, module, imports) + if module is not None and imports is not None and known_modules is not None: + inferred = strip_or_import(inferred, module, known_modules, imports) if is_static_property(obj): trailing_comment = " # read-only" if readonly else "" @@ -422,6 +491,7 @@ def generate_c_type_stub( class_name: str, obj: type, output: list[str], + known_modules: list[str], imports: list[str], sig_generators: Iterable[SignatureGenerator], ) -> None: @@ -430,69 +500,75 @@ def generate_c_type_stub( The result lines will be appended to 'output'. If necessary, any required names will be added to 'imports'. """ - # typeshed gives obj.__dict__ the not quite correct type Dict[str, Any] - # (it could be a mappingproxy!), which makes mypyc mad, so obfuscate it. - obj_dict: Mapping[str, Any] = getattr(obj, "__dict__") # noqa: B009 - items = sorted(obj_dict.items(), key=lambda x: method_name_sort_key(x[0])) + raw_lookup = getattr(obj, "__dict__") # noqa: B009 + items = sorted(get_members(obj), key=lambda x: method_name_sort_key(x[0])) + names = set(x[0] for x in items) methods: list[str] = [] types: list[str] = [] static_properties: list[str] = [] rw_properties: list[str] = [] ro_properties: list[str] = [] - done: set[str] = set() + attrs: list[tuple[str, Any]] = [] for attr, value in items: + # use unevaluated descriptors when dealing with property inspection + raw_value = raw_lookup.get(attr, value) if is_c_method(value) or is_c_classmethod(value): - done.add(attr) - if not is_skipped_attribute(attr): - if attr == "__new__": - # TODO: We should support __new__. - if "__init__" in obj_dict: - # Avoid duplicate functions if both are present. - # But is there any case where .__new__() has a - # better signature than __init__() ? - continue - attr = "__init__" - if is_c_classmethod(value): - self_var = "cls" - else: - self_var = "self" - generate_c_function_stub( - module, - attr, - value, - methods, - imports=imports, - self_var=self_var, - class_name=class_name, - sig_generators=sig_generators, - ) - elif is_c_property(value): - done.add(attr) - generate_c_property_stub( + if attr == "__new__": + # TODO: We should support __new__. + if "__init__" in names: + # Avoid duplicate functions if both are present. + # But is there any case where .__new__() has a + # better signature than __init__() ? + continue + attr = "__init__" + if is_c_classmethod(value): + self_var = "cls" + else: + self_var = "self" + generate_c_function_stub( + module, attr, value, + output=methods, + known_modules=known_modules, + imports=imports, + self_var=self_var, + cls=obj, + class_name=class_name, + sig_generators=sig_generators, + ) + elif is_c_property(raw_value): + generate_c_property_stub( + attr, + raw_value, static_properties, rw_properties, ro_properties, - is_c_property_readonly(value), + is_c_property_readonly(raw_value), module=module, + known_modules=known_modules, imports=imports, ) elif is_c_type(value): generate_c_type_stub( - module, attr, value, types, imports=imports, sig_generators=sig_generators + module, + attr, + value, + types, + imports=imports, + known_modules=known_modules, + sig_generators=sig_generators, ) - done.add(attr) + else: + attrs.append((attr, value)) - for attr, value in items: - if is_skipped_attribute(attr): - continue - if attr not in done: - static_properties.append( - "{}: ClassVar[{}] = ...".format( - attr, strip_or_import(get_type_fullname(type(value)), module, imports) - ) + for attr, value in attrs: + static_properties.append( + "{}: ClassVar[{}] = ...".format( + attr, + strip_or_import(get_type_fullname(type(value)), module, known_modules, imports), ) + ) all_bases = type.mro(obj) if all_bases[-1] is object: # TODO: Is this always object? @@ -510,7 +586,8 @@ def generate_c_type_stub( bases.append(base) if bases: bases_str = "(%s)" % ", ".join( - strip_or_import(get_type_fullname(base), module, imports) for base in bases + strip_or_import(get_type_fullname(base), module, known_modules, imports) + for base in bases ) else: bases_str = "" @@ -559,6 +636,7 @@ def is_pybind_skipped_attribute(attr: str) -> bool: def is_skipped_attribute(attr: str) -> bool: return attr in ( + "__class__", "__getattribute__", "__str__", "__repr__", @@ -571,7 +649,7 @@ def is_skipped_attribute(attr: str) -> bool: ) -def infer_method_sig(name: str, self_var: str | None = None) -> list[ArgSig]: +def infer_method_args(name: str, self_var: str | None = None) -> list[ArgSig]: args: list[ArgSig] | None = None if name.startswith("__") and name.endswith("__"): name = name[2:-2] @@ -673,3 +751,18 @@ def infer_method_sig(name: str, self_var: str | None = None) -> list[ArgSig]: if args is None: args = [ArgSig(name="*args"), ArgSig(name="**kwargs")] return [ArgSig(name=self_var or "self")] + args + + +def infer_method_ret_type(name: str) -> str: + if name.startswith("__") and name.endswith("__"): + name = name[2:-2] + if name in ("float", "bool", "bytes", "int"): + return name + # Note: __eq__ and co may return arbitrary types, but bool is good enough for stubgen. + elif name in ("eq", "ne", "lt", "le", "gt", "ge", "contains"): + return "bool" + elif name in ("len", "hash", "sizeof", "trunc", "floor", "ceil"): + return "int" + elif name in ("init", "setitem"): + return "None" + return "Any" diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index 31e2b34d9fe6..e6e549ad280f 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -20,8 +20,6 @@ def stub_package_name(prefix: str) -> str: # Package name can have one or two components ('a' or 'a.b'). legacy_bundled_packages = { "aiofiles": "types-aiofiles", - "backports": "types-backports", - "backports_abc": "types-backports_abc", "bleach": "types-bleach", "boto": "types-boto", "cachetools": "types-cachetools", diff --git a/mypy/stubtest.py b/mypy/stubtest.py index cd173f63e2a1..7b4a3b223e00 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -25,8 +25,8 @@ from contextlib import redirect_stderr, redirect_stdout from functools import singledispatch from pathlib import Path -from typing import Any, Generic, Iterator, TypeVar, Union, cast -from typing_extensions import get_origin +from typing import Any, Generic, Iterator, TypeVar, Union +from typing_extensions import get_origin, is_typeddict import mypy.build import mypy.modulefinder @@ -419,14 +419,29 @@ class SubClass(runtime): # type: ignore[misc] # Examples: ctypes.Array, ctypes._SimpleCData pass + # Runtime class might be annotated with `@final`: + try: + runtime_final = getattr(runtime, "__final__", False) + except Exception: + runtime_final = False + + if runtime_final and not stub.is_final: + yield Error( + object_path, + "has `__final__` attribute, but isn't marked with @final in the stub", + stub, + runtime, + stub_desc=repr(stub), + ) + def _verify_metaclass( - stub: nodes.TypeInfo, runtime: type[Any], object_path: list[str] + stub: nodes.TypeInfo, runtime: type[Any], object_path: list[str], *, is_runtime_typeddict: bool ) -> Iterator[Error]: # We exclude protocols, because of how complex their implementation is in different versions of - # python. Enums are also hard, ignoring. + # python. Enums are also hard, as are runtime TypedDicts; ignoring. # TODO: check that metaclasses are identical? - if not stub.is_protocol and not stub.is_enum: + if not stub.is_protocol and not stub.is_enum and not is_runtime_typeddict: runtime_metaclass = type(runtime) if runtime_metaclass is not type and stub.metaclass_type is None: # This means that runtime has a custom metaclass, but a stub does not. @@ -470,18 +485,18 @@ def verify_typeinfo( return yield from _verify_final(stub, runtime, object_path) - yield from _verify_metaclass(stub, runtime, object_path) + is_runtime_typeddict = stub.typeddict_type is not None and is_typeddict(runtime) + yield from _verify_metaclass( + stub, runtime, object_path, is_runtime_typeddict=is_runtime_typeddict + ) # Check everything already defined on the stub class itself (i.e. not inherited) to_check = set(stub.names) # Check all public things on the runtime class to_check.update( - # cast to workaround mypyc complaints - m - for m in cast(Any, vars)(runtime) - if not is_probably_private(m) and m not in IGNORABLE_CLASS_DUNDERS + m for m in vars(runtime) if not is_probably_private(m) and m not in IGNORABLE_CLASS_DUNDERS ) - # Special-case the __init__ method for Protocols + # Special-case the __init__ method for Protocols and the __new__ method for TypedDicts # # TODO: On Python <3.11, __init__ methods on Protocol classes # are silently discarded and replaced. @@ -489,6 +504,8 @@ def verify_typeinfo( # Ideally, we'd figure out a good way of validating Protocol __init__ methods on 3.11+. if stub.is_protocol: to_check.discard("__init__") + if is_runtime_typeddict: + to_check.discard("__new__") for entry in sorted(to_check): mangled_entry = entry @@ -517,8 +534,21 @@ def verify_typeinfo( yield from verify(stub_to_verify, runtime_attr, object_path + [entry]) +def _static_lookup_runtime(object_path: list[str]) -> MaybeMissing[Any]: + static_runtime = importlib.import_module(object_path[0]) + for entry in object_path[1:]: + try: + static_runtime = inspect.getattr_static(static_runtime, entry) + except AttributeError: + # This can happen with mangled names, ignore for now. + # TODO: pass more information about ancestors of nodes/objects to verify, so we don't + # have to do this hacky lookup. Would be useful in several places. + return MISSING + return static_runtime + + def _verify_static_class_methods( - stub: nodes.FuncBase, runtime: Any, object_path: list[str] + stub: nodes.FuncBase, runtime: Any, static_runtime: MaybeMissing[Any], object_path: list[str] ) -> Iterator[str]: if stub.name in ("__new__", "__init_subclass__", "__class_getitem__"): # Special cased by Python, so don't bother checking @@ -533,16 +563,8 @@ def _verify_static_class_methods( yield "stub is a classmethod but runtime is not" return - # Look the object up statically, to avoid binding by the descriptor protocol - static_runtime = importlib.import_module(object_path[0]) - for entry in object_path[1:]: - try: - static_runtime = inspect.getattr_static(static_runtime, entry) - except AttributeError: - # This can happen with mangled names, ignore for now. - # TODO: pass more information about ancestors of nodes/objects to verify, so we don't - # have to do this hacky lookup. Would be useful in a couple other places too. - return + if static_runtime is MISSING: + return if isinstance(static_runtime, classmethod) and not stub.is_class: yield "runtime is a classmethod but stub is not" @@ -933,11 +955,16 @@ def verify_funcitem( if not callable(runtime): return + # Look the object up statically, to avoid binding by the descriptor protocol + static_runtime = _static_lookup_runtime(object_path) + if isinstance(stub, nodes.FuncDef): for error_text in _verify_abstract_status(stub, runtime): yield Error(object_path, error_text, stub, runtime) + for error_text in _verify_final_method(stub, runtime, static_runtime): + yield Error(object_path, error_text, stub, runtime) - for message in _verify_static_class_methods(stub, runtime, object_path): + for message in _verify_static_class_methods(stub, runtime, static_runtime, object_path): yield Error(object_path, "is inconsistent, " + message, stub, runtime) signature = safe_inspect_signature(runtime) @@ -1040,9 +1067,26 @@ def verify_overloadedfuncdef( if not callable(runtime): return - for message in _verify_static_class_methods(stub, runtime, object_path): + # mypy doesn't allow overloads where one overload is abstract but another isn't, + # so it should be okay to just check whether the first overload is abstract or not. + # + # TODO: Mypy *does* allow properties where e.g. the getter is abstract but the setter is not; + # and any property with a setter is represented as an OverloadedFuncDef internally; + # not sure exactly what (if anything) we should do about that. + first_part = stub.items[0] + if isinstance(first_part, nodes.Decorator) and first_part.is_overload: + for msg in _verify_abstract_status(first_part.func, runtime): + yield Error(object_path, msg, stub, runtime) + + # Look the object up statically, to avoid binding by the descriptor protocol + static_runtime = _static_lookup_runtime(object_path) + + for message in _verify_static_class_methods(stub, runtime, static_runtime, object_path): yield Error(object_path, "is inconsistent, " + message, stub, runtime) + # TODO: Should call _verify_final_method here, + # but overloaded final methods in stubs cause a stubtest crash: see #14950 + signature = safe_inspect_signature(runtime) if not signature: return @@ -1062,7 +1106,7 @@ def verify_overloadedfuncdef( "is inconsistent, " + message, stub, runtime, - stub_desc=str(stub.type) + f"\nInferred signature: {stub_sig}", + stub_desc=(str(stub.type)) + f"\nInferred signature: {stub_sig}", runtime_desc="def " + str(signature), ) @@ -1103,6 +1147,7 @@ def verify_paramspecexpr( def _verify_readonly_property(stub: nodes.Decorator, runtime: Any) -> Iterator[str]: assert stub.func.is_property if isinstance(runtime, property): + yield from _verify_final_method(stub.func, runtime.fget, MISSING) return if inspect.isdatadescriptor(runtime): # It's enough like a property... @@ -1131,6 +1176,17 @@ def _verify_abstract_status(stub: nodes.FuncDef, runtime: Any) -> Iterator[str]: yield f"is inconsistent, runtime {item_type} is abstract but stub is not" +def _verify_final_method( + stub: nodes.FuncDef, runtime: Any, static_runtime: MaybeMissing[Any] +) -> Iterator[str]: + if stub.is_final: + return + if getattr(runtime, "__final__", False) or ( + static_runtime is not MISSING and getattr(static_runtime, "__final__", False) + ): + yield "is decorated with @final at runtime, but not in the stub" + + def _resolve_funcitem_from_decorator(dec: nodes.OverloadPart) -> nodes.FuncItem | None: """Returns a FuncItem that corresponds to the output of the decorator. @@ -1342,7 +1398,7 @@ def verify_typealias( "__origin__", "__args__", "__orig_bases__", - "__final__", + "__final__", # Has a specialized check # Consider removing __slots__? "__slots__", } @@ -1611,7 +1667,7 @@ def strip_comments(s: str) -> str: return s.strip() with open(allowlist_file) as f: - for line in f.readlines(): + for line in f: entry = strip_comments(line) if entry: yield entry diff --git a/mypy/subtypes.py b/mypy/subtypes.py index c3d5517d43dd..6f9d6e84c34e 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -278,11 +278,7 @@ def _is_subtype( left = get_proper_type(left) right = get_proper_type(right) - if not proper_subtype and ( - isinstance(right, AnyType) - or isinstance(right, UnboundType) - or isinstance(right, ErasedType) - ): + if not proper_subtype and isinstance(right, (AnyType, UnboundType, ErasedType)): # TODO: should we consider all types proper subtypes of UnboundType and/or # ErasedType as we do for non-proper subtyping. return True @@ -388,8 +384,7 @@ def _is_subtype(self, left: Type, right: Type) -> bool: return is_proper_subtype(left, right, subtype_context=self.subtype_context) return is_subtype(left, right, subtype_context=self.subtype_context) - # visit_x(left) means: is left (which is an instance of X) a subtype of - # right? + # visit_x(left) means: is left (which is an instance of X) a subtype of right? def visit_unbound_type(self, left: UnboundType) -> bool: # This can be called if there is a bad type annotation. The result probably @@ -445,6 +440,8 @@ def visit_instance(self, left: Instance) -> bool: if isinstance(right, Instance): if type_state.is_cached_subtype_check(self._subtype_kind, left, right): return True + if type_state.is_cached_negative_subtype_check(self._subtype_kind, left, right): + return False if not self.subtype_context.ignore_promotions: for base in left.type.mro: if base._promote and any( @@ -599,11 +596,17 @@ def check_mixed( nominal = False if nominal: type_state.record_subtype_cache_entry(self._subtype_kind, left, right) + else: + type_state.record_negative_subtype_cache_entry(self._subtype_kind, left, right) return nominal if right.type.is_protocol and is_protocol_implementation( left, right, proper_subtype=self.proper_subtype ): return True + # We record negative cache entry here, and not in the protocol check like we do for + # positive cache, to avoid accidentally adding a type that is not a structural + # subtype, but is a nominal subtype (involving type: ignore override). + type_state.record_negative_subtype_cache_entry(self._subtype_kind, left, right) return False if isinstance(right, TypeType): item = right.item @@ -661,7 +664,7 @@ def visit_unpack_type(self, left: UnpackType) -> bool: return False def visit_parameters(self, left: Parameters) -> bool: - if isinstance(self.right, Parameters) or isinstance(self.right, CallableType): + if isinstance(self.right, (Parameters, CallableType)): right = self.right if isinstance(right, CallableType): right = right.with_unpacked_kwargs() @@ -910,13 +913,9 @@ def visit_union_type(self, left: UnionType) -> bool: for item in _flattened(self.right.relevant_items()): p_item = get_proper_type(item) - if isinstance(p_item, LiteralType): - fast_check.add(p_item) - elif isinstance(p_item, Instance): - if p_item.last_known_value is None: - fast_check.add(p_item) - else: - fast_check.add(p_item.last_known_value) + fast_check.add(p_item) + if isinstance(p_item, Instance) and p_item.last_known_value is not None: + fast_check.add(p_item.last_known_value) for item in left.relevant_items(): p_item = get_proper_type(item) @@ -1027,7 +1026,7 @@ def f(self) -> A: ... if not members_right.issubset(members_left): return False assuming = right.type.assuming_proper if proper_subtype else right.type.assuming - for (l, r) in reversed(assuming): + for l, r in reversed(assuming): if l == left and r == right: return True with pop_on_exit(assuming, left, right): @@ -1039,23 +1038,8 @@ def f(self) -> A: ... # We always bind self to the subtype. (Similarly to nominal types). supertype = get_proper_type(find_member(member, right, left)) assert supertype is not None - if member == "__call__" and class_obj: - # Special case: class objects always have __call__ that is just the constructor. - # TODO: move this helper function to typeops.py? - import mypy.checkmember - def named_type(fullname: str) -> Instance: - return Instance(left.type.mro[-1], []) - - subtype: ProperType | None = mypy.checkmember.type_object_type( - left.type, named_type - ) - elif member == "__call__" and left.type.is_metaclass(): - # Special case: we want to avoid falling back to metaclass __call__ - # if constructor signature didn't match, this can cause many false negatives. - subtype = None - else: - subtype = get_proper_type(find_member(member, left, left, class_obj=class_obj)) + subtype = mypy.typeops.get_protocol_member(left, member, class_obj) # Useful for debugging: # print(member, 'of', left, 'has type', subtype) # print(member, 'of', right, 'has type', supertype) @@ -1730,7 +1714,7 @@ def report(*args: Any) -> None: # (probably also because solver needs subtyping). See also comment in # ExpandTypeVisitor.visit_erased_type(). applied = mypy.applytype.apply_generic_arguments( - type, non_none_inferred_vars, report, context=target, allow_erased_callables=True + type, non_none_inferred_vars, report, context=target ) if had_errors: return None @@ -1803,6 +1787,9 @@ def covers_at_runtime(item: Type, supertype: Type) -> bool: # Special case useful for selecting TypedDicts from unions using isinstance(x, dict). if supertype.type.fullname == "builtins.dict": return True + elif isinstance(item, TypeVarType): + if is_proper_subtype(item.upper_bound, supertype, ignore_promotions=True): + return True elif isinstance(item, Instance) and supertype.type.fullname == "builtins.int": # "int" covers all native int types if item.type.fullname in MYPYC_NATIVE_INT_NAMES: diff --git a/mypy/suggestions.py b/mypy/suggestions.py index 9ac033ba3bdf..2e3744025325 100644 --- a/mypy/suggestions.py +++ b/mypy/suggestions.py @@ -54,6 +54,7 @@ TypeInfo, reverse_builtin_aliases, ) +from mypy.options import Options from mypy.plugin import FunctionContext, MethodContext, Plugin from mypy.server.update import FineGrainedBuildManager from mypy.state import state @@ -735,7 +736,7 @@ def format_signature(self, sig: PyAnnotateSignature) -> str: def format_type(self, cur_module: str | None, typ: Type) -> str: if self.use_fixme and isinstance(get_proper_type(typ), AnyType): return self.use_fixme - return typ.accept(TypeFormatter(cur_module, self.graph)) + return typ.accept(TypeFormatter(cur_module, self.graph, self.manager.options)) def score_type(self, t: Type, arg_pos: bool) -> int: """Generate a score for a type that we use to pick which type to use. @@ -809,8 +810,8 @@ class TypeFormatter(TypeStrVisitor): """Visitor used to format types""" # TODO: Probably a lot - def __init__(self, module: str | None, graph: Graph) -> None: - super().__init__() + def __init__(self, module: str | None, graph: Graph, options: Options) -> None: + super().__init__(options=options) self.module = module self.graph = graph diff --git a/mypy/test/data.py b/mypy/test/data.py index 3ee368869095..976e68c38a98 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -105,6 +105,12 @@ def parse_test_case(case: DataDrivenTestCase) -> None: src_path = join(os.path.dirname(case.file), item.arg) with open(src_path, encoding="utf8") as f: files.append((join(base_path, "typing.pyi"), f.read())) + elif item.id == "_typeshed": + # Use an alternative stub file for the _typeshed module. + assert item.arg is not None + src_path = join(os.path.dirname(case.file), item.arg) + with open(src_path, encoding="utf8") as f: + files.append((join(base_path, "_typeshed.pyi"), f.read())) elif re.match(r"stale[0-9]*$", item.id): passnum = 1 if item.id == "stale" else int(item.id[len("stale") :]) assert passnum > 0 @@ -207,6 +213,16 @@ def parse_test_case(case: DataDrivenTestCase) -> None: for file_path, contents in files: expand_errors(contents.split("\n"), output, file_path) + seen_files = set() + for file, _ in files: + if file in seen_files: + raise ValueError( + f"{case.file}, line {first_item.line}: Duplicated filename {file}. Did you include" + " it multiple times?" + ) + + seen_files.add(file) + case.input = input case.output = output case.output2 = output2 @@ -359,12 +375,14 @@ def reportinfo(self) -> tuple[str, int, str]: return self.file, self.line, self.name def repr_failure(self, excinfo: Any, style: Any | None = None) -> str: - if excinfo.errisinstance(SystemExit): + if isinstance(excinfo.value, SystemExit): # We assume that before doing exit() (which raises SystemExit) we've printed # enough context about what happened so that a stack trace is not useful. # In particular, uncaught exceptions during semantic analysis or type checking # call exit() and they already print out a stack trace. excrepr = excinfo.exconly() + elif isinstance(excinfo.value, pytest.fail.Exception) and not excinfo.value.pytrace: + excrepr = excinfo.exconly() else: self.parent._prunetraceback(excinfo) excrepr = excinfo.getrepr(style="short") @@ -474,7 +492,7 @@ def strip_list(l: list[str]) -> list[str]: # Strip spaces at end of line r.append(re.sub(r"\s+$", "", s)) - while len(r) > 0 and r[-1] == "": + while r and r[-1] == "": r.pop() return r @@ -594,7 +612,7 @@ def pytest_addoption(parser: Any) -> None: # This function name is special to pytest. See -# http://doc.pytest.org/en/latest/writing_plugins.html#collection-hooks +# https://doc.pytest.org/en/latest/how-to/writing_plugins.html#collection-hooks def pytest_pycollect_makeitem(collector: Any, name: str, obj: object) -> Any | None: """Called by pytest on each object in modules configured in conftest.py files. diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 145027404ff7..ca9b02eac805 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -51,8 +51,6 @@ def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) Display any differences in a human-readable form. """ - __tracebackhide__ = True - actual = clean_up(actual) actual = [line.replace("can't", "cannot") for line in actual] expected = [line.replace("can't", "cannot") for line in expected] @@ -117,7 +115,7 @@ def assert_string_arrays_equal(expected: list[str], actual: list[str], msg: str) # long lines. show_align_message(expected[first_diff], actual[first_diff]) - raise AssertionError(msg) + pytest.fail(msg, pytrace=False) def assert_module_equivalence(name: str, expected: Iterable[str], actual: Iterable[str]) -> None: @@ -258,7 +256,7 @@ def local_sys_path_set() -> Iterator[None]: This can be used by test cases that do runtime imports, for example by the stubgen tests. """ - old_sys_path = sys.path[:] + old_sys_path = sys.path.copy() if not ("" in sys.path or "." in sys.path): sys.path.insert(0, "") try: @@ -384,6 +382,8 @@ def parse_options( options.strict_optional = False options.error_summary = False options.hide_error_codes = True + options.force_uppercase_builtins = True + options.force_union_syntax = True # Allow custom python version to override testfile_pyversion. if all(flag.split("=")[0] not in ["--python-version", "-2", "--py2"] for flag in flag_list): diff --git a/mypy/test/testcheck.py b/mypy/test/testcheck.py index 5f128283a190..bdd722c5d6ff 100644 --- a/mypy/test/testcheck.py +++ b/mypy/test/testcheck.py @@ -125,6 +125,10 @@ def run_case_once( options.hide_error_codes = False if "abstract" not in testcase.file: options.allow_empty_bodies = not testcase.name.endswith("_no_empty") + if "lowercase" not in testcase.file: + options.force_uppercase_builtins = True + if "union-error" not in testcase.file: + options.force_union_syntax = True if incremental_step and options.incremental: # Don't overwrite # flags: --no-incremental in incremental test cases diff --git a/mypy/test/testcmdline.py b/mypy/test/testcmdline.py index 2e8b0dc9a1cd..30ecef07a821 100644 --- a/mypy/test/testcmdline.py +++ b/mypy/test/testcmdline.py @@ -61,6 +61,10 @@ def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: args.append("--hide-error-codes") if "--disallow-empty-bodies" not in args: args.append("--allow-empty-bodies") + if "--no-force-uppercase-builtins" not in args: + args.append("--force-uppercase-builtins") + if "--no-force-union-syntax" not in args: + args.append("--force-union-syntax") # Type check the program. fixed = [python3_path, "-m", "mypy"] env = os.environ.copy() diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py index faf4956a0273..f9a059672de8 100644 --- a/mypy/test/testdeps.py +++ b/mypy/test/testdeps.py @@ -50,7 +50,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: type_state.add_all_protocol_deps(deps) for source, targets in sorted(deps.items()): - if source.startswith((" {', '.join(sorted(targets))}" diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index b19c49bf60bc..5b4c816b5c38 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -18,7 +18,7 @@ import re import sys import unittest -from typing import Any, cast +from typing import Any import pytest @@ -169,7 +169,8 @@ def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bo def run_check(self, server: Server, sources: list[BuildSource]) -> list[str]: response = server.check(sources, export_types=True, is_tty=False, terminal_width=-1) - out = cast(str, response["out"] or response["err"]) + out = response["out"] or response["err"] + assert isinstance(out, str) return out.splitlines() def build(self, options: Options, sources: list[BuildSource]) -> list[str]: diff --git a/mypy/test/testgraph.py b/mypy/test/testgraph.py index b145d92aea6c..ce7697142ff2 100644 --- a/mypy/test/testgraph.py +++ b/mypy/test/testgraph.py @@ -41,9 +41,9 @@ def test_scc(self) -> None: assert_equal(sccs, {frozenset({"A"}), frozenset({"B", "C"}), frozenset({"D"})}) def _make_manager(self) -> BuildManager: - errors = Errors() options = Options() options.use_builtins_fixtures = True + errors = Errors(options) fscache = FileSystemCache() search_paths = SearchPaths((), (), (), ()) manager = BuildManager( diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index 11e9a3c3d7e7..0582c9ed5882 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -20,6 +20,7 @@ TypeVarExpr, Var, ) +from mypy.options import Options from mypy.server.subexpr import get_subexpressions from mypy.server.update import FineGrainedBuildManager from mypy.strconv import StrConv @@ -41,10 +42,10 @@ class ASTMergeSuite(DataSuite): def setup(self) -> None: super().setup() - self.str_conv = StrConv(show_ids=True) + self.str_conv = StrConv(show_ids=True, options=Options()) assert self.str_conv.id_mapper is not None self.id_mapper: IdMapper = self.str_conv.id_mapper - self.type_str_conv = TypeStrVisitor(self.id_mapper) + self.type_str_conv = TypeStrVisitor(self.id_mapper, options=Options()) def run_case(self, testcase: DataDrivenTestCase) -> None: name = testcase.name @@ -102,7 +103,11 @@ def build(self, source: str, testcase: DataDrivenTestCase) -> BuildResult | None options.export_types = True options.show_traceback = True options.allow_empty_bodies = True + options.force_uppercase_builtins = True main_path = os.path.join(test_temp_dir, "main") + + self.str_conv.options = options + self.type_str_conv.options = options with open(main_path, "w", encoding="utf8") as f: f.write(source) try: @@ -218,7 +223,12 @@ def dump_types( if type_map: a.append(f"## {module_id}") for expr in sorted( - type_map, key=lambda n: (n.line, short_type(n), str(n) + str(type_map[n])) + type_map, + key=lambda n: ( + n.line, + short_type(n), + n.str_with_options(self.str_conv.options) + str(type_map[n]), + ), ): typ = type_map[expr] a.append(f"{short_type(expr)}:{expr.line}: {self.format_type(typ)}") diff --git a/mypy/test/testparse.py b/mypy/test/testparse.py index 6a2d1e145251..0140eb072821 100644 --- a/mypy/test/testparse.py +++ b/mypy/test/testparse.py @@ -7,11 +7,13 @@ from pytest import skip from mypy import defaults +from mypy.config_parser import parse_mypy_comments from mypy.errors import CompileError from mypy.options import Options from mypy.parse import parse from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, find_test_files, parse_options +from mypy.util import get_mypy_comments class ParserSuite(DataSuite): @@ -32,6 +34,7 @@ def test_parser(testcase: DataDrivenTestCase) -> None: The argument contains the description of the test case. """ options = Options() + options.force_uppercase_builtins = True options.hide_error_codes = True if testcase.file.endswith("python310.test"): @@ -39,15 +42,18 @@ def test_parser(testcase: DataDrivenTestCase) -> None: else: options.python_version = defaults.PYTHON3_VERSION + source = "\n".join(testcase.input) + + # Apply mypy: comments to options. + comments = get_mypy_comments(source) + changes, _ = parse_mypy_comments(comments, options) + options = options.apply_changes(changes) + try: n = parse( - bytes("\n".join(testcase.input), "ascii"), - fnam="main", - module="__main__", - errors=None, - options=options, + bytes(source, "ascii"), fnam="main", module="__main__", errors=None, options=options ) - a = str(n).split("\n") + a = n.str_with_options(options).split("\n") except CompileError as e: a = e.messages assert_string_arrays_equal( diff --git a/mypy/test/testpep561.py b/mypy/test/testpep561.py index 1602bae6a51f..ed8674e8d5bb 100644 --- a/mypy/test/testpep561.py +++ b/mypy/test/testpep561.py @@ -92,7 +92,7 @@ def test_pep561(testcase: DataDrivenTestCase) -> None: use_pip = False elif arg == "editable": editable = True - assert pkgs != [], "No packages to install for PEP 561 test?" + assert pkgs, "No packages to install for PEP 561 test?" with virtualenv(python) as venv: venv_dir, python_executable = venv for pkg in pkgs: diff --git a/mypy/test/testpythoneval.py b/mypy/test/testpythoneval.py index 02dd11655382..62ba54591d9d 100644 --- a/mypy/test/testpythoneval.py +++ b/mypy/test/testpythoneval.py @@ -54,6 +54,7 @@ def test_python_evaluation(testcase: DataDrivenTestCase, cache_dir: str) -> None "--no-error-summary", "--hide-error-codes", "--allow-empty-bodies", + "--force-uppercase-builtins", ] interpreter = python3_path mypy_cmdline.append(f"--python-version={'.'.join(map(str, PYTHON3_VERSION))}") diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py index 3276f21540df..3455f41aa20a 100644 --- a/mypy/test/testsemanal.py +++ b/mypy/test/testsemanal.py @@ -46,6 +46,7 @@ def get_semanal_options(program_text: str, testcase: DataDrivenTestCase) -> Opti options.show_traceback = True options.python_version = PYTHON3_VERSION options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] + options.force_uppercase_builtins = True return options @@ -78,7 +79,7 @@ def test_semanal(testcase: DataDrivenTestCase) -> None: # output. for module in sorted(result.files.keys()): if module in testcase.test_modules: - a += str(result.files[module]).split("\n") + a += result.files[module].str_with_options(options).split("\n") except CompileError as e: a = e.messages if testcase.normalize_output: diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py index c7b576f89389..b21e06c0896a 100644 --- a/mypy/test/teststubgen.py +++ b/mypy/test/teststubgen.py @@ -38,7 +38,8 @@ generate_c_function_stub, generate_c_property_stub, generate_c_type_stub, - infer_method_sig, + infer_method_args, + infer_method_ret_type, is_c_property_readonly, ) from mypy.stubutil import common_dir_prefix, remove_misplaced_type_comments, walk_packages @@ -768,16 +769,18 @@ class StubgencSuite(unittest.TestCase): """ def test_infer_hash_sig(self) -> None: - assert_equal(infer_method_sig("__hash__"), [self_arg]) + assert_equal(infer_method_args("__hash__"), [self_arg]) + assert_equal(infer_method_ret_type("__hash__"), "int") def test_infer_getitem_sig(self) -> None: - assert_equal(infer_method_sig("__getitem__"), [self_arg, ArgSig(name="index")]) + assert_equal(infer_method_args("__getitem__"), [self_arg, ArgSig(name="index")]) def test_infer_setitem_sig(self) -> None: assert_equal( - infer_method_sig("__setitem__"), + infer_method_args("__setitem__"), [self_arg, ArgSig(name="index"), ArgSig(name="object")], ) + assert_equal(infer_method_ret_type("__setitem__"), "None") def test_infer_binary_op_sig(self) -> None: for op in ( @@ -794,11 +797,19 @@ def test_infer_binary_op_sig(self) -> None: "mul", "rmul", ): - assert_equal(infer_method_sig(f"__{op}__"), [self_arg, ArgSig(name="other")]) + assert_equal(infer_method_args(f"__{op}__"), [self_arg, ArgSig(name="other")]) + + def test_infer_equality_op_sig(self) -> None: + for op in ("eq", "ne", "lt", "le", "gt", "ge", "contains"): + assert_equal(infer_method_ret_type(f"__{op}__"), "bool") def test_infer_unary_op_sig(self) -> None: for op in ("neg", "pos"): - assert_equal(infer_method_sig(f"__{op}__"), [self_arg]) + assert_equal(infer_method_args(f"__{op}__"), [self_arg]) + + def test_infer_cast_sig(self) -> None: + for op in ("float", "bool", "bytes", "int"): + assert_equal(infer_method_ret_type(f"__{op}__"), op) def test_generate_c_type_stub_no_crash_for_object(self) -> None: output: list[str] = [] @@ -809,7 +820,8 @@ def test_generate_c_type_stub_no_crash_for_object(self) -> None: "alias", object, output, - imports, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(imports, []) @@ -828,7 +840,8 @@ class TestClassVariableCls: "C", TestClassVariableCls, output, - imports, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(imports, []) @@ -846,7 +859,8 @@ class TestClass(KeyError): "C", TestClass, output, - imports, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["class C(KeyError): ..."]) @@ -861,7 +875,8 @@ def test_generate_c_type_inheritance_same_module(self) -> None: "C", TestClass, output, - imports, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["class C(TestBaseClass): ..."]) @@ -881,7 +896,8 @@ class TestClass(argparse.Action): "C", TestClass, output, - imports, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["class C(argparse.Action): ..."]) @@ -899,7 +915,8 @@ class TestClass(type): "C", TestClass, output, - imports, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["class C(type): ..."]) @@ -919,10 +936,12 @@ def test(self, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) @@ -942,10 +961,12 @@ def test(self, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) @@ -964,10 +985,12 @@ def test(cls, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="cls", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["@classmethod", "def test(cls, *args, **kwargs) -> Any: ..."]) @@ -990,10 +1013,12 @@ def test(self, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="cls", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal( @@ -1023,10 +1048,12 @@ def test(self, arg0: str = "") -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: str = ...) -> Any: ..."]) @@ -1034,6 +1061,7 @@ def test(self, arg0: str = "") -> None: def test_generate_c_function_other_module_arg(self) -> None: """Test that if argument references type from other module, module will be imported.""" + # Provide different type in python spec than in docstring to make sure, that docstring # information is used. def test(arg0: str) -> None: @@ -1048,22 +1076,24 @@ def test(arg0: str) -> None: mod, "test", test, - output, - imports, + output=output, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(arg0: argparse.Action) -> Any: ..."]) assert_equal(imports, ["import argparse"]) - def test_generate_c_function_same_module_arg(self) -> None: - """Test that if argument references type from same module but using full path, no module + def test_generate_c_function_same_module(self) -> None: + """Test that if annotation references type from same module but using full path, no module will be imported, and type specification will be striped to local reference. """ + # Provide different type in python spec than in docstring to make sure, that docstring # information is used. def test(arg0: str) -> None: """ - test(arg0: argparse.Action) + test(arg0: argparse.Action) -> argparse.Action """ output: list[str] = [] @@ -1073,19 +1103,20 @@ def test(arg0: str) -> None: mod, "test", test, - output, - imports, + output=output, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) - assert_equal(output, ["def test(arg0: Action) -> Any: ..."]) + assert_equal(output, ["def test(arg0: Action) -> Action: ..."]) assert_equal(imports, []) - def test_generate_c_function_other_module_ret(self) -> None: - """Test that if return type references type from other module, module will be imported.""" + def test_generate_c_function_other_module(self) -> None: + """Test that if annotation references type from other module, module will be imported.""" def test(arg0: str) -> None: """ - test(arg0: str) -> argparse.Action + test(arg0: argparse.Action) -> argparse.Action """ output: list[str] = [] @@ -1095,21 +1126,24 @@ def test(arg0: str) -> None: mod, "test", test, - output, - imports, + output=output, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) - assert_equal(output, ["def test(arg0: str) -> argparse.Action: ..."]) - assert_equal(imports, ["import argparse"]) + assert_equal(output, ["def test(arg0: argparse.Action) -> argparse.Action: ..."]) + assert_equal(set(imports), {"import argparse"}) - def test_generate_c_function_same_module_ret(self) -> None: - """Test that if return type references type from same module but using full path, - no module will be imported, and type specification will be striped to local reference. + def test_generate_c_function_same_module_nested(self) -> None: + """Test that if annotation references type from same module but using full path, no module + will be imported, and type specification will be stripped to local reference. """ + # Provide different type in python spec than in docstring to make sure, that docstring + # information is used. def test(arg0: str) -> None: """ - test(arg0: str) -> argparse.Action + test(arg0: list[argparse.Action]) -> list[argparse.Action] """ output: list[str] = [] @@ -1119,13 +1153,65 @@ def test(arg0: str) -> None: mod, "test", test, - output, - imports, + output=output, + imports=imports, + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) - assert_equal(output, ["def test(arg0: str) -> Action: ..."]) + assert_equal(output, ["def test(arg0: list[Action]) -> list[Action]: ..."]) assert_equal(imports, []) + def test_generate_c_function_same_module_compound(self) -> None: + """Test that if annotation references type from same module but using full path, no module + will be imported, and type specification will be stripped to local reference. + """ + + # Provide different type in python spec than in docstring to make sure, that docstring + # information is used. + def test(arg0: str) -> None: + """ + test(arg0: Union[argparse.Action, NoneType]) -> Tuple[argparse.Action, NoneType] + """ + + output: list[str] = [] + imports: list[str] = [] + mod = ModuleType("argparse", "") + generate_c_function_stub( + mod, + "test", + test, + output=output, + imports=imports, + known_modules=[mod.__name__], + sig_generators=get_sig_generators(parse_options([])), + ) + assert_equal(output, ["def test(arg0: Union[Action,None]) -> Tuple[Action,None]: ..."]) + assert_equal(imports, []) + + def test_generate_c_function_other_module_nested(self) -> None: + """Test that if annotation references type from other module, module will be imported, + and the import will be restricted to one of the known modules.""" + + def test(arg0: str) -> None: + """ + test(arg0: foo.bar.Action) -> other.Thing + """ + + output: list[str] = [] + imports: list[str] = [] + mod = ModuleType(self.__module__, "") + generate_c_function_stub( + mod, + "test", + test, + output=output, + imports=imports, + known_modules=["foo", "foo.spangle", "bar"], + sig_generators=get_sig_generators(parse_options([])), + ) + assert_equal(output, ["def test(arg0: foo.bar.Action) -> other.Thing: ..."]) + assert_equal(set(imports), {"import foo", "import other"}) + def test_generate_c_property_with_pybind11(self) -> None: """Signatures included by PyBind11 inside property.fget are read.""" @@ -1190,10 +1276,12 @@ def test(self, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: List[int]) -> Any: ..."]) @@ -1213,10 +1301,12 @@ def test(self, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: Dict[str,int]) -> Any: ..."]) @@ -1236,10 +1326,12 @@ def test(self, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: Dict[str,List[int]]) -> Any: ..."]) @@ -1259,10 +1351,12 @@ def test(self, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: Dict[argparse.Action,int]) -> Any: ..."]) @@ -1282,10 +1376,12 @@ def test(self, arg0: str) -> None: mod, "test", TestClass.test, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal(output, ["def test(self, arg0: Dict[str,argparse.Action]) -> Any: ..."]) @@ -1310,10 +1406,12 @@ def __init__(self, arg0: str) -> None: mod, "__init__", TestClass.__init__, - output, - imports, + output=output, + imports=imports, self_var="self", + cls=TestClass, class_name="TestClass", + known_modules=[mod.__name__], sig_generators=get_sig_generators(parse_options([])), ) assert_equal( @@ -1329,6 +1427,42 @@ def __init__(self, arg0: str) -> None: ) assert_equal(set(imports), {"from typing import overload"}) + def test_generate_c_type_with_overload_shiboken(self) -> None: + class TestClass: + """ + TestClass(self: TestClass, arg0: str) -> None + TestClass(self: TestClass, arg0: str, arg1: str) -> None + """ + + def __init__(self, arg0: str) -> None: + pass + + output: list[str] = [] + imports: list[str] = [] + mod = ModuleType(TestClass.__module__, "") + generate_c_function_stub( + mod, + "__init__", + TestClass.__init__, + output=output, + imports=imports, + self_var="self", + cls=TestClass, + class_name="TestClass", + known_modules=[mod.__name__], + sig_generators=get_sig_generators(parse_options([])), + ) + assert_equal( + output, + [ + "@overload", + "def __init__(self, arg0: str) -> None: ...", + "@overload", + "def __init__(self, arg0: str, arg1: str) -> None: ...", + ], + ) + assert_equal(set(imports), {"from typing import overload"}) + class ArgSigSuite(unittest.TestCase): def test_repr(self) -> None: diff --git a/mypy/test/teststubtest.py b/mypy/test/teststubtest.py index 6bb4dfb2c937..94db9f55aa20 100644 --- a/mypy/test/teststubtest.py +++ b/mypy/test/teststubtest.py @@ -19,7 +19,7 @@ @contextlib.contextmanager def use_tmp_dir(mod_name: str) -> Iterator[str]: current = os.getcwd() - current_syspath = sys.path[:] + current_syspath = sys.path.copy() with tempfile.TemporaryDirectory() as tmp: try: os.chdir(tmp) @@ -27,7 +27,7 @@ def use_tmp_dir(mod_name: str) -> Iterator[str]: sys.path.insert(0, tmp) yield tmp finally: - sys.path = current_syspath[:] + sys.path = current_syspath.copy() if mod_name in sys.modules: del sys.modules[mod_name] @@ -765,6 +765,20 @@ def read_write_attr(self, val): self._val = val @collect_cases def test_type_alias(self) -> Iterator[Case]: + yield Case( + stub=""" + import collections.abc + import re + import typing + from typing import Callable, Dict, Generic, Iterable, List, Match, Tuple, TypeVar, Union + """, + runtime=""" + import collections.abc + import re + from typing import Callable, Dict, Generic, Iterable, List, Match, Tuple, TypeVar, Union + """, + error=None, + ) yield Case( stub=""" class X: @@ -778,27 +792,18 @@ class Y: ... """, error="Y.f", ) - yield Case( - stub=""" - from typing import Tuple - A = Tuple[int, str] - """, - runtime="A = (int, str)", - error="A", - ) + yield Case(stub="A = Tuple[int, str]", runtime="A = (int, str)", error="A") # Error if an alias isn't present at runtime... yield Case(stub="B = str", runtime="", error="B") # ... but only if the alias isn't private yield Case(stub="_C = int", runtime="", error=None) yield Case( stub=""" - from typing import Tuple D = tuple[str, str] E = Tuple[int, int, int] F = Tuple[str, int] """, runtime=""" - from typing import List, Tuple D = Tuple[str, str] E = Tuple[int, int, int] F = List[str] @@ -807,13 +812,11 @@ class Y: ... ) yield Case( stub=""" - from typing import Union G = str | int H = Union[str, bool] I = str | int """, runtime=""" - from typing import Union G = Union[str, int] H = Union[str, bool] I = str @@ -822,16 +825,12 @@ class Y: ... ) yield Case( stub=""" - import typing - from collections.abc import Iterable - from typing import Dict K = dict[str, str] L = Dict[int, int] - KK = Iterable[str] + KK = collections.abc.Iterable[str] LL = typing.Iterable[str] """, runtime=""" - from typing import Iterable, Dict K = Dict[str, str] L = Dict[int, int] KK = Iterable[str] @@ -841,14 +840,12 @@ class Y: ... ) yield Case( stub=""" - from typing import Generic, TypeVar _T = TypeVar("_T") class _Spam(Generic[_T]): def foo(self) -> None: ... IntFood = _Spam[int] """, runtime=""" - from typing import Generic, TypeVar _T = TypeVar("_T") class _Bacon(Generic[_T]): def foo(self, arg): pass @@ -859,14 +856,11 @@ def foo(self, arg): pass yield Case(stub="StrList = list[str]", runtime="StrList = ['foo', 'bar']", error="StrList") yield Case( stub=""" - import collections.abc - from typing import Callable - N = Callable[[str], bool] + N = typing.Callable[[str], bool] O = collections.abc.Callable[[int], str] - P = Callable[[str], bool] + P = typing.Callable[[str], bool] """, runtime=""" - from typing import Callable N = Callable[[str], bool] O = Callable[[int], str] P = int @@ -897,17 +891,7 @@ class Bar: pass """, error=None, ) - yield Case( - stub=""" - from typing import Match - M = Match[str] - """, - runtime=""" - from typing import Match - M = Match[str] - """, - error=None, - ) + yield Case(stub="M = Match[str]", runtime="M = Match[str]", error=None) yield Case( stub=""" class Baz: @@ -940,37 +924,32 @@ def fizz(self): pass if sys.version_info >= (3, 10): yield Case( stub=""" - import collections.abc - import re - from typing import Callable, Dict, Match, Iterable, Tuple, Union Q = Dict[str, str] R = dict[int, int] S = Tuple[int, int] T = tuple[str, str] U = int | str V = Union[int, str] - W = Callable[[str], bool] + W = typing.Callable[[str], bool] Z = collections.abc.Callable[[str], bool] - QQ = Iterable[str] + QQ = typing.Iterable[str] RR = collections.abc.Iterable[str] - MM = Match[str] + MM = typing.Match[str] MMM = re.Match[str] """, runtime=""" - from collections.abc import Callable, Iterable - from re import Match Q = dict[str, str] R = dict[int, int] S = tuple[int, int] T = tuple[str, str] U = int | str V = int | str - W = Callable[[str], bool] - Z = Callable[[str], bool] - QQ = Iterable[str] - RR = Iterable[str] - MM = Match[str] - MMM = Match[str] + W = collections.abc.Callable[[str], bool] + Z = collections.abc.Callable[[str], bool] + QQ = collections.abc.Iterable[str] + RR = collections.abc.Iterable[str] + MM = re.Match[str] + MMM = re.Match[str] """, error=None, ) @@ -1139,6 +1118,263 @@ def test_not_subclassable(self) -> Iterator[Case]: error="CannotBeSubclassed", ) + @collect_cases + def test_has_runtime_final_decorator(self) -> Iterator[Case]: + yield Case( + stub="from typing_extensions import final", + runtime=""" + import functools + from typing_extensions import final + """, + error=None, + ) + yield Case( + stub=""" + @final + class A: ... + """, + runtime=""" + @final + class A: ... + """, + error=None, + ) + yield Case( # Runtime can miss `@final` decorator + stub=""" + @final + class B: ... + """, + runtime=""" + class B: ... + """, + error=None, + ) + yield Case( # Stub cannot miss `@final` decorator + stub=""" + class C: ... + """, + runtime=""" + @final + class C: ... + """, + error="C", + ) + yield Case( + stub=""" + class D: + @final + def foo(self) -> None: ... + @final + @staticmethod + def bar() -> None: ... + @staticmethod + @final + def bar2() -> None: ... + @final + @classmethod + def baz(cls) -> None: ... + @classmethod + @final + def baz2(cls) -> None: ... + @property + @final + def eggs(self) -> int: ... + @final + @property + def eggs2(self) -> int: ... + @final + def ham(self, obj: int) -> int: ... + """, + runtime=""" + class D: + @final + def foo(self): pass + @final + @staticmethod + def bar(): pass + @staticmethod + @final + def bar2(): pass + @final + @classmethod + def baz(cls): pass + @classmethod + @final + def baz2(cls): pass + @property + @final + def eggs(self): return 42 + @final + @property + def eggs2(self): pass + @final + @functools.lru_cache() + def ham(self, obj): return obj * 2 + """, + error=None, + ) + # Stub methods are allowed to have @final even if the runtime doesn't... + yield Case( + stub=""" + class E: + @final + def foo(self) -> None: ... + @final + @staticmethod + def bar() -> None: ... + @staticmethod + @final + def bar2() -> None: ... + @final + @classmethod + def baz(cls) -> None: ... + @classmethod + @final + def baz2(cls) -> None: ... + @property + @final + def eggs(self) -> int: ... + @final + @property + def eggs2(self) -> int: ... + @final + def ham(self, obj: int) -> int: ... + """, + runtime=""" + class E: + def foo(self): pass + @staticmethod + def bar(): pass + @staticmethod + def bar2(): pass + @classmethod + def baz(cls): pass + @classmethod + def baz2(cls): pass + @property + def eggs(self): return 42 + @property + def eggs2(self): return 42 + @functools.lru_cache() + def ham(self, obj): return obj * 2 + """, + error=None, + ) + # ...But if the runtime has @final, the stub must have it as well + yield Case( + stub=""" + class F: + def foo(self) -> None: ... + """, + runtime=""" + class F: + @final + def foo(self): pass + """, + error="F.foo", + ) + yield Case( + stub=""" + class G: + @staticmethod + def foo() -> None: ... + """, + runtime=""" + class G: + @final + @staticmethod + def foo(): pass + """, + error="G.foo", + ) + yield Case( + stub=""" + class H: + @staticmethod + def foo() -> None: ... + """, + runtime=""" + class H: + @staticmethod + @final + def foo(): pass + """, + error="H.foo", + ) + yield Case( + stub=""" + class I: + @classmethod + def foo(cls) -> None: ... + """, + runtime=""" + class I: + @final + @classmethod + def foo(cls): pass + """, + error="I.foo", + ) + yield Case( + stub=""" + class J: + @classmethod + def foo(cls) -> None: ... + """, + runtime=""" + class J: + @classmethod + @final + def foo(cls): pass + """, + error="J.foo", + ) + yield Case( + stub=""" + class K: + @property + def foo(self) -> int: ... + """, + runtime=""" + class K: + @property + @final + def foo(self): return 42 + """, + error="K.foo", + ) + # This test wouldn't pass, + # because the runtime can't set __final__ on instances of builtins.property, + # so stubtest has non way of knowing that the runtime was decorated with @final: + # + # yield Case( + # stub=""" + # class K2: + # @property + # def foo(self) -> int: ... + # """, + # runtime=""" + # class K2: + # @final + # @property + # def foo(self): return 42 + # """, + # error="K2.foo", + # ) + yield Case( + stub=""" + class L: + def foo(self, obj: int) -> int: ... + """, + runtime=""" + class L: + @final + @functools.lru_cache() + def foo(self, obj): return obj * 2 + """, + error="L.foo", + ) + @collect_cases def test_name_mangling(self) -> Iterator[Case]: yield Case( @@ -1336,24 +1572,36 @@ class _Options(TypedDict): ) @collect_cases - def test_protocol(self) -> Iterator[Case]: + def test_runtime_typing_objects(self) -> Iterator[Case]: + yield Case( + stub="from typing_extensions import Protocol, TypedDict", + runtime="from typing_extensions import Protocol, TypedDict", + error=None, + ) yield Case( stub=""" - from typing_extensions import Protocol - class X(Protocol): bar: int def foo(self, x: int, y: bytes = ...) -> str: ... """, runtime=""" - from typing_extensions import Protocol - class X(Protocol): bar: int def foo(self, x: int, y: bytes = ...) -> str: ... """, error=None, ) + yield Case( + stub=""" + class Y(TypedDict): + a: int + """, + runtime=""" + class Y(TypedDict): + a: int + """, + error=None, + ) @collect_cases def test_type_var(self) -> Iterator[Case]: @@ -1436,7 +1684,10 @@ def test_metaclass_abcmeta(self) -> Iterator[Case]: @collect_cases def test_abstract_methods(self) -> Iterator[Case]: yield Case( - stub="from abc import abstractmethod", + stub=""" + from abc import abstractmethod + from typing import overload + """, runtime="from abc import abstractmethod", error=None, ) @@ -1465,15 +1716,64 @@ def some(self) -> None: ... """, error=None, ) - # Runtime can miss `@abstractmethod`: yield Case( stub=""" class A3: + @overload + def some(self, other: int) -> str: ... + @overload + def some(self, other: str) -> int: ... + """, + runtime=""" + class A3: + @abstractmethod + def some(self, other) -> None: ... + """, + error="A3.some", + ) + yield Case( + stub=""" + class A4: + @overload + @abstractmethod + def some(self, other: int) -> str: ... + @overload + @abstractmethod + def some(self, other: str) -> int: ... + """, + runtime=""" + class A4: + @abstractmethod + def some(self, other) -> None: ... + """, + error=None, + ) + yield Case( + stub=""" + class A5: + @abstractmethod + @overload + def some(self, other: int) -> str: ... + @abstractmethod + @overload + def some(self, other: str) -> int: ... + """, + runtime=""" + class A5: + @abstractmethod + def some(self, other) -> None: ... + """, + error=None, + ) + # Runtime can miss `@abstractmethod`: + yield Case( + stub=""" + class A6: @abstractmethod def some(self) -> None: ... """, runtime=""" - class A3: + class A6: def some(self) -> None: ... """, error=None, diff --git a/mypy/test/testtransform.py b/mypy/test/testtransform.py index c765bae12062..ba9fe8668fb4 100644 --- a/mypy/test/testtransform.py +++ b/mypy/test/testtransform.py @@ -40,6 +40,7 @@ def test_transform(testcase: DataDrivenTestCase) -> None: options.semantic_analysis_only = True options.enable_incomplete_feature = [TYPE_VAR_TUPLE, UNPACK] options.show_traceback = True + options.force_uppercase_builtins = True result = build.build( sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir ) @@ -53,7 +54,7 @@ def test_transform(testcase: DataDrivenTestCase) -> None: t = TypeAssertTransformVisitor() t.test_only = True file = t.mypyfile(result.files[module]) - a += str(file).split("\n") + a += file.str_with_options(options).split("\n") except CompileError as e: a = e.messages if testcase.normalize_output: diff --git a/mypy/test/testtypegen.py b/mypy/test/testtypegen.py index 3f09254f081a..4933bd3522a0 100644 --- a/mypy/test/testtypegen.py +++ b/mypy/test/testtypegen.py @@ -35,6 +35,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: options.export_types = True options.preserve_asts = True options.allow_empty_bodies = True + options.force_uppercase_builtins = True result = build.build( sources=[BuildSource("main", None, src)], options=options, @@ -66,8 +67,11 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: # Include node in output. keys.append(node) - for key in sorted(keys, key=lambda n: (n.line, short_type(n), str(n) + str(map[n]))): - ts = str(map[key]).replace("*", "") # Remove erased tags + for key in sorted( + keys, + key=lambda n: (n.line, short_type(n), str(n) + map[n].str_with_options(options)), + ): + ts = map[key].str_with_options(options).replace("*", "") # Remove erased tags ts = ts.replace("__main__.", "") a.append(f"{short_type(key)}({key.line}) : {ts}") except CompileError as e: diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index ee0256e2057a..601cdf27466e 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -7,7 +7,22 @@ from mypy.indirection import TypeIndirectionVisitor from mypy.join import join_simple, join_types from mypy.meet import meet_types, narrow_declared_type -from mypy.nodes import ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, CONTRAVARIANT, COVARIANT, INVARIANT +from mypy.nodes import ( + ARG_NAMED, + ARG_OPT, + ARG_POS, + ARG_STAR, + ARG_STAR2, + CONTRAVARIANT, + COVARIANT, + INVARIANT, + ArgKind, + CallExpr, + Expression, + NameExpr, +) +from mypy.options import Options +from mypy.plugins.common import find_shallow_matching_overload_item from mypy.state import state from mypy.subtypes import is_more_precise, is_proper_subtype, is_same_type, is_subtype from mypy.test.helpers import Suite, assert_equal, assert_type, skip @@ -110,10 +125,14 @@ def test_callable_type_with_var_args(self) -> None: assert_equal(str(c3), "def (X? =, *Y?) -> Any") def test_tuple_type(self) -> None: - assert_equal(str(TupleType([], self.fx.std_tuple)), "Tuple[]") - assert_equal(str(TupleType([self.x], self.fx.std_tuple)), "Tuple[X?]") + options = Options() + options.force_uppercase_builtins = True + assert_equal(TupleType([], self.fx.std_tuple).str_with_options(options), "Tuple[]") + assert_equal(TupleType([self.x], self.fx.std_tuple).str_with_options(options), "Tuple[X?]") assert_equal( - str(TupleType([self.x, AnyType(TypeOfAny.special_form)], self.fx.std_tuple)), + TupleType( + [self.x, AnyType(TypeOfAny.special_form)], self.fx.std_tuple + ).str_with_options(options), "Tuple[X?, Any]", ) @@ -1287,3 +1306,135 @@ def assert_union_result(self, t: ProperType, expected: list[Type]) -> None: t2 = remove_instance_last_known_values(t) assert type(t2) is UnionType assert t2.items == expected + + +class ShallowOverloadMatchingSuite(Suite): + def setUp(self) -> None: + self.fx = TypeFixture() + + def test_simple(self) -> None: + fx = self.fx + ov = self.make_overload([[("x", fx.anyt, ARG_NAMED)], [("y", fx.anyt, ARG_NAMED)]]) + # Match first only + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 0) + # Match second only + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "y")), 1) + # No match -- invalid keyword arg name + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "z")), 1) + # No match -- missing arg + self.assert_find_shallow_matching_overload_item(ov, make_call(), 1) + # No match -- extra arg + self.assert_find_shallow_matching_overload_item( + ov, make_call(("foo", "x"), ("foo", "z")), 1 + ) + + def test_match_using_types(self) -> None: + fx = self.fx + ov = self.make_overload( + [ + [("x", fx.nonet, ARG_POS)], + [("x", fx.lit_false, ARG_POS)], + [("x", fx.lit_true, ARG_POS)], + [("x", fx.anyt, ARG_POS)], + ] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("builtins.False", None)), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("builtins.True", None)), 2) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", None)), 3) + + def test_none_special_cases(self) -> None: + fx = self.fx + ov = self.make_overload( + [[("x", fx.callable(fx.nonet), ARG_POS)], [("x", fx.nonet, ARG_POS)]] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload([[("x", fx.str_type, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload( + [[("x", UnionType([fx.str_type, fx.a]), ARG_POS)], [("x", fx.nonet, ARG_POS)]] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload([[("x", fx.o, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload( + [[("x", UnionType([fx.str_type, fx.nonet]), ARG_POS)], [("x", fx.nonet, ARG_POS)]] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + ov = self.make_overload([[("x", fx.anyt, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) + + def test_optional_arg(self) -> None: + fx = self.fx + ov = self.make_overload( + [[("x", fx.anyt, ARG_NAMED)], [("y", fx.anyt, ARG_OPT)], [("z", fx.anyt, ARG_NAMED)]] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "y")), 1) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "z")), 2) + + def test_two_args(self) -> None: + fx = self.fx + ov = self.make_overload( + [ + [("x", fx.nonet, ARG_OPT), ("y", fx.anyt, ARG_OPT)], + [("x", fx.anyt, ARG_OPT), ("y", fx.anyt, ARG_OPT)], + ] + ) + self.assert_find_shallow_matching_overload_item(ov, make_call(), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("None", "x")), 0) + self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 1) + self.assert_find_shallow_matching_overload_item( + ov, make_call(("foo", "y"), ("None", "x")), 0 + ) + self.assert_find_shallow_matching_overload_item( + ov, make_call(("foo", "y"), ("bar", "x")), 1 + ) + + def assert_find_shallow_matching_overload_item( + self, ov: Overloaded, call: CallExpr, expected_index: int + ) -> None: + c = find_shallow_matching_overload_item(ov, call) + assert c in ov.items + assert ov.items.index(c) == expected_index + + def make_overload(self, items: list[list[tuple[str, Type, ArgKind]]]) -> Overloaded: + result = [] + for item in items: + arg_types = [] + arg_names = [] + arg_kinds = [] + for name, typ, kind in item: + arg_names.append(name) + arg_types.append(typ) + arg_kinds.append(kind) + result.append( + CallableType( + arg_types, arg_kinds, arg_names, ret_type=NoneType(), fallback=self.fx.o + ) + ) + return Overloaded(result) + + +def make_call(*items: tuple[str, str | None]) -> CallExpr: + args: list[Expression] = [] + arg_names = [] + arg_kinds = [] + for arg, name in items: + shortname = arg.split(".")[-1] + n = NameExpr(shortname) + n.fullname = arg + args.append(n) + arg_names.append(name) + if name: + arg_kinds.append(ARG_NAMED) + else: + arg_kinds.append(ARG_POS) + return CallExpr(NameExpr("f"), args, arg_kinds, arg_names) diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index d12e7abab0e2..1013b87c213f 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -136,6 +136,7 @@ def make_type_var( self.type_type = Instance(self.type_typei, []) # type self.function = Instance(self.functioni, []) # function TODO self.str_type = Instance(self.str_type_info, []) + self.bool_type = Instance(self.bool_type_info, []) self.a = Instance(self.ai, []) # A self.b = Instance(self.bi, []) # B self.c = Instance(self.ci, []) # C @@ -197,6 +198,9 @@ def make_type_var( self.lit_str2_inst = Instance(self.str_type_info, [], last_known_value=self.lit_str2) self.lit_str3_inst = Instance(self.str_type_info, [], last_known_value=self.lit_str3) + self.lit_false = LiteralType(False, self.bool_type) + self.lit_true = LiteralType(True, self.bool_type) + self.type_a = TypeType.make_normalized(self.a) self.type_b = TypeType.make_normalized(self.b) self.type_c = TypeType.make_normalized(self.c) diff --git a/mypy/treetransform.py b/mypy/treetransform.py index 535f50d5cf5e..424fa6b96415 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -145,7 +145,7 @@ def __init__(self) -> None: def visit_mypy_file(self, node: MypyFile) -> MypyFile: assert self.test_only, "This visitor should not be used for whole files." # NOTE: The 'names' and 'imports' instance variables will be empty! - ignored_lines = {line: codes[:] for line, codes in node.ignored_lines.items()} + ignored_lines = {line: codes.copy() for line, codes in node.ignored_lines.items()} new = MypyFile(self.statements(node.defs), [], node.is_bom, ignored_lines=ignored_lines) new._fullname = node._fullname new.path = node.path @@ -153,10 +153,10 @@ def visit_mypy_file(self, node: MypyFile) -> MypyFile: return new def visit_import(self, node: Import) -> Import: - return Import(node.ids[:]) + return Import(node.ids.copy()) def visit_import_from(self, node: ImportFrom) -> ImportFrom: - return ImportFrom(node.id, node.relative, node.names[:]) + return ImportFrom(node.id, node.relative, node.names.copy()) def visit_import_all(self, node: ImportAll) -> ImportAll: return ImportAll(node.id, node.relative) @@ -233,6 +233,9 @@ def copy_function_attributes(self, new: FuncItem, original: FuncItem) -> None: new.max_pos = original.max_pos new.is_overload = original.is_overload new.is_generator = original.is_generator + new.is_coroutine = original.is_coroutine + new.is_async_generator = original.is_async_generator + new.is_awaitable_coroutine = original.is_awaitable_coroutine new.line = original.line def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> OverloadedFuncDef: @@ -267,10 +270,10 @@ def visit_class_def(self, node: ClassDef) -> ClassDef: return new def visit_global_decl(self, node: GlobalDecl) -> GlobalDecl: - return GlobalDecl(node.names[:]) + return GlobalDecl(node.names.copy()) def visit_nonlocal_decl(self, node: NonlocalDecl) -> NonlocalDecl: - return NonlocalDecl(node.names[:]) + return NonlocalDecl(node.names.copy()) def visit_block(self, node: Block) -> Block: return Block(self.statements(node.body)) @@ -513,8 +516,8 @@ def visit_call_expr(self, node: CallExpr) -> CallExpr: return CallExpr( self.expr(node.callee), self.expressions(node.args), - node.arg_kinds[:], - node.arg_names[:], + node.arg_kinds.copy(), + node.arg_names.copy(), self.optional_expr(node.analyzed), ) diff --git a/mypy/typeanal.py b/mypy/typeanal.py index f3329af6207a..cb2f8410ee62 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -404,6 +404,7 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.fail, node.no_args, t, + self.options, unexpanded_type=t, disallow_any=disallow_any, ) @@ -419,7 +420,7 @@ def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) self.fail, self.note, disallow_any=disallow_any, - python_version=self.options.python_version, + options=self.options, use_generic_error=True, unexpanded_type=t, ) @@ -654,9 +655,7 @@ def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Typ def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType: disallow_any = not self.is_typeshed_stub and self.options.disallow_any_generics - return get_omitted_any( - disallow_any, self.fail, self.note, typ, self.options.python_version, fullname - ) + return get_omitted_any(disallow_any, self.fail, self.note, typ, self.options, fullname) def analyze_type_with_type_info( self, info: TypeInfo, args: Sequence[Type], ctx: Context @@ -690,8 +689,13 @@ def analyze_type_with_type_info( instance.args = tuple(self.pack_paramspec_args(instance.args)) if info.has_type_var_tuple_type: - # - 1 to allow for the empty type var tuple case. - valid_arg_length = len(instance.args) >= len(info.type_vars) - 1 + if instance.args: + # -1 to account for empty tuple + valid_arg_length = len(instance.args) >= len(info.type_vars) - 1 + # Empty case is special cased and we want to infer a Tuple[Any, ...] + # instead of the empty tuple, so no - 1 here. + else: + valid_arg_length = False else: valid_arg_length = len(instance.args) == len(info.type_vars) @@ -702,7 +706,7 @@ def analyze_type_with_type_info( self.fail, self.note, disallow_any=self.options.disallow_any_generics and not self.is_typeshed_stub, - python_version=self.options.python_version, + options=self.options, ) tup = info.tuple_type @@ -717,6 +721,7 @@ def analyze_type_with_type_info( self.fail, False, ctx, + self.options, use_standard_error=True, ) return tup.copy_modified(items=self.anal_array(tup.items), fallback=instance) @@ -732,6 +737,7 @@ def analyze_type_with_type_info( self.fail, False, ctx, + self.options, use_standard_error=True, ) # Create a named TypedDictType @@ -1109,7 +1115,13 @@ def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(self.anal_type(t.item), line=t.line) def visit_placeholder_type(self, t: PlaceholderType) -> Type: - n = None if not t.fullname else self.api.lookup_fully_qualified(t.fullname) + n = ( + None + # No dot in fullname indicates we are at function scope, and recursive + # types are not supported there anyway, so we just give up. + if not t.fullname or "." not in t.fullname + else self.api.lookup_fully_qualified(t.fullname) + ) if not n or isinstance(n.node, PlaceholderNode): self.api.defer() # Still incomplete return t @@ -1600,12 +1612,12 @@ def get_omitted_any( fail: MsgCallback, note: MsgCallback, orig_type: Type, - python_version: tuple[int, int], + options: Options, fullname: str | None = None, unexpanded_type: Type | None = None, ) -> AnyType: if disallow_any: - nongen_builtins = get_nongen_builtins(python_version) + nongen_builtins = get_nongen_builtins(options.python_version) if fullname in nongen_builtins: typ = orig_type # We use a dedicated error message for builtin generics (as the most common case). @@ -1617,7 +1629,7 @@ def get_omitted_any( ) else: typ = unexpanded_type or orig_type - type_str = typ.name if isinstance(typ, UnboundType) else format_type_bare(typ) + type_str = typ.name if isinstance(typ, UnboundType) else format_type_bare(typ, options) fail( message_registry.BARE_GENERIC.format(quote_type_string(type_str)), @@ -1630,7 +1642,10 @@ def get_omitted_any( ) # Ideally, we'd check whether the type is quoted or `from __future__ annotations` # is set before issuing this note - if python_version < (3, 9) and base_fullname in GENERIC_STUB_NOT_AT_RUNTIME_TYPES: + if ( + options.python_version < (3, 9) + and base_fullname in GENERIC_STUB_NOT_AT_RUNTIME_TYPES + ): # Recommend `from __future__ import annotations` or to put type in quotes # (string literal escaping) for classes not generic at runtime note( @@ -1649,12 +1664,24 @@ def get_omitted_any( return any_type +def fix_type_var_tuple_argument(any_type: Type, t: Instance) -> None: + if t.type.has_type_var_tuple_type: + args = list(t.args) + assert t.type.type_var_tuple_prefix is not None + tvt = t.type.defn.type_vars[t.type.type_var_tuple_prefix] + assert isinstance(tvt, TypeVarTupleType) + args[t.type.type_var_tuple_prefix] = UnpackType( + Instance(tvt.tuple_fallback.type, [any_type]) + ) + t.args = tuple(args) + + def fix_instance( t: Instance, fail: MsgCallback, note: MsgCallback, disallow_any: bool, - python_version: tuple[int, int], + options: Options, use_generic_error: bool = False, unexpanded_type: Type | None = None, ) -> None: @@ -1667,10 +1694,10 @@ def fix_instance( fullname: str | None = None else: fullname = t.type.fullname - any_type = get_omitted_any( - disallow_any, fail, note, t, python_version, fullname, unexpanded_type - ) + any_type = get_omitted_any(disallow_any, fail, note, t, options, fullname, unexpanded_type) t.args = (any_type,) * len(t.type.type_vars) + fix_type_var_tuple_argument(any_type, t) + return # Invalid number of type parameters. fail( @@ -1682,6 +1709,7 @@ def fix_instance( # otherwise the type checker may crash as it expects # things to be right. t.args = tuple(AnyType(TypeOfAny.from_error) for _ in t.type.type_vars) + fix_type_var_tuple_argument(AnyType(TypeOfAny.from_error), t) t.invalid = True @@ -1691,6 +1719,7 @@ def expand_type_alias( fail: MsgCallback, no_args: bool, ctx: Context, + options: Options, *, unexpanded_type: Type | None = None, disallow_any: bool = False, @@ -1714,6 +1743,7 @@ def expand_type_alias( node, ctx.line, ctx.column, + options, disallow_any=disallow_any, fail=fail, unexpanded_type=unexpanded_type, @@ -1743,7 +1773,7 @@ def expand_type_alias( else: msg = f"Bad number of arguments for type alias, expected: {exp_len}, given: {act_len}" fail(msg, ctx, code=codes.TYPE_ARG) - return set_any_tvars(node, ctx.line, ctx.column, from_error=True) + return set_any_tvars(node, ctx.line, ctx.column, options, from_error=True) # TODO: we need to check args validity w.r.t alias.alias_tvars. # Otherwise invalid instantiations will be allowed in runtime context. # Note: in type context, these will be still caught by semanal_typeargs. @@ -1764,6 +1794,7 @@ def set_any_tvars( node: TypeAlias, newline: int, newcolumn: int, + options: Options, *, from_error: bool = False, disallow_any: bool = False, @@ -1780,7 +1811,7 @@ def set_any_tvars( type_str = ( unexpanded_type.name if isinstance(unexpanded_type, UnboundType) - else format_type_bare(unexpanded_type) + else format_type_bare(unexpanded_type, options) ) else: type_str = node.name @@ -2023,24 +2054,20 @@ def make_optional_type(t: Type) -> Type: return UnionType([t, NoneType()], t.line, t.column) -def fix_instance_types( - t: Type, fail: MsgCallback, note: MsgCallback, python_version: tuple[int, int] -) -> None: +def fix_instance_types(t: Type, fail: MsgCallback, note: MsgCallback, options: Options) -> None: """Recursively fix all instance types (type argument count) in a given type. For example 'Union[Dict, List[str, int]]' will be transformed into 'Union[Dict[Any, Any], List[Any]]' in place. """ - t.accept(InstanceFixer(fail, note, python_version)) + t.accept(InstanceFixer(fail, note, options)) class InstanceFixer(TypeTraverserVisitor): - def __init__( - self, fail: MsgCallback, note: MsgCallback, python_version: tuple[int, int] - ) -> None: + def __init__(self, fail: MsgCallback, note: MsgCallback, options: Options) -> None: self.fail = fail self.note = note - self.python_version = python_version + self.options = options def visit_instance(self, typ: Instance) -> None: super().visit_instance(typ) @@ -2050,7 +2077,7 @@ def visit_instance(self, typ: Instance) -> None: self.fail, self.note, disallow_any=False, - python_version=self.python_version, + options=self.options, use_generic_error=True, ) diff --git a/mypy/typeops.py b/mypy/typeops.py index 8c01fb118076..8ed59b6fbe55 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -1050,3 +1050,23 @@ def fixup_partial_type(typ: Type) -> Type: return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) else: return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) + + +def get_protocol_member(left: Instance, member: str, class_obj: bool) -> ProperType | None: + if member == "__call__" and class_obj: + # Special case: class objects always have __call__ that is just the constructor. + from mypy.checkmember import type_object_type + + def named_type(fullname: str) -> Instance: + return Instance(left.type.mro[-1], []) + + return type_object_type(left.type, named_type) + + if member == "__call__" and left.type.is_metaclass(): + # Special case: we want to avoid falling back to metaclass __call__ + # if constructor signature didn't match, this can cause many false negatives. + return None + + from mypy.subtypes import find_member + + return get_proper_type(find_member(member, left, left, class_obj=class_obj)) diff --git a/mypy/types.py b/mypy/types.py index 9858559ad5c1..d050f4cc81a2 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -33,6 +33,7 @@ FuncItem, SymbolNode, ) +from mypy.options import Options from mypy.state import state from mypy.util import IdMapper @@ -183,7 +184,7 @@ class TypeOfAny: # Does this Any come from an error? from_error: Final = 5 # Is this a type that can't be represented in mypy's type system? For instance, type of - # call to NewType...). Even though these types aren't real Anys, we treat them as such. + # call to NewType(...). Even though these types aren't real Anys, we treat them as such. # Also used for variables named '_'. special_form: Final = 6 # Does this Any come from interaction with another Any? @@ -256,7 +257,10 @@ def accept(self, visitor: TypeVisitor[T]) -> T: raise RuntimeError("Not implemented") def __repr__(self) -> str: - return self.accept(TypeStrVisitor()) + return self.accept(TypeStrVisitor(options=Options())) + + def str_with_options(self, options: Options) -> str: + return self.accept(TypeStrVisitor(options=options)) def serialize(self) -> JsonDict | str: raise NotImplementedError(f"Cannot serialize {self.__class__.__name__} instance") @@ -505,7 +509,6 @@ def is_meta_var(self) -> bool: class TypeVarLikeType(ProperType): - __slots__ = ("name", "fullname", "id", "upper_bound") name: str # Name (may be qualified) @@ -950,7 +953,8 @@ def __init__( def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return cast(T, visitor.visit_callable_argument(self)) + ret: T = visitor.visit_callable_argument(self) + return ret def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" @@ -975,7 +979,8 @@ def __init__(self, items: list[Type], line: int = -1, column: int = -1) -> None: def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return cast(T, visitor.visit_type_list(self)) + ret: T = visitor.visit_type_list(self) + return ret def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" @@ -1661,7 +1666,7 @@ def __hash__(self) -> int: ) def __eq__(self, other: object) -> bool: - if isinstance(other, Parameters) or isinstance(other, CallableType): + if isinstance(other, (Parameters, CallableType)): return ( self.arg_types == other.arg_types and self.arg_names == other.arg_names @@ -1775,7 +1780,7 @@ def copy_modified( self: CT, arg_types: Bogus[Sequence[Type]] = _dummy, arg_kinds: Bogus[list[ArgKind]] = _dummy, - arg_names: Bogus[list[str | None]] = _dummy, + arg_names: Bogus[Sequence[str | None]] = _dummy, ret_type: Bogus[Type] = _dummy, fallback: Bogus[Instance] = _dummy, name: Bogus[str | None] = _dummy, @@ -1976,20 +1981,15 @@ def param_spec(self) -> ParamSpecType | None: arg_type = self.arg_types[-2] if not isinstance(arg_type, ParamSpecType): return None + # sometimes paramspectypes are analyzed in from mysterious places, # e.g. def f(prefix..., *args: P.args, **kwargs: P.kwargs) -> ...: ... prefix = arg_type.prefix if not prefix.arg_types: # TODO: confirm that all arg kinds are positional prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2]) - return ParamSpecType( - arg_type.name, - arg_type.fullname, - arg_type.id, - ParamSpecFlavor.BARE, - arg_type.upper_bound, - prefix=prefix, - ) + + return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix) def expand_param_spec( self, c: CallableType | Parameters, no_prefix: bool = False @@ -2409,17 +2409,17 @@ def names_are_wider_than(self, other: TypedDictType) -> bool: def zip(self, right: TypedDictType) -> Iterable[tuple[str, Type, Type]]: left = self - for (item_name, left_item_type) in left.items.items(): + for item_name, left_item_type in left.items.items(): right_item_type = right.items.get(item_name) if right_item_type is not None: yield (item_name, left_item_type, right_item_type) def zipall(self, right: TypedDictType) -> Iterable[tuple[str, Type | None, Type | None]]: left = self - for (item_name, left_item_type) in left.items.items(): + for item_name, left_item_type in left.items.items(): right_item_type = right.items.get(item_name) yield (item_name, left_item_type, right_item_type) - for (item_name, right_item_type) in right.items.items(): + for item_name, right_item_type in right.items.items(): if item_name in left.items: continue yield (item_name, None, right_item_type) @@ -2489,7 +2489,8 @@ def simple_name(self) -> str: def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return cast(T, visitor.visit_raw_expression_type(self)) + ret: T = visitor.visit_raw_expression_type(self) + return ret def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" @@ -2736,7 +2737,8 @@ class EllipsisType(ProperType): def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return cast(T, visitor.visit_ellipsis_type(self)) + ret: T = visitor.visit_ellipsis_type(self) + return ret def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" @@ -2845,7 +2847,8 @@ def __init__(self, fullname: str | None, args: list[Type], line: int) -> None: def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) - return cast(T, visitor.visit_placeholder_type(self)) + ret: T = visitor.visit_placeholder_type(self) + return ret def __hash__(self) -> int: return hash((self.fullname, tuple(self.args))) @@ -2945,9 +2948,10 @@ class TypeStrVisitor(SyntheticTypeVisitor[str]): - Represent the NoneType type as None. """ - def __init__(self, id_mapper: IdMapper | None = None) -> None: + def __init__(self, id_mapper: IdMapper | None = None, *, options: Options) -> None: self.id_mapper = id_mapper self.any_as_dots = False + self.options = options def visit_unbound_type(self, t: UnboundType) -> str: s = t.name + "?" @@ -2989,7 +2993,7 @@ def visit_instance(self, t: Instance) -> str: if t.last_known_value and not t.args: # Instances with a literal fallback should never be generic. If they are, # something went wrong so we fall back to showing the full Instance repr. - s = f"{t.last_known_value}?" + s = f"{t.last_known_value.accept(self)}?" else: s = t.type.fullname or t.type.name or "" @@ -3137,11 +3141,12 @@ def visit_overloaded(self, t: Overloaded) -> str: def visit_tuple_type(self, t: TupleType) -> str: s = self.list_str(t.items) + tuple_name = "tuple" if self.options.use_lowercase_names() else "Tuple" if t.partial_fallback and t.partial_fallback.type: fallback_name = t.partial_fallback.type.fullname if fallback_name != "builtins.tuple": - return f"Tuple[{s}, fallback={t.partial_fallback.accept(self)}]" - return f"Tuple[{s}]" + return f"{tuple_name}[{s}, fallback={t.partial_fallback.accept(self)}]" + return f"{tuple_name}[{s}]" def visit_typeddict_type(self, t: TypedDictType) -> str: def item_str(name: str, typ: str) -> str: @@ -3285,7 +3290,7 @@ def visit_callable_type(self, t: CallableType) -> Type: # TODO: this branch duplicates the one in expand_type(), find a way to reuse it # without import cycle types <-> typeanal <-> expandtype. repl = get_proper_type(self.replacements.get(param_spec.id)) - if isinstance(repl, CallableType) or isinstance(repl, Parameters): + if isinstance(repl, (CallableType, Parameters)): prefix = param_spec.prefix t = t.expand_param_spec(repl, no_prefix=True) return t.copy_modified( diff --git a/mypy/typeshed/stdlib/VERSIONS b/mypy/typeshed/stdlib/VERSIONS index bd1abd204885..d24aa35faf6e 100644 --- a/mypy/typeshed/stdlib/VERSIONS +++ b/mypy/typeshed/stdlib/VERSIONS @@ -152,6 +152,7 @@ importlib: 2.7- importlib.metadata: 3.8- importlib.metadata._meta: 3.10- importlib.resources: 3.7- +importlib.resources.abc: 3.11- inspect: 2.7- io: 2.7- ipaddress: 3.3- diff --git a/mypy/typeshed/stdlib/_compression.pyi b/mypy/typeshed/stdlib/_compression.pyi index 817f251586b2..24e11261140b 100644 --- a/mypy/typeshed/stdlib/_compression.pyi +++ b/mypy/typeshed/stdlib/_compression.pyi @@ -17,7 +17,7 @@ class DecompressReader(RawIOBase): self, fp: _Reader, decomp_factory: Callable[..., object], - trailing_error: type[Exception] | tuple[type[Exception], ...] = ..., + trailing_error: type[Exception] | tuple[type[Exception], ...] = (), **decomp_args: Any, ) -> None: ... def readinto(self, b: WriteableBuffer) -> int: ... diff --git a/mypy/typeshed/stdlib/_ctypes.pyi b/mypy/typeshed/stdlib/_ctypes.pyi index 0ad2fcb571b8..b0c044352a3c 100644 --- a/mypy/typeshed/stdlib/_ctypes.pyi +++ b/mypy/typeshed/stdlib/_ctypes.pyi @@ -1,6 +1,16 @@ import sys -from ctypes import _CArgObject, _PointerLike -from typing_extensions import TypeAlias +from _typeshed import ReadableBuffer, WriteableBuffer +from abc import abstractmethod +from collections.abc import Iterable, Iterator, Mapping, Sequence +from ctypes import CDLL, _CArgObject, _PointerLike +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Self, TypeAlias + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_CT = TypeVar("_CT", bound=_CData) FUNCFLAG_CDECL: int FUNCFLAG_PYTHONAPI: int @@ -27,3 +37,90 @@ if sys.platform == "win32": FUNCFLAG_HRESULT: int FUNCFLAG_STDCALL: int + +class _CDataMeta(type): + # By default mypy complains about the following two methods, because strictly speaking cls + # might not be a Type[_CT]. However this can never actually happen, because the only class that + # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] + +class _CData(metaclass=_CDataMeta): + _b_base_: int + _b_needsfree_: bool + _objects: Mapping[Any, int] | None + @classmethod + def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ... + @classmethod + def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ... + @classmethod + def from_address(cls, address: int) -> Self: ... + @classmethod + def from_param(cls, obj: Any) -> Self | _CArgObject: ... + @classmethod + def in_dll(cls, library: CDLL, name: str) -> Self: ... + +class _SimpleCData(Generic[_T], _CData): + value: _T + # The TypeVar can be unsolved here, + # but we can't use overloads without creating many, many mypy false-positive errors + def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] + +class _CField: + offset: int + size: int + +class _StructUnionMeta(_CDataMeta): + _fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]] + _pack_: int + _anonymous_: Sequence[str] + def __getattr__(self, name: str) -> _CField: ... + +class _StructUnionBase(_CData, metaclass=_StructUnionMeta): + def __init__(self, *args: Any, **kw: Any) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __setattr__(self, name: str, value: Any) -> None: ... + +class Union(_StructUnionBase): ... +class Structure(_StructUnionBase): ... + +class Array(Generic[_CT], _CData): + @property + @abstractmethod + def _length_(self) -> int: ... + @_length_.setter + def _length_(self, value: int) -> None: ... + @property + @abstractmethod + def _type_(self) -> type[_CT]: ... + @_type_.setter + def _type_(self, value: type[_CT]) -> None: ... + raw: bytes # Note: only available if _CT == c_char + value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise + # TODO These methods cannot be annotated correctly at the moment. + # All of these "Any"s stand for the array's element type, but it's not possible to use _CT + # here, because of a special feature of ctypes. + # By default, when accessing an element of an Array[_CT], the returned object has type _CT. + # However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object + # and converts it to the corresponding Python primitive. For example, when accessing an element + # of an Array[c_int], a Python int object is returned, not a c_int. + # This behavior does *not* apply to subclasses of "simple types". + # If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns + # a MyInt, not an int. + # This special behavior is not easy to model in a stub, so for now all places where + # the array element type would belong are annotated with Any instead. + def __init__(self, *args: Any) -> None: ... + @overload + def __getitem__(self, __key: int) -> Any: ... + @overload + def __getitem__(self, __key: slice) -> list[Any]: ... + @overload + def __setitem__(self, __key: int, __value: Any) -> None: ... + @overload + def __setitem__(self, __key: slice, __value: Iterable[Any]) -> None: ... + def __iter__(self) -> Iterator[Any]: ... + # Can't inherit from Sized because the metaclass conflict between + # Sized and _CData prevents using _CDataMeta. + def __len__(self) -> int: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/_dummy_thread.pyi b/mypy/typeshed/stdlib/_dummy_thread.pyi index e371dd0e9933..541096734a91 100644 --- a/mypy/typeshed/stdlib/_dummy_thread.pyi +++ b/mypy/typeshed/stdlib/_dummy_thread.pyi @@ -7,7 +7,7 @@ __all__ = ["error", "start_new_thread", "exit", "get_ident", "allocate_lock", "i TIMEOUT_MAX: int error = RuntimeError -def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any] = ...) -> None: ... +def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any] = {}) -> None: ... def exit() -> NoReturn: ... def get_ident() -> int: ... def allocate_lock() -> LockType: ... diff --git a/mypy/typeshed/stdlib/_dummy_threading.pyi b/mypy/typeshed/stdlib/_dummy_threading.pyi index 9a49dfa9649e..abcf3a13a496 100644 --- a/mypy/typeshed/stdlib/_dummy_threading.pyi +++ b/mypy/typeshed/stdlib/_dummy_threading.pyi @@ -62,7 +62,7 @@ class Thread: group: None = None, target: Callable[..., object] | None = None, name: str | None = None, - args: Iterable[Any] = ..., + args: Iterable[Any] = (), kwargs: Mapping[str, Any] | None = None, *, daemon: bool | None = None, diff --git a/mypy/typeshed/stdlib/_sitebuiltins.pyi b/mypy/typeshed/stdlib/_sitebuiltins.pyi index 3bda2d88425d..94ad701d4a73 100644 --- a/mypy/typeshed/stdlib/_sitebuiltins.pyi +++ b/mypy/typeshed/stdlib/_sitebuiltins.pyi @@ -10,7 +10,7 @@ class Quitter: class _Printer: MAXLINES: ClassVar[Literal[23]] - def __init__(self, name: str, data: str, files: Iterable[str] = ..., dirs: Iterable[str] = ...) -> None: ... + def __init__(self, name: str, data: str, files: Iterable[str] = (), dirs: Iterable[str] = ()) -> None: ... def __call__(self) -> None: ... class _Helper: diff --git a/mypy/typeshed/stdlib/_winapi.pyi b/mypy/typeshed/stdlib/_winapi.pyi index e21402b801c5..ca1e61f0f19f 100644 --- a/mypy/typeshed/stdlib/_winapi.pyi +++ b/mypy/typeshed/stdlib/_winapi.pyi @@ -54,7 +54,10 @@ if sys.platform == "win32": HIGH_PRIORITY_CLASS: Literal[0x80] INFINITE: Literal[0xFFFFFFFF] if sys.version_info >= (3, 8): - INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF] + # Ignore the flake8 error -- flake8-pyi assumes + # most numbers this long will be implementation details, + # but here we can see that it's a power of 2 + INVALID_HANDLE_VALUE: Literal[0xFFFFFFFFFFFFFFFF] # noqa: Y054 IDLE_PRIORITY_CLASS: Literal[0x40] NORMAL_PRIORITY_CLASS: Literal[0x20] REALTIME_PRIORITY_CLASS: Literal[0x100] diff --git a/mypy/typeshed/stdlib/abc.pyi b/mypy/typeshed/stdlib/abc.pyi index 068dab4752be..ec04d8f85d12 100644 --- a/mypy/typeshed/stdlib/abc.pyi +++ b/mypy/typeshed/stdlib/abc.pyi @@ -2,12 +2,13 @@ import _typeshed import sys from _typeshed import SupportsWrite from collections.abc import Callable -from typing import Any, Generic, TypeVar -from typing_extensions import Literal +from typing import Any, TypeVar +from typing_extensions import Concatenate, Literal, ParamSpec _T = TypeVar("_T") _R_co = TypeVar("_R_co", covariant=True) _FuncT = TypeVar("_FuncT", bound=Callable[..., Any]) +_P = ParamSpec("_P") # These definitions have special processing in mypy class ABCMeta(type): @@ -28,13 +29,13 @@ class ABCMeta(type): def abstractmethod(funcobj: _FuncT) -> _FuncT: ... -class abstractclassmethod(classmethod[_R_co], Generic[_R_co]): +class abstractclassmethod(classmethod[_T, _P, _R_co]): __isabstractmethod__: Literal[True] - def __init__(self: abstractclassmethod[_R_co], callable: Callable[..., _R_co]) -> None: ... + def __init__(self, callable: Callable[Concatenate[_T, _P], _R_co]) -> None: ... -class abstractstaticmethod(staticmethod[_R_co], Generic[_R_co]): +class abstractstaticmethod(staticmethod[_P, _R_co]): __isabstractmethod__: Literal[True] - def __init__(self, callable: Callable[..., _R_co]) -> None: ... + def __init__(self, callable: Callable[_P, _R_co]) -> None: ... class abstractproperty(property): __isabstractmethod__: Literal[True] diff --git a/mypy/typeshed/stdlib/argparse.pyi b/mypy/typeshed/stdlib/argparse.pyi index eb0b707bafaa..1e956069314b 100644 --- a/mypy/typeshed/stdlib/argparse.pyi +++ b/mypy/typeshed/stdlib/argparse.pyi @@ -135,7 +135,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): usage: str | None = None, description: str | None = None, epilog: str | None = None, - parents: Sequence[ArgumentParser] = ..., + parents: Sequence[ArgumentParser] = [], formatter_class: _FormatterClass = ..., prefix_chars: str = "-", fromfile_prefix_chars: str | None = None, @@ -152,7 +152,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): usage: str | None = None, description: str | None = None, epilog: str | None = None, - parents: Sequence[ArgumentParser] = ..., + parents: Sequence[ArgumentParser] = [], formatter_class: _FormatterClass = ..., prefix_chars: str = "-", fromfile_prefix_chars: str | None = None, diff --git a/mypy/typeshed/stdlib/asyncio/base_events.pyi b/mypy/typeshed/stdlib/asyncio/base_events.pyi index 3b8f286710b9..992f6af5c4a8 100644 --- a/mypy/typeshed/stdlib/asyncio/base_events.pyi +++ b/mypy/typeshed/stdlib/asyncio/base_events.pyi @@ -417,9 +417,9 @@ class BaseEventLoop(AbstractEventLoop): async def sock_connect(self, sock: socket, address: _Address) -> None: ... async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... if sys.version_info >= (3, 11): - async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... - async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> int: ... - async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... + async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> tuple[int, _RetAddress]: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> int: ... # Signal handling. def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... def remove_signal_handler(self, sig: int) -> bool: ... diff --git a/mypy/typeshed/stdlib/asyncio/events.pyi b/mypy/typeshed/stdlib/asyncio/events.pyi index f97afe873c9f..34576b091edb 100644 --- a/mypy/typeshed/stdlib/asyncio/events.pyi +++ b/mypy/typeshed/stdlib/asyncio/events.pyi @@ -569,11 +569,11 @@ class AbstractEventLoop: async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... if sys.version_info >= (3, 11): @abstractmethod - async def sock_recvfrom(self, sock: socket, bufsize: int) -> bytes: ... + async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ... @abstractmethod - async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> int: ... + async def sock_recvfrom_into(self, sock: socket, buf: WriteableBuffer, nbytes: int = 0) -> tuple[int, _RetAddress]: ... @abstractmethod - async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> None: ... + async def sock_sendto(self, sock: socket, data: ReadableBuffer, address: _Address) -> int: ... # Signal handling. @abstractmethod def add_signal_handler(self, sig: int, callback: Callable[..., object], *args: Any) -> None: ... diff --git a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi index f3a82e2b8462..9f88718b7b70 100644 --- a/mypy/typeshed/stdlib/asyncio/windows_utils.pyi +++ b/mypy/typeshed/stdlib/asyncio/windows_utils.pyi @@ -15,7 +15,7 @@ if sys.platform == "win32": BUFSIZE: Literal[8192] PIPE = subprocess.PIPE STDOUT = subprocess.STDOUT - def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = ..., bufsize: int = 8192) -> tuple[int, int]: ... + def pipe(*, duplex: bool = False, overlapped: tuple[bool, bool] = (True, True), bufsize: int = 8192) -> tuple[int, int]: ... class PipeHandle: def __init__(self, handle: int) -> None: ... diff --git a/mypy/typeshed/stdlib/builtins.pyi b/mypy/typeshed/stdlib/builtins.pyi index 9947c52f1c7c..c72da4aba335 100644 --- a/mypy/typeshed/stdlib/builtins.pyi +++ b/mypy/typeshed/stdlib/builtins.pyi @@ -54,7 +54,7 @@ from typing import ( # noqa: Y022 overload, type_check_only, ) -from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, TypeGuard, final +from typing_extensions import Concatenate, Literal, ParamSpec, Self, SupportsIndex, TypeAlias, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -75,6 +75,7 @@ _SupportsNextT = TypeVar("_SupportsNextT", bound=SupportsNext[Any], covariant=Tr _SupportsAnextT = TypeVar("_SupportsAnextT", bound=SupportsAnext[Any], covariant=True) _AwaitableT = TypeVar("_AwaitableT", bound=Awaitable[Any]) _AwaitableT_co = TypeVar("_AwaitableT_co", bound=Awaitable[Any], covariant=True) +_P = ParamSpec("_P") class object: __doc__: str | None @@ -107,38 +108,40 @@ class object: def __reduce_ex__(self, __protocol: SupportsIndex) -> str | tuple[Any, ...]: ... else: def __reduce_ex__(self, __protocol: int) -> str | tuple[Any, ...]: ... + if sys.version_info >= (3, 11): + def __getstate__(self) -> object: ... def __dir__(self) -> Iterable[str]: ... def __init_subclass__(cls) -> None: ... @classmethod def __subclasshook__(cls, __subclass: type) -> bool: ... -class staticmethod(Generic[_R_co]): +class staticmethod(Generic[_P, _R_co]): @property - def __func__(self) -> Callable[..., _R_co]: ... + def __func__(self) -> Callable[_P, _R_co]: ... @property def __isabstractmethod__(self) -> bool: ... - def __init__(self: staticmethod[_R_co], __f: Callable[..., _R_co]) -> None: ... - def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[..., _R_co]: ... + def __init__(self, __f: Callable[_P, _R_co]) -> None: ... + def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): __name__: str __qualname__: str @property - def __wrapped__(self) -> Callable[..., _R_co]: ... - def __call__(self, *args: Any, **kwargs: Any) -> _R_co: ... + def __wrapped__(self) -> Callable[_P, _R_co]: ... + def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R_co: ... -class classmethod(Generic[_R_co]): +class classmethod(Generic[_T, _P, _R_co]): @property - def __func__(self) -> Callable[..., _R_co]: ... + def __func__(self) -> Callable[Concatenate[_T, _P], _R_co]: ... @property def __isabstractmethod__(self) -> bool: ... - def __init__(self: classmethod[_R_co], __f: Callable[..., _R_co]) -> None: ... - def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[..., _R_co]: ... + def __init__(self, __f: Callable[Concatenate[_T, _P], _R_co]) -> None: ... + def __get__(self, __instance: _T, __owner: type[_T] | None = None) -> Callable[_P, _R_co]: ... if sys.version_info >= (3, 10): __name__: str __qualname__: str @property - def __wrapped__(self) -> Callable[..., _R_co]: ... + def __wrapped__(self) -> Callable[Concatenate[_T, _P], _R_co]: ... class type: @property @@ -939,10 +942,12 @@ class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __init__(self, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ... @overload def __init__(self: dict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ... - # Next overload is for dict(string.split(sep) for string in iterable) + # Next two overloads are for dict(string.split(sep) for string in iterable) # Cannot be Iterable[Sequence[_T]] or otherwise dict(["foo", "bar", "baz"]) is not an error @overload def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None: ... + @overload + def __init__(self: dict[bytes, bytes], __iterable: Iterable[list[bytes]]) -> None: ... def __new__(cls, *args: Any, **kwargs: Any) -> Self: ... def copy(self) -> dict[_KT, _VT]: ... def keys(self) -> dict_keys[_KT, _VT]: ... @@ -1746,7 +1751,7 @@ def __import__( name: str, globals: Mapping[str, object] | None = None, locals: Mapping[str, object] | None = None, - fromlist: Sequence[str] = ..., + fromlist: Sequence[str] = (), level: int = 0, ) -> types.ModuleType: ... def __build_class__(__func: Callable[[], _Cell | Any], __name: str, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... diff --git a/mypy/typeshed/stdlib/codecs.pyi b/mypy/typeshed/stdlib/codecs.pyi index 5a22853b6aee..3f6d2d3d16b7 100644 --- a/mypy/typeshed/stdlib/codecs.pyi +++ b/mypy/typeshed/stdlib/codecs.pyi @@ -272,8 +272,9 @@ class StreamRecoder(BinaryIO): def readlines(self, sizehint: int | None = None) -> list[bytes]: ... def __next__(self) -> bytes: ... def __iter__(self) -> Self: ... + # Base class accepts more types than just bytes def write(self, data: bytes) -> None: ... # type: ignore[override] - def writelines(self, list: Iterable[bytes]) -> None: ... + def writelines(self, list: Iterable[bytes]) -> None: ... # type: ignore[override] def reset(self) -> None: ... def __getattr__(self, name: str) -> Any: ... def __enter__(self) -> Self: ... diff --git a/mypy/typeshed/stdlib/collections/__init__.pyi b/mypy/typeshed/stdlib/collections/__init__.pyi index 1a40421146cc..d5ca17c749eb 100644 --- a/mypy/typeshed/stdlib/collections/__init__.pyi +++ b/mypy/typeshed/stdlib/collections/__init__.pyi @@ -62,6 +62,8 @@ class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __init__(self: UserDict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ... @overload def __init__(self: UserDict[str, str], __iterable: Iterable[list[str]]) -> None: ... + @overload + def __init__(self: UserDict[bytes, bytes], __iterable: Iterable[list[bytes]]) -> None: ... def __len__(self) -> int: ... def __getitem__(self, key: _KT) -> _VT: ... def __setitem__(self, key: _KT, item: _VT) -> None: ... diff --git a/mypy/typeshed/stdlib/concurrent/futures/process.pyi b/mypy/typeshed/stdlib/concurrent/futures/process.pyi index 85af2e7f84c7..000e7a43503a 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/process.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/process.pyi @@ -153,9 +153,9 @@ def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> A class BrokenProcessPool(BrokenExecutor): ... class ProcessPoolExecutor(Executor): - _mp_context: BaseContext | None = ... - _initializer: Callable[..., None] | None = ... - _initargs: tuple[Any, ...] = ... + _mp_context: BaseContext | None + _initializer: Callable[..., None] | None + _initargs: tuple[Any, ...] _executor_manager_thread: _ThreadWakeup _processes: MutableMapping[int, Process] _shutdown_thread: bool @@ -174,7 +174,7 @@ class ProcessPoolExecutor(Executor): max_workers: int | None = None, mp_context: BaseContext | None = None, initializer: Callable[..., object] | None = None, - initargs: tuple[Any, ...] = ..., + initargs: tuple[Any, ...] = (), *, max_tasks_per_child: int | None = None, ) -> None: ... @@ -184,7 +184,7 @@ class ProcessPoolExecutor(Executor): max_workers: int | None = None, mp_context: BaseContext | None = None, initializer: Callable[..., object] | None = None, - initargs: tuple[Any, ...] = ..., + initargs: tuple[Any, ...] = (), ) -> None: ... if sys.version_info >= (3, 9): def _start_executor_manager_thread(self) -> None: ... diff --git a/mypy/typeshed/stdlib/concurrent/futures/thread.pyi b/mypy/typeshed/stdlib/concurrent/futures/thread.pyi index e43dd3dfa33a..0b00d524aa3d 100644 --- a/mypy/typeshed/stdlib/concurrent/futures/thread.pyi +++ b/mypy/typeshed/stdlib/concurrent/futures/thread.pyi @@ -44,16 +44,16 @@ class ThreadPoolExecutor(Executor): _broken: bool _shutdown: bool _shutdown_lock: Lock - _thread_name_prefix: str | None = ... - _initializer: Callable[..., None] | None = ... - _initargs: tuple[Any, ...] = ... + _thread_name_prefix: str | None + _initializer: Callable[..., None] | None + _initargs: tuple[Any, ...] _work_queue: queue.SimpleQueue[_WorkItem[Any]] def __init__( self, max_workers: int | None = None, thread_name_prefix: str = "", initializer: Callable[..., object] | None = None, - initargs: tuple[Any, ...] = ..., + initargs: tuple[Any, ...] = (), ) -> None: ... def _adjust_thread_count(self) -> None: ... def _initializer_failed(self) -> None: ... diff --git a/mypy/typeshed/stdlib/configparser.pyi b/mypy/typeshed/stdlib/configparser.pyi index 2c5b68385767..6f9f788310d1 100644 --- a/mypy/typeshed/stdlib/configparser.pyi +++ b/mypy/typeshed/stdlib/configparser.pyi @@ -17,7 +17,6 @@ __all__ = [ "ParsingError", "MissingSectionHeaderError", "ConfigParser", - "SafeConfigParser", "RawConfigParser", "Interpolation", "BasicInterpolation", @@ -29,6 +28,9 @@ __all__ = [ "MAX_INTERPOLATION_DEPTH", ] +if sys.version_info < (3, 12): + __all__ += ["SafeConfigParser"] + _Section: TypeAlias = Mapping[str, str] _Parser: TypeAlias = MutableMapping[str, _Section] _ConverterCallback: TypeAlias = Callable[[str], Any] @@ -69,8 +71,8 @@ class RawConfigParser(_Parser): dict_type: type[Mapping[str, str]] = ..., *, allow_no_value: Literal[True], - delimiters: Sequence[str] = ..., - comment_prefixes: Sequence[str] = ..., + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), inline_comment_prefixes: Sequence[str] | None = None, strict: bool = True, empty_lines_in_values: bool = True, @@ -85,8 +87,8 @@ class RawConfigParser(_Parser): dict_type: type[Mapping[str, str]], allow_no_value: Literal[True], *, - delimiters: Sequence[str] = ..., - comment_prefixes: Sequence[str] = ..., + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), inline_comment_prefixes: Sequence[str] | None = None, strict: bool = True, empty_lines_in_values: bool = True, @@ -101,8 +103,8 @@ class RawConfigParser(_Parser): dict_type: type[Mapping[str, str]] = ..., allow_no_value: bool = False, *, - delimiters: Sequence[str] = ..., - comment_prefixes: Sequence[str] = ..., + delimiters: Sequence[str] = ("=", ":"), + comment_prefixes: Sequence[str] = ("#", ";"), inline_comment_prefixes: Sequence[str] | None = None, strict: bool = True, empty_lines_in_values: bool = True, diff --git a/mypy/typeshed/stdlib/copy.pyi b/mypy/typeshed/stdlib/copy.pyi index f68965d3dc91..8a2dcc508e5d 100644 --- a/mypy/typeshed/stdlib/copy.pyi +++ b/mypy/typeshed/stdlib/copy.pyi @@ -8,7 +8,7 @@ _T = TypeVar("_T") PyStringMap: Any # Note: memo and _nil are internal kwargs. -def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = ...) -> _T: ... +def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ... def copy(x: _T) -> _T: ... class Error(Exception): ... diff --git a/mypy/typeshed/stdlib/ctypes/__init__.pyi b/mypy/typeshed/stdlib/ctypes/__init__.pyi index 2ae5b22f3074..c85d65c9f474 100644 --- a/mypy/typeshed/stdlib/ctypes/__init__.pyi +++ b/mypy/typeshed/stdlib/ctypes/__init__.pyi @@ -1,10 +1,20 @@ import sys -from _ctypes import RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL -from _typeshed import ReadableBuffer, WriteableBuffer -from abc import abstractmethod -from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from _ctypes import ( + RTLD_GLOBAL as RTLD_GLOBAL, + RTLD_LOCAL as RTLD_LOCAL, + Array as Array, + Structure as Structure, + Union as Union, + _CData as _CData, + _CDataMeta as _CDataMeta, + _CField as _CField, + _SimpleCData as _SimpleCData, + _StructUnionBase as _StructUnionBase, + _StructUnionMeta as _StructUnionMeta, +) +from collections.abc import Callable, Sequence from typing import Any, ClassVar, Generic, TypeVar, overload -from typing_extensions import Self, TypeAlias +from typing_extensions import TypeAlias if sys.version_info >= (3, 9): from types import GenericAlias @@ -65,28 +75,6 @@ if sys.platform == "win32": pydll: LibraryLoader[PyDLL] pythonapi: PyDLL -class _CDataMeta(type): - # By default mypy complains about the following two methods, because strictly speaking cls - # might not be a Type[_CT]. However this can never actually happen, because the only class that - # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. - def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] - -class _CData(metaclass=_CDataMeta): - _b_base_: int - _b_needsfree_: bool - _objects: Mapping[Any, int] | None - @classmethod - def from_buffer(cls, source: WriteableBuffer, offset: int = ...) -> Self: ... - @classmethod - def from_buffer_copy(cls, source: ReadableBuffer, offset: int = ...) -> Self: ... - @classmethod - def from_address(cls, address: int) -> Self: ... - @classmethod - def from_param(cls, obj: Any) -> Self | _CArgObject: ... - @classmethod - def in_dll(cls, library: CDLL, name: str) -> Self: ... - class _CanCastTo(_CData): ... class _PointerLike(_CanCastTo): ... @@ -190,12 +178,6 @@ if sys.platform == "win32": def wstring_at(address: _CVoidConstPLike, size: int = -1) -> str: ... -class _SimpleCData(Generic[_T], _CData): - value: _T - # The TypeVar can be unsolved here, - # but we can't use overloads without creating many, many mypy false-positive errors - def __init__(self, value: _T = ...) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] - class c_byte(_SimpleCData[int]): ... class c_char(_SimpleCData[bytes]): @@ -239,64 +221,5 @@ if sys.platform == "win32": class HRESULT(_SimpleCData[int]): ... # TODO undocumented class py_object(_CanCastTo, _SimpleCData[_T]): ... - -class _CField: - offset: int - size: int - -class _StructUnionMeta(_CDataMeta): - _fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]] - _pack_: int - _anonymous_: Sequence[str] - def __getattr__(self, name: str) -> _CField: ... - -class _StructUnionBase(_CData, metaclass=_StructUnionMeta): - def __init__(self, *args: Any, **kw: Any) -> None: ... - def __getattr__(self, name: str) -> Any: ... - def __setattr__(self, name: str, value: Any) -> None: ... - -class Union(_StructUnionBase): ... -class Structure(_StructUnionBase): ... class BigEndianStructure(Structure): ... class LittleEndianStructure(Structure): ... - -class Array(Generic[_CT], _CData): - @property - @abstractmethod - def _length_(self) -> int: ... - @_length_.setter - def _length_(self, value: int) -> None: ... - @property - @abstractmethod - def _type_(self) -> type[_CT]: ... - @_type_.setter - def _type_(self, value: type[_CT]) -> None: ... - raw: bytes # Note: only available if _CT == c_char - value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise - # TODO These methods cannot be annotated correctly at the moment. - # All of these "Any"s stand for the array's element type, but it's not possible to use _CT - # here, because of a special feature of ctypes. - # By default, when accessing an element of an Array[_CT], the returned object has type _CT. - # However, when _CT is a "simple type" like c_int, ctypes automatically "unboxes" the object - # and converts it to the corresponding Python primitive. For example, when accessing an element - # of an Array[c_int], a Python int object is returned, not a c_int. - # This behavior does *not* apply to subclasses of "simple types". - # If MyInt is a subclass of c_int, then accessing an element of an Array[MyInt] returns - # a MyInt, not an int. - # This special behavior is not easy to model in a stub, so for now all places where - # the array element type would belong are annotated with Any instead. - def __init__(self, *args: Any) -> None: ... - @overload - def __getitem__(self, __key: int) -> Any: ... - @overload - def __getitem__(self, __key: slice) -> list[Any]: ... - @overload - def __setitem__(self, __key: int, __value: Any) -> None: ... - @overload - def __setitem__(self, __key: slice, __value: Iterable[Any]) -> None: ... - def __iter__(self) -> Iterator[Any]: ... - # Can't inherit from Sized because the metaclass conflict between - # Sized and _CData prevents using _CDataMeta. - def __len__(self) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/mypy/typeshed/stdlib/dataclasses.pyi b/mypy/typeshed/stdlib/dataclasses.pyi index d254a594d8e8..13cffcd70c0e 100644 --- a/mypy/typeshed/stdlib/dataclasses.pyi +++ b/mypy/typeshed/stdlib/dataclasses.pyi @@ -247,12 +247,32 @@ class InitVar(Generic[_T], metaclass=_InitVarMeta): @overload def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... -if sys.version_info >= (3, 11): +if sys.version_info >= (3, 12): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + module: str | None = None, + ) -> type: ... + +elif sys.version_info >= (3, 11): def make_dataclass( cls_name: str, fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, - bases: tuple[type, ...] = ..., + bases: tuple[type, ...] = (), namespace: dict[str, Any] | None = None, init: bool = True, repr: bool = True, @@ -271,7 +291,7 @@ elif sys.version_info >= (3, 10): cls_name: str, fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, - bases: tuple[type, ...] = ..., + bases: tuple[type, ...] = (), namespace: dict[str, Any] | None = None, init: bool = True, repr: bool = True, @@ -289,7 +309,7 @@ else: cls_name: str, fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], *, - bases: tuple[type, ...] = ..., + bases: tuple[type, ...] = (), namespace: dict[str, Any] | None = None, init: bool = True, repr: bool = True, diff --git a/mypy/typeshed/stdlib/datetime.pyi b/mypy/typeshed/stdlib/datetime.pyi index f78737e98910..2bb2264c97b1 100644 --- a/mypy/typeshed/stdlib/datetime.pyi +++ b/mypy/typeshed/stdlib/datetime.pyi @@ -2,7 +2,7 @@ import sys from abc import abstractmethod from time import struct_time from typing import ClassVar, NamedTuple, NoReturn, TypeVar, overload -from typing_extensions import Literal, Self, TypeAlias, final +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias, final if sys.version_info >= (3, 11): __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR", "UTC") @@ -49,7 +49,7 @@ class date: min: ClassVar[date] max: ClassVar[date] resolution: ClassVar[timedelta] - def __new__(cls, year: int, month: int, day: int) -> Self: ... + def __new__(cls, year: SupportsIndex, month: SupportsIndex, day: SupportsIndex) -> Self: ... @classmethod def fromtimestamp(cls, __timestamp: float) -> Self: ... @classmethod @@ -81,7 +81,7 @@ class date: def isoformat(self) -> str: ... def timetuple(self) -> struct_time: ... def toordinal(self) -> int: ... - def replace(self, year: int = ..., month: int = ..., day: int = ...) -> Self: ... + def replace(self, year: SupportsIndex = ..., month: SupportsIndex = ..., day: SupportsIndex = ...) -> Self: ... def __le__(self, __value: date) -> bool: ... def __lt__(self, __value: date) -> bool: ... def __ge__(self, __value: date) -> bool: ... @@ -119,10 +119,10 @@ class time: resolution: ClassVar[timedelta] def __new__( cls, - hour: int = ..., - minute: int = ..., - second: int = ..., - microsecond: int = ..., + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., *, fold: int = ..., @@ -160,10 +160,10 @@ class time: def dst(self) -> timedelta | None: ... def replace( self, - hour: int = ..., - minute: int = ..., - second: int = ..., - microsecond: int = ..., + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., *, fold: int = ..., @@ -223,13 +223,13 @@ class datetime(date): max: ClassVar[datetime] def __new__( cls, - year: int, - month: int, - day: int, - hour: int = ..., - minute: int = ..., - second: int = ..., - microsecond: int = ..., + year: SupportsIndex, + month: SupportsIndex, + day: SupportsIndex, + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., *, fold: int = ..., @@ -280,13 +280,13 @@ class datetime(date): def timetz(self) -> _Time: ... def replace( self, - year: int = ..., - month: int = ..., - day: int = ..., - hour: int = ..., - minute: int = ..., - second: int = ..., - microsecond: int = ..., + year: SupportsIndex = ..., + month: SupportsIndex = ..., + day: SupportsIndex = ..., + hour: SupportsIndex = ..., + minute: SupportsIndex = ..., + second: SupportsIndex = ..., + microsecond: SupportsIndex = ..., tzinfo: _TzInfo | None = ..., *, fold: int = ..., diff --git a/mypy/typeshed/stdlib/dis.pyi b/mypy/typeshed/stdlib/dis.pyi index ac0c5356f5f9..d153771e676b 100644 --- a/mypy/typeshed/stdlib/dis.pyi +++ b/mypy/typeshed/stdlib/dis.pyi @@ -39,10 +39,10 @@ _HaveCodeType: TypeAlias = types.MethodType | types.FunctionType | types.CodeTyp if sys.version_info >= (3, 11): class Positions(NamedTuple): - lineno: int | None = ... - end_lineno: int | None = ... - col_offset: int | None = ... - end_col_offset: int | None = ... + lineno: int | None = None + end_lineno: int | None = None + col_offset: int | None = None + end_col_offset: int | None = None if sys.version_info >= (3, 11): class Instruction(NamedTuple): @@ -54,7 +54,7 @@ if sys.version_info >= (3, 11): offset: int starts_line: int | None is_jump_target: bool - positions: Positions | None = ... + positions: Positions | None = None else: class Instruction(NamedTuple): diff --git a/mypy/typeshed/stdlib/distutils/cmd.pyi b/mypy/typeshed/stdlib/distutils/cmd.pyi index a9aade0206dd..61fce37b80bc 100644 --- a/mypy/typeshed/stdlib/distutils/cmd.pyi +++ b/mypy/typeshed/stdlib/distutils/cmd.pyi @@ -5,6 +5,7 @@ from distutils.dist import Distribution from typing import Any class Command: + distribution: Distribution sub_commands: list[tuple[str, Callable[[Command], bool] | None]] def __init__(self, dist: Distribution) -> None: ... @abstractmethod diff --git a/mypy/typeshed/stdlib/distutils/command/build_ext.pyi b/mypy/typeshed/stdlib/distutils/command/build_ext.pyi index 80cd78936cb9..5eb541fb9101 100644 --- a/mypy/typeshed/stdlib/distutils/command/build_ext.pyi +++ b/mypy/typeshed/stdlib/distutils/command/build_ext.pyi @@ -43,8 +43,8 @@ class build_ext(Command): def build_extension(self, ext) -> None: ... def swig_sources(self, sources, extension): ... def find_swig(self): ... - def get_ext_fullpath(self, ext_name): ... - def get_ext_fullname(self, ext_name): ... - def get_ext_filename(self, ext_name): ... + def get_ext_fullpath(self, ext_name: str) -> str: ... + def get_ext_fullname(self, ext_name: str) -> str: ... + def get_ext_filename(self, ext_name: str) -> str: ... def get_export_symbols(self, ext): ... def get_libraries(self, ext): ... diff --git a/mypy/typeshed/stdlib/distutils/command/config.pyi b/mypy/typeshed/stdlib/distutils/command/config.pyi index 81fdf76b2b59..7077c9a4c158 100644 --- a/mypy/typeshed/stdlib/distutils/command/config.pyi +++ b/mypy/typeshed/stdlib/distutils/command/config.pyi @@ -74,7 +74,7 @@ class config(Command): library_dirs: Sequence[str] | None = None, headers: Sequence[str] | None = None, include_dirs: Sequence[str] | None = None, - other_libraries: list[str] = ..., + other_libraries: list[str] = [], ) -> bool: ... def check_header( self, header: str, include_dirs: Sequence[str] | None = None, library_dirs: Sequence[str] | None = None, lang: str = "c" diff --git a/mypy/typeshed/stdlib/distutils/dist.pyi b/mypy/typeshed/stdlib/distutils/dist.pyi index dfffdc5e11bb..b296b11f73ba 100644 --- a/mypy/typeshed/stdlib/distutils/dist.pyi +++ b/mypy/typeshed/stdlib/distutils/dist.pyi @@ -2,10 +2,14 @@ from _typeshed import FileDescriptorOrPath, Incomplete, SupportsWrite from collections.abc import Iterable, Mapping from distutils.cmd import Command from re import Pattern -from typing import IO, Any +from typing import IO, Any, ClassVar, TypeVar, overload +from typing_extensions import TypeAlias command_re: Pattern[str] +_OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str | None, str]] +_CommandT = TypeVar("_CommandT", bound=Command) + class DistributionMetadata: def __init__(self, path: FileDescriptorOrPath | None = None) -> None: ... name: str | None @@ -59,22 +63,22 @@ class Distribution: def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: ... def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ... - def get_command_obj(self, command: str, create: bool = ...) -> Command | None: ... - global_options: Incomplete - common_usage: str - display_options: Incomplete - display_option_names: Incomplete - negative_opt: Incomplete + def get_command_obj(self, command: str, create: bool = True) -> Command | None: ... + global_options: ClassVar[_OptionsList] + common_usage: ClassVar[str] + display_options: ClassVar[_OptionsList] + display_option_names: ClassVar[list[str]] + negative_opt: ClassVar[dict[str, str]] verbose: int dry_run: int help: int - command_packages: Incomplete - script_name: Incomplete - script_args: Incomplete - command_options: Incomplete - dist_files: Incomplete + command_packages: list[str] | None + script_name: str | None + script_args: list[str] | None + command_options: dict[str, dict[str, tuple[str, str]]] + dist_files: list[tuple[str, str, str]] packages: Incomplete - package_data: Incomplete + package_data: dict[str, list[str]] package_dir: Incomplete py_modules: Incomplete libraries: Incomplete @@ -101,16 +105,42 @@ class Distribution: def print_commands(self) -> None: ... def get_command_list(self): ... def get_command_packages(self): ... - def get_command_class(self, command): ... - def reinitialize_command(self, command, reinit_subcommands: int = 0): ... - def announce(self, msg, level: int = ...) -> None: ... + def get_command_class(self, command: str) -> type[Command]: ... + @overload + def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ... + @overload + def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ... + def announce(self, msg, level: int = 2) -> None: ... def run_commands(self) -> None: ... - def run_command(self, command) -> None: ... - def has_pure_modules(self): ... - def has_ext_modules(self): ... - def has_c_libraries(self): ... - def has_modules(self): ... - def has_headers(self): ... - def has_scripts(self): ... - def has_data_files(self): ... - def is_pure(self): ... + def run_command(self, command: str) -> None: ... + def has_pure_modules(self) -> bool: ... + def has_ext_modules(self) -> bool: ... + def has_c_libraries(self) -> bool: ... + def has_modules(self) -> bool: ... + def has_headers(self) -> bool: ... + def has_scripts(self) -> bool: ... + def has_data_files(self) -> bool: ... + def is_pure(self) -> bool: ... + + # Getter methods generated in __init__ + def get_name(self) -> str: ... + def get_version(self) -> str: ... + def get_fullname(self) -> str: ... + def get_author(self) -> str: ... + def get_author_email(self) -> str: ... + def get_maintainer(self) -> str: ... + def get_maintainer_email(self) -> str: ... + def get_contact(self) -> str: ... + def get_contact_email(self) -> str: ... + def get_url(self) -> str: ... + def get_license(self) -> str: ... + def get_licence(self) -> str: ... + def get_description(self) -> str: ... + def get_long_description(self) -> str: ... + def get_keywords(self) -> str | list[str]: ... + def get_platforms(self) -> str | list[str]: ... + def get_classifiers(self) -> str | list[str]: ... + def get_download_url(self) -> str: ... + def get_requires(self) -> list[str]: ... + def get_provides(self) -> list[str]: ... + def get_obsoletes(self) -> list[str]: ... diff --git a/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi b/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi index c15bb8a167dd..f9916d4511b2 100644 --- a/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi +++ b/mypy/typeshed/stdlib/distutils/fancy_getopt.pyi @@ -31,4 +31,4 @@ def wrap_text(text: str, width: int) -> list[str]: ... def translate_longopt(opt: str) -> str: ... class OptionDummy: - def __init__(self, options: Iterable[str] = ...) -> None: ... + def __init__(self, options: Iterable[str] = []) -> None: ... diff --git a/mypy/typeshed/stdlib/email/message.pyi b/mypy/typeshed/stdlib/email/message.pyi index 14e018073103..18852f4d3bb2 100644 --- a/mypy/typeshed/stdlib/email/message.pyi +++ b/mypy/typeshed/stdlib/email/message.pyi @@ -80,9 +80,9 @@ class Message: @overload def get_content_charset(self, failobj: _T) -> str | _T: ... @overload - def get_charsets(self, failobj: None = None) -> list[str] | None: ... + def get_charsets(self, failobj: None = None) -> list[str | None]: ... @overload - def get_charsets(self, failobj: _T) -> list[str] | _T: ... + def get_charsets(self, failobj: _T) -> list[str | _T]: ... def walk(self) -> Generator[Self, None, None]: ... def get_content_disposition(self) -> str | None: ... def as_string(self, unixfrom: bool = False, maxheaderlen: int = 0, policy: Policy | None = None) -> str: ... @@ -105,7 +105,7 @@ class Message: class MIMEPart(Message): def __init__(self, policy: Policy | None = None) -> None: ... - def get_body(self, preferencelist: Sequence[str] = ...) -> Message | None: ... + def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> Message | None: ... def iter_attachments(self) -> Iterator[Message]: ... def iter_parts(self) -> Iterator[Message]: ... def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ... diff --git a/mypy/typeshed/stdlib/enum.pyi b/mypy/typeshed/stdlib/enum.pyi index 5a39c456b4b4..383c336ed2c7 100644 --- a/mypy/typeshed/stdlib/enum.pyi +++ b/mypy/typeshed/stdlib/enum.pyi @@ -208,13 +208,6 @@ def unique(enumeration: _EnumerationT) -> _EnumerationT: ... _auto_null: Any -# subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() -class auto(IntFlag): - _value_: Any - @_magic_enum_attr - def value(self) -> Any: ... - def __new__(cls) -> Self: ... - class Flag(Enum): _name_: str | None # type: ignore[assignment] _value_: int @@ -235,27 +228,6 @@ class Flag(Enum): __rand__ = __and__ __rxor__ = __xor__ -if sys.version_info >= (3, 11): - # The body of the class is the same, but the base classes are different. - class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases - def __new__(cls, value: int) -> Self: ... - def __or__(self, other: int) -> Self: ... - def __and__(self, other: int) -> Self: ... - def __xor__(self, other: int) -> Self: ... - __ror__ = __or__ - __rand__ = __and__ - __rxor__ = __xor__ - -else: - class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases - def __new__(cls, value: int) -> Self: ... - def __or__(self, other: int) -> Self: ... - def __and__(self, other: int) -> Self: ... - def __xor__(self, other: int) -> Self: ... - __ror__ = __or__ - __rand__ = __and__ - __rxor__ = __xor__ - if sys.version_info >= (3, 11): class StrEnum(str, ReprEnum): def __new__(cls, value: str) -> Self: ... @@ -289,3 +261,31 @@ if sys.version_info >= (3, 11): def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... def global_enum_repr(self: Enum) -> str: ... def global_flag_repr(self: Flag) -> str: ... + +if sys.version_info >= (3, 11): + # The body of the class is the same, but the base classes are different. + class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +else: + class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +# subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() +class auto(IntFlag): + _value_: Any + @_magic_enum_attr + def value(self) -> Any: ... + def __new__(cls) -> Self: ... diff --git a/mypy/typeshed/stdlib/ftplib.pyi b/mypy/typeshed/stdlib/ftplib.pyi index 76d9dc02a5da..36a213d48680 100644 --- a/mypy/typeshed/stdlib/ftplib.pyi +++ b/mypy/typeshed/stdlib/ftplib.pyi @@ -106,7 +106,7 @@ class FTP: def nlst(self, *args: str) -> list[str]: ... # Technically only the last arg can be a Callable but ... def dir(self, *args: str | Callable[[str], object]) -> None: ... - def mlsd(self, path: str = "", facts: Iterable[str] = ...) -> Iterator[tuple[str, dict[str, str]]]: ... + def mlsd(self, path: str = "", facts: Iterable[str] = []) -> Iterator[tuple[str, dict[str, str]]]: ... def rename(self, fromname: str, toname: str) -> str: ... def delete(self, filename: str) -> str: ... def cwd(self, dirname: str) -> str: ... diff --git a/mypy/typeshed/stdlib/functools.pyi b/mypy/typeshed/stdlib/functools.pyi index fe36a134f74e..d01fd8ce55cb 100644 --- a/mypy/typeshed/stdlib/functools.pyi +++ b/mypy/typeshed/stdlib/functools.pyi @@ -3,7 +3,7 @@ import types from _typeshed import IdentityFunction, SupportsAllComparisons, SupportsItems from collections.abc import Callable, Hashable, Iterable, Sequence, Sized from typing import Any, Generic, NamedTuple, TypeVar, overload -from typing_extensions import Literal, Self, TypeAlias, final +from typing_extensions import Literal, Self, TypeAlias, TypedDict, final if sys.version_info >= (3, 9): from types import GenericAlias @@ -44,12 +44,20 @@ class _CacheInfo(NamedTuple): maxsize: int | None currsize: int +if sys.version_info >= (3, 9): + class _CacheParameters(TypedDict): + maxsize: int + typed: bool + @final class _lru_cache_wrapper(Generic[_T]): __wrapped__: Callable[..., _T] def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... def cache_info(self) -> _CacheInfo: ... def cache_clear(self) -> None: ... + if sys.version_info >= (3, 9): + def cache_parameters(self) -> _CacheParameters: ... + def __copy__(self) -> _lru_cache_wrapper[_T]: ... def __deepcopy__(self, __memo: Any) -> _lru_cache_wrapper[_T]: ... @@ -67,8 +75,17 @@ WRAPPER_ASSIGNMENTS: tuple[ ] WRAPPER_UPDATES: tuple[Literal["__dict__"]] -def update_wrapper(wrapper: _T, wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> _T: ... -def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> IdentityFunction: ... +def update_wrapper( + wrapper: _T, + wrapped: _AnyCallable, + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Sequence[str] = ("__dict__",), +) -> _T: ... +def wraps( + wrapped: _AnyCallable, + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Sequence[str] = ("__dict__",), +) -> IdentityFunction: ... def total_ordering(cls: type[_T]) -> type[_T]: ... def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... @@ -144,7 +161,7 @@ if sys.version_info >= (3, 8): attrname: str | None def __init__(self, func: Callable[[Any], _T]) -> None: ... @overload - def __get__(self, instance: None, owner: type[Any] | None = None) -> cached_property[_T]: ... + def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... @overload def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... def __set_name__(self, owner: type[Any], name: str) -> None: ... diff --git a/mypy/typeshed/stdlib/getopt.pyi b/mypy/typeshed/stdlib/getopt.pyi index 14d63dbd6f99..bc9d4da4796b 100644 --- a/mypy/typeshed/stdlib/getopt.pyi +++ b/mypy/typeshed/stdlib/getopt.pyi @@ -1,7 +1,7 @@ __all__ = ["GetoptError", "error", "getopt", "gnu_getopt"] -def getopt(args: list[str], shortopts: str, longopts: list[str] = ...) -> tuple[list[tuple[str, str]], list[str]]: ... -def gnu_getopt(args: list[str], shortopts: str, longopts: list[str] = ...) -> tuple[list[tuple[str, str]], list[str]]: ... +def getopt(args: list[str], shortopts: str, longopts: list[str] = []) -> tuple[list[tuple[str, str]], list[str]]: ... +def gnu_getopt(args: list[str], shortopts: str, longopts: list[str] = []) -> tuple[list[tuple[str, str]], list[str]]: ... class GetoptError(Exception): msg: str diff --git a/mypy/typeshed/stdlib/http/client.pyi b/mypy/typeshed/stdlib/http/client.pyi index 1f16bdc2dbab..9c7c0c1c4a12 100644 --- a/mypy/typeshed/stdlib/http/client.pyi +++ b/mypy/typeshed/stdlib/http/client.pyi @@ -101,7 +101,7 @@ class HTTPMessage(email.message.Message): def parse_headers(fp: io.BufferedIOBase, _class: Callable[[], email.message.Message] = ...) -> HTTPMessage: ... -class HTTPResponse(io.BufferedIOBase, BinaryIO): +class HTTPResponse(io.BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible method definitions in the base classes msg: HTTPMessage headers: HTTPMessage version: int @@ -158,7 +158,7 @@ class HTTPConnection: method: str, url: str, body: _DataType | str | None = None, - headers: Mapping[str, str] = ..., + headers: Mapping[str, str] = {}, *, encode_chunked: bool = False, ) -> None: ... diff --git a/mypy/typeshed/stdlib/http/cookiejar.pyi b/mypy/typeshed/stdlib/http/cookiejar.pyi index 7f2c9c6cc8f4..482cbca1d88a 100644 --- a/mypy/typeshed/stdlib/http/cookiejar.pyi +++ b/mypy/typeshed/stdlib/http/cookiejar.pyi @@ -99,7 +99,7 @@ class DefaultCookiePolicy(CookiePolicy): strict_ns_domain: int = 0, strict_ns_set_initial_dollar: bool = False, strict_ns_set_path: bool = False, - secure_protocols: Sequence[str] = ..., + secure_protocols: Sequence[str] = ("https", "wss"), ) -> None: ... else: def __init__( diff --git a/mypy/typeshed/stdlib/importlib/__init__.pyi b/mypy/typeshed/stdlib/importlib/__init__.pyi index 1747b274136e..8d73319f8c3d 100644 --- a/mypy/typeshed/stdlib/importlib/__init__.pyi +++ b/mypy/typeshed/stdlib/importlib/__init__.pyi @@ -9,7 +9,7 @@ def __import__( name: str, globals: Mapping[str, object] | None = None, locals: Mapping[str, object] | None = None, - fromlist: Sequence[str] = ..., + fromlist: Sequence[str] = (), level: int = 0, ) -> ModuleType: ... diff --git a/mypy/typeshed/stdlib/importlib/abc.pyi b/mypy/typeshed/stdlib/importlib/abc.pyi index 4fe2fed27092..4bf46104ba6d 100644 --- a/mypy/typeshed/stdlib/importlib/abc.pyi +++ b/mypy/typeshed/stdlib/importlib/abc.pyi @@ -1,3 +1,4 @@ +import _ast import sys import types from _typeshed import ( @@ -7,6 +8,7 @@ from _typeshed import ( OpenBinaryModeWriting, OpenTextMode, ReadableBuffer, + StrPath, ) from abc import ABCMeta, abstractmethod from collections.abc import Iterator, Mapping, Sequence @@ -52,7 +54,9 @@ class InspectLoader(Loader): def get_source(self, fullname: str) -> str | None: ... def exec_module(self, module: types.ModuleType) -> None: ... @staticmethod - def source_to_code(data: ReadableBuffer | str, path: str = "") -> types.CodeType: ... + def source_to_code( + data: ReadableBuffer | str | _ast.Module | _ast.Expression | _ast.Interactive, path: ReadableBuffer | StrPath = "" + ) -> types.CodeType: ... class ExecutionLoader(InspectLoader): @abstractmethod diff --git a/mypy/typeshed/stdlib/importlib/resources.pyi b/mypy/typeshed/stdlib/importlib/resources/__init__.pyi similarity index 100% rename from mypy/typeshed/stdlib/importlib/resources.pyi rename to mypy/typeshed/stdlib/importlib/resources/__init__.pyi diff --git a/mypy/typeshed/stdlib/importlib/resources/abc.pyi b/mypy/typeshed/stdlib/importlib/resources/abc.pyi new file mode 100644 index 000000000000..a36c952d01ac --- /dev/null +++ b/mypy/typeshed/stdlib/importlib/resources/abc.pyi @@ -0,0 +1,12 @@ +import sys + +if sys.version_info >= (3, 11): + # These are all actually defined in this file on 3.11+, + # and re-exported from importlib.abc, + # but it's much less code duplication for typeshed if we pretend that they're still defined + # in importlib.abc on 3.11+, and re-exported from this file + from importlib.abc import ( + ResourceReader as ResourceReader, + Traversable as Traversable, + TraversableResources as TraversableResources, + ) diff --git a/mypy/typeshed/stdlib/inspect.pyi b/mypy/typeshed/stdlib/inspect.pyi index 2525ef4968ec..a2252e38ee8c 100644 --- a/mypy/typeshed/stdlib/inspect.pyi +++ b/mypy/typeshed/stdlib/inspect.pyi @@ -161,12 +161,20 @@ TPFLAGS_IS_ABSTRACT: Literal[1048576] modulesbyfile: dict[str, Any] +_GetMembersPredicateTypeGuard: TypeAlias = Callable[[Any], TypeGuard[_T]] _GetMembersPredicate: TypeAlias = Callable[[Any], bool] +_GetMembersReturnTypeGuard: TypeAlias = list[tuple[str, _T]] _GetMembersReturn: TypeAlias = list[tuple[str, Any]] +@overload +def getmembers(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturnTypeGuard[_T]: ... +@overload def getmembers(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ... if sys.version_info >= (3, 11): + @overload + def getmembers_static(object: object, predicate: _GetMembersPredicateTypeGuard[_T]) -> _GetMembersReturnTypeGuard[_T]: ... + @overload def getmembers_static(object: object, predicate: _GetMembersPredicate | None = None) -> _GetMembersReturn: ... def getmodulename(path: str) -> str | None: ... @@ -442,9 +450,9 @@ if sys.version_info < (3, 11): varargs: str | None = None, varkw: str | None = None, defaults: tuple[Any, ...] | None = None, - kwonlyargs: Sequence[str] | None = ..., - kwonlydefaults: Mapping[str, Any] | None = ..., - annotations: Mapping[str, Any] = ..., + kwonlyargs: Sequence[str] | None = (), + kwonlydefaults: Mapping[str, Any] | None = {}, + annotations: Mapping[str, Any] = {}, formatarg: Callable[[str], str] = ..., formatvarargs: Callable[[str], str] = ..., formatvarkw: Callable[[str], str] = ..., diff --git a/mypy/typeshed/stdlib/io.pyi b/mypy/typeshed/stdlib/io.pyi index c3e07bacbe5a..c114f839594f 100644 --- a/mypy/typeshed/stdlib/io.pyi +++ b/mypy/typeshed/stdlib/io.pyi @@ -90,7 +90,7 @@ class BufferedIOBase(IOBase): def read(self, __size: int | None = ...) -> bytes: ... def read1(self, __size: int = ...) -> bytes: ... -class FileIO(RawIOBase, BinaryIO): +class FileIO(RawIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes mode: str name: FileDescriptorOrPath # type: ignore[assignment] def __init__( @@ -102,7 +102,7 @@ class FileIO(RawIOBase, BinaryIO): def read(self, __size: int = -1) -> bytes: ... def __enter__(self) -> Self: ... -class BytesIO(BufferedIOBase, BinaryIO): +class BytesIO(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined @@ -113,17 +113,17 @@ class BytesIO(BufferedIOBase, BinaryIO): def getbuffer(self) -> memoryview: ... def read1(self, __size: int | None = -1) -> bytes: ... -class BufferedReader(BufferedIOBase, BinaryIO): +class BufferedReader(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes def __enter__(self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def peek(self, __size: int = 0) -> bytes: ... -class BufferedWriter(BufferedIOBase, BinaryIO): +class BufferedWriter(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes def __enter__(self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def write(self, __buffer: ReadableBuffer) -> int: ... -class BufferedRandom(BufferedReader, BufferedWriter): +class BufferedRandom(BufferedReader, BufferedWriter): # type: ignore[misc] # incompatible definitions of methods in the base classes def __enter__(self) -> Self: ... def seek(self, __target: int, __whence: int = 0) -> int: ... # stubtest needs this @@ -144,7 +144,7 @@ class TextIOBase(IOBase): def readlines(self, __hint: int = -1) -> list[str]: ... # type: ignore[override] def read(self, __size: int | None = ...) -> str: ... -class TextIOWrapper(TextIOBase, TextIO): +class TextIOWrapper(TextIOBase, TextIO): # type: ignore[misc] # incompatible definitions of write in the base classes def __init__( self, buffer: IO[bytes], diff --git a/mypy/typeshed/stdlib/itertools.pyi b/mypy/typeshed/stdlib/itertools.pyi index c7b92c3aebb5..4b5d624c78d7 100644 --- a/mypy/typeshed/stdlib/itertools.pyi +++ b/mypy/typeshed/stdlib/itertools.pyi @@ -272,7 +272,7 @@ if sys.version_info >= (3, 10): def __next__(self) -> _T_co: ... if sys.version_info >= (3, 12): - class batched(Iterator[_T_co], Generic[_T_co]): + class batched(Iterator[tuple[_T_co, ...]], Generic[_T_co]): def __new__(cls, iterable: Iterable[_T_co], n: int) -> Self: ... def __iter__(self) -> Self: ... def __next__(self) -> tuple[_T_co, ...]: ... diff --git a/mypy/typeshed/stdlib/lib2to3/btm_matcher.pyi b/mypy/typeshed/stdlib/lib2to3/btm_matcher.pyi new file mode 100644 index 000000000000..4c87b664eb20 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/btm_matcher.pyi @@ -0,0 +1,28 @@ +from _typeshed import Incomplete, SupportsGetItem +from collections import defaultdict +from collections.abc import Iterable + +from .fixer_base import BaseFix +from .pytree import Leaf, Node + +class BMNode: + count: Incomplete + transition_table: Incomplete + fixers: Incomplete + id: Incomplete + content: str + def __init__(self) -> None: ... + +class BottomMatcher: + match: Incomplete + root: Incomplete + nodes: Incomplete + fixers: Incomplete + logger: Incomplete + def __init__(self) -> None: ... + def add_fixer(self, fixer: BaseFix) -> None: ... + def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: ... + def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: ... + def print_ac(self) -> None: ... + +def type_repr(type_num: int) -> str | int: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixer_base.pyi b/mypy/typeshed/stdlib/lib2to3/fixer_base.pyi new file mode 100644 index 000000000000..eef386f709ac --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixer_base.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete, StrPath +from abc import ABCMeta, abstractmethod +from collections.abc import MutableMapping +from typing import ClassVar, TypeVar +from typing_extensions import Literal + +from .pytree import Base, Leaf, Node + +_N = TypeVar("_N", bound=Base) + +class BaseFix: + PATTERN: ClassVar[str | None] + pattern: Incomplete | None + pattern_tree: Incomplete | None + options: Incomplete | None + filename: Incomplete | None + numbers: Incomplete + used_names: Incomplete + order: ClassVar[Literal["post", "pre"]] + explicit: ClassVar[bool] + run_order: ClassVar[int] + keep_line_order: ClassVar[bool] + BM_compatible: ClassVar[bool] + syms: Incomplete + log: Incomplete + def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: ... + def compile_pattern(self) -> None: ... + def set_filename(self, filename: StrPath) -> None: ... + def match(self, node: _N) -> Literal[False] | dict[str, _N]: ... + @abstractmethod + def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: ... + def new_name(self, template: str = "xxx_todo_changeme") -> str: ... + first_log: bool + def log_message(self, message: str) -> None: ... + def cannot_convert(self, node: Base, reason: str | None = None) -> None: ... + def warning(self, node: Base, reason: str) -> None: ... + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def finish_tree(self, tree: Node, filename: StrPath) -> None: ... + +class ConditionalFix(BaseFix, metaclass=ABCMeta): + skip_on: ClassVar[str | None] + def start_tree(self, __tree: Node, __filename: StrPath) -> None: ... + def should_skip(self, node: Base) -> bool: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/__init__.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi new file mode 100644 index 000000000000..7c5451c15220 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_apply.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixApply(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi new file mode 100644 index 000000000000..bf73009e9dbf --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_asserts.pyi @@ -0,0 +1,11 @@ +from typing import ClassVar +from typing_extensions import Literal + +from ..fixer_base import BaseFix + +NAMES: dict[str, str] + +class FixAsserts(BaseFix): + BM_compatible: ClassVar[Literal[False]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi new file mode 100644 index 000000000000..84a354d32777 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_basestring.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixBasestring(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[Literal["'basestring'"]] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi new file mode 100644 index 000000000000..857c1e2241b9 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_buffer.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixBuffer(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi new file mode 100644 index 000000000000..2e66911195bf --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_dict.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +iter_exempt: set[str] + +class FixDict(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... + P1: ClassVar[str] + p1: ClassVar[Incomplete] + P2: ClassVar[str] + p2: ClassVar[Incomplete] + def in_special_context(self, node, isiter): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_except.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_except.pyi new file mode 100644 index 000000000000..b87aacd342e9 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_except.pyi @@ -0,0 +1,15 @@ +from collections.abc import Generator, Iterable +from typing import ClassVar, TypeVar +from typing_extensions import Literal + +from .. import fixer_base +from ..pytree import Base + +_N = TypeVar("_N", bound=Base) + +def find_excepts(nodes: Iterable[_N]) -> Generator[tuple[_N, _N], None, None]: ... + +class FixExcept(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi new file mode 100644 index 000000000000..306937eb9759 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_exec.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixExec(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi new file mode 100644 index 000000000000..fb245e5a1c1c --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_execfile.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixExecfile(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi new file mode 100644 index 000000000000..10341d7985a8 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_exitfunc.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete, StrPath +from lib2to3 import fixer_base +from typing import ClassVar +from typing_extensions import Literal + +from ..pytree import Node + +class FixExitfunc(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def __init__(self, *args) -> None: ... + sys_import: Incomplete | None + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi new file mode 100644 index 000000000000..3998a1dd001e --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_filter.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixFilter(fixer_base.ConditionalFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + skip_on: ClassVar[Literal["future_builtins.filter"]] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi new file mode 100644 index 000000000000..59919446ffdd --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_funcattrs.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixFuncattrs(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_future.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_future.pyi new file mode 100644 index 000000000000..8eb5ca35dcc3 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_future.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixFuture(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi new file mode 100644 index 000000000000..d18a38f69be0 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_getcwdu.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixGetcwdu(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi new file mode 100644 index 000000000000..1e6b58dd3512 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_has_key.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixHasKey(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi new file mode 100644 index 000000000000..8f02252f7bb9 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_idioms.pyi @@ -0,0 +1,16 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +CMP: str +TYPE: str + +class FixIdioms(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[False]] + PATTERN: ClassVar[str] + def match(self, node): ... + def transform(self, node, results): ... + def transform_isinstance(self, node, results): ... + def transform_while(self, node, results) -> None: ... + def transform_sort(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_import.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_import.pyi new file mode 100644 index 000000000000..436e7f1915b2 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_import.pyi @@ -0,0 +1,17 @@ +from _typeshed import StrPath +from collections.abc import Generator +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base +from ..pytree import Node + +def traverse_imports(names) -> Generator[str, None, None]: ... + +class FixImport(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + skip: bool + def start_tree(self, tree: Node, name: StrPath) -> None: ... + def transform(self, node, results): ... + def probably_a_local_import(self, imp_name): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi new file mode 100644 index 000000000000..277a172d3af9 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports.pyi @@ -0,0 +1,22 @@ +from _typeshed import StrPath +from collections.abc import Generator +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base +from ..pytree import Node + +MAPPING: dict[str, str] + +def alternates(members): ... +def build_pattern(mapping=...) -> Generator[str, None, None]: ... + +class FixImports(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + mapping = MAPPING + def build_pattern(self): ... + def compile_pattern(self) -> None: ... + def match(self, node): ... + replace: dict[str, str] + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi new file mode 100644 index 000000000000..8d55433085dd --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_imports2.pyi @@ -0,0 +1,6 @@ +from . import fix_imports + +MAPPING: dict[str, str] + +class FixImports2(fix_imports.FixImports): + mapping = MAPPING diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_input.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_input.pyi new file mode 100644 index 000000000000..df52f8d77427 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_input.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +context: Incomplete + +class FixInput(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi new file mode 100644 index 000000000000..f4e71b6da5f2 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_intern.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixIntern(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi new file mode 100644 index 000000000000..e776ea043714 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_isinstance.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixIsinstance(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi new file mode 100644 index 000000000000..a19f7b5e8a00 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixItertools(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + it_funcs: str + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi new file mode 100644 index 000000000000..1ea0b506aaa2 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_itertools_imports.pyi @@ -0,0 +1,8 @@ +from lib2to3 import fixer_base +from typing import ClassVar +from typing_extensions import Literal + +class FixItertoolsImports(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_long.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_long.pyi new file mode 100644 index 000000000000..c47f4528de47 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_long.pyi @@ -0,0 +1,8 @@ +from lib2to3 import fixer_base +from typing import ClassVar +from typing_extensions import Literal + +class FixLong(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[Literal["'long'"]] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_map.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_map.pyi new file mode 100644 index 000000000000..66e311cba8a8 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_map.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixMap(fixer_base.ConditionalFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + skip_on: ClassVar[Literal["future_builtins.map"]] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi new file mode 100644 index 000000000000..44626b47072d --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_metaclass.pyi @@ -0,0 +1,18 @@ +from collections.abc import Generator +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base +from ..pytree import Base + +def has_metaclass(parent): ... +def fixup_parse_tree(cls_node) -> None: ... +def fixup_simple_stmt(parent, i, stmt_node) -> None: ... +def remove_trailing_newline(node) -> None: ... +def find_metas(cls_node) -> Generator[tuple[Base, int, Base], None, None]: ... +def fixup_indent(suite) -> None: ... + +class FixMetaclass(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi new file mode 100644 index 000000000000..9bda7992dc8b --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_methodattrs.pyi @@ -0,0 +1,11 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +MAP: dict[str, str] + +class FixMethodattrs(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi new file mode 100644 index 000000000000..95dfacccf219 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_ne.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixNe(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[False]] + def match(self, node): ... + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_next.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_next.pyi new file mode 100644 index 000000000000..a5757d65064a --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_next.pyi @@ -0,0 +1,20 @@ +from _typeshed import StrPath +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base +from ..pytree import Node + +bind_warning: str + +class FixNext(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + order: ClassVar[Literal["pre"]] + shadowed_next: bool + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results) -> None: ... + +def is_assign_target(node): ... +def find_assign(node): ... +def is_subtree(root, node): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi new file mode 100644 index 000000000000..adf268fdb8e2 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_nonzero.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixNonzero(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi new file mode 100644 index 000000000000..6842e42e45f0 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_numliterals.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixNumliterals(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[False]] + def match(self, node): ... + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi new file mode 100644 index 000000000000..6da150a51c0c --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_operator.pyi @@ -0,0 +1,13 @@ +from lib2to3 import fixer_base +from typing import ClassVar +from typing_extensions import Literal + +def invocation(s): ... + +class FixOperator(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + methods: str + obj: str + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi new file mode 100644 index 000000000000..c730cdc5d0b2 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_paren.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixParen(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_print.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_print.pyi new file mode 100644 index 000000000000..2261c9489299 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_print.pyi @@ -0,0 +1,13 @@ +from _typeshed import Incomplete +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +parend_expr: Incomplete + +class FixPrint(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... + def add_kwarg(self, l_nodes, s_kwd, n_expr) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi new file mode 100644 index 000000000000..756a05ea3ddd --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_raise.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixRaise(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi new file mode 100644 index 000000000000..61d6ad7676ef --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_raw_input.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixRawInput(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi new file mode 100644 index 000000000000..4ea07fdde00b --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_reduce.pyi @@ -0,0 +1,9 @@ +from lib2to3 import fixer_base +from typing import ClassVar +from typing_extensions import Literal + +class FixReduce(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi new file mode 100644 index 000000000000..8045ac507890 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_reload.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixReload(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi new file mode 100644 index 000000000000..2ceca053e903 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_renames.pyi @@ -0,0 +1,18 @@ +from collections.abc import Generator +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +MAPPING: dict[str, dict[str, str]] +LOOKUP: dict[tuple[str, str], str] + +def alternates(members): ... +def build_pattern() -> Generator[str, None, None]: ... + +class FixRenames(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + order: ClassVar[Literal["pre"]] + PATTERN: ClassVar[str] + def match(self, node): ... + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi new file mode 100644 index 000000000000..6f3305846d18 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_repr.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixRepr(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi new file mode 100644 index 000000000000..dd18413d6d5a --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_set_literal.pyi @@ -0,0 +1,8 @@ +from lib2to3 import fixer_base +from typing import ClassVar +from typing_extensions import Literal + +class FixSetLiteral(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi new file mode 100644 index 000000000000..fd23af5a711e --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_standarderror.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixStandarderror(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi new file mode 100644 index 000000000000..3dbcd38c4b26 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_sys_exc.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixSysExc(fixer_base.BaseFix): + exc_info: ClassVar[list[str]] + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi new file mode 100644 index 000000000000..50e37d44a58b --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_throw.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixThrow(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi new file mode 100644 index 000000000000..48eadf75341c --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_tuple_params.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +def is_docstring(stmt): ... + +class FixTupleParams(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... + def transform_lambda(self, node, results) -> None: ... + +def simplify_args(node): ... +def find_params(node): ... +def map_to_index(param_list, prefix=..., d: Incomplete | None = ...): ... +def tuple_name(param_list): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_types.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_types.pyi new file mode 100644 index 000000000000..6ac1344b1e6c --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_types.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixTypes(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi new file mode 100644 index 000000000000..af63d1865f2d --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_unicode.pyi @@ -0,0 +1,13 @@ +from _typeshed import StrPath +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base +from ..pytree import Node + +class FixUnicode(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[Literal["STRING | 'unicode' | 'unichr'"]] # type: ignore[name-defined] # Name "STRING" is not defined + unicode_literals: bool + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi new file mode 100644 index 000000000000..a37e63b31101 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_urllib.pyi @@ -0,0 +1,15 @@ +from collections.abc import Generator +from typing_extensions import Literal + +from .fix_imports import FixImports + +MAPPING: dict[str, list[tuple[Literal["urllib.request", "urllib.parse", "urllib.error"], list[str]]]] + +def build_pattern() -> Generator[str, None, None]: ... + +class FixUrllib(FixImports): + def build_pattern(self): ... + def transform_import(self, node, results) -> None: ... + def transform_member(self, node, results): ... + def transform_dot(self, node, results) -> None: ... + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi new file mode 100644 index 000000000000..6231d90c65f1 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_ws_comma.pyi @@ -0,0 +1,13 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base +from ..pytree import Leaf + +class FixWsComma(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[False]] + PATTERN: ClassVar[str] + COMMA: Leaf + COLON: Leaf + SEPS: tuple[Leaf, Leaf] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi new file mode 100644 index 000000000000..89d300ef063a --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_xrange.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete, StrPath +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base +from ..pytree import Node + +class FixXrange(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + transformed_xranges: set[Incomplete] | None + def start_tree(self, tree: Node, filename: StrPath) -> None: ... + def finish_tree(self, tree: Node, filename: StrPath) -> None: ... + def transform(self, node, results): ... + def transform_xrange(self, node, results) -> None: ... + def transform_range(self, node, results): ... + P1: ClassVar[str] + p1: ClassVar[Incomplete] + P2: ClassVar[str] + p2: ClassVar[Incomplete] + def in_special_context(self, node): ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi new file mode 100644 index 000000000000..39757155e5d9 --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_xreadlines.pyi @@ -0,0 +1,9 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixXreadlines(fixer_base.BaseFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + def transform(self, node, results) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi b/mypy/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi new file mode 100644 index 000000000000..0c70717aa2ac --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/fixes/fix_zip.pyi @@ -0,0 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + +from .. import fixer_base + +class FixZip(fixer_base.ConditionalFix): + BM_compatible: ClassVar[Literal[True]] + PATTERN: ClassVar[str] + skip_on: ClassVar[Literal["future_builtins.zip"]] + def transform(self, node, results): ... diff --git a/mypy/typeshed/stdlib/lib2to3/main.pyi b/mypy/typeshed/stdlib/lib2to3/main.pyi new file mode 100644 index 000000000000..cfcaeeaf64ee --- /dev/null +++ b/mypy/typeshed/stdlib/lib2to3/main.pyi @@ -0,0 +1,43 @@ +from _typeshed import FileDescriptorOrPath +from collections.abc import Container, Iterable, Iterator, Mapping, Sequence +from logging import _ExcInfoType +from typing import AnyStr +from typing_extensions import Literal + +from . import refactor as refactor + +def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: ... + +class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): + nobackups: bool + show_diffs: bool + def __init__( + self, + fixers: Iterable[str], + options: Mapping[str, object] | None, + explicit: Container[str] | None, + nobackups: bool, + show_diffs: bool, + input_base_dir: str = "", + output_dir: str = "", + append_suffix: str = "", + ) -> None: ... + # Same as super.log_error and Logger.error + def log_error( # type: ignore[override] + self, + msg: str, + *args: Iterable[str], + exc_info: _ExcInfoType = None, + stack_info: bool = False, + stacklevel: int = 1, + extra: Mapping[str, object] | None = None, + ) -> None: ... + # Same as super.write_file but without default values + def write_file( # type: ignore[override] + self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None + ) -> None: ... + # filename has to be str + def print_output(self, old: str, new: str, filename: str, equal: bool) -> None: ... # type: ignore[override] + +def warn(msg: object) -> None: ... +def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/__init__.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/__init__.pyi index acc1cc429be9..de8a874f434d 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/__init__.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/__init__.pyi @@ -1,8 +1,9 @@ from collections.abc import Callable -from lib2to3.pgen2.grammar import Grammar -from lib2to3.pytree import _RawNode from typing import Any from typing_extensions import TypeAlias +from ..pytree import _RawNode +from .grammar import Grammar + # This is imported in several lib2to3/pgen2 submodules _Convert: TypeAlias = Callable[[Grammar, _RawNode], Any] # noqa: Y047 diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi index 9f6e4d6774ad..dea13fb9d0f8 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/driver.pyi @@ -1,10 +1,11 @@ from _typeshed import StrPath from collections.abc import Iterable -from lib2to3.pgen2 import _Convert -from lib2to3.pgen2.grammar import Grammar -from lib2to3.pytree import _NL from logging import Logger -from typing import IO, Any +from typing import IO + +from ..pytree import _NL +from . import _Convert +from .grammar import Grammar __all__ = ["Driver", "load_grammar"] @@ -13,7 +14,9 @@ class Driver: logger: Logger convert: _Convert def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ... - def parse_tokens(self, tokens: Iterable[Any], debug: bool = False) -> _NL: ... + def parse_tokens( + self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False + ) -> _NL: ... def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ... def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ... def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi index 51eb671f4236..320c5f018d43 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/parse.pyi @@ -1,11 +1,12 @@ +from _typeshed import Incomplete from collections.abc import Sequence -from lib2to3.pgen2 import _Convert -from lib2to3.pgen2.grammar import _DFAS, Grammar -from lib2to3.pytree import _NL, _RawNode -from typing import Any from typing_extensions import TypeAlias -_Context: TypeAlias = Sequence[Any] +from ..pytree import _NL, _RawNode +from . import _Convert +from .grammar import _DFAS, Grammar + +_Context: TypeAlias = Sequence[Incomplete] class ParseError(Exception): msg: str diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi index d346739d4d58..6d9f776c61ae 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/pgen.pyi @@ -1,8 +1,9 @@ -from _typeshed import StrPath +from _typeshed import Incomplete, StrPath from collections.abc import Iterable, Iterator -from lib2to3.pgen2 import grammar -from lib2to3.pgen2.tokenize import _TokenInfo -from typing import IO, Any, NoReturn +from typing import IO, NoReturn, overload + +from . import grammar +from .tokenize import _TokenInfo class PgenGrammar(grammar.Grammar): ... @@ -26,19 +27,22 @@ class ParserGenerator: def parse_alt(self) -> tuple[NFAState, NFAState]: ... def parse_item(self) -> tuple[NFAState, NFAState]: ... def parse_atom(self) -> tuple[NFAState, NFAState]: ... - def expect(self, type: int, value: Any | None = None) -> str: ... + def expect(self, type: int, value: str | None = None) -> str: ... def gettoken(self) -> None: ... - def raise_error(self, msg: str, *args: Any) -> NoReturn: ... + @overload + def raise_error(self, msg: object) -> NoReturn: ... + @overload + def raise_error(self, msg: str, *args: object) -> NoReturn: ... class NFAState: arcs: list[tuple[str | None, NFAState]] def addarc(self, next: NFAState, label: str | None = None) -> None: ... class DFAState: - nfaset: dict[NFAState, Any] + nfaset: dict[NFAState, Incomplete] isfinal: bool arcs: dict[str, DFAState] - def __init__(self, nfaset: dict[NFAState, Any], final: NFAState) -> None: ... + def __init__(self, nfaset: dict[NFAState, Incomplete], final: NFAState) -> None: ... def addarc(self, next: DFAState, label: str) -> None: ... def unifystate(self, old: DFAState, new: DFAState) -> None: ... def __eq__(self, other: DFAState) -> bool: ... # type: ignore[override] diff --git a/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi b/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi index 2a9c3fbba821..af54de1b51d3 100644 --- a/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pgen2/tokenize.pyi @@ -1,7 +1,8 @@ from collections.abc import Callable, Iterable, Iterator -from lib2to3.pgen2.token import * from typing_extensions import TypeAlias +from .token import * + __all__ = [ "AMPER", "AMPEREQUAL", diff --git a/mypy/typeshed/stdlib/lib2to3/pygram.pyi b/mypy/typeshed/stdlib/lib2to3/pygram.pyi index 00fdbd1a124e..2d1e90e79927 100644 --- a/mypy/typeshed/stdlib/lib2to3/pygram.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pygram.pyi @@ -1,5 +1,6 @@ import sys -from lib2to3.pgen2.grammar import Grammar + +from .pgen2.grammar import Grammar class Symbols: def __init__(self, grammar: Grammar) -> None: ... diff --git a/mypy/typeshed/stdlib/lib2to3/pytree.pyi b/mypy/typeshed/stdlib/lib2to3/pytree.pyi index 4f756c9768db..d14446f38565 100644 --- a/mypy/typeshed/stdlib/lib2to3/pytree.pyi +++ b/mypy/typeshed/stdlib/lib2to3/pytree.pyi @@ -1,16 +1,19 @@ -from collections.abc import Iterator -from lib2to3.pgen2.grammar import Grammar -from typing import Any -from typing_extensions import Self, TypeAlias +from _typeshed import Incomplete, SupportsGetItem, SupportsLenAndGetItem, Unused +from abc import abstractmethod +from collections.abc import Iterable, Iterator, MutableSequence +from typing_extensions import Final, Self, TypeAlias + +from .fixer_base import BaseFix +from .pgen2.grammar import Grammar _NL: TypeAlias = Node | Leaf _Context: TypeAlias = tuple[str, int, int] _Results: TypeAlias = dict[str, _NL] _RawNode: TypeAlias = tuple[int, str, _Context, list[_NL] | None] -HUGE: int +HUGE: Final = 0x7FFFFFFF -def type_repr(type_num: int) -> str: ... +def type_repr(type_num: int) -> str | int: ... class Base: type: int @@ -20,10 +23,14 @@ class Base: was_changed: bool was_checked: bool def __eq__(self, other: object) -> bool: ... - def _eq(self, other: Self) -> bool: ... + @abstractmethod + def _eq(self, other: Base) -> bool: ... + @abstractmethod def clone(self) -> Self: ... - def post_order(self) -> Iterator[_NL]: ... - def pre_order(self) -> Iterator[_NL]: ... + @abstractmethod + def post_order(self) -> Iterator[Self]: ... + @abstractmethod + def pre_order(self) -> Iterator[Self]: ... def replace(self, new: _NL | list[_NL]) -> None: ... def get_lineno(self) -> int: ... def changed(self) -> None: ... @@ -37,15 +44,23 @@ class Base: def get_suffix(self) -> str: ... class Node(Base): - fixers_applied: list[Any] + fixers_applied: MutableSequence[BaseFix] | None + # Is Unbound until set in refactor.RefactoringTool + future_features: frozenset[Incomplete] + # Is Unbound until set in pgen2.parse.Parser.pop + used_names: set[str] def __init__( self, type: int, - children: list[_NL], - context: Any | None = None, + children: Iterable[_NL], + context: Unused = None, prefix: str | None = None, - fixers_applied: list[Any] | None = None, + fixers_applied: MutableSequence[BaseFix] | None = None, ) -> None: ... + def _eq(self, other: Base) -> bool: ... + def clone(self) -> Node: ... + def post_order(self) -> Iterator[Self]: ... + def pre_order(self) -> Iterator[Self]: ... def set_child(self, i: int, child: _NL) -> None: ... def insert_child(self, i: int, child: _NL) -> None: ... def append_child(self, child: _NL) -> None: ... @@ -55,10 +70,19 @@ class Leaf(Base): lineno: int column: int value: str - fixers_applied: list[Any] + fixers_applied: MutableSequence[BaseFix] def __init__( - self, type: int, value: str, context: _Context | None = None, prefix: str | None = None, fixers_applied: list[Any] = ... + self, + type: int, + value: str, + context: _Context | None = None, + prefix: str | None = None, + fixers_applied: MutableSequence[BaseFix] = [], ) -> None: ... + def _eq(self, other: Base) -> bool: ... + def clone(self) -> Leaf: ... + def post_order(self) -> Iterator[Self]: ... + def pre_order(self) -> Iterator[Self]: ... def __unicode__(self) -> str: ... def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ... @@ -69,8 +93,8 @@ class BasePattern: name: str | None def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns def match(self, node: _NL, results: _Results | None = None) -> bool: ... - def match_seq(self, nodes: list[_NL], results: _Results | None = None) -> bool: ... - def generate_matches(self, nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... + def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: ... + def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: ... class LeafPattern(BasePattern): def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ... @@ -87,4 +111,6 @@ class WildcardPattern(BasePattern): class NegatedPattern(BasePattern): def __init__(self, content: str | None = None) -> None: ... -def generate_matches(patterns: list[BasePattern], nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... +def generate_matches( + patterns: SupportsGetItem[int | slice, BasePattern] | None, nodes: SupportsGetItem[int | slice, _NL] +) -> Iterator[tuple[int, _Results]]: ... diff --git a/mypy/typeshed/stdlib/lib2to3/refactor.pyi b/mypy/typeshed/stdlib/lib2to3/refactor.pyi index f1d89679aee7..d750d9c4a6cf 100644 --- a/mypy/typeshed/stdlib/lib2to3/refactor.pyi +++ b/mypy/typeshed/stdlib/lib2to3/refactor.pyi @@ -1,12 +1,16 @@ +from _typeshed import FileDescriptorOrPath, StrPath, SupportsGetItem from collections.abc import Container, Generator, Iterable, Mapping -from logging import Logger -from typing import Any, ClassVar, NoReturn -from typing_extensions import TypeAlias +from logging import Logger, _ExcInfoType +from multiprocessing import JoinableQueue +from multiprocessing.synchronize import Lock +from typing import Any, ClassVar, NoReturn, overload +from typing_extensions import Final +from .btm_matcher import BottomMatcher +from .fixer_base import BaseFix +from .pgen2.driver import Driver from .pgen2.grammar import Grammar - -_Driver: TypeAlias = Any # really lib2to3.driver.Driver -_BottomMatcher: TypeAlias = Any # really lib2to3.btm_matcher.BottomMatcher +from .pytree import Node def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ... def get_fixers_from_package(pkg_name: str) -> list[str]: ... @@ -21,53 +25,59 @@ class RefactoringTool: options: dict[str, Any] grammar: Grammar write_unchanged_files: bool - errors: list[Any] + errors: list[tuple[str, Iterable[str], dict[str, _ExcInfoType]]] logger: Logger - fixer_log: list[Any] + fixer_log: list[str] wrote: bool - driver: _Driver - pre_order: Any - post_order: Any - files: list[Any] - BM: _BottomMatcher - bmi_pre_order: list[Any] - bmi_post_order: list[Any] + driver: Driver + pre_order: list[BaseFix] + post_order: list[BaseFix] + files: list[StrPath] + BM: BottomMatcher + bmi_pre_order: list[BaseFix] + bmi_post_order: list[BaseFix] def __init__( - self, fixer_names: Iterable[str], options: Mapping[str, Any] | None = None, explicit: Container[str] | None = None + self, fixer_names: Iterable[str], options: Mapping[str, object] | None = None, explicit: Container[str] | None = None ) -> None: ... - def get_fixers(self) -> tuple[list[Any], list[Any]]: ... - def log_error(self, msg: str, *args: Any, **kwds: Any) -> NoReturn: ... - def log_message(self, msg: str, *args: Any) -> None: ... - def log_debug(self, msg: str, *args: Any) -> None: ... - def print_output(self, old_text: str, new_text: str, filename: str, equal): ... + def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: ... + def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: ... + @overload + def log_message(self, msg: object) -> None: ... + @overload + def log_message(self, msg: str, *args: object) -> None: ... + @overload + def log_debug(self, msg: object) -> None: ... + @overload + def log_debug(self, msg: str, *args: object) -> None: ... + def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: ... def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ... def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ... - def _read_python_source(self, filename: str) -> tuple[str, str]: ... - def refactor_file(self, filename: str, write: bool = False, doctests_only: bool = False) -> None: ... - def refactor_string(self, data: str, name: str): ... + def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: ... + def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: ... + def refactor_string(self, data: str, name: str) -> Node | None: ... def refactor_stdin(self, doctests_only: bool = False) -> None: ... - def refactor_tree(self, tree, name: str) -> bool: ... - def traverse_by(self, fixers, traversal) -> None: ... + def refactor_tree(self, tree: Node, name: str) -> bool: ... + def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: ... def processed_file( - self, new_text: str, filename: str, old_text: str | None = None, write: bool = False, encoding: str | None = None + self, new_text: str, filename: StrPath, old_text: str | None = None, write: bool = False, encoding: str | None = None ) -> None: ... - def write_file(self, new_text: str, filename: str, old_text: str, encoding: str | None = None) -> None: ... - PS1: ClassVar[str] - PS2: ClassVar[str] - def refactor_docstring(self, input: str, filename: str) -> str: ... - def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: str) -> list[str]: ... + def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: ... + PS1: Final = ">>> " + PS2: Final = "... " + def refactor_docstring(self, input: str, filename: StrPath) -> str: ... + def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: ... def summarize(self) -> None: ... - def parse_block(self, block: Iterable[str], lineno: int, indent: int): ... + def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: ... def wrap_toks( self, block: Iterable[str], lineno: int, indent: int - ) -> Generator[tuple[Any, Any, tuple[int, int], tuple[int, int], str], None, None]: ... + ) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: ... def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: ... class MultiprocessingUnsupported(Exception): ... class MultiprocessRefactoringTool(RefactoringTool): - queue: Any | None - output_lock: Any | None + queue: JoinableQueue[None | tuple[Iterable[str], bool | int]] | None + output_lock: Lock | None def refactor( self, items: Iterable[str], write: bool = False, doctests_only: bool = False, num_processes: int = 1 ) -> None: ... diff --git a/mypy/typeshed/stdlib/locale.pyi b/mypy/typeshed/stdlib/locale.pyi index 0b0dd9456e52..c6cc7cacfb1d 100644 --- a/mypy/typeshed/stdlib/locale.pyi +++ b/mypy/typeshed/stdlib/locale.pyi @@ -114,7 +114,9 @@ class Error(Exception): ... def setlocale(category: int, locale: _str | Iterable[_str | None] | None = None) -> _str: ... def localeconv() -> Mapping[_str, int | _str | list[int]]: ... def nl_langinfo(__key: int) -> _str: ... -def getdefaultlocale(envvars: tuple[_str, ...] = ...) -> tuple[_str | None, _str | None]: ... +def getdefaultlocale( + envvars: tuple[_str, ...] = ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE") +) -> tuple[_str | None, _str | None]: ... def getlocale(category: int = ...) -> tuple[_str | None, _str | None]: ... def getpreferredencoding(do_setlocale: bool = True) -> _str: ... def normalize(localename: _str) -> _str: ... diff --git a/mypy/typeshed/stdlib/logging/__init__.pyi b/mypy/typeshed/stdlib/logging/__init__.pyi index 3c547a6e0ff8..6ebd305aacb8 100644 --- a/mypy/typeshed/stdlib/logging/__init__.pyi +++ b/mypy/typeshed/stdlib/logging/__init__.pyi @@ -80,7 +80,7 @@ _levelToName: dict[int, str] _nameToLevel: dict[str, int] class Filterer: - filters: list[Filter] + filters: list[_FilterType] def addFilter(self, filter: _FilterType) -> None: ... def removeFilter(self, filter: _FilterType) -> None: ... def filter(self, record: LogRecord) -> bool: ... @@ -389,7 +389,7 @@ class LogRecord: msecs: float # Only created when logging.Formatter.format is called. See #6132. message: str - msg: str + msg: str | Any # The runtime accepts any object, but will be a str in 99% of cases name: str pathname: str process: int | None diff --git a/mypy/typeshed/stdlib/logging/config.pyi b/mypy/typeshed/stdlib/logging/config.pyi index f76f655a6196..e92658f7f1b3 100644 --- a/mypy/typeshed/stdlib/logging/config.pyi +++ b/mypy/typeshed/stdlib/logging/config.pyi @@ -1,36 +1,59 @@ import sys from _typeshed import StrOrBytesPath -from collections.abc import Callable, Sequence +from collections.abc import Callable, Hashable, Iterable, Sequence from configparser import RawConfigParser from re import Pattern from threading import Thread -from typing import IO, Any +from typing import IO, Any, overload +from typing_extensions import Literal, SupportsIndex, TypeAlias, TypedDict -from . import _Level - -if sys.version_info >= (3, 8): - from typing import Literal, TypedDict -else: - from typing_extensions import Literal, TypedDict +from . import Filter, Filterer, Formatter, Handler, Logger, _FilterType, _FormatStyle, _Level DEFAULT_LOGGING_CONFIG_PORT: int RESET_ERROR: int # undocumented IDENTIFIER: Pattern[str] # undocumented -class _RootLoggerConfiguration(TypedDict, total=False): - level: _Level - filters: Sequence[str] - handlers: Sequence[str] +if sys.version_info >= (3, 11): + class _RootLoggerConfiguration(TypedDict, total=False): + level: _Level + filters: Sequence[str | _FilterType] + handlers: Sequence[str] + +else: + class _RootLoggerConfiguration(TypedDict, total=False): + level: _Level + filters: Sequence[str] + handlers: Sequence[str] class _LoggerConfiguration(_RootLoggerConfiguration, TypedDict, total=False): propagate: bool +if sys.version_info >= (3, 8): + _FormatterConfigurationTypedDict = TypedDict( + "_FormatterConfigurationTypedDict", {"class": str, "format": str, "datefmt": str, "style": _FormatStyle}, total=False + ) +else: + _FormatterConfigurationTypedDict = TypedDict( + "_FormatterConfigurationTypedDict", + {"class": str, "format": str, "datefmt": str, "style": _FormatStyle, "validate": bool}, + total=False, + ) + +class _FilterConfigurationTypedDict(TypedDict): + name: str + +# Formatter and filter configs can specify custom factories via the special `()` key. +# If that is the case, the dictionary can contain any additional keys +# https://docs.python.org/3/library/logging.config.html#user-defined-objects +_FormatterConfiguration: TypeAlias = _FormatterConfigurationTypedDict | dict[str, Any] +_FilterConfiguration: TypeAlias = _FilterConfigurationTypedDict | dict[str, Any] +# Handler config can have additional keys even when not providing a custom factory so we just use `dict`. +_HandlerConfiguration: TypeAlias = dict[str, Any] + class _OptionalDictConfigArgs(TypedDict, total=False): - # these two can have custom factories (key: `()`) which can have extra keys - formatters: dict[str, dict[str, Any]] - filters: dict[str, dict[str, Any]] - # type checkers would warn about extra keys if this was a TypedDict - handlers: dict[str, dict[str, Any]] + formatters: dict[str, _FormatterConfiguration] + filters: dict[str, _FilterConfiguration] + handlers: dict[str, _HandlerConfiguration] loggers: dict[str, _LoggerConfiguration] root: _RootLoggerConfiguration | None incremental: bool @@ -64,3 +87,57 @@ else: def valid_ident(s: str) -> Literal[True]: ... # undocumented def listen(port: int = 9030, verify: Callable[[bytes], bytes | None] | None = None) -> Thread: ... def stopListening() -> None: ... + +class ConvertingMixin: # undocumented + def convert_with_key(self, key: Any, value: Any, replace: bool = True) -> Any: ... + def convert(self, value: Any) -> Any: ... + +class ConvertingDict(dict[Hashable, Any], ConvertingMixin): # undocumented + def __getitem__(self, key: Hashable) -> Any: ... + def get(self, key: Hashable, default: Any = None) -> Any: ... + def pop(self, key: Hashable, default: Any = None) -> Any: ... + +class ConvertingList(list[Any], ConvertingMixin): # undocumented + @overload + def __getitem__(self, key: SupportsIndex) -> Any: ... + @overload + def __getitem__(self, key: slice) -> Any: ... + def pop(self, idx: SupportsIndex = -1) -> Any: ... + +class ConvertingTuple(tuple[Any, ...], ConvertingMixin): # undocumented + @overload + def __getitem__(self, key: SupportsIndex) -> Any: ... + @overload + def __getitem__(self, key: slice) -> Any: ... + +class BaseConfigurator: # undocumented + CONVERT_PATTERN: Pattern[str] + WORD_PATTERN: Pattern[str] + DOT_PATTERN: Pattern[str] + INDEX_PATTERN: Pattern[str] + DIGIT_PATTERN: Pattern[str] + value_converters: dict[str, str] + importer: Callable[..., Any] + + def __init__(self, config: _DictConfigArgs | dict[str, Any]) -> None: ... + def resolve(self, s: str) -> Any: ... + def ext_convert(self, value: str) -> Any: ... + def cfg_convert(self, value: str) -> Any: ... + def convert(self, value: Any) -> Any: ... + def configure_custom(self, config: dict[str, Any]) -> Any: ... + def as_tuple(self, value: list[Any] | tuple[Any]) -> tuple[Any]: ... + +class DictConfigurator(BaseConfigurator): + def configure(self) -> None: ... # undocumented + def configure_formatter(self, config: _FormatterConfiguration) -> Formatter | Any: ... # undocumented + def configure_filter(self, config: _FilterConfiguration) -> Filter | Any: ... # undocumented + def add_filters(self, filterer: Filterer, filters: Iterable[_FilterType]) -> None: ... # undocumented + def configure_handler(self, config: _HandlerConfiguration) -> Handler | Any: ... # undocumented + def add_handlers(self, logger: Logger, handlers: Iterable[str]) -> None: ... # undocumented + def common_logger_config( + self, logger: Logger, config: _LoggerConfiguration, incremental: bool = False + ) -> None: ... # undocumented + def configure_logger(self, name: str, config: _LoggerConfiguration, incremental: bool = False) -> None: ... # undocumented + def configure_root(self, config: _LoggerConfiguration, incremental: bool = False) -> None: ... # undocumented + +dictConfigClass = DictConfigurator diff --git a/mypy/typeshed/stdlib/logging/handlers.pyi b/mypy/typeshed/stdlib/logging/handlers.pyi index 7e0bfd705895..8a0373435d21 100644 --- a/mypy/typeshed/stdlib/logging/handlers.pyi +++ b/mypy/typeshed/stdlib/logging/handlers.pyi @@ -177,7 +177,9 @@ class SysLogHandler(Handler): priority_names: ClassVar[dict[str, int]] # undocumented facility_names: ClassVar[dict[str, int]] # undocumented priority_map: ClassVar[dict[str, str]] # undocumented - def __init__(self, address: tuple[str, int] | str = ..., facility: int = 1, socktype: SocketKind | None = None) -> None: ... + def __init__( + self, address: tuple[str, int] | str = ("localhost", 514), facility: int = 1, socktype: SocketKind | None = None + ) -> None: ... if sys.version_info >= (3, 11): def createSocket(self) -> None: ... diff --git a/mypy/typeshed/stdlib/lzma.pyi b/mypy/typeshed/stdlib/lzma.pyi index 34bd6f3f8db1..8e296bb5b357 100644 --- a/mypy/typeshed/stdlib/lzma.pyi +++ b/mypy/typeshed/stdlib/lzma.pyi @@ -104,7 +104,7 @@ class LZMACompressor: class LZMAError(Exception): ... -class LZMAFile(io.BufferedIOBase, IO[bytes]): +class LZMAFile(io.BufferedIOBase, IO[bytes]): # type: ignore[misc] # incompatible definitions of writelines in the base classes def __init__( self, filename: _PathOrFile | None = None, diff --git a/mypy/typeshed/stdlib/mailcap.pyi b/mypy/typeshed/stdlib/mailcap.pyi index 5905f5826bf7..ce549e01f528 100644 --- a/mypy/typeshed/stdlib/mailcap.pyi +++ b/mypy/typeshed/stdlib/mailcap.pyi @@ -6,6 +6,6 @@ _Cap: TypeAlias = dict[str, str | int] __all__ = ["getcaps", "findmatch"] def findmatch( - caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = ... + caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = "view", filename: str = "/dev/null", plist: Sequence[str] = [] ) -> tuple[str | None, _Cap | None]: ... def getcaps() -> dict[str, list[_Cap]]: ... diff --git a/mypy/typeshed/stdlib/mimetypes.pyi b/mypy/typeshed/stdlib/mimetypes.pyi index fd3908680009..128a05fa5752 100644 --- a/mypy/typeshed/stdlib/mimetypes.pyi +++ b/mypy/typeshed/stdlib/mimetypes.pyi @@ -43,7 +43,7 @@ class MimeTypes: encodings_map: dict[str, str] types_map: tuple[dict[str, str], dict[str, str]] types_map_inv: tuple[dict[str, str], dict[str, str]] - def __init__(self, filenames: tuple[str, ...] = ..., strict: bool = True) -> None: ... + def __init__(self, filenames: tuple[str, ...] = (), strict: bool = True) -> None: ... def guess_extension(self, type: str, strict: bool = True) -> str | None: ... if sys.version_info >= (3, 8): def guess_type(self, url: StrPath, strict: bool = True) -> tuple[str | None, str | None]: ... diff --git a/mypy/typeshed/stdlib/modulefinder.pyi b/mypy/typeshed/stdlib/modulefinder.pyi index 6f1917644b06..06bb50d26286 100644 --- a/mypy/typeshed/stdlib/modulefinder.pyi +++ b/mypy/typeshed/stdlib/modulefinder.pyi @@ -44,8 +44,8 @@ class ModuleFinder: self, path: list[str] | None = None, debug: int = 0, - excludes: Container[str] = ..., - replace_paths: Sequence[tuple[str, str]] = ..., + excludes: Container[str] = [], + replace_paths: Sequence[tuple[str, str]] = [], ) -> None: ... def msg(self, level: int, str: str, *args: Any) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/multiprocessing/context.pyi b/mypy/typeshed/stdlib/multiprocessing/context.pyi index c498649a7b61..fe3b98024548 100644 --- a/mypy/typeshed/stdlib/multiprocessing/context.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/context.pyi @@ -72,7 +72,7 @@ class BaseContext: self, processes: int | None = None, initializer: Callable[..., object] | None = None, - initargs: Iterable[Any] = ..., + initargs: Iterable[Any] = (), maxtasksperchild: int | None = None, ) -> _Pool: ... @overload diff --git a/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi b/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi index 5b2a33772de6..967b57ded6c8 100644 --- a/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/dummy/__init__.pyi @@ -50,8 +50,8 @@ class DummyProcess(threading.Thread): group: Any = None, target: Callable[..., object] | None = None, name: str | None = None, - args: Iterable[Any] = ..., - kwargs: Mapping[str, Any] = ..., + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] = {}, ) -> None: ... Process = DummyProcess @@ -69,9 +69,7 @@ class Value: def Array(typecode: Any, sequence: Sequence[Any], lock: Any = True) -> array.array[Any]: ... def Manager() -> Any: ... -def Pool( - processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ... -) -> Any: ... +def Pool(processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> Any: ... def active_children() -> list[Any]: ... current_process = threading.current_thread diff --git a/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi b/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi index df435f00ebe7..9a15f2683b7d 100644 --- a/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/forkserver.pyi @@ -24,7 +24,7 @@ def main( def read_signed(fd: int) -> Any: ... def write_signed(fd: int, n: int) -> None: ... -_forkserver: ForkServer = ... +_forkserver: ForkServer ensure_running = _forkserver.ensure_running get_inherited_fds = _forkserver.get_inherited_fds connect_to_new_process = _forkserver.connect_to_new_process diff --git a/mypy/typeshed/stdlib/multiprocessing/managers.pyi b/mypy/typeshed/stdlib/multiprocessing/managers.pyi index ad147fca36ed..27a903fb9987 100644 --- a/mypy/typeshed/stdlib/multiprocessing/managers.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/managers.pyi @@ -54,7 +54,7 @@ class BaseProxy: manager_owned: bool = False, ) -> None: ... def __deepcopy__(self, memo: Any | None) -> Any: ... - def _callmethod(self, methodname: str, args: tuple[Any, ...] = ..., kwds: dict[Any, Any] = ...) -> None: ... + def _callmethod(self, methodname: str, args: tuple[Any, ...] = (), kwds: dict[Any, Any] = {}) -> None: ... def _getvalue(self) -> Any: ... def __reduce__(self) -> tuple[Any, tuple[Any, Any, str, dict[Any, Any]]]: ... @@ -150,7 +150,7 @@ class BaseManager: def get_server(self) -> Server: ... def connect(self) -> None: ... - def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ...) -> None: ... + def start(self, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ()) -> None: ... def shutdown(self) -> None: ... # only available after start() was called def join(self, timeout: float | None = None) -> None: ... # undocumented @property @@ -197,6 +197,8 @@ class SyncManager(BaseManager): @overload def dict(self, __iterable: Iterable[list[str]]) -> DictProxy[str, str]: ... @overload + def dict(self, __iterable: Iterable[list[bytes]]) -> DictProxy[bytes, bytes]: ... + @overload def list(self, __sequence: Sequence[_T]) -> ListProxy[_T]: ... @overload def list(self) -> ListProxy[Any]: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/pool.pyi b/mypy/typeshed/stdlib/multiprocessing/pool.pyi index a19dd555e254..dd4b865a3574 100644 --- a/mypy/typeshed/stdlib/multiprocessing/pool.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/pool.pyi @@ -72,16 +72,16 @@ class Pool: self, processes: int | None = None, initializer: Callable[..., object] | None = None, - initargs: Iterable[Any] = ..., + initargs: Iterable[Any] = (), maxtasksperchild: int | None = None, context: Any | None = None, ) -> None: ... - def apply(self, func: Callable[..., _T], args: Iterable[Any] = ..., kwds: Mapping[str, Any] = ...) -> _T: ... + def apply(self, func: Callable[..., _T], args: Iterable[Any] = (), kwds: Mapping[str, Any] = {}) -> _T: ... def apply_async( self, func: Callable[..., _T], - args: Iterable[Any] = ..., - kwds: Mapping[str, Any] = ..., + args: Iterable[Any] = (), + kwds: Mapping[str, Any] = {}, callback: Callable[[_T], object] | None = None, error_callback: Callable[[BaseException], object] | None = None, ) -> AsyncResult[_T]: ... @@ -115,7 +115,7 @@ class Pool: class ThreadPool(Pool): def __init__( - self, processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = ... + self, processes: int | None = None, initializer: Callable[..., object] | None = None, initargs: Iterable[Any] = () ) -> None: ... # undocumented diff --git a/mypy/typeshed/stdlib/multiprocessing/process.pyi b/mypy/typeshed/stdlib/multiprocessing/process.pyi index ef1b4b596d33..9863013fc05f 100644 --- a/mypy/typeshed/stdlib/multiprocessing/process.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/process.pyi @@ -17,8 +17,8 @@ class BaseProcess: group: None = None, target: Callable[..., object] | None = None, name: str | None = None, - args: Iterable[Any] = ..., - kwargs: Mapping[str, Any] = ..., + args: Iterable[Any] = (), + kwargs: Mapping[str, Any] = {}, *, daemon: bool | None = None, ) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/queues.pyi b/mypy/typeshed/stdlib/multiprocessing/queues.pyi index f821b6df4b37..a26ab7173232 100644 --- a/mypy/typeshed/stdlib/multiprocessing/queues.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/queues.pyi @@ -1,4 +1,3 @@ -import queue import sys from typing import Any, Generic, TypeVar @@ -9,19 +8,24 @@ __all__ = ["Queue", "SimpleQueue", "JoinableQueue"] _T = TypeVar("_T") -class Queue(queue.Queue[_T]): +class Queue(Generic[_T]): # FIXME: `ctx` is a circular dependency and it's not actually optional. # It's marked as such to be able to use the generic Queue in __init__.pyi. def __init__(self, maxsize: int = 0, *, ctx: Any = ...) -> None: ... - def get(self, block: bool = True, timeout: float | None = None) -> _T: ... def put(self, obj: _T, block: bool = True, timeout: float | None = None) -> None: ... - def put_nowait(self, obj: _T) -> None: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def qsize(self) -> int: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... def get_nowait(self) -> _T: ... + def put_nowait(self, obj: _T) -> None: ... def close(self) -> None: ... def join_thread(self) -> None: ... def cancel_join_thread(self) -> None: ... -class JoinableQueue(Queue[_T]): ... +class JoinableQueue(Queue[_T]): + def task_done(self) -> None: ... + def join(self) -> None: ... class SimpleQueue(Generic[_T]): def __init__(self, *, ctx: Any = ...) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi b/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi index e2b940796126..7f726a00d73a 100644 --- a/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/resource_tracker.pyi @@ -9,7 +9,7 @@ class ResourceTracker: def register(self, name: Sized, rtype: Incomplete) -> None: ... def unregister(self, name: Sized, rtype: Incomplete) -> None: ... -_resource_tracker: ResourceTracker = ... +_resource_tracker: ResourceTracker ensure_running = _resource_tracker.ensure_running register = _resource_tracker.register unregister = _resource_tracker.unregister diff --git a/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi b/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi index 6c2e18954343..a4e36cfa0b6e 100644 --- a/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/synchronize.pyi @@ -14,9 +14,6 @@ class Barrier(threading.Barrier): self, parties: int, action: Callable[[], object] | None = None, timeout: float | None = None, *ctx: BaseContext ) -> None: ... -class BoundedSemaphore(Semaphore): - def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... - class Condition(AbstractContextManager[bool]): def __init__(self, lock: _LockLike | None = None, *, ctx: BaseContext) -> None: ... def notify(self, n: int = 1) -> None: ... @@ -36,6 +33,14 @@ class Event: def clear(self) -> None: ... def wait(self, timeout: float | None = None) -> bool: ... +# Not part of public API +class SemLock(AbstractContextManager[bool]): + def acquire(self, block: bool = ..., timeout: float | None = ...) -> bool: ... + def release(self) -> None: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None + ) -> None: ... + class Lock(SemLock): def __init__(self, *, ctx: BaseContext) -> None: ... @@ -45,10 +50,5 @@ class RLock(SemLock): class Semaphore(SemLock): def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... -# Not part of public API -class SemLock(AbstractContextManager[bool]): - def acquire(self, block: bool = ..., timeout: float | None = ...) -> bool: ... - def release(self) -> None: ... - def __exit__( - self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None - ) -> None: ... +class BoundedSemaphore(Semaphore): + def __init__(self, value: int = 1, *, ctx: BaseContext) -> None: ... diff --git a/mypy/typeshed/stdlib/multiprocessing/util.pyi b/mypy/typeshed/stdlib/multiprocessing/util.pyi index 006ec3a9f6ce..7ca650511e51 100644 --- a/mypy/typeshed/stdlib/multiprocessing/util.pyi +++ b/mypy/typeshed/stdlib/multiprocessing/util.pyi @@ -50,14 +50,14 @@ class Finalize: self, obj: Incomplete | None, callback: Callable[..., Incomplete], - args: Sequence[Any] = ..., + args: Sequence[Any] = (), kwargs: Mapping[str, Any] | None = None, exitpriority: int | None = None, ) -> None: ... def __call__( self, wr: Unused = None, - _finalizer_registry: MutableMapping[Incomplete, Incomplete] = ..., + _finalizer_registry: MutableMapping[Incomplete, Incomplete] = {}, sub_debug: Callable[..., object] = ..., getpid: Callable[[], int] = ..., ) -> Incomplete: ... diff --git a/mypy/typeshed/stdlib/pathlib.pyi b/mypy/typeshed/stdlib/pathlib.pyi index 114678ed574d..7aec66b584e3 100644 --- a/mypy/typeshed/stdlib/pathlib.pyi +++ b/mypy/typeshed/stdlib/pathlib.pyi @@ -173,9 +173,9 @@ class Path(PurePath): def resolve(self, strict: bool = False) -> Self: ... def rglob(self, pattern: str) -> Generator[Self, None, None]: ... def rmdir(self) -> None: ... - def symlink_to(self, target: str | Path, target_is_directory: bool = False) -> None: ... + def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: ... if sys.version_info >= (3, 10): - def hardlink_to(self, target: str | Path) -> None: ... + def hardlink_to(self, target: StrOrBytesPath) -> None: ... def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: ... if sys.version_info >= (3, 8): diff --git a/mypy/typeshed/stdlib/pdb.pyi b/mypy/typeshed/stdlib/pdb.pyi index e2871bb54fa0..405c45ca01ac 100644 --- a/mypy/typeshed/stdlib/pdb.pyi +++ b/mypy/typeshed/stdlib/pdb.pyi @@ -128,7 +128,7 @@ class Pdb(Bdb, Cmd): def _select_frame(self, number: int) -> None: ... def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ... def _print_lines( - self, lines: Sequence[str], start: int, breaks: Sequence[int] = ..., frame: FrameType | None = None + self, lines: Sequence[str], start: int, breaks: Sequence[int] = (), frame: FrameType | None = None ) -> None: ... def _cmdloop(self) -> None: ... def do_display(self, arg: str) -> bool | None: ... diff --git a/mypy/typeshed/stdlib/pickle.pyi b/mypy/typeshed/stdlib/pickle.pyi index 57c4cb03e484..55ff38585b95 100644 --- a/mypy/typeshed/stdlib/pickle.pyi +++ b/mypy/typeshed/stdlib/pickle.pyi @@ -121,7 +121,7 @@ if sys.version_info >= (3, 8): fix_imports: bool = True, encoding: str = "ASCII", errors: str = "strict", - buffers: Iterable[Any] | None = ..., + buffers: Iterable[Any] | None = (), ) -> Any: ... def loads( __data: ReadableBuffer, @@ -129,7 +129,7 @@ if sys.version_info >= (3, 8): fix_imports: bool = True, encoding: str = "ASCII", errors: str = "strict", - buffers: Iterable[Any] | None = ..., + buffers: Iterable[Any] | None = (), ) -> Any: ... else: diff --git a/mypy/typeshed/stdlib/platform.pyi b/mypy/typeshed/stdlib/platform.pyi index 291f302b4c7d..483d6c454c2e 100644 --- a/mypy/typeshed/stdlib/platform.pyi +++ b/mypy/typeshed/stdlib/platform.pyi @@ -33,10 +33,10 @@ if sys.version_info >= (3, 8): def win32_is_iot() -> bool: ... def mac_ver( - release: str = "", versioninfo: tuple[str, str, str] = ..., machine: str = "" + release: str = "", versioninfo: tuple[str, str, str] = ("", "", ""), machine: str = "" ) -> tuple[str, tuple[str, str, str], str]: ... def java_ver( - release: str = "", vendor: str = "", vminfo: tuple[str, str, str] = ..., osinfo: tuple[str, str, str] = ... + release: str = "", vendor: str = "", vminfo: tuple[str, str, str] = ("", "", ""), osinfo: tuple[str, str, str] = ("", "", "") ) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... def architecture(executable: str = sys.executable, bits: str = "", linkage: str = "") -> tuple[str, str]: ... diff --git a/mypy/typeshed/stdlib/pydoc.pyi b/mypy/typeshed/stdlib/pydoc.pyi index c6893d50c66a..ed97f1918e01 100644 --- a/mypy/typeshed/stdlib/pydoc.pyi +++ b/mypy/typeshed/stdlib/pydoc.pyi @@ -30,7 +30,7 @@ def visiblename(name: str, all: Container[str] | None = None, obj: object = None def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... def ispackage(path: str) -> bool: ... def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... -def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = ...) -> str | None: ... +def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = {}) -> str | None: ... class ErrorDuringImport(Exception): filename: str @@ -40,7 +40,7 @@ class ErrorDuringImport(Exception): def __init__(self, filename: str, exc_info: OptExcInfo) -> None: ... def importfile(path: str) -> ModuleType: ... -def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = ...) -> ModuleType: ... +def safeimport(path: str, forceload: bool = ..., cache: MutableMapping[str, ModuleType] = {}) -> ModuleType | None: ... class Doc: PYTHONDOCS: str @@ -70,7 +70,7 @@ class HTMLRepr(Repr): def repr_unicode(self, x: AnyStr, level: complex) -> str: ... class HTMLDoc(Doc): - _repr_instance: HTMLRepr = ... + _repr_instance: HTMLRepr repr = _repr_instance.repr escape = _repr_instance.escape def page(self, title: str, contents: str) -> str: ... @@ -113,9 +113,9 @@ class HTMLDoc(Doc): self, text: str, escape: Callable[[str], str] | None = None, - funcs: Mapping[str, str] = ..., - classes: Mapping[str, str] = ..., - methods: Mapping[str, str] = ..., + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, + methods: Mapping[str, str] = {}, ) -> str: ... def formattree( self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = None @@ -126,8 +126,8 @@ class HTMLDoc(Doc): object: object, name: str | None = None, mod: str | None = None, - funcs: Mapping[str, str] = ..., - classes: Mapping[str, str] = ..., + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, *ignored: Any, ) -> str: ... def formatvalue(self, object: object) -> str: ... @@ -136,9 +136,9 @@ class HTMLDoc(Doc): object: object, name: str | None = None, mod: str | None = None, - funcs: Mapping[str, str] = ..., - classes: Mapping[str, str] = ..., - methods: Mapping[str, str] = ..., + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, + methods: Mapping[str, str] = {}, cl: type | None = None, ) -> str: ... def docproperty(self, object: object, name: str | None = None, mod: str | None = None, cl: Any | None = None) -> str: ... # type: ignore[override] @@ -154,7 +154,7 @@ class TextRepr(Repr): def repr_instance(self, x: object, level: complex) -> str: ... class TextDoc(Doc): - _repr_instance: TextRepr = ... + _repr_instance: TextRepr repr = _repr_instance.repr def bold(self, text: str) -> str: ... def indent(self, text: str, prefix: str = " ") -> str: ... diff --git a/mypy/typeshed/stdlib/random.pyi b/mypy/typeshed/stdlib/random.pyi index 4849878691f5..5434f22407cc 100644 --- a/mypy/typeshed/stdlib/random.pyi +++ b/mypy/typeshed/stdlib/random.pyi @@ -100,9 +100,7 @@ class SystemRandom(Random): def getstate(self, *args: Any, **kwds: Any) -> NoReturn: ... def setstate(self, *args: Any, **kwds: Any) -> NoReturn: ... -# ----- random function stubs ----- - -_inst: Random = ... +_inst: Random seed = _inst.seed random = _inst.random uniform = _inst.uniform diff --git a/mypy/typeshed/stdlib/reprlib.pyi b/mypy/typeshed/stdlib/reprlib.pyi index 21c8a5cd4e0c..68ada6569348 100644 --- a/mypy/typeshed/stdlib/reprlib.pyi +++ b/mypy/typeshed/stdlib/reprlib.pyi @@ -1,3 +1,4 @@ +import sys from array import array from collections import deque from collections.abc import Callable @@ -22,6 +23,30 @@ class Repr: maxlong: int maxstring: int maxother: int + if sys.version_info >= (3, 11): + fillvalue: str + if sys.version_info >= (3, 12): + indent: str | int | None + + if sys.version_info >= (3, 12): + def __init__( + self, + *, + maxlevel: int = 6, + maxtuple: int = 6, + maxlist: int = 6, + maxarray: int = 5, + maxdict: int = 4, + maxset: int = 6, + maxfrozenset: int = 6, + maxdeque: int = 6, + maxstring: int = 30, + maxlong: int = 40, + maxother: int = 30, + fillvalue: str = "...", + indent: str | int | None = None, + ) -> None: ... + def repr(self, x: Any) -> str: ... def repr1(self, x: Any, level: int) -> str: ... def repr_tuple(self, x: tuple[Any, ...], level: int) -> str: ... diff --git a/mypy/typeshed/stdlib/sched.pyi b/mypy/typeshed/stdlib/sched.pyi index a8ec78d68fd2..75dd63d0414a 100644 --- a/mypy/typeshed/stdlib/sched.pyi +++ b/mypy/typeshed/stdlib/sched.pyi @@ -30,10 +30,10 @@ class scheduler: def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: ... def enterabs( - self, time: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = ..., kwargs: dict[str, Any] = ... + self, time: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... ) -> Event: ... def enter( - self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = ..., kwargs: dict[str, Any] = ... + self, delay: float, priority: Any, action: _ActionCallback, argument: tuple[Any, ...] = (), kwargs: dict[str, Any] = ... ) -> Event: ... def run(self, blocking: bool = True) -> float | None: ... def cancel(self, event: Event) -> None: ... diff --git a/mypy/typeshed/stdlib/shlex.pyi b/mypy/typeshed/stdlib/shlex.pyi index fa04932db676..c4fd23d60666 100644 --- a/mypy/typeshed/stdlib/shlex.pyi +++ b/mypy/typeshed/stdlib/shlex.pyi @@ -29,7 +29,7 @@ class shlex(Iterable[str]): debug: int lineno: int token: str - eof: str + eof: str | None @property def punctuation_chars(self) -> str: ... def __init__( @@ -39,12 +39,12 @@ class shlex(Iterable[str]): posix: bool = False, punctuation_chars: bool | str = False, ) -> None: ... - def get_token(self) -> str: ... + def get_token(self) -> str | None: ... def push_token(self, tok: str) -> None: ... - def read_token(self) -> str: ... - def sourcehook(self, newfile: str) -> tuple[str, TextIO]: ... + def read_token(self) -> str | None: ... + def sourcehook(self, newfile: str) -> tuple[str, TextIO] | None: ... def push_source(self, newstream: str | TextIO, newfile: str | None = None) -> None: ... def pop_source(self) -> None: ... - def error_leader(self, infile: str | None = None, lineno: int | None = None) -> None: ... + def error_leader(self, infile: str | None = None, lineno: int | None = None) -> str: ... def __iter__(self) -> Self: ... def __next__(self) -> str: ... diff --git a/mypy/typeshed/stdlib/shutil.pyi b/mypy/typeshed/stdlib/shutil.pyi index 0e4f521e5e34..e8eb468337e1 100644 --- a/mypy/typeshed/stdlib/shutil.pyi +++ b/mypy/typeshed/stdlib/shutil.pyi @@ -182,4 +182,4 @@ def register_unpack_format( ) -> None: ... def unregister_unpack_format(name: str) -> None: ... def get_unpack_formats() -> list[tuple[str, list[str], str]]: ... -def get_terminal_size(fallback: tuple[int, int] = ...) -> os.terminal_size: ... +def get_terminal_size(fallback: tuple[int, int] = (80, 24)) -> os.terminal_size: ... diff --git a/mypy/typeshed/stdlib/signal.pyi b/mypy/typeshed/stdlib/signal.pyi index e411d47016b6..4c961a0c9aab 100644 --- a/mypy/typeshed/stdlib/signal.pyi +++ b/mypy/typeshed/stdlib/signal.pyi @@ -53,6 +53,8 @@ class Signals(IntEnum): SIGPWR: int SIGRTMAX: int SIGRTMIN: int + if sys.version_info >= (3, 11): + SIGSTKFLT: int class Handlers(IntEnum): SIG_DFL: int @@ -147,6 +149,8 @@ else: SIGPWR: Signals SIGRTMAX: Signals SIGRTMIN: Signals + if sys.version_info >= (3, 11): + SIGSTKFLT: Signals @final class struct_siginfo(structseq[int], tuple[int, int, int, int, int, int, int]): if sys.version_info >= (3, 10): diff --git a/mypy/typeshed/stdlib/smtplib.pyi b/mypy/typeshed/stdlib/smtplib.pyi index 0d7595fc1d6d..4228ad551eba 100644 --- a/mypy/typeshed/stdlib/smtplib.pyi +++ b/mypy/typeshed/stdlib/smtplib.pyi @@ -111,8 +111,8 @@ class SMTP: def help(self, args: str = "") -> bytes: ... def rset(self) -> _Reply: ... def noop(self) -> _Reply: ... - def mail(self, sender: str, options: Sequence[str] = ...) -> _Reply: ... - def rcpt(self, recip: str, options: Sequence[str] = ...) -> _Reply: ... + def mail(self, sender: str, options: Sequence[str] = ()) -> _Reply: ... + def rcpt(self, recip: str, options: Sequence[str] = ()) -> _Reply: ... def data(self, msg: ReadableBuffer | str) -> _Reply: ... def verify(self, address: str) -> _Reply: ... vrfy = verify @@ -134,16 +134,16 @@ class SMTP: from_addr: str, to_addrs: str | Sequence[str], msg: _BufferWithLen | str, - mail_options: Sequence[str] = ..., - rcpt_options: Sequence[str] = ..., + mail_options: Sequence[str] = (), + rcpt_options: Sequence[str] = (), ) -> _SendErrs: ... def send_message( self, msg: _Message, from_addr: str | None = None, to_addrs: str | Sequence[str] | None = None, - mail_options: Sequence[str] = ..., - rcpt_options: Sequence[str] = ..., + mail_options: Sequence[str] = (), + rcpt_options: Sequence[str] = (), ) -> _SendErrs: ... def close(self) -> None: ... def quit(self) -> _Reply: ... diff --git a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi index da58c3aa97fd..24974f787c62 100644 --- a/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi +++ b/mypy/typeshed/stdlib/sqlite3/dbapi2.pyi @@ -377,7 +377,7 @@ class Cursor(Iterator[Any]): def rowcount(self) -> int: ... def __init__(self, __cursor: Connection) -> None: ... def close(self) -> None: ... - def execute(self, __sql: str, __parameters: _Parameters = ...) -> Self: ... + def execute(self, __sql: str, __parameters: _Parameters = ()) -> Self: ... def executemany(self, __sql: str, __seq_of_parameters: Iterable[_Parameters]) -> Self: ... def executescript(self, __sql_script: str) -> Cursor: ... def fetchall(self) -> list[Any]: ... @@ -390,14 +390,13 @@ class Cursor(Iterator[Any]): def __iter__(self) -> Self: ... def __next__(self) -> Any: ... -class DataError(DatabaseError): ... -class DatabaseError(Error): ... - class Error(Exception): if sys.version_info >= (3, 11): sqlite_errorcode: int sqlite_errorname: str +class DatabaseError(Error): ... +class DataError(DatabaseError): ... class IntegrityError(DatabaseError): ... class InterfaceError(Error): ... class InternalError(DatabaseError): ... diff --git a/mypy/typeshed/stdlib/string.pyi b/mypy/typeshed/stdlib/string.pyi index dc9a449e0e39..1a875a071bf5 100644 --- a/mypy/typeshed/stdlib/string.pyi +++ b/mypy/typeshed/stdlib/string.pyi @@ -47,8 +47,8 @@ class Template(metaclass=_TemplateMetaclass): flags: ClassVar[RegexFlag] pattern: ClassVar[Pattern[str]] def __init__(self, template: str) -> None: ... - def substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... - def safe_substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... + def substitute(self, __mapping: Mapping[str, object] = {}, **kwds: object) -> str: ... + def safe_substitute(self, __mapping: Mapping[str, object] = {}, **kwds: object) -> str: ... if sys.version_info >= (3, 11): def get_identifiers(self) -> list[str]: ... def is_valid(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/subprocess.pyi b/mypy/typeshed/stdlib/subprocess.pyi index 3c8041811ef3..346e4d5513d8 100644 --- a/mypy/typeshed/stdlib/subprocess.pyi +++ b/mypy/typeshed/stdlib/subprocess.pyi @@ -1870,7 +1870,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str, @@ -1901,7 +1901,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str | None = None, @@ -1933,7 +1933,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), # where the *real* keyword only args start text: bool | None = None, encoding: str | None = None, @@ -1964,7 +1964,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: Literal[True], encoding: str | None = None, @@ -1995,7 +1995,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: Literal[None, False] = None, encoding: None = None, @@ -2026,7 +2026,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str | None = None, @@ -2059,7 +2059,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str, @@ -2089,7 +2089,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str | None = None, @@ -2120,7 +2120,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), # where the *real* keyword only args start text: bool | None = None, encoding: str | None = None, @@ -2150,7 +2150,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: Literal[True], encoding: str | None = None, @@ -2180,7 +2180,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: Literal[None, False] = None, encoding: None = None, @@ -2210,7 +2210,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str | None = None, @@ -2242,7 +2242,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str, @@ -2271,7 +2271,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str | None = None, @@ -2301,7 +2301,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), # where the *real* keyword only args start text: bool | None = None, encoding: str | None = None, @@ -2330,7 +2330,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: Literal[True], encoding: str | None = None, @@ -2359,7 +2359,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: Literal[None, False] = None, encoding: None = None, @@ -2388,7 +2388,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str | None = None, @@ -2418,7 +2418,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str, @@ -2443,7 +2443,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str | None = None, @@ -2469,7 +2469,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), # where the *real* keyword only args start text: bool | None = None, encoding: str | None = None, @@ -2494,7 +2494,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: Literal[True], encoding: str | None = None, @@ -2519,7 +2519,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: Literal[None, False] = None, encoding: None = None, @@ -2544,7 +2544,7 @@ class Popen(Generic[AnyStr]): creationflags: int = 0, restore_signals: bool = True, start_new_session: bool = False, - pass_fds: Collection[int] = ..., + pass_fds: Collection[int] = (), *, text: bool | None = None, encoding: str | None = None, diff --git a/mypy/typeshed/stdlib/sys.pyi b/mypy/typeshed/stdlib/sys.pyi index 6e97fbb328b2..786db72c78ec 100644 --- a/mypy/typeshed/stdlib/sys.pyi +++ b/mypy/typeshed/stdlib/sys.pyi @@ -6,7 +6,7 @@ from importlib.abc import PathEntryFinder from importlib.machinery import ModuleSpec from io import TextIOWrapper from types import FrameType, ModuleType, TracebackType -from typing import Any, NoReturn, Protocol, TextIO, TypeVar, overload +from typing import Any, NoReturn, Protocol, TextIO, TypeVar from typing_extensions import Final, Literal, TypeAlias, final _T = TypeVar("_T") @@ -201,6 +201,20 @@ class _int_info(structseq[int], tuple[int, int, int, int]): @property def str_digits_check_threshold(self) -> int: ... +_ThreadInfoName: TypeAlias = Literal["nt", "pthread", "pthread-stubs", "solaris"] +_ThreadInfoLock: TypeAlias = Literal["semaphore", "mutex+cond"] | None + +@final +class _thread_info(_UninstantiableStructseq, tuple[_ThreadInfoName, _ThreadInfoLock, str | None]): + @property + def name(self) -> _ThreadInfoName: ... + @property + def lock(self) -> _ThreadInfoLock: ... + @property + def version(self) -> str | None: ... + +thread_info: _thread_info + @final class _version_info(_UninstantiableStructseq, tuple[int, int, int, str, int]): @property @@ -239,10 +253,7 @@ def getfilesystemencoding() -> str: ... def getfilesystemencodeerrors() -> str: ... def getrefcount(__object: Any) -> int: ... def getrecursionlimit() -> int: ... -@overload -def getsizeof(obj: object) -> int: ... -@overload -def getsizeof(obj: object, default: int) -> int: ... +def getsizeof(obj: object, default: int = ...) -> int: ... def getswitchinterval() -> float: ... def getprofile() -> ProfileFunction | None: ... def setprofile(profilefunc: ProfileFunction | None) -> None: ... diff --git a/mypy/typeshed/stdlib/tempfile.pyi b/mypy/typeshed/stdlib/tempfile.pyi index dbff6d632d02..cd27e91fbc75 100644 --- a/mypy/typeshed/stdlib/tempfile.pyi +++ b/mypy/typeshed/stdlib/tempfile.pyi @@ -1,6 +1,6 @@ import io import sys -from _typeshed import BytesPath, GenericPath, StrPath, WriteableBuffer +from _typeshed import BytesPath, GenericPath, ReadableBuffer, StrPath, WriteableBuffer from collections.abc import Iterable, Iterator from types import TracebackType from typing import IO, Any, AnyStr, Generic, overload @@ -215,7 +215,17 @@ class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): def tell(self) -> int: ... def truncate(self, size: int | None = ...) -> int: ... def writable(self) -> bool: ... + @overload + def write(self: _TemporaryFileWrapper[str], s: str) -> int: ... + @overload + def write(self: _TemporaryFileWrapper[bytes], s: ReadableBuffer) -> int: ... + @overload def write(self, s: AnyStr) -> int: ... + @overload + def writelines(self: _TemporaryFileWrapper[str], lines: Iterable[str]) -> None: ... + @overload + def writelines(self: _TemporaryFileWrapper[bytes], lines: Iterable[ReadableBuffer]) -> None: ... + @overload def writelines(self, lines: Iterable[AnyStr]) -> None: ... if sys.version_info >= (3, 11): @@ -392,8 +402,18 @@ class SpooledTemporaryFile(IO[AnyStr], _SpooledTemporaryFileBase): def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... def truncate(self, size: int | None = None) -> None: ... # type: ignore[override] + @overload + def write(self: SpooledTemporaryFile[str], s: str) -> int: ... + @overload + def write(self: SpooledTemporaryFile[bytes], s: ReadableBuffer) -> int: ... + @overload def write(self, s: AnyStr) -> int: ... - def writelines(self, iterable: Iterable[AnyStr]) -> None: ... # type: ignore[override] + @overload + def writelines(self: SpooledTemporaryFile[str], iterable: Iterable[str]) -> None: ... + @overload + def writelines(self: SpooledTemporaryFile[bytes], iterable: Iterable[ReadableBuffer]) -> None: ... + @overload + def writelines(self, iterable: Iterable[AnyStr]) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... # type: ignore[override] # These exist at runtime only on 3.11+. def readable(self) -> bool: ... diff --git a/mypy/typeshed/stdlib/threading.pyi b/mypy/typeshed/stdlib/threading.pyi index c017978808dd..6275e4552630 100644 --- a/mypy/typeshed/stdlib/threading.pyi +++ b/mypy/typeshed/stdlib/threading.pyi @@ -78,7 +78,7 @@ class Thread: group: None = None, target: Callable[..., object] | None = None, name: str | None = None, - args: Iterable[Any] = ..., + args: Iterable[Any] = (), kwargs: Mapping[str, Any] | None = None, *, daemon: bool | None = None, diff --git a/mypy/typeshed/stdlib/tkinter/__init__.pyi b/mypy/typeshed/stdlib/tkinter/__init__.pyi index 7b4b06be4ecb..3291b0c9dd98 100644 --- a/mypy/typeshed/stdlib/tkinter/__init__.pyi +++ b/mypy/typeshed/stdlib/tkinter/__init__.pyi @@ -173,10 +173,7 @@ EXCEPTION = _tkinter.EXCEPTION # Some widgets have an option named -compound that accepts different values # than the _Compound defined here. Many other options have similar things. _Anchor: TypeAlias = Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] # manual page: Tk_GetAnchor -_Bitmap: TypeAlias = str # manual page: Tk_GetBitmap _ButtonCommand: TypeAlias = str | Callable[[], Any] # accepts string of tcl code, return value is returned from Button.invoke() -_CanvasItemId: TypeAlias = int -_Color: TypeAlias = str # typically '#rrggbb', '#rgb' or color names. _Compound: TypeAlias = Literal["top", "left", "center", "right", "bottom", "none"] # -compound in manual page named 'options' # manual page: Tk_GetCursor _Cursor: TypeAlias = str | tuple[str] | tuple[str, str] | tuple[str, str, str] | tuple[str, str, str, str] @@ -405,7 +402,7 @@ class Misc: def winfo_pointery(self) -> int: ... def winfo_reqheight(self) -> int: ... def winfo_reqwidth(self) -> int: ... - def winfo_rgb(self, color: _Color) -> tuple[int, int, int]: ... + def winfo_rgb(self, color: str) -> tuple[int, int, int]: ... def winfo_rootx(self) -> int: ... def winfo_rooty(self) -> int: ... def winfo_screen(self) -> str: ... @@ -504,7 +501,7 @@ class Misc: def grid_columnconfigure( self, index: _GridIndex, - cnf: _GridIndexInfo = ..., + cnf: _GridIndexInfo = {}, *, minsize: _ScreenUnits = ..., pad: _ScreenUnits = ..., @@ -514,7 +511,7 @@ class Misc: def grid_rowconfigure( self, index: _GridIndex, - cnf: _GridIndexInfo = ..., + cnf: _GridIndexInfo = {}, *, minsize: _ScreenUnits = ..., pad: _ScreenUnits = ..., @@ -745,15 +742,15 @@ class Tk(Misc, Wm): self, cnf: dict[str, Any] | None = None, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., menu: Menu = ..., padx: _ScreenUnits = ..., @@ -825,7 +822,7 @@ class Pack: # replaced by **kwargs. def pack_configure( self, - cnf: Mapping[str, Any] | None = ..., + cnf: Mapping[str, Any] | None = {}, *, after: Misc = ..., anchor: _Anchor = ..., @@ -861,7 +858,7 @@ class _PlaceInfo(_InMiscNonTotal): # empty dict if widget hasn't been placed class Place: def place_configure( self, - cnf: Mapping[str, Any] | None = ..., + cnf: Mapping[str, Any] | None = {}, *, anchor: _Anchor = ..., bordermode: Literal["inside", "outside", "ignore"] = ..., @@ -896,7 +893,7 @@ class _GridInfo(_InMiscNonTotal): # empty dict if widget hasn't been gridded class Grid: def grid_configure( self, - cnf: Mapping[str, Any] | None = ..., + cnf: Mapping[str, Any] | None = {}, *, column: int = ..., columnspan: int = ..., @@ -920,7 +917,7 @@ class Grid: class BaseWidget(Misc): master: Misc widgetName: Incomplete - def __init__(self, master, widgetName, cnf=..., kw=..., extra=...) -> None: ... + def __init__(self, master, widgetName, cnf={}, kw={}, extra=()) -> None: ... def destroy(self) -> None: ... # This class represents any widget except Toplevel or Tk. @@ -947,11 +944,11 @@ class Toplevel(BaseWidget, Wm): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., class_: str = ..., @@ -959,8 +956,8 @@ class Toplevel(BaseWidget, Wm): container: bool = ..., cursor: _Cursor = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., menu: Menu = ..., name: str = ..., @@ -978,15 +975,15 @@ class Toplevel(BaseWidget, Wm): self, cnf: dict[str, Any] | None = None, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., menu: Menu = ..., padx: _ScreenUnits = ..., @@ -1003,30 +1000,30 @@ class Button(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., # same as borderwidth - bg: _Color = ..., # same as background - bitmap: _Bitmap = ..., + bg: str = ..., # same as background + bitmap: str = ..., border: _ScreenUnits = ..., # same as borderwidth borderwidth: _ScreenUnits = ..., command: _ButtonCommand = ..., compound: _Compound = ..., cursor: _Cursor = ..., default: Literal["normal", "active", "disabled"] = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., # same as foreground + disabledforeground: str = ..., + fg: str = ..., # same as foreground font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., # width and height must be int for buttons containing just text, but # ints are also valid _ScreenUnits height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., justify: Literal["left", "center", "right"] = ..., @@ -1053,26 +1050,26 @@ class Button(Widget): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., command: _ButtonCommand = ..., compound: _Compound = ..., cursor: _Cursor = ..., default: Literal["normal", "active", "disabled"] = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., justify: Literal["left", "center", "right"] = ..., @@ -1100,11 +1097,11 @@ class Canvas(Widget, XView, YView): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., closeenough: float = ..., @@ -1113,10 +1110,10 @@ class Canvas(Widget, XView, YView): # canvas manual page has a section named COORDINATES, and the first # part of it describes _ScreenUnits. height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., - insertbackground: _Color = ..., + insertbackground: str = ..., insertborderwidth: _ScreenUnits = ..., insertofftime: int = ..., insertontime: int = ..., @@ -1127,9 +1124,9 @@ class Canvas(Widget, XView, YView): # Setting scrollregion to None doesn't reset it back to empty, # but setting it to () does. scrollregion: tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits] | tuple[()] = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., # man page says that state can be 'hidden', but it can't state: Literal["normal", "disabled"] = ..., takefocus: _TakeFocusValue = ..., @@ -1144,19 +1141,19 @@ class Canvas(Widget, XView, YView): self, cnf: dict[str, Any] | None = None, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., closeenough: float = ..., confine: bool = ..., cursor: _Cursor = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., - insertbackground: _Color = ..., + insertbackground: str = ..., insertborderwidth: _ScreenUnits = ..., insertofftime: int = ..., insertontime: int = ..., @@ -1164,9 +1161,9 @@ class Canvas(Widget, XView, YView): offset=..., # undocumented relief: _Relief = ..., scrollregion: tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits] | tuple[()] = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., state: Literal["normal", "disabled"] = ..., takefocus: _TakeFocusValue = ..., width: _ScreenUnits = ..., @@ -1179,38 +1176,31 @@ class Canvas(Widget, XView, YView): def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def addtag(self, *args): ... # internal method - def addtag_above(self, newtag: str, tagOrId: str | _CanvasItemId) -> None: ... + def addtag_above(self, newtag: str, tagOrId: str | int) -> None: ... def addtag_all(self, newtag: str) -> None: ... - def addtag_below(self, newtag: str, tagOrId: str | _CanvasItemId) -> None: ... + def addtag_below(self, newtag: str, tagOrId: str | int) -> None: ... def addtag_closest( - self, - newtag: str, - x: _ScreenUnits, - y: _ScreenUnits, - halo: _ScreenUnits | None = None, - start: str | _CanvasItemId | None = None, + self, newtag: str, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = None, start: str | int | None = None ) -> None: ... def addtag_enclosed(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... def addtag_overlapping(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... - def addtag_withtag(self, newtag: str, tagOrId: str | _CanvasItemId) -> None: ... + def addtag_withtag(self, newtag: str, tagOrId: str | int) -> None: ... def find(self, *args): ... # internal method - def find_above(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... - def find_all(self) -> tuple[_CanvasItemId, ...]: ... - def find_below(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... + def find_above(self, tagOrId: str | int) -> tuple[int, ...]: ... + def find_all(self) -> tuple[int, ...]: ... + def find_below(self, tagOrId: str | int) -> tuple[int, ...]: ... def find_closest( - self, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = None, start: str | _CanvasItemId | None = None - ) -> tuple[_CanvasItemId, ...]: ... - def find_enclosed( - self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits - ) -> tuple[_CanvasItemId, ...]: ... - def find_overlapping(self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: float) -> tuple[_CanvasItemId, ...]: ... - def find_withtag(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... + self, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = None, start: str | int | None = None + ) -> tuple[int, ...]: ... + def find_enclosed(self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> tuple[int, ...]: ... + def find_overlapping(self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: float) -> tuple[int, ...]: ... + def find_withtag(self, tagOrId: str | int) -> tuple[int, ...]: ... # Incompatible with Misc.bbox(), tkinter violates LSP - def bbox(self, *args: str | _CanvasItemId) -> tuple[int, int, int, int]: ... # type: ignore[override] + def bbox(self, *args: str | int) -> tuple[int, int, int, int]: ... # type: ignore[override] @overload def tag_bind( self, - tagOrId: str | _CanvasItemId, + tagOrId: str | int, sequence: str | None = None, func: Callable[[Event[Canvas]], object] | None = None, add: Literal["", "+"] | bool | None = None, @@ -1220,22 +1210,22 @@ class Canvas(Widget, XView, YView): self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = None ) -> None: ... @overload - def tag_bind(self, tagOrId: str | _CanvasItemId, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... - def tag_unbind(self, tagOrId: str | _CanvasItemId, sequence: str, funcid: str | None = None) -> None: ... + def tag_bind(self, tagOrId: str | int, *, func: str, add: Literal["", "+"] | bool | None = None) -> None: ... + def tag_unbind(self, tagOrId: str | int, sequence: str, funcid: str | None = None) -> None: ... def canvasx(self, screenx, gridspacing: Incomplete | None = None): ... def canvasy(self, screeny, gridspacing: Incomplete | None = None): ... @overload - def coords(self, __tagOrId: str | _CanvasItemId) -> list[float]: ... + def coords(self, __tagOrId: str | int) -> list[float]: ... @overload - def coords(self, __tagOrId: str | _CanvasItemId, __args: list[int] | list[float] | tuple[float, ...]) -> None: ... + def coords(self, __tagOrId: str | int, __args: list[int] | list[float] | tuple[float, ...]) -> None: ... @overload - def coords(self, __tagOrId: str | _CanvasItemId, __x1: float, __y1: float, *args: float) -> None: ... + def coords(self, __tagOrId: str | int, __x1: float, __y1: float, *args: float) -> None: ... # create_foo() methods accept coords as a list or tuple, or as separate arguments. # Lists and tuples can be flat as in [1, 2, 3, 4], or nested as in [(1, 2), (3, 4)]. # Keyword arguments should be the same in all overloads of each method. - def create_arc(self, *args, **kw) -> _CanvasItemId: ... - def create_bitmap(self, *args, **kw) -> _CanvasItemId: ... - def create_image(self, *args, **kw) -> _CanvasItemId: ... + def create_arc(self, *args, **kw) -> int: ... + def create_bitmap(self, *args, **kw) -> int: ... + def create_image(self, *args, **kw) -> int: ... @overload def create_line( self, @@ -1244,58 +1234,58 @@ class Canvas(Widget, XView, YView): __x1: float, __y1: float, *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., arrow: Literal["first", "last", "both"] = ..., arrowshape: tuple[float, float, float] = ..., capstyle: Literal["round", "projecting", "butt"] = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., joinstyle: Literal["round", "bevel", "miter"] = ..., offset: _ScreenUnits = ..., smooth: bool = ..., splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_line( self, __xy_pair_0: tuple[float, float], __xy_pair_1: tuple[float, float], *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., arrow: Literal["first", "last", "both"] = ..., arrowshape: tuple[float, float, float] = ..., capstyle: Literal["round", "projecting", "butt"] = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., joinstyle: Literal["round", "bevel", "miter"] = ..., offset: _ScreenUnits = ..., smooth: bool = ..., splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_line( self, @@ -1308,29 +1298,29 @@ class Canvas(Widget, XView, YView): | list[tuple[float, float]] ), *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., arrow: Literal["first", "last", "both"] = ..., arrowshape: tuple[float, float, float] = ..., capstyle: Literal["round", "projecting", "butt"] = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., joinstyle: Literal["round", "bevel", "miter"] = ..., offset: _ScreenUnits = ..., smooth: bool = ..., splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_oval( self, @@ -1339,60 +1329,60 @@ class Canvas(Widget, XView, YView): __x1: float, __y1: float, *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_oval( self, __xy_pair_0: tuple[float, float], __xy_pair_1: tuple[float, float], *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_oval( self, @@ -1405,30 +1395,30 @@ class Canvas(Widget, XView, YView): | list[tuple[float, float]] ), *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_polygon( self, @@ -1437,66 +1427,66 @@ class Canvas(Widget, XView, YView): __x1: float, __y1: float, *xy_pairs: float, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., joinstyle: Literal["round", "bevel", "miter"] = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., smooth: bool = ..., splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_polygon( self, __xy_pair_0: tuple[float, float], __xy_pair_1: tuple[float, float], *xy_pairs: tuple[float, float], - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., joinstyle: Literal["round", "bevel", "miter"] = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., smooth: bool = ..., splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_polygon( self, @@ -1509,33 +1499,33 @@ class Canvas(Widget, XView, YView): | list[tuple[float, float]] ), *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., joinstyle: Literal["round", "bevel", "miter"] = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., smooth: bool = ..., splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_rectangle( self, @@ -1544,60 +1534,60 @@ class Canvas(Widget, XView, YView): __x1: float, __y1: float, *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_rectangle( self, __xy_pair_0: tuple[float, float], __xy_pair_1: tuple[float, float], *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_rectangle( self, @@ -1610,71 +1600,71 @@ class Canvas(Widget, XView, YView): | list[tuple[float, float]] ), *, - activedash: str | list[int] | tuple[int, ...] = ..., - activefill: _Color = ..., - activeoutline: _Color = ..., - activeoutlinestipple: _Color = ..., + activedash: str | int | list[int] | tuple[int, ...] = ..., + activefill: str = ..., + activeoutline: str = ..., + activeoutlinestipple: str = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | list[int] | tuple[int, ...] = ..., + dash: str | int | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | list[int] | tuple[int, ...] = ..., - disabledfill: _Color = ..., - disabledoutline: _Color = ..., - disabledoutlinestipple: _Color = ..., - disabledstipple: _Bitmap = ..., + disableddash: str | int | list[int] | tuple[int, ...] = ..., + disabledfill: str = ..., + disabledoutline: str = ..., + disabledoutlinestipple: str = ..., + disabledstipple: str = ..., disabledwidth: _ScreenUnits = ..., - fill: _Color = ..., + fill: str = ..., offset: _ScreenUnits = ..., - outline: _Color = ..., + outline: str = ..., outlineoffset: _ScreenUnits = ..., - outlinestipple: _Bitmap = ..., + outlinestipple: str = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_text( self, __x: float, __y: float, *, - activefill: _Color = ..., + activefill: str = ..., activestipple: str = ..., anchor: _Anchor = ..., - disabledfill: _Color = ..., - disabledstipple: _Bitmap = ..., - fill: _Color = ..., + disabledfill: str = ..., + disabledstipple: str = ..., + fill: str = ..., font: _FontDescription = ..., justify: Literal["left", "center", "right"] = ..., offset: _ScreenUnits = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., text: float | str = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_text( self, __coords: tuple[float, float] | list[int] | list[float], *, - activefill: _Color = ..., + activefill: str = ..., activestipple: str = ..., anchor: _Anchor = ..., - disabledfill: _Color = ..., - disabledstipple: _Bitmap = ..., - fill: _Color = ..., + disabledfill: str = ..., + disabledstipple: str = ..., + fill: str = ..., font: _FontDescription = ..., justify: Literal["left", "center", "right"] = ..., offset: _ScreenUnits = ..., state: Literal["normal", "active", "disabled"] = ..., - stipple: _Bitmap = ..., + stipple: str = ..., tags: str | list[str] | tuple[str, ...] = ..., text: float | str = ..., width: _ScreenUnits = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_window( self, @@ -1687,7 +1677,7 @@ class Canvas(Widget, XView, YView): tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., window: Widget = ..., - ) -> _CanvasItemId: ... + ) -> int: ... @overload def create_window( self, @@ -1699,39 +1689,39 @@ class Canvas(Widget, XView, YView): tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., window: Widget = ..., - ) -> _CanvasItemId: ... + ) -> int: ... def dchars(self, *args) -> None: ... - def delete(self, *tagsOrCanvasIds: str | _CanvasItemId) -> None: ... + def delete(self, *tagsOrCanvasIds: str | int) -> None: ... @overload def dtag(self, __tag: str, __tag_to_delete: str | None = ...) -> None: ... @overload - def dtag(self, __id: _CanvasItemId, __tag_to_delete: str) -> None: ... + def dtag(self, __id: int, __tag_to_delete: str) -> None: ... def focus(self, *args): ... - def gettags(self, __tagOrId: str | _CanvasItemId) -> tuple[str, ...]: ... + def gettags(self, __tagOrId: str | int) -> tuple[str, ...]: ... def icursor(self, *args) -> None: ... def index(self, *args): ... def insert(self, *args) -> None: ... def itemcget(self, tagOrId, option): ... # itemconfigure kwargs depend on item type, which is not known when type checking def itemconfigure( - self, tagOrId: str | _CanvasItemId, cnf: dict[str, Any] | None = None, **kw: Any + self, tagOrId: str | int, cnf: dict[str, Any] | None = None, **kw: Any ) -> dict[str, tuple[str, str, str, str, str]] | None: ... itemconfig = itemconfigure def move(self, *args) -> None: ... if sys.version_info >= (3, 8): - def moveto(self, tagOrId: str | _CanvasItemId, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: ... + def moveto(self, tagOrId: str | int, x: Literal[""] | float = "", y: Literal[""] | float = "") -> None: ... - def postscript(self, cnf=..., **kw): ... + def postscript(self, cnf={}, **kw): ... # tkinter does: # lower = tag_lower # lift = tkraise = tag_raise # # But mypy doesn't like aliasing here (maybe because Misc defines the same names) - def tag_lower(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... - def lower(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] - def tag_raise(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... - def tkraise(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] - def lift(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] + def tag_lower(self, __first: str | int, __second: str | int | None = ...) -> None: ... + def lower(self, __first: str | int, __second: str | int | None = ...) -> None: ... # type: ignore[override] + def tag_raise(self, __first: str | int, __second: str | int | None = ...) -> None: ... + def tkraise(self, __first: str | int, __second: str | int | None = ...) -> None: ... # type: ignore[override] + def lift(self, __first: str | int, __second: str | int | None = ...) -> None: ... # type: ignore[override] def scale(self, *args) -> None: ... def scan_mark(self, x, y) -> None: ... def scan_dragto(self, x, y, gain: int = 10) -> None: ... @@ -1746,27 +1736,27 @@ class Checkbutton(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., command: _ButtonCommand = ..., compound: _Compound = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., indicatoron: bool = ..., @@ -1789,7 +1779,7 @@ class Checkbutton(Widget): padx: _ScreenUnits = ..., pady: _ScreenUnits = ..., relief: _Relief = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., selectimage: _ImageSpec = ..., state: Literal["normal", "active", "disabled"] = ..., takefocus: _TakeFocusValue = ..., @@ -1807,25 +1797,25 @@ class Checkbutton(Widget): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., command: _ButtonCommand = ..., compound: _Compound = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., indicatoron: bool = ..., @@ -1837,7 +1827,7 @@ class Checkbutton(Widget): padx: _ScreenUnits = ..., pady: _ScreenUnits = ..., relief: _Relief = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., selectimage: _ImageSpec = ..., state: Literal["normal", "active", "disabled"] = ..., takefocus: _TakeFocusValue = ..., @@ -1865,24 +1855,24 @@ class Entry(Widget, XView): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - disabledbackground: _Color = ..., - disabledforeground: _Color = ..., + disabledbackground: str = ..., + disabledforeground: str = ..., exportselection: bool = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + foreground: str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., - insertbackground: _Color = ..., + insertbackground: str = ..., insertborderwidth: _ScreenUnits = ..., insertofftime: int = ..., insertontime: int = ..., @@ -1891,11 +1881,11 @@ class Entry(Widget, XView): invcmd: _EntryValidateCommand = ..., # same as invalidcommand justify: Literal["left", "center", "right"] = ..., name: str = ..., - readonlybackground: _Color = ..., + readonlybackground: str = ..., relief: _Relief = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., show: str = ..., state: Literal["normal", "disabled", "readonly"] = ..., takefocus: _TakeFocusValue = ..., @@ -1911,22 +1901,22 @@ class Entry(Widget, XView): self, cnf: dict[str, Any] | None = None, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - disabledbackground: _Color = ..., - disabledforeground: _Color = ..., + disabledbackground: str = ..., + disabledforeground: str = ..., exportselection: bool = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + foreground: str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., - insertbackground: _Color = ..., + insertbackground: str = ..., insertborderwidth: _ScreenUnits = ..., insertofftime: int = ..., insertontime: int = ..., @@ -1934,11 +1924,11 @@ class Entry(Widget, XView): invalidcommand: _EntryValidateCommand = ..., invcmd: _EntryValidateCommand = ..., justify: Literal["left", "center", "right"] = ..., - readonlybackground: _Color = ..., + readonlybackground: str = ..., relief: _Relief = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., show: str = ..., state: Literal["normal", "disabled", "readonly"] = ..., takefocus: _TakeFocusValue = ..., @@ -1976,11 +1966,11 @@ class Frame(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., class_: str = ..., # can't be changed with configure() @@ -1988,8 +1978,8 @@ class Frame(Widget): container: bool = ..., # can't be changed with configure() cursor: _Cursor = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., name: str = ..., padx: _ScreenUnits = ..., @@ -2004,15 +1994,15 @@ class Frame(Widget): self, cnf: dict[str, Any] | None = None, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., padx: _ScreenUnits = ..., pady: _ScreenUnits = ..., @@ -2028,26 +2018,26 @@ class Label(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., compound: _Compound = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., justify: Literal["left", "center", "right"] = ..., @@ -2068,24 +2058,24 @@ class Label(Widget): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., compound: _Compound = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., justify: Literal["left", "center", "right"] = ..., @@ -2108,23 +2098,23 @@ class Listbox(Widget, XView, YView): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, activestyle: Literal["dotbox", "none", "underline"] = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., + disabledforeground: str = ..., exportselection: int = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: int = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., justify: Literal["left", "center", "right"] = ..., # There's no tkinter.ListVar, but seems like bare tkinter.Variable @@ -2139,9 +2129,9 @@ class Listbox(Widget, XView, YView): listvariable: Variable = ..., name: str = ..., relief: _Relief = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., # from listbox man page: "The value of the [selectmode] option may be # arbitrary, but the default bindings expect it to be ..." # @@ -2161,27 +2151,27 @@ class Listbox(Widget, XView, YView): cnf: dict[str, Any] | None = None, *, activestyle: Literal["dotbox", "none", "underline"] = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., + disabledforeground: str = ..., exportselection: bool = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: int = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., justify: Literal["left", "center", "right"] = ..., listvariable: Variable = ..., relief: _Relief = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., selectmode: str = ..., setgrid: bool = ..., state: Literal["normal", "disabled"] = ..., @@ -2221,25 +2211,25 @@ class Menu(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., + activebackground: str = ..., activeborderwidth: _ScreenUnits = ..., - activeforeground: _Color = ..., - background: _Color = ..., + activeforeground: str = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., name: str = ..., postcommand: Callable[[], object] | str = ..., relief: _Relief = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., takefocus: _TakeFocusValue = ..., tearoff: int = ..., # I guess tearoffcommand arguments are supposed to be widget objects, @@ -2254,22 +2244,22 @@ class Menu(Widget): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., + activebackground: str = ..., activeborderwidth: _ScreenUnits = ..., - activeforeground: _Color = ..., - background: _Color = ..., + activeforeground: str = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., postcommand: Callable[[], object] | str = ..., relief: _Relief = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., takefocus: _TakeFocusValue = ..., tearoff: bool = ..., tearoffcommand: Callable[[str, str], object] | str = ..., @@ -2281,22 +2271,22 @@ class Menu(Widget): config = configure def tk_popup(self, x: int, y: int, entry: str | int = "") -> None: ... def activate(self, index: str | int) -> None: ... - def add(self, itemType, cnf=..., **kw): ... # docstring says "Internal function." - def insert(self, index, itemType, cnf=..., **kw): ... # docstring says "Internal function." + def add(self, itemType, cnf={}, **kw): ... # docstring says "Internal function." + def insert(self, index, itemType, cnf={}, **kw): ... # docstring says "Internal function." def add_cascade( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, accelerator: str = ..., - activebackground: _Color = ..., - activeforeground: _Color = ..., - background: _Color = ..., - bitmap: _Bitmap = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., columnbreak: int = ..., command: Callable[[], object] | str = ..., compound: _Compound = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., hidemargin: bool = ..., image: _ImageSpec = ..., label: str = ..., @@ -2306,25 +2296,25 @@ class Menu(Widget): ) -> None: ... def add_checkbutton( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, accelerator: str = ..., - activebackground: _Color = ..., - activeforeground: _Color = ..., - background: _Color = ..., - bitmap: _Bitmap = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., columnbreak: int = ..., command: Callable[[], object] | str = ..., compound: _Compound = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., hidemargin: bool = ..., image: _ImageSpec = ..., indicatoron: bool = ..., label: str = ..., offvalue: Any = ..., onvalue: Any = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., selectimage: _ImageSpec = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., @@ -2332,18 +2322,18 @@ class Menu(Widget): ) -> None: ... def add_command( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, accelerator: str = ..., - activebackground: _Color = ..., - activeforeground: _Color = ..., - background: _Color = ..., - bitmap: _Bitmap = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., columnbreak: int = ..., command: Callable[[], object] | str = ..., compound: _Compound = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., hidemargin: bool = ..., image: _ImageSpec = ..., label: str = ..., @@ -2352,45 +2342,45 @@ class Menu(Widget): ) -> None: ... def add_radiobutton( self, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, accelerator: str = ..., - activebackground: _Color = ..., - activeforeground: _Color = ..., - background: _Color = ..., - bitmap: _Bitmap = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., columnbreak: int = ..., command: Callable[[], object] | str = ..., compound: _Compound = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., hidemargin: bool = ..., image: _ImageSpec = ..., indicatoron: bool = ..., label: str = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., selectimage: _ImageSpec = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., value: Any = ..., variable: Variable = ..., ) -> None: ... - def add_separator(self, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... + def add_separator(self, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: ... def insert_cascade( self, index: str | int, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, accelerator: str = ..., - activebackground: _Color = ..., - activeforeground: _Color = ..., - background: _Color = ..., - bitmap: _Bitmap = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., columnbreak: int = ..., command: Callable[[], object] | str = ..., compound: _Compound = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., hidemargin: bool = ..., image: _ImageSpec = ..., label: str = ..., @@ -2401,25 +2391,25 @@ class Menu(Widget): def insert_checkbutton( self, index: str | int, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, accelerator: str = ..., - activebackground: _Color = ..., - activeforeground: _Color = ..., - background: _Color = ..., - bitmap: _Bitmap = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., columnbreak: int = ..., command: Callable[[], object] | str = ..., compound: _Compound = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., hidemargin: bool = ..., image: _ImageSpec = ..., indicatoron: bool = ..., label: str = ..., offvalue: Any = ..., onvalue: Any = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., selectimage: _ImageSpec = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., @@ -2428,18 +2418,18 @@ class Menu(Widget): def insert_command( self, index: str | int, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, accelerator: str = ..., - activebackground: _Color = ..., - activeforeground: _Color = ..., - background: _Color = ..., - bitmap: _Bitmap = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., columnbreak: int = ..., command: Callable[[], object] | str = ..., compound: _Compound = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., hidemargin: bool = ..., image: _ImageSpec = ..., label: str = ..., @@ -2449,30 +2439,30 @@ class Menu(Widget): def insert_radiobutton( self, index: str | int, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, accelerator: str = ..., - activebackground: _Color = ..., - activeforeground: _Color = ..., - background: _Color = ..., - bitmap: _Bitmap = ..., + activebackground: str = ..., + activeforeground: str = ..., + background: str = ..., + bitmap: str = ..., columnbreak: int = ..., command: Callable[[], object] | str = ..., compound: _Compound = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., hidemargin: bool = ..., image: _ImageSpec = ..., indicatoron: bool = ..., label: str = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., selectimage: _ImageSpec = ..., state: Literal["normal", "active", "disabled"] = ..., underline: int = ..., value: Any = ..., variable: Variable = ..., ) -> None: ... - def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = ..., *, background: _Color = ...) -> None: ... + def insert_separator(self, index: str | int, cnf: dict[str, Any] | None = {}, *, background: str = ...) -> None: ... def delete(self, index1: str | int, index2: str | int | None = None) -> None: ... def entrycget(self, index: str | int, option: str) -> Any: ... def entryconfigure( @@ -2491,27 +2481,27 @@ class Menubutton(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., compound: _Compound = ..., cursor: _Cursor = ..., direction: Literal["above", "below", "left", "right", "flush"] = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., indicatoron: bool = ..., @@ -2534,25 +2524,25 @@ class Menubutton(Widget): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., compound: _Compound = ..., cursor: _Cursor = ..., direction: Literal["above", "below", "left", "right", "flush"] = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., indicatoron: bool = ..., @@ -2577,21 +2567,21 @@ class Message(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, anchor: _Anchor = ..., aspect: int = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + foreground: str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., justify: Literal["left", "center", "right"] = ..., name: str = ..., @@ -2611,17 +2601,17 @@ class Message(Widget): *, anchor: _Anchor = ..., aspect: int = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + foreground: str = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., justify: Literal["left", "center", "right"] = ..., padx: _ScreenUnits = ..., @@ -2640,27 +2630,27 @@ class Radiobutton(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., command: _ButtonCommand = ..., compound: _Compound = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., indicatoron: bool = ..., @@ -2671,7 +2661,7 @@ class Radiobutton(Widget): padx: _ScreenUnits = ..., pady: _ScreenUnits = ..., relief: _Relief = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., selectimage: _ImageSpec = ..., state: Literal["normal", "active", "disabled"] = ..., takefocus: _TakeFocusValue = ..., @@ -2690,25 +2680,25 @@ class Radiobutton(Widget): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., - activeforeground: _Color = ..., + activebackground: str = ..., + activeforeground: str = ..., anchor: _Anchor = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., - bitmap: _Bitmap = ..., + bg: str = ..., + bitmap: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., command: _ButtonCommand = ..., compound: _Compound = ..., cursor: _Cursor = ..., - disabledforeground: _Color = ..., - fg: _Color = ..., + disabledforeground: str = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., image: _ImageSpec = ..., indicatoron: bool = ..., @@ -2718,7 +2708,7 @@ class Radiobutton(Widget): padx: _ScreenUnits = ..., pady: _ScreenUnits = ..., relief: _Relief = ..., - selectcolor: _Color = ..., + selectcolor: str = ..., selectimage: _ImageSpec = ..., state: Literal["normal", "active", "disabled"] = ..., takefocus: _TakeFocusValue = ..., @@ -2744,12 +2734,12 @@ class Scale(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., - background: _Color = ..., + activebackground: str = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., bigincrement: float = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., @@ -2757,12 +2747,12 @@ class Scale(Widget): command: str | Callable[[str], object] = ..., cursor: _Cursor = ..., digits: int = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., from_: float = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., label: str = ..., length: _ScreenUnits = ..., @@ -2779,7 +2769,7 @@ class Scale(Widget): takefocus: _TakeFocusValue = ..., tickinterval: float = ..., to: float = ..., - troughcolor: _Color = ..., + troughcolor: str = ..., variable: IntVar | DoubleVar = ..., width: _ScreenUnits = ..., ) -> None: ... @@ -2788,22 +2778,22 @@ class Scale(Widget): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., - background: _Color = ..., + activebackground: str = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., bigincrement: float = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., command: str | Callable[[str], object] = ..., cursor: _Cursor = ..., digits: int = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., from_: float = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., label: str = ..., length: _ScreenUnits = ..., @@ -2819,7 +2809,7 @@ class Scale(Widget): takefocus: _TakeFocusValue = ..., tickinterval: float = ..., to: float = ..., - troughcolor: _Color = ..., + troughcolor: str = ..., variable: IntVar | DoubleVar = ..., width: _ScreenUnits = ..., ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @@ -2835,13 +2825,13 @@ class Scrollbar(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., + activebackground: str = ..., activerelief: _Relief = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., # There are many ways how the command may get called. Search for @@ -2851,8 +2841,8 @@ class Scrollbar(Widget): command: Callable[..., tuple[float, float] | None] | str = ..., cursor: _Cursor = ..., elementborderwidth: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., jump: bool = ..., name: str = ..., @@ -2861,7 +2851,7 @@ class Scrollbar(Widget): repeatdelay: int = ..., repeatinterval: int = ..., takefocus: _TakeFocusValue = ..., - troughcolor: _Color = ..., + troughcolor: str = ..., width: _ScreenUnits = ..., ) -> None: ... @overload @@ -2869,18 +2859,18 @@ class Scrollbar(Widget): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., + activebackground: str = ..., activerelief: _Relief = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., command: Callable[..., tuple[float, float] | None] | str = ..., cursor: _Cursor = ..., elementborderwidth: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., jump: bool = ..., orient: Literal["horizontal", "vertical"] = ..., @@ -2888,7 +2878,7 @@ class Scrollbar(Widget): repeatdelay: int = ..., repeatinterval: int = ..., takefocus: _TakeFocusValue = ..., - troughcolor: _Color = ..., + troughcolor: str = ..., width: _ScreenUnits = ..., ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload @@ -2907,30 +2897,30 @@ class Text(Widget, XView, YView): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, autoseparators: bool = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., blockcursor: bool = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., endline: int | Literal[""] = ..., exportselection: bool = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., # width is always int, but height is allowed to be ScreenUnits. # This doesn't make any sense to me, and this isn't documented. # The docs seem to say that both should be integers. height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., - inactiveselectbackground: _Color = ..., - insertbackground: _Color = ..., + inactiveselectbackground: str = ..., + insertbackground: str = ..., insertborderwidth: _ScreenUnits = ..., insertofftime: int = ..., insertontime: int = ..., @@ -2941,9 +2931,9 @@ class Text(Widget, XView, YView): padx: _ScreenUnits = ..., pady: _ScreenUnits = ..., relief: _Relief = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., setgrid: bool = ..., spacing1: _ScreenUnits = ..., spacing2: _ScreenUnits = ..., @@ -2966,24 +2956,24 @@ class Text(Widget, XView, YView): cnf: dict[str, Any] | None = None, *, autoseparators: bool = ..., - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., blockcursor: bool = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., endline: int | Literal[""] = ..., exportselection: bool = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., - inactiveselectbackground: _Color = ..., - insertbackground: _Color = ..., + inactiveselectbackground: str = ..., + insertbackground: str = ..., insertborderwidth: _ScreenUnits = ..., insertofftime: int = ..., insertontime: int = ..., @@ -2993,9 +2983,9 @@ class Text(Widget, XView, YView): padx: _ScreenUnits = ..., pady: _ScreenUnits = ..., relief: _Relief = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., setgrid: bool = ..., spacing1: _ScreenUnits = ..., spacing2: _ScreenUnits = ..., @@ -3078,7 +3068,7 @@ class Text(Widget, XView, YView): # TODO: image_* methods def image_cget(self, index, option): ... def image_configure(self, index, cnf: Incomplete | None = None, **kw): ... - def image_create(self, index, cnf=..., **kw): ... + def image_create(self, index, cnf={}, **kw): ... def image_names(self): ... def index(self, index: _TextIndex) -> str: ... def insert(self, index: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ... @@ -3092,7 +3082,7 @@ class Text(Widget, XView, YView): def mark_next(self, index: _TextIndex) -> str | None: ... def mark_previous(self, index: _TextIndex) -> str | None: ... # **kw of peer_create is same as the kwargs of Text.__init__ - def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = ..., **kw) -> None: ... + def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = {}, **kw) -> None: ... def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: ... def replace(self, index1: _TextIndex, index2: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ... def scan_mark(self, x: int, y: int) -> None: ... @@ -3132,33 +3122,33 @@ class Text(Widget, XView, YView): tagName: str, cnf: dict[str, Any] | None = None, *, - background: _Color = ..., - bgstipple: _Bitmap = ..., + background: str = ..., + bgstipple: str = ..., borderwidth: _ScreenUnits = ..., border: _ScreenUnits = ..., # alias for borderwidth elide: bool = ..., - fgstipple: _Bitmap = ..., + fgstipple: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., justify: Literal["left", "right", "center"] = ..., lmargin1: _ScreenUnits = ..., lmargin2: _ScreenUnits = ..., - lmargincolor: _Color = ..., + lmargincolor: str = ..., offset: _ScreenUnits = ..., overstrike: bool = ..., - overstrikefg: _Color = ..., + overstrikefg: str = ..., relief: _Relief = ..., rmargin: _ScreenUnits = ..., - rmargincolor: _Color = ..., - selectbackground: _Color = ..., - selectforeground: _Color = ..., + rmargincolor: str = ..., + selectbackground: str = ..., + selectforeground: str = ..., spacing1: _ScreenUnits = ..., spacing2: _ScreenUnits = ..., spacing3: _ScreenUnits = ..., tabs: Any = ..., # the exact type is kind of complicated, see manual page tabstyle: Literal["tabular", "wordprocessor"] = ..., underline: bool = ..., - underlinefg: _Color = ..., + underlinefg: str = ..., wrap: Literal["none", "char", "word"] = ..., # be careful with "none" vs None ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload @@ -3181,7 +3171,7 @@ class Text(Widget, XView, YView): def window_cget(self, index, option): ... def window_configure(self, index, cnf: Incomplete | None = None, **kw): ... window_config = window_configure - def window_create(self, index, cnf=..., **kw) -> None: ... + def window_create(self, index, cnf={}, **kw) -> None: ... def window_names(self): ... def yview_pickplace(self, *what): ... # deprecated @@ -3222,7 +3212,7 @@ class Image(_Image): name: Incomplete tk: _tkinter.TkappType def __init__( - self, imgtype, name: Incomplete | None = None, cnf=..., master: Misc | _tkinter.TkappType | None = None, **kw + self, imgtype, name: Incomplete | None = None, cnf={}, master: Misc | _tkinter.TkappType | None = None, **kw ) -> None: ... def __del__(self) -> None: ... def __setitem__(self, key, value) -> None: ... @@ -3238,7 +3228,7 @@ class PhotoImage(Image, _PhotoImageLike): def __init__( self, name: str | None = None, - cnf: dict[str, Any] = ..., + cnf: dict[str, Any] = {}, master: Misc | _tkinter.TkappType | None = None, *, data: str | bytes = ..., # not same as data argument of put() @@ -3273,11 +3263,11 @@ class PhotoImage(Image, _PhotoImageLike): data: ( str | list[str] - | list[list[_Color]] - | list[tuple[_Color, ...]] + | list[list[str]] + | list[tuple[str, ...]] | tuple[str, ...] - | tuple[list[_Color], ...] - | tuple[tuple[_Color, ...], ...] + | tuple[list[str], ...] + | tuple[tuple[str, ...], ...] ), to: tuple[int, int] | None = None, ) -> None: ... @@ -3291,13 +3281,13 @@ class BitmapImage(Image, _BitmapImageLike): def __init__( self, name: Incomplete | None = None, - cnf: dict[str, Any] = ..., + cnf: dict[str, Any] = {}, master: Misc | _tkinter.TkappType | None = None, *, - background: _Color = ..., + background: str = ..., data: str | bytes = ..., file: StrOrBytesPath = ..., - foreground: _Color = ..., + foreground: str = ..., maskdata: str = ..., maskfile: StrOrBytesPath = ..., ) -> None: ... @@ -3309,34 +3299,34 @@ class Spinbox(Widget, XView): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - activebackground: _Color = ..., - background: _Color = ..., + activebackground: str = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., - buttonbackground: _Color = ..., + buttonbackground: str = ..., buttoncursor: _Cursor = ..., buttondownrelief: _Relief = ..., buttonuprelief: _Relief = ..., # percent substitutions don't seem to be supported, it's similar to Entry's validation stuff command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., cursor: _Cursor = ..., - disabledbackground: _Color = ..., - disabledforeground: _Color = ..., + disabledbackground: str = ..., + disabledforeground: str = ..., exportselection: bool = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., format: str = ..., from_: float = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., increment: float = ..., - insertbackground: _Color = ..., + insertbackground: str = ..., insertborderwidth: _ScreenUnits = ..., insertofftime: int = ..., insertontime: int = ..., @@ -3345,13 +3335,13 @@ class Spinbox(Widget, XView): invcmd: _EntryValidateCommand = ..., justify: Literal["left", "center", "right"] = ..., name: str = ..., - readonlybackground: _Color = ..., + readonlybackground: str = ..., relief: _Relief = ..., repeatdelay: int = ..., repeatinterval: int = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., state: Literal["normal", "disabled", "readonly"] = ..., takefocus: _TakeFocusValue = ..., textvariable: Variable = ..., @@ -3369,31 +3359,31 @@ class Spinbox(Widget, XView): self, cnf: dict[str, Any] | None = None, *, - activebackground: _Color = ..., - background: _Color = ..., + activebackground: str = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., - buttonbackground: _Color = ..., + buttonbackground: str = ..., buttoncursor: _Cursor = ..., buttondownrelief: _Relief = ..., buttonuprelief: _Relief = ..., command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., cursor: _Cursor = ..., - disabledbackground: _Color = ..., - disabledforeground: _Color = ..., + disabledbackground: str = ..., + disabledforeground: str = ..., exportselection: bool = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., format: str = ..., from_: float = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., increment: float = ..., - insertbackground: _Color = ..., + insertbackground: str = ..., insertborderwidth: _ScreenUnits = ..., insertofftime: int = ..., insertontime: int = ..., @@ -3401,13 +3391,13 @@ class Spinbox(Widget, XView): invalidcommand: _EntryValidateCommand = ..., invcmd: _EntryValidateCommand = ..., justify: Literal["left", "center", "right"] = ..., - readonlybackground: _Color = ..., + readonlybackground: str = ..., relief: _Relief = ..., repeatdelay: int = ..., repeatinterval: int = ..., - selectbackground: _Color = ..., + selectbackground: str = ..., selectborderwidth: _ScreenUnits = ..., - selectforeground: _Color = ..., + selectforeground: str = ..., state: Literal["normal", "disabled", "readonly"] = ..., takefocus: _TakeFocusValue = ..., textvariable: Variable = ..., @@ -3449,23 +3439,23 @@ class LabelFrame(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., class_: str = ..., # can't be changed with configure() colormap: Literal["new", ""] | Misc = ..., # can't be changed with configure() container: bool = ..., # undocumented, can't be changed with configure() cursor: _Cursor = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., # 'ne' and 'en' are valid labelanchors, but only 'ne' is a valid _Anchor. labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., @@ -3484,18 +3474,18 @@ class LabelFrame(Widget): self, cnf: dict[str, Any] | None = None, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., - fg: _Color = ..., + fg: str = ..., font: _FontDescription = ..., - foreground: _Color = ..., + foreground: str = ..., height: _ScreenUnits = ..., - highlightbackground: _Color = ..., - highlightcolor: _Color = ..., + highlightbackground: str = ..., + highlightcolor: str = ..., highlightthickness: _ScreenUnits = ..., labelanchor: Literal["nw", "n", "ne", "en", "e", "es", "se", "s", "sw", "ws", "w", "wn"] = ..., labelwidget: Misc = ..., @@ -3514,11 +3504,11 @@ class PanedWindow(Widget): def __init__( self, master: Misc | None = None, - cnf: dict[str, Any] | None = ..., + cnf: dict[str, Any] | None = {}, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., @@ -3528,7 +3518,7 @@ class PanedWindow(Widget): name: str = ..., opaqueresize: bool = ..., orient: Literal["horizontal", "vertical"] = ..., - proxybackground: _Color = ..., + proxybackground: str = ..., proxyborderwidth: _ScreenUnits = ..., proxyrelief: _Relief = ..., relief: _Relief = ..., @@ -3544,9 +3534,9 @@ class PanedWindow(Widget): self, cnf: dict[str, Any] | None = None, *, - background: _Color = ..., + background: str = ..., bd: _ScreenUnits = ..., - bg: _Color = ..., + bg: str = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., cursor: _Cursor = ..., @@ -3555,7 +3545,7 @@ class PanedWindow(Widget): height: _ScreenUnits = ..., opaqueresize: bool = ..., orient: Literal["horizontal", "vertical"] = ..., - proxybackground: _Color = ..., + proxybackground: str = ..., proxyborderwidth: _ScreenUnits = ..., proxyrelief: _Relief = ..., relief: _Relief = ..., diff --git a/mypy/typeshed/stdlib/tkinter/colorchooser.pyi b/mypy/typeshed/stdlib/tkinter/colorchooser.pyi index 4300d94f58e8..09bc8cbb4f1e 100644 --- a/mypy/typeshed/stdlib/tkinter/colorchooser.pyi +++ b/mypy/typeshed/stdlib/tkinter/colorchooser.pyi @@ -1,5 +1,5 @@ import sys -from tkinter import Misc, _Color +from tkinter import Misc from tkinter.commondialog import Dialog from typing import ClassVar @@ -11,10 +11,10 @@ class Chooser(Dialog): if sys.version_info >= (3, 9): def askcolor( - color: str | bytes | None = None, *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... + color: str | bytes | None = None, *, initialcolor: str = ..., parent: Misc = ..., title: str = ... ) -> tuple[None, None] | tuple[tuple[int, int, int], str]: ... else: def askcolor( - color: str | bytes | None = None, *, initialcolor: _Color = ..., parent: Misc = ..., title: str = ... + color: str | bytes | None = None, *, initialcolor: str = ..., parent: Misc = ..., title: str = ... ) -> tuple[None, None] | tuple[tuple[float, float, float], str]: ... diff --git a/mypy/typeshed/stdlib/tkinter/dialog.pyi b/mypy/typeshed/stdlib/tkinter/dialog.pyi index 8825188c767e..7bc77ac6d8b5 100644 --- a/mypy/typeshed/stdlib/tkinter/dialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/dialog.pyi @@ -12,5 +12,5 @@ DIALOG_ICON: str class Dialog(Widget): widgetName: str num: int - def __init__(self, master: Incomplete | None = None, cnf: Mapping[str, Any] = ..., **kw: Incomplete) -> None: ... + def __init__(self, master: Incomplete | None = None, cnf: Mapping[str, Any] = {}, **kw: Incomplete) -> None: ... def destroy(self) -> None: ... diff --git a/mypy/typeshed/stdlib/tkinter/simpledialog.pyi b/mypy/typeshed/stdlib/tkinter/simpledialog.pyi index 2c57cce7371c..45dce21a6b1c 100644 --- a/mypy/typeshed/stdlib/tkinter/simpledialog.pyi +++ b/mypy/typeshed/stdlib/tkinter/simpledialog.pyi @@ -14,7 +14,7 @@ class SimpleDialog: self, master: Misc | None, text: str = "", - buttons: list[str] = ..., + buttons: list[str] = [], default: int | None = None, cancel: int | None = None, title: str | None = None, diff --git a/mypy/typeshed/stdlib/tkinter/tix.pyi b/mypy/typeshed/stdlib/tkinter/tix.pyi index 5dd6f040fab7..672c5ab67403 100644 --- a/mypy/typeshed/stdlib/tkinter/tix.pyi +++ b/mypy/typeshed/stdlib/tkinter/tix.pyi @@ -54,117 +54,117 @@ class TixWidget(tkinter.Widget): master: tkinter.Misc | None = None, widgetName: str | None = None, static_options: list[str] | None = None, - cnf: dict[str, Any] = ..., - kw: dict[str, Any] = ..., + cnf: dict[str, Any] = {}, + kw: dict[str, Any] = {}, ) -> None: ... def __getattr__(self, name: str): ... def set_silent(self, value: str) -> None: ... def subwidget(self, name: str) -> tkinter.Widget: ... def subwidgets_all(self) -> list[tkinter.Widget]: ... def config_all(self, option: Any, value: Any) -> None: ... - def image_create(self, imgtype: str, cnf: dict[str, Any] = ..., master: tkinter.Widget | None = None, **kw) -> None: ... + def image_create(self, imgtype: str, cnf: dict[str, Any] = {}, master: tkinter.Widget | None = None, **kw) -> None: ... def image_delete(self, imgname: str) -> None: ... class TixSubWidget(TixWidget): def __init__(self, master: tkinter.Widget, name: str, destroy_physically: int = 1, check_intermediate: int = 1) -> None: ... class DisplayStyle: - def __init__(self, itemtype: str, cnf: dict[str, Any] = ..., *, master: tkinter.Widget | None = None, **kw) -> None: ... + def __init__(self, itemtype: str, cnf: dict[str, Any] = {}, *, master: tkinter.Widget | None = None, **kw) -> None: ... def __getitem__(self, key: str): ... def __setitem__(self, key: str, value: Any) -> None: ... def delete(self) -> None: ... - def config(self, cnf: dict[str, Any] = ..., **kw): ... + def config(self, cnf: dict[str, Any] = {}, **kw): ... class Balloon(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... - def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def bind_widget(self, widget: tkinter.Widget, cnf: dict[str, Any] = {}, **kw) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... class ButtonBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... - def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class ComboBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def add_history(self, str: str) -> None: ... def append_history(self, str: str) -> None: ... def insert(self, index: int, str: str) -> None: ... def pick(self, index: int) -> None: ... class Control(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def decrement(self) -> None: ... def increment(self) -> None: ... def invoke(self) -> None: ... class LabelEntry(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class LabelFrame(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Meter(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class OptionMenu(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... - def add_command(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... - def add_separator(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add_command(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add_separator(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... def disable(self, name: str) -> None: ... def enable(self, name: str) -> None: ... class PopupMenu(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def bind_widget(self, widget: tkinter.Widget) -> None: ... def unbind_widget(self, widget: tkinter.Widget) -> None: ... def post_widget(self, widget: tkinter.Widget, x: int, y: int) -> None: ... class Select(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... - def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def invoke(self, name: str) -> None: ... class StdButtonBox(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self, name: str) -> None: ... class DirList(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... class DirTree(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def chdir(self, dir: str) -> None: ... class DirSelectDialog(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def popup(self) -> None: ... def popdown(self) -> None: ... class DirSelectBox(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... class ExFileSelectBox(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def filter(self) -> None: ... def invoke(self) -> None: ... class FileSelectBox(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def apply_filter(self) -> None: ... def invoke(self) -> None: ... class FileEntry(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... def invoke(self) -> None: ... def file_dialog(self) -> None: ... class HList(TixWidget, tkinter.XView, tkinter.YView): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... - def add(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... - def add_child(self, parent: str | None = None, cnf: dict[str, Any] = ..., **kw) -> tkinter.Widget: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... + def add_child(self, parent: str | None = None, cnf: dict[str, Any] = {}, **kw) -> tkinter.Widget: ... def anchor_set(self, entry: str) -> None: ... def anchor_clear(self) -> None: ... # FIXME: Overload, certain combos return, others don't @@ -177,16 +177,16 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def dragsite_clear(self) -> None: ... def dropsite_set(self, index: int) -> None: ... def dropsite_clear(self) -> None: ... - def header_create(self, col: int, cnf: dict[str, Any] = ..., **kw) -> None: ... - def header_configure(self, col: int, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def header_create(self, col: int, cnf: dict[str, Any] = {}, **kw) -> None: ... + def header_configure(self, col: int, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... def header_cget(self, col: int, opt): ... def header_exists(self, col: int) -> bool: ... def header_exist(self, col: int) -> bool: ... def header_delete(self, col: int) -> None: ... def header_size(self, col: int) -> int: ... def hide_entry(self, entry: str) -> None: ... - def indicator_create(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> None: ... - def indicator_configure(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def indicator_create(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> None: ... + def indicator_configure(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... def indicator_cget(self, entry: str, opt): ... def indicator_exists(self, entry: str) -> bool: ... def indicator_delete(self, entry: str) -> None: ... @@ -204,21 +204,21 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def info_prev(self, entry: str) -> str: ... def info_selection(self) -> tuple[str, ...]: ... def item_cget(self, entry: str, col: int, opt): ... - def item_configure(self, entry: str, col: int, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... - def item_create(self, entry: str, col: int, cnf: dict[str, Any] = ..., **kw) -> None: ... + def item_configure(self, entry: str, col: int, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... + def item_create(self, entry: str, col: int, cnf: dict[str, Any] = {}, **kw) -> None: ... def item_exists(self, entry: str, col: int) -> bool: ... def item_delete(self, entry: str, col: int) -> None: ... def entrycget(self, entry: str, opt): ... - def entryconfigure(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def entryconfigure(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... def nearest(self, y: int) -> str: ... def see(self, entry: str) -> None: ... - def selection_clear(self, cnf: dict[str, Any] = ..., **kw) -> None: ... + def selection_clear(self, cnf: dict[str, Any] = {}, **kw) -> None: ... def selection_includes(self, entry: str) -> bool: ... def selection_set(self, first: str, last: str | None = None) -> None: ... def show_entry(self, entry: str) -> None: ... class CheckList(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def autosetmode(self) -> None: ... def close(self, entrypath: str) -> None: ... def getmode(self, entrypath: str) -> str: ... @@ -228,7 +228,7 @@ class CheckList(TixWidget): def setstatus(self, entrypath: str, mode: str = "on") -> None: ... class Tree(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def autosetmode(self) -> None: ... def close(self, entrypath: str) -> None: ... def getmode(self, entrypath: str) -> str: ... @@ -236,7 +236,7 @@ class Tree(TixWidget): def setmode(self, entrypath: str, mode: str = "none") -> None: ... class TList(TixWidget, tkinter.XView, tkinter.YView): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... def active_set(self, index: int) -> None: ... def active_clear(self) -> None: ... def anchor_set(self, index: int) -> None: ... @@ -246,7 +246,7 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): def dragsite_clear(self) -> None: ... def dropsite_set(self, index: int) -> None: ... def dropsite_clear(self) -> None: ... - def insert(self, index: int, cnf: dict[str, Any] = ..., **kw) -> None: ... + def insert(self, index: int, cnf: dict[str, Any] = {}, **kw) -> None: ... def info_active(self) -> int: ... def info_anchor(self) -> int: ... def info_down(self, index: int) -> int: ... @@ -257,29 +257,29 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): def info_up(self, index: int) -> int: ... def nearest(self, x: int, y: int) -> int: ... def see(self, index: int) -> None: ... - def selection_clear(self, cnf: dict[str, Any] = ..., **kw) -> None: ... + def selection_clear(self, cnf: dict[str, Any] = {}, **kw) -> None: ... def selection_includes(self, index: int) -> bool: ... def selection_set(self, first: int, last: int | None = None) -> None: ... class PanedWindow(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... - def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... def forget(self, name: str) -> None: ... # type: ignore[override] def panecget(self, entry: str, opt): ... - def paneconfigure(self, entry: str, cnf: dict[str, Any] = ..., **kw) -> Incomplete | None: ... + def paneconfigure(self, entry: str, cnf: dict[str, Any] = {}, **kw) -> Incomplete | None: ... def panes(self) -> list[tkinter.Widget]: ... class ListNoteBook(TixWidget): - def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw) -> None: ... - def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def page(self, name: str) -> tkinter.Widget: ... def pages(self) -> list[tkinter.Widget]: ... def raise_page(self, name: str) -> None: ... class NoteBook(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... - def add(self, name: str, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... + def add(self, name: str, cnf: dict[str, Any] = {}, **kw) -> None: ... def delete(self, name: str) -> None: ... def page(self, name: str) -> tkinter.Widget: ... def pages(self) -> list[tkinter.Widget]: ... @@ -287,12 +287,12 @@ class NoteBook(TixWidget): def raised(self) -> bool: ... class InputOnly(TixWidget): - def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = ..., **kw) -> None: ... + def __init__(self, master: tkinter.Widget | None = None, cnf: dict[str, Any] = {}, **kw) -> None: ... class Form: def __setitem__(self, key: str, value: Any) -> None: ... - def config(self, cnf: dict[str, Any] = ..., **kw) -> None: ... - def form(self, cnf: dict[str, Any] = ..., **kw) -> None: ... + def config(self, cnf: dict[str, Any] = {}, **kw) -> None: ... + def form(self, cnf: dict[str, Any] = {}, **kw) -> None: ... def check(self) -> bool: ... def forget(self) -> None: ... def grid(self, xsize: int = 0, ysize: int = 0) -> tuple[int, int] | None: ... diff --git a/mypy/typeshed/stdlib/tkinter/ttk.pyi b/mypy/typeshed/stdlib/tkinter/ttk.pyi index 61ebc0e2734f..009fdf51a440 100644 --- a/mypy/typeshed/stdlib/tkinter/ttk.pyi +++ b/mypy/typeshed/stdlib/tkinter/ttk.pyi @@ -177,12 +177,12 @@ class Entry(Widget, tkinter.Entry): master: tkinter.Misc | None = None, widget: str | None = None, *, - background: tkinter._Color = ..., # undocumented + background: str = ..., # undocumented class_: str = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., font: _FontDescription = ..., - foreground: tkinter._Color = ..., + foreground: str = ..., invalidcommand: tkinter._EntryValidateCommand = ..., justify: Literal["left", "center", "right"] = ..., name: str = ..., @@ -201,11 +201,11 @@ class Entry(Widget, tkinter.Entry): self, cnf: dict[str, Any] | None = None, *, - background: tkinter._Color = ..., + background: str = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., font: _FontDescription = ..., - foreground: tkinter._Color = ..., + foreground: str = ..., invalidcommand: tkinter._EntryValidateCommand = ..., justify: Literal["left", "center", "right"] = ..., show: str = ..., @@ -226,11 +226,11 @@ class Entry(Widget, tkinter.Entry): self, cnf: dict[str, Any] | None = None, *, - background: tkinter._Color = ..., + background: str = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., font: _FontDescription = ..., - foreground: tkinter._Color = ..., + foreground: str = ..., invalidcommand: tkinter._EntryValidateCommand = ..., justify: Literal["left", "center", "right"] = ..., show: str = ..., @@ -254,12 +254,12 @@ class Combobox(Entry): self, master: tkinter.Misc | None = None, *, - background: tkinter._Color = ..., # undocumented + background: str = ..., # undocumented class_: str = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., font: _FontDescription = ..., # undocumented - foreground: tkinter._Color = ..., # undocumented + foreground: str = ..., # undocumented height: int = ..., invalidcommand: tkinter._EntryValidateCommand = ..., # undocumented justify: Literal["left", "center", "right"] = ..., @@ -281,11 +281,11 @@ class Combobox(Entry): self, cnf: dict[str, Any] | None = None, *, - background: tkinter._Color = ..., + background: str = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., font: _FontDescription = ..., - foreground: tkinter._Color = ..., + foreground: str = ..., height: int = ..., invalidcommand: tkinter._EntryValidateCommand = ..., justify: Literal["left", "center", "right"] = ..., @@ -309,11 +309,11 @@ class Combobox(Entry): self, cnf: dict[str, Any] | None = None, *, - background: tkinter._Color = ..., + background: str = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., font: _FontDescription = ..., - foreground: tkinter._Color = ..., + foreground: str = ..., height: int = ..., invalidcommand: tkinter._EntryValidateCommand = ..., justify: Literal["left", "center", "right"] = ..., @@ -376,14 +376,14 @@ class Label(Widget): master: tkinter.Misc | None = None, *, anchor: tkinter._Anchor = ..., - background: tkinter._Color = ..., + background: str = ..., border: tkinter._ScreenUnits = ..., # alias for borderwidth borderwidth: tkinter._ScreenUnits = ..., # undocumented class_: str = ..., compound: _TtkCompound = ..., cursor: tkinter._Cursor = ..., font: _FontDescription = ..., - foreground: tkinter._Color = ..., + foreground: str = ..., image: tkinter._ImageSpec = ..., justify: Literal["left", "center", "right"] = ..., name: str = ..., @@ -404,13 +404,13 @@ class Label(Widget): cnf: dict[str, Any] | None = None, *, anchor: tkinter._Anchor = ..., - background: tkinter._Color = ..., + background: str = ..., border: tkinter._ScreenUnits = ..., borderwidth: tkinter._ScreenUnits = ..., compound: _TtkCompound = ..., cursor: tkinter._Cursor = ..., font: _FontDescription = ..., - foreground: tkinter._Color = ..., + foreground: str = ..., image: tkinter._ImageSpec = ..., justify: Literal["left", "center", "right"] = ..., padding: _Padding = ..., @@ -865,13 +865,13 @@ class Spinbox(Entry): self, master: tkinter.Misc | None = None, *, - background: tkinter._Color = ..., # undocumented + background: str = ..., # undocumented class_: str = ..., command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., # undocumented font: _FontDescription = ..., # undocumented - foreground: tkinter._Color = ..., # undocumented + foreground: str = ..., # undocumented format: str = ..., from_: float = ..., increment: float = ..., @@ -896,12 +896,12 @@ class Spinbox(Entry): self, cnf: dict[str, Any] | None = None, *, - background: tkinter._Color = ..., + background: str = ..., command: Callable[[], object] | str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., font: _FontDescription = ..., - foreground: tkinter._Color = ..., + foreground: str = ..., format: str = ..., from_: float = ..., increment: float = ..., @@ -934,8 +934,8 @@ class _TreeviewItemDict(TypedDict): class _TreeviewTagDict(TypedDict): # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug - foreground: tkinter._Color - background: tkinter._Color + foreground: str + background: str font: _FontDescription image: str # not wrapped in list :D @@ -1130,7 +1130,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def tag_bind(self, tagname: str, *, callback: str) -> None: ... @overload - def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> tkinter._Color: ... + def tag_configure(self, tagname: str, option: Literal["foreground", "background"]) -> str: ... @overload def tag_configure(self, tagname: str, option: Literal["font"]) -> _FontDescription: ... @overload @@ -1142,8 +1142,8 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): option: None = None, *, # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug - foreground: tkinter._Color = ..., - background: tkinter._Color = ..., + foreground: str = ..., + background: str = ..., font: _FontDescription = ..., image: tkinter._ImageSpec = ..., ) -> _TreeviewTagDict | Any: ... # can be None but annoying to check diff --git a/mypy/typeshed/stdlib/trace.pyi b/mypy/typeshed/stdlib/trace.pyi index f79b38f1ce82..3764a5b06024 100644 --- a/mypy/typeshed/stdlib/trace.pyi +++ b/mypy/typeshed/stdlib/trace.pyi @@ -34,8 +34,8 @@ class Trace: trace: int = 1, countfuncs: int = 0, countcallers: int = 0, - ignoremods: Sequence[str] = ..., - ignoredirs: Sequence[str] = ..., + ignoremods: Sequence[str] = (), + ignoredirs: Sequence[str] = (), infile: StrPath | None = None, outfile: StrPath | None = None, timing: bool = False, diff --git a/mypy/typeshed/stdlib/turtle.pyi b/mypy/typeshed/stdlib/turtle.pyi index 8017c8290fb9..5df3e4b90cb5 100644 --- a/mypy/typeshed/stdlib/turtle.pyi +++ b/mypy/typeshed/stdlib/turtle.pyi @@ -405,7 +405,9 @@ class RawTurtle(TPen, TNavigator): def begin_fill(self) -> None: ... def end_fill(self) -> None: ... def dot(self, size: int | None = None, *color: _Color) -> None: ... - def write(self, arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ...) -> None: ... + def write( + self, arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal") + ) -> None: ... def begin_poly(self) -> None: ... def end_poly(self) -> None: ... def get_poly(self) -> _PolygonCoords | None: ... @@ -674,7 +676,7 @@ def filling() -> bool: ... def begin_fill() -> None: ... def end_fill() -> None: ... def dot(size: int | None = None, *color: _Color) -> None: ... -def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ...) -> None: ... +def write(arg: object, move: bool = False, align: str = "left", font: tuple[str, int, str] = ("Arial", 8, "normal")) -> None: ... def begin_poly() -> None: ... def end_poly() -> None: ... def get_poly() -> _PolygonCoords | None: ... diff --git a/mypy/typeshed/stdlib/types.pyi b/mypy/typeshed/stdlib/types.pyi index 2b3e58b8a7f6..43475d91279d 100644 --- a/mypy/typeshed/stdlib/types.pyi +++ b/mypy/typeshed/stdlib/types.pyi @@ -56,6 +56,9 @@ if sys.version_info >= (3, 9): if sys.version_info >= (3, 10): __all__ += ["EllipsisType", "NoneType", "NotImplementedType", "UnionType"] +if sys.version_info >= (3, 12): + __all__ += ["get_original_bases"] + # Note, all classes "defined" here require special handling. _T1 = TypeVar("_T1") @@ -554,15 +557,18 @@ class MemberDescriptorType: def new_class( name: str, - bases: Iterable[object] = ..., + bases: Iterable[object] = (), kwds: dict[str, Any] | None = None, exec_body: Callable[[dict[str, Any]], object] | None = None, ) -> type: ... def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... def prepare_class( - name: str, bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = None + name: str, bases: tuple[type, ...] = (), kwds: dict[str, Any] | None = None ) -> tuple[type, dict[str, Any], dict[str, Any]]: ... +if sys.version_info >= (3, 12): + def get_original_bases(__cls: type) -> tuple[Any, ...]: ... + # Actually a different type, but `property` is special and we want that too. DynamicClassAttribute = property diff --git a/mypy/typeshed/stdlib/typing.pyi b/mypy/typeshed/stdlib/typing.pyi index efd61ad8bf43..6fc677dcbdc9 100644 --- a/mypy/typeshed/stdlib/typing.pyi +++ b/mypy/typeshed/stdlib/typing.pyi @@ -2,7 +2,7 @@ import collections # Needed by aliases like DefaultDict, see mypy issue 2986 import sys import typing_extensions from _collections_abc import dict_items, dict_keys, dict_values -from _typeshed import IdentityFunction, Incomplete, SupportsKeysAndGetItem +from _typeshed import IdentityFunction, Incomplete, ReadableBuffer, SupportsKeysAndGetItem from abc import ABCMeta, abstractmethod from contextlib import AbstractAsyncContextManager, AbstractContextManager from re import Match as Match, Pattern as Pattern @@ -167,20 +167,14 @@ _T = TypeVar("_T") def overload(func: _F) -> _F: ... -# Unlike the vast majority module-level objects in stub files, -# these `_SpecialForm` objects in typing need the default value `= ...`, -# due to the fact that they are used elswhere in the same file. -# Otherwise, flake8 erroneously flags them as undefined. -# `_SpecialForm` objects in typing.py that are not used elswhere in the same file -# do not need the default value assignment. -Union: _SpecialForm = ... -Generic: _SpecialForm = ... +Union: _SpecialForm +Generic: _SpecialForm # Protocol is only present in 3.8 and later, but mypy needs it unconditionally -Protocol: _SpecialForm = ... -Callable: _SpecialForm = ... -Type: _SpecialForm = ... -NoReturn: _SpecialForm = ... -ClassVar: _SpecialForm = ... +Protocol: _SpecialForm +Callable: _SpecialForm +Type: _SpecialForm +NoReturn: _SpecialForm +ClassVar: _SpecialForm Optional: _SpecialForm Tuple: _SpecialForm @@ -193,7 +187,7 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 11): Self: _SpecialForm - Never: _SpecialForm = ... + Never: _SpecialForm Unpack: _SpecialForm Required: _SpecialForm NotRequired: _SpecialForm @@ -687,8 +681,22 @@ class IO(Iterator[AnyStr], Generic[AnyStr]): @abstractmethod def writable(self) -> bool: ... @abstractmethod + @overload + def write(self: IO[str], __s: str) -> int: ... + @abstractmethod + @overload + def write(self: IO[bytes], __s: ReadableBuffer) -> int: ... + @abstractmethod + @overload def write(self, __s: AnyStr) -> int: ... @abstractmethod + @overload + def writelines(self: IO[str], __lines: Iterable[str]) -> None: ... + @abstractmethod + @overload + def writelines(self: IO[bytes], __lines: Iterable[ReadableBuffer]) -> None: ... + @abstractmethod + @overload def writelines(self, __lines: Iterable[AnyStr]) -> None: ... @abstractmethod def __next__(self) -> AnyStr: ... @@ -784,7 +792,7 @@ if sys.version_info >= (3, 11): order_default: bool = False, kw_only_default: bool = False, frozen_default: bool = False, # on 3.11, runtime accepts it as part of kwargs - field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: Any, ) -> IdentityFunction: ... diff --git a/mypy/typeshed/stdlib/typing_extensions.pyi b/mypy/typeshed/stdlib/typing_extensions.pyi index 3233acc99f3e..d567d8b96faf 100644 --- a/mypy/typeshed/stdlib/typing_extensions.pyi +++ b/mypy/typeshed/stdlib/typing_extensions.pyi @@ -70,6 +70,7 @@ __all__ = [ "assert_never", "assert_type", "dataclass_transform", + "deprecated", "final", "IntVar", "is_typeddict", @@ -112,7 +113,7 @@ class _SpecialForm: # typing.Protocol and typing_extensions.Protocol so they can properly # warn users about potential runtime exceptions when using typing.Protocol # on older versions of Python. -Protocol: _SpecialForm = ... +Protocol: _SpecialForm def runtime_checkable(cls: _TC) -> _TC: ... @@ -226,7 +227,7 @@ if sys.version_info >= (3, 11): ) else: Self: _SpecialForm - Never: _SpecialForm = ... + Never: _SpecialForm def reveal_type(__obj: _T) -> _T: ... def assert_never(__arg: Never) -> Never: ... def assert_type(__val: _T, __typ: Any) -> _T: ... @@ -244,7 +245,7 @@ else: order_default: bool = False, kw_only_default: bool = False, frozen_default: bool = False, - field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), **kwargs: object, ) -> IdentityFunction: ... @@ -326,3 +327,4 @@ class TypeVarTuple: def __iter__(self) -> Any: ... # Unpack[Self] def override(__arg: _F) -> _F: ... +def deprecated(__msg: str, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> Callable[[_T], _T]: ... diff --git a/mypy/typeshed/stdlib/unittest/mock.pyi b/mypy/typeshed/stdlib/unittest/mock.pyi index 953480549fb2..1f554da52d5d 100644 --- a/mypy/typeshed/stdlib/unittest/mock.pyi +++ b/mypy/typeshed/stdlib/unittest/mock.pyi @@ -47,7 +47,8 @@ else: "seal", ) -__version__: Final[str] +if sys.version_info < (3, 9): + __version__: Final[str] FILTER_DIR: Any @@ -67,14 +68,14 @@ _CallValue: TypeAlias = str | tuple[Any, ...] | Mapping[str, Any] | _ArgsKwargs class _Call(tuple[Any, ...]): def __new__( - cls, value: _CallValue = ..., name: str | None = "", parent: Any | None = None, two: bool = False, from_kall: bool = True + cls, value: _CallValue = (), name: str | None = "", parent: Any | None = None, two: bool = False, from_kall: bool = True ) -> Self: ... name: Any parent: Any from_kall: Any def __init__( self, - value: _CallValue = ..., + value: _CallValue = (), name: str | None = None, parent: Any | None = None, two: bool = False, @@ -259,7 +260,7 @@ class _patch_dict: in_dict: Any values: Any clear: Any - def __init__(self, in_dict: Any, values: Any = ..., clear: Any = False, **kwargs: Any) -> None: ... + def __init__(self, in_dict: Any, values: Any = (), clear: Any = False, **kwargs: Any) -> None: ... def __call__(self, f: Any) -> Any: ... if sys.version_info >= (3, 10): def decorate_callable(self, f: _F) -> _F: ... diff --git a/mypy/typeshed/stdlib/unittest/suite.pyi b/mypy/typeshed/stdlib/unittest/suite.pyi index f6b8ef003518..c10cbc75d7fd 100644 --- a/mypy/typeshed/stdlib/unittest/suite.pyi +++ b/mypy/typeshed/stdlib/unittest/suite.pyi @@ -8,7 +8,7 @@ _TestType: TypeAlias = unittest.case.TestCase | TestSuite class BaseTestSuite(Iterable[_TestType]): _tests: list[unittest.case.TestCase] _removed_tests: int - def __init__(self, tests: Iterable[_TestType] = ...) -> None: ... + def __init__(self, tests: Iterable[_TestType] = ()) -> None: ... def __call__(self, result: unittest.result.TestResult) -> unittest.result.TestResult: ... def addTest(self, test: _TestType) -> None: ... def addTests(self, tests: Iterable[_TestType]) -> None: ... diff --git a/mypy/typeshed/stdlib/urllib/parse.pyi b/mypy/typeshed/stdlib/urllib/parse.pyi index 8e179ca765b1..116754091d1a 100644 --- a/mypy/typeshed/stdlib/urllib/parse.pyi +++ b/mypy/typeshed/stdlib/urllib/parse.pyi @@ -196,14 +196,15 @@ else: url: bytes | bytearray | None, scheme: bytes | bytearray | None | Literal[""] = "", allow_fragments: bool = True ) -> SplitResultBytes: ... +# Requires an iterable of length 6 @overload -def urlunparse( - components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None] -) -> AnyStr: ... +def urlunparse(components: Iterable[None]) -> Literal[b""]: ... @overload -def urlunparse(components: Sequence[AnyStr | None]) -> AnyStr: ... +def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ... + +# Requires an iterable of length 5 @overload -def urlunsplit(components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None]) -> AnyStr: ... +def urlunsplit(components: Iterable[None]) -> Literal[b""]: ... @overload -def urlunsplit(components: Sequence[AnyStr | None]) -> AnyStr: ... +def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ... def unwrap(url: str) -> str: ... diff --git a/mypy/typeshed/stdlib/urllib/request.pyi b/mypy/typeshed/stdlib/urllib/request.pyi index 09ce27961999..8f99c5837871 100644 --- a/mypy/typeshed/stdlib/urllib/request.pyi +++ b/mypy/typeshed/stdlib/urllib/request.pyi @@ -102,7 +102,7 @@ class Request: self, url: str, data: _DataType = None, - headers: MutableMapping[str, str] = ..., + headers: MutableMapping[str, str] = {}, origin_req_host: str | None = None, unverifiable: bool = False, method: str | None = None, diff --git a/mypy/typeshed/stdlib/uuid.pyi b/mypy/typeshed/stdlib/uuid.pyi index 249257783626..935e44e80dfa 100644 --- a/mypy/typeshed/stdlib/uuid.pyi +++ b/mypy/typeshed/stdlib/uuid.pyi @@ -73,9 +73,20 @@ else: def getnode(*, getters: Unused = None) -> int: ... # undocumented def uuid1(node: _Int | None = None, clock_seq: _Int | None = None) -> UUID: ... -def uuid3(namespace: UUID, name: str) -> UUID: ... + +if sys.version_info >= (3, 12): + def uuid3(namespace: UUID, name: str | bytes) -> UUID: ... + +else: + def uuid3(namespace: UUID, name: str) -> UUID: ... + def uuid4() -> UUID: ... -def uuid5(namespace: UUID, name: str) -> UUID: ... + +if sys.version_info >= (3, 12): + def uuid5(namespace: UUID, name: str | bytes) -> UUID: ... + +else: + def uuid5(namespace: UUID, name: str) -> UUID: ... NAMESPACE_DNS: UUID NAMESPACE_URL: UUID diff --git a/mypy/typeshed/stdlib/weakref.pyi b/mypy/typeshed/stdlib/weakref.pyi index 1e0aac814dfb..0bbab52f9b08 100644 --- a/mypy/typeshed/stdlib/weakref.pyi +++ b/mypy/typeshed/stdlib/weakref.pyi @@ -53,7 +53,7 @@ class WeakValueDictionary(MutableMapping[_KT, _VT]): def __init__(self: WeakValueDictionary[_KT, _VT], __other: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]]) -> None: ... @overload def __init__( - self: WeakValueDictionary[str, _VT], __other: Mapping[str, _VT] | Iterable[tuple[str, _VT]] = ..., **kwargs: _VT + self: WeakValueDictionary[str, _VT], __other: Mapping[str, _VT] | Iterable[tuple[str, _VT]] = (), **kwargs: _VT ) -> None: ... def __len__(self) -> int: ... def __getitem__(self, key: _KT) -> _VT: ... diff --git a/mypy/typeshed/stdlib/xml/dom/domreg.pyi b/mypy/typeshed/stdlib/xml/dom/domreg.pyi index a46d3ff090e6..346a4bf63bd4 100644 --- a/mypy/typeshed/stdlib/xml/dom/domreg.pyi +++ b/mypy/typeshed/stdlib/xml/dom/domreg.pyi @@ -5,6 +5,4 @@ well_known_implementations: dict[str, str] registered: dict[str, Callable[[], DOMImplementation]] def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: ... -def getDOMImplementation( - name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ... -) -> DOMImplementation: ... +def getDOMImplementation(name: str | None = None, features: str | Iterable[tuple[str, str | None]] = ()) -> DOMImplementation: ... diff --git a/mypy/typeshed/stdlib/xml/dom/minicompat.pyi b/mypy/typeshed/stdlib/xml/dom/minicompat.pyi index 4507b3d98ee7..4d83bef025d9 100644 --- a/mypy/typeshed/stdlib/xml/dom/minicompat.pyi +++ b/mypy/typeshed/stdlib/xml/dom/minicompat.pyi @@ -1,5 +1,6 @@ from collections.abc import Iterable from typing import Any, TypeVar +from typing_extensions import Literal __all__ = ["NodeList", "EmptyNodeList", "StringTypes", "defproperty"] @@ -8,11 +9,13 @@ _T = TypeVar("_T") StringTypes: tuple[type[str]] class NodeList(list[_T]): - length: int + @property + def length(self) -> int: ... def item(self, index: int) -> _T | None: ... -class EmptyNodeList(tuple[Any, ...]): - length: int +class EmptyNodeList(tuple[()]): + @property + def length(self) -> Literal[0]: ... def item(self, index: int) -> None: ... def __add__(self, other: Iterable[_T]) -> NodeList[_T]: ... # type: ignore[override] def __radd__(self, other: Iterable[_T]) -> NodeList[_T]: ... diff --git a/mypy/typeshed/stdlib/xml/dom/minidom.pyi b/mypy/typeshed/stdlib/xml/dom/minidom.pyi index ecc7bb6bcdf7..ec17f0a41497 100644 --- a/mypy/typeshed/stdlib/xml/dom/minidom.pyi +++ b/mypy/typeshed/stdlib/xml/dom/minidom.pyi @@ -1,7 +1,7 @@ import sys import xml.dom from _typeshed import Incomplete, ReadableBuffer, SupportsRead, SupportsWrite -from typing import NoReturn, TypeVar +from typing import NoReturn, TypeVar, overload from typing_extensions import Literal, Self from xml.dom.minicompat import NodeList from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS @@ -9,8 +9,10 @@ from xml.sax.xmlreader import XMLReader _N = TypeVar("_N", bound=Node) -def parse(file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None): ... -def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None): ... +def parse( + file: str | SupportsRead[ReadableBuffer | str], parser: XMLReader | None = None, bufsize: int | None = None +) -> Document: ... +def parseString(string: str | ReadableBuffer, parser: XMLReader | None = None) -> Document: ... def getDOMImplementation(features=None) -> DOMImplementation | None: ... class Node(xml.dom.Node): @@ -28,13 +30,69 @@ class Node(xml.dom.Node): def localName(self) -> str | None: ... def __bool__(self) -> Literal[True]: ... if sys.version_info >= (3, 9): - def toxml(self, encoding: str | None = None, standalone: bool | None = None): ... + @overload + def toxml(self, encoding: str, standalone: bool | None = None) -> bytes: ... + @overload + def toxml(self, encoding: None = None, standalone: bool | None = None) -> str: ... + @overload def toprettyxml( - self, indent: str = "\t", newl: str = "\n", encoding: str | None = None, standalone: bool | None = None - ): ... + self, + indent: str = "\t", + newl: str = "\n", + # Handle any case where encoding is not provided or where it is passed with None + encoding: None = None, + standalone: bool | None = None, + ) -> str: ... + @overload + def toprettyxml( + self, + indent: str, + newl: str, + # Handle cases where encoding is passed as str *positionally* + encoding: str, + standalone: bool | None = None, + ) -> bytes: ... + @overload + def toprettyxml( + self, + indent: str = "\t", + newl: str = "\n", + # Handle all cases where encoding is passed as a keyword argument; because standalone + # comes after, it will also have to be a keyword arg if encoding is + *, + encoding: str, + standalone: bool | None = None, + ) -> bytes: ... else: - def toxml(self, encoding: str | None = None): ... - def toprettyxml(self, indent: str = "\t", newl: str = "\n", encoding: str | None = None): ... + @overload + def toxml(self, encoding: str) -> bytes: ... + @overload + def toxml(self, encoding: None = None) -> str: ... + @overload + def toprettyxml( + self, + indent: str = "\t", + newl: str = "\n", + # Handle any case where encoding is not provided or where it is passed with None + encoding: None = None, + ) -> str: ... + @overload + def toprettyxml( + self, + indent: str, + newl: str, + # Handle cases where encoding is passed as str *positionally* + encoding: str, + ) -> bytes: ... + @overload + def toprettyxml( + self, + indent: str = "\t", + newl: str = "\n", + # Handle all cases where encoding is passed as a keyword argument + *, + encoding: str, + ) -> bytes: ... def hasChildNodes(self) -> bool: ... def insertBefore(self, newChild, refChild): ... @@ -147,8 +205,8 @@ class Element(Node): removeAttributeNodeNS: Incomplete def hasAttribute(self, name: str) -> bool: ... def hasAttributeNS(self, namespaceURI: str, localName) -> bool: ... - def getElementsByTagName(self, name: str) -> NodeList[Node]: ... - def getElementsByTagNameNS(self, namespaceURI: str, localName): ... + def getElementsByTagName(self, name: str) -> NodeList[Element]: ... + def getElementsByTagNameNS(self, namespaceURI: str, localName: str) -> NodeList[Element]: ... def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... def hasAttributes(self) -> bool: ... def setIdAttribute(self, name) -> None: ... @@ -218,7 +276,7 @@ class CDATASection(Text): def writexml(self, writer: SupportsWrite[str], indent: str = "", addindent: str = "", newl: str = "") -> None: ... class ReadOnlySequentialNamedNodeMap: - def __init__(self, seq=...) -> None: ... + def __init__(self, seq=()) -> None: ... def __len__(self) -> int: ... def getNamedItem(self, name): ... def getNamedItemNS(self, namespaceURI: str, localName): ... @@ -318,9 +376,9 @@ class Document(Node, DocumentLS): def createAttribute(self, qName) -> Attr: ... def createElementNS(self, namespaceURI: str, qualifiedName: str): ... def createAttributeNS(self, namespaceURI: str, qualifiedName: str) -> Attr: ... - def getElementById(self, id): ... - def getElementsByTagName(self, name: str) -> NodeList[Node]: ... - def getElementsByTagNameNS(self, namespaceURI: str, localName): ... + def getElementById(self, id: str) -> Element | None: ... + def getElementsByTagName(self, name: str) -> NodeList[Element]: ... + def getElementsByTagNameNS(self, namespaceURI: str, localName: str) -> NodeList[Element]: ... def isSupported(self, feature: str, version: str | None) -> bool: ... def importNode(self, node, deep): ... if sys.version_info >= (3, 9): diff --git a/mypy/typeshed/stdlib/xml/sax/__init__.pyi b/mypy/typeshed/stdlib/xml/sax/__init__.pyi index ca981a00d25f..8bcf902df8d8 100644 --- a/mypy/typeshed/stdlib/xml/sax/__init__.pyi +++ b/mypy/typeshed/stdlib/xml/sax/__init__.pyi @@ -28,7 +28,7 @@ class SAXReaderNotAvailable(SAXNotSupportedException): ... default_parser_list: list[str] if sys.version_info >= (3, 8): - def make_parser(parser_list: Iterable[str] = ...) -> XMLReader: ... + def make_parser(parser_list: Iterable[str] = ()) -> XMLReader: ... def parse( source: StrPath | _SupportsReadClose[bytes] | _SupportsReadClose[str], handler: ContentHandler, @@ -36,7 +36,7 @@ if sys.version_info >= (3, 8): ) -> None: ... else: - def make_parser(parser_list: list[str] = ...) -> XMLReader: ... + def make_parser(parser_list: list[str] = []) -> XMLReader: ... def parse( source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], handler: ContentHandler, diff --git a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi index 67a06d2fcda2..0d9223770c6a 100644 --- a/mypy/typeshed/stdlib/xml/sax/saxutils.pyi +++ b/mypy/typeshed/stdlib/xml/sax/saxutils.pyi @@ -4,9 +4,9 @@ from collections.abc import Mapping from io import RawIOBase, TextIOBase from xml.sax import handler, xmlreader -def escape(data: str, entities: Mapping[str, str] = ...) -> str: ... -def unescape(data: str, entities: Mapping[str, str] = ...) -> str: ... -def quoteattr(data: str, entities: Mapping[str, str] = ...) -> str: ... +def escape(data: str, entities: Mapping[str, str] = {}) -> str: ... +def unescape(data: str, entities: Mapping[str, str] = {}) -> str: ... +def quoteattr(data: str, entities: Mapping[str, str] = {}) -> str: ... class XMLGenerator(handler.ContentHandler): def __init__( diff --git a/mypy/typeshed/stdlib/xmlrpc/client.pyi b/mypy/typeshed/stdlib/xmlrpc/client.pyi index 7bf701ae716d..8c32f3080749 100644 --- a/mypy/typeshed/stdlib/xmlrpc/client.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/client.pyi @@ -230,7 +230,7 @@ class Transport: if sys.version_info >= (3, 8): def __init__( - self, use_datetime: bool = False, use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = ... + self, use_datetime: bool = False, use_builtin_types: bool = False, *, headers: Iterable[tuple[str, str]] = () ) -> None: ... else: def __init__(self, use_datetime: bool = False, use_builtin_types: bool = False) -> None: ... @@ -259,7 +259,7 @@ class SafeTransport(Transport): use_datetime: bool = False, use_builtin_types: bool = False, *, - headers: Iterable[tuple[str, str]] = ..., + headers: Iterable[tuple[str, str]] = (), context: Any | None = None, ) -> None: ... else: @@ -288,7 +288,7 @@ class ServerProxy: use_datetime: bool = False, use_builtin_types: bool = False, *, - headers: Iterable[tuple[str, str]] = ..., + headers: Iterable[tuple[str, str]] = (), context: Any | None = None, ) -> None: ... else: diff --git a/mypy/typeshed/stdlib/xmlrpc/server.pyi b/mypy/typeshed/stdlib/xmlrpc/server.pyi index 800c205513c6..832fe265e0a5 100644 --- a/mypy/typeshed/stdlib/xmlrpc/server.pyi +++ b/mypy/typeshed/stdlib/xmlrpc/server.pyi @@ -108,9 +108,9 @@ class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented object: object, name: str, mod: str | None = None, - funcs: Mapping[str, str] = ..., - classes: Mapping[str, str] = ..., - methods: Mapping[str, str] = ..., + funcs: Mapping[str, str] = {}, + classes: Mapping[str, str] = {}, + methods: Mapping[str, str] = {}, cl: type | None = None, ) -> str: ... def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: ... diff --git a/mypy/typeshed/stdlib/zipfile.pyi b/mypy/typeshed/stdlib/zipfile.pyi index b969d0cf9e6a..92f1dc49adbc 100644 --- a/mypy/typeshed/stdlib/zipfile.pyi +++ b/mypy/typeshed/stdlib/zipfile.pyi @@ -211,7 +211,7 @@ class ZipInfo: compress_size: int file_size: int orig_filename: str # undocumented - def __init__(self, filename: str = "NoName", date_time: _DateTuple = ...) -> None: ... + def __init__(self, filename: str = "NoName", date_time: _DateTuple = (1980, 1, 1, 0, 0, 0)) -> None: ... if sys.version_info >= (3, 8): @classmethod def from_file(cls, filename: StrPath, arcname: StrPath | None = None, *, strict_timestamps: bool = True) -> Self: ... @@ -222,10 +222,11 @@ class ZipInfo: def is_dir(self) -> bool: ... def FileHeader(self, zip64: bool | None = None) -> bytes: ... -class _PathOpenProtocol(Protocol): - def __call__(self, mode: _ReadWriteMode = ..., pwd: bytes | None = ..., *, force_zip64: bool = ...) -> IO[bytes]: ... - if sys.version_info >= (3, 8): + if sys.version_info < (3, 9): + class _PathOpenProtocol(Protocol): + def __call__(self, mode: _ReadWriteMode = "r", pwd: bytes | None = ..., *, force_zip64: bool = ...) -> IO[bytes]: ... + class Path: @property def name(self) -> str: ... @@ -245,7 +246,12 @@ if sys.version_info >= (3, 8): def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ... if sys.version_info >= (3, 9): def open( - self, mode: _ReadWriteBinaryMode = "r", *args: Any, pwd: bytes | None = None, **kwargs: Any + self, + mode: _ReadWriteBinaryMode = "r", + encoding: str | None = None, + *args: Any, + pwd: bytes | None = None, + **kwargs: Any, ) -> IO[bytes]: ... else: @property diff --git a/mypy/typestate.py b/mypy/typestate.py index 9cbad17aa7bd..9f65481e5e94 100644 --- a/mypy/typestate.py +++ b/mypy/typestate.py @@ -12,6 +12,9 @@ from mypy.server.trigger import make_trigger from mypy.types import Instance, Type, TypeVarId, get_proper_type +MAX_NEGATIVE_CACHE_TYPES: Final = 1000 +MAX_NEGATIVE_CACHE_ENTRIES: Final = 10000 + # Represents that the 'left' instance is a subtype of the 'right' instance SubtypeRelationship: _TypeAlias = Tuple[Instance, Instance] @@ -42,6 +45,9 @@ class TypeState: # We need the caches, since subtype checks for structural types are very slow. _subtype_caches: Final[SubtypeCache] + # Same as above but for negative subtyping results. + _negative_subtype_caches: Final[SubtypeCache] + # This contains protocol dependencies generated after running a full build, # or after an update. These dependencies are special because: # * They are a global property of the program; i.e. some dependencies for imported @@ -95,6 +101,7 @@ class TypeState: def __init__(self) -> None: self._subtype_caches = {} + self._negative_subtype_caches = {} self.proto_deps = {} self._attempted_protocols = {} self._checked_against_members = {} @@ -105,7 +112,7 @@ def __init__(self) -> None: self.infer_unions = False def is_assumed_subtype(self, left: Type, right: Type) -> bool: - for (l, r) in reversed(self._assuming): + for l, r in reversed(self._assuming): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): @@ -113,7 +120,7 @@ def is_assumed_subtype(self, left: Type, right: Type) -> bool: return False def is_assumed_proper_subtype(self, left: Type, right: Type) -> bool: - for (l, r) in reversed(self._assuming_proper): + for l, r in reversed(self._assuming_proper): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): @@ -128,11 +135,14 @@ def get_assumptions(self, is_proper: bool) -> list[tuple[Type, Type]]: def reset_all_subtype_caches(self) -> None: """Completely reset all known subtype caches.""" self._subtype_caches.clear() + self._negative_subtype_caches.clear() def reset_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo.""" if info in self._subtype_caches: self._subtype_caches[info].clear() + if info in self._negative_subtype_caches: + self._negative_subtype_caches[info].clear() def reset_all_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo and its MRO.""" @@ -154,6 +164,23 @@ def is_cached_subtype_check(self, kind: SubtypeKind, left: Instance, right: Inst return False return (left, right) in subcache + def is_cached_negative_subtype_check( + self, kind: SubtypeKind, left: Instance, right: Instance + ) -> bool: + if left.last_known_value is not None or right.last_known_value is not None: + # If there is a literal last known value, give up. There + # will be an unbounded number of potential types to cache, + # making caching less effective. + return False + info = right.type + cache = self._negative_subtype_caches.get(info) + if cache is None: + return False + subcache = cache.get(kind) + if subcache is None: + return False + return (left, right) in subcache + def record_subtype_cache_entry( self, kind: SubtypeKind, left: Instance, right: Instance ) -> None: @@ -164,6 +191,21 @@ def record_subtype_cache_entry( cache = self._subtype_caches.setdefault(right.type, dict()) cache.setdefault(kind, set()).add((left, right)) + def record_negative_subtype_cache_entry( + self, kind: SubtypeKind, left: Instance, right: Instance + ) -> None: + if left.last_known_value is not None or right.last_known_value is not None: + # These are unlikely to match, due to the large space of + # possible values. Avoid uselessly increasing cache sizes. + return + if len(self._negative_subtype_caches) > MAX_NEGATIVE_CACHE_TYPES: + self._negative_subtype_caches.clear() + cache = self._negative_subtype_caches.setdefault(right.type, dict()) + subcache = cache.setdefault(kind, set()) + if len(subcache) > MAX_NEGATIVE_CACHE_ENTRIES: + subcache.clear() + cache.setdefault(kind, set()).add((left, right)) + def reset_protocol_deps(self) -> None: """Reset dependencies after a full run or before a daemon shutdown.""" self.proto_deps = {} diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index 29b85dae72eb..cb7650ebb57d 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -52,14 +52,17 @@ def split_with_mapped_and_template( template: tuple[Type, ...], template_prefix_len: int, template_suffix_len: int, -) -> tuple[ - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], -] | None: +) -> ( + tuple[ + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + ] + | None +): split_result = fully_split_with_mapped_and_template( mapped, mapped_prefix_len, @@ -101,18 +104,21 @@ def fully_split_with_mapped_and_template( template: tuple[Type, ...], template_prefix_len: int, template_suffix_len: int, -) -> tuple[ - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], - tuple[Type, ...], -] | None: +) -> ( + tuple[ + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + tuple[Type, ...], + ] + | None +): if mapped_prefix_len is not None: assert mapped_suffix_len is not None mapped_prefix, mapped_middle, mapped_suffix = split_with_prefix_and_suffix( diff --git a/mypy/version.py b/mypy/version.py index da01cd54dd4b..826ba0020100 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -8,7 +8,7 @@ # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". -__version__ = "1.2.0+dev" +__version__ = "1.4.0+dev" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py index 21c4da8981d1..ee2ff06b0f03 100644 --- a/mypyc/analysis/dataflow.py +++ b/mypyc/analysis/dataflow.py @@ -18,6 +18,10 @@ ComparisonOp, ControlOp, Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, GetAttr, GetElementPtr, Goto, @@ -85,7 +89,6 @@ def get_cfg(blocks: list[BasicBlock]) -> CFG: pred_map: dict[BasicBlock, list[BasicBlock]] = {} exits = set() for block in blocks: - assert not any( isinstance(op, ControlOp) for op in block.ops[:-1] ), "Control-flow ops must be at the end of blocks" @@ -142,7 +145,7 @@ def cleanup_cfg(blocks: list[BasicBlock]) -> None: # Then delete any blocks that have no predecessors changed = False cfg = get_cfg(blocks) - orig_blocks = blocks[:] + orig_blocks = blocks.copy() blocks.clear() for i, block in enumerate(orig_blocks): if i == 0 or cfg.pred[block]: @@ -245,9 +248,18 @@ def visit_load_global(self, op: LoadGlobal) -> GenAndKill[T]: def visit_int_op(self, op: IntOp) -> GenAndKill[T]: return self.visit_register_op(op) + def visit_float_op(self, op: FloatOp) -> GenAndKill[T]: + return self.visit_register_op(op) + + def visit_float_neg(self, op: FloatNeg) -> GenAndKill[T]: + return self.visit_register_op(op) + def visit_comparison_op(self, op: ComparisonOp) -> GenAndKill[T]: return self.visit_register_op(op) + def visit_float_comparison_op(self, op: FloatComparisonOp) -> GenAndKill[T]: + return self.visit_register_op(op) + def visit_load_mem(self, op: LoadMem) -> GenAndKill[T]: return self.visit_register_op(op) @@ -444,7 +456,7 @@ def analyze_undefined_regs( def non_trivial_sources(op: Op) -> set[Value]: result = set() for source in op.sources(): - if not isinstance(source, Integer): + if not isinstance(source, (Integer, Float)): result.add(source) return result @@ -454,7 +466,7 @@ def visit_branch(self, op: Branch) -> GenAndKill[Value]: return non_trivial_sources(op), set() def visit_return(self, op: Return) -> GenAndKill[Value]: - if not isinstance(op.value, Integer): + if not isinstance(op.value, (Integer, Float)): return {op.value}, set() else: return set(), set() diff --git a/mypyc/analysis/ircheck.py b/mypyc/analysis/ircheck.py index 719faebfcee8..2e6b7320e898 100644 --- a/mypyc/analysis/ircheck.py +++ b/mypyc/analysis/ircheck.py @@ -16,6 +16,9 @@ ControlOp, DecRef, Extend, + FloatComparisonOp, + FloatNeg, + FloatOp, GetAttr, GetElementPtr, Goto, @@ -43,6 +46,7 @@ TupleSet, Unbox, Unreachable, + Value, ) from mypyc.ir.pprint import format_func from mypyc.ir.rtypes import ( @@ -54,6 +58,7 @@ bytes_rprimitive, dict_rprimitive, int_rprimitive, + is_float_rprimitive, is_object_rprimitive, list_rprimitive, range_rprimitive, @@ -221,6 +226,14 @@ def check_compatibility(self, op: Op, t: RType, s: RType) -> None: if not can_coerce_to(t, s) or not can_coerce_to(s, t): self.fail(source=op, desc=f"{t.name} and {s.name} are not compatible") + def expect_float(self, op: Op, v: Value) -> None: + if not is_float_rprimitive(v.type): + self.fail(op, f"Float expected (actual type is {v.type})") + + def expect_non_float(self, op: Op, v: Value) -> None: + if is_float_rprimitive(v.type): + self.fail(op, "Float not expected") + def visit_goto(self, op: Goto) -> None: self.check_control_op_targets(op) @@ -376,10 +389,24 @@ def visit_load_global(self, op: LoadGlobal) -> None: pass def visit_int_op(self, op: IntOp) -> None: - pass + self.expect_non_float(op, op.lhs) + self.expect_non_float(op, op.rhs) def visit_comparison_op(self, op: ComparisonOp) -> None: self.check_compatibility(op, op.lhs.type, op.rhs.type) + self.expect_non_float(op, op.lhs) + self.expect_non_float(op, op.rhs) + + def visit_float_op(self, op: FloatOp) -> None: + self.expect_float(op, op.lhs) + self.expect_float(op, op.rhs) + + def visit_float_neg(self, op: FloatNeg) -> None: + self.expect_float(op, op.src) + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> None: + self.expect_float(op, op.lhs) + self.expect_float(op, op.rhs) def visit_load_mem(self, op: LoadMem) -> None: pass diff --git a/mypyc/analysis/selfleaks.py b/mypyc/analysis/selfleaks.py index 16c1050acf91..288c366e50e5 100644 --- a/mypyc/analysis/selfleaks.py +++ b/mypyc/analysis/selfleaks.py @@ -14,6 +14,9 @@ Cast, ComparisonOp, Extend, + FloatComparisonOp, + FloatNeg, + FloatOp, GetAttr, GetElementPtr, Goto, @@ -160,6 +163,15 @@ def visit_int_op(self, op: IntOp) -> GenAndKill: def visit_comparison_op(self, op: ComparisonOp) -> GenAndKill: return CLEAN + def visit_float_op(self, op: FloatOp) -> GenAndKill: + return CLEAN + + def visit_float_neg(self, op: FloatNeg) -> GenAndKill: + return CLEAN + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> GenAndKill: + return CLEAN + def visit_load_mem(self, op: LoadMem) -> GenAndKill: return CLEAN diff --git a/mypyc/build.py b/mypyc/build.py index 8e1ee8078c11..5fc041e2dcf2 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -225,7 +225,7 @@ def generate_c( if compiler_options.verbose: print(f"Parsed and typechecked in {t1 - t0:.3f}s") - errors = Errors() + errors = Errors(options) modules, ctext = emitmodule.compile_modules_to_c( result, compiler_options=compiler_options, errors=errors, groups=groups ) diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 6e0c89dd0ecf..56ce9637307b 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -2,7 +2,9 @@ from __future__ import annotations +import pprint import sys +import textwrap from typing import Callable from typing_extensions import Final @@ -191,10 +193,31 @@ def reg(self, reg: Value) -> str: def attr(self, name: str) -> str: return ATTR_PREFIX + name - def emit_line(self, line: str = "") -> None: + def object_annotation(self, obj: object, line: str) -> str: + """Build a C comment with an object's string represention. + + If the comment exceeds the line length limit, it's wrapped into a + multiline string (with the extra lines indented to be aligned with + the first line's comment). + + If it contains illegal characters, an empty string is returned.""" + line_width = self._indent + len(line) + formatted = pprint.pformat(obj, compact=True, width=max(90 - line_width, 20)) + if any(x in formatted for x in ("/*", "*/", "\0")): + return "" + + if "\n" in formatted: + first_line, rest = formatted.split("\n", maxsplit=1) + comment_continued = textwrap.indent(rest, (line_width + 3) * " ") + return f" /* {first_line}\n{comment_continued} */" + else: + return f" /* {formatted} */" + + def emit_line(self, line: str = "", *, ann: object = None) -> None: if line.startswith("}"): self.dedent() - self.fragments.append(self._indent * " " + line + "\n") + comment = self.object_annotation(ann, line) if ann is not None else "" + self.fragments.append(self._indent * " " + line + comment + "\n") if line.endswith("{"): self.indent() @@ -895,6 +918,16 @@ def emit_unbox( self.emit_line(f"{dest} = CPyLong_AsInt32({src});") # TODO: Handle 'optional' # TODO: Handle 'failure' + elif is_float_rprimitive(typ): + if declare_dest: + self.emit_line("double {};".format(dest)) + # TODO: Don't use __float__ and __index__ + self.emit_line(f"{dest} = PyFloat_AsDouble({src});") + self.emit_lines( + f"if ({dest} == -1.0 && PyErr_Occurred()) {{", f"{dest} = -113.0;", "}" + ) + # TODO: Handle 'optional' + # TODO: Handle 'failure' elif isinstance(typ, RTuple): self.declare_tuple_struct(typ) if declare_dest: @@ -983,6 +1016,8 @@ def emit_box( self.emit_line(f"{declaration}{dest} = PyLong_FromLong({src});") elif is_int64_rprimitive(typ): self.emit_line(f"{declaration}{dest} = PyLong_FromLongLong({src});") + elif is_float_rprimitive(typ): + self.emit_line(f"{declaration}{dest} = PyFloat_FromDouble({src});") elif isinstance(typ, RTuple): self.declare_tuple_struct(typ) self.emit_line(f"{declaration}{dest} = PyTuple_New({len(typ.types)});") @@ -1107,3 +1142,35 @@ def _emit_traceback( self.emit_line(line) if DEBUG_ERRORS: self.emit_line('assert(PyErr_Occurred() != NULL && "failure w/o err!");') + + +def c_array_initializer(components: list[str], *, indented: bool = False) -> str: + """Construct an initializer for a C array variable. + + Components are C expressions valid in an initializer. + + For example, if components are ["1", "2"], the result + would be "{1, 2}", which can be used like this: + + int a[] = {1, 2}; + + If the result is long, split it into multiple lines. + """ + indent = " " * 4 if indented else "" + res = [] + current: list[str] = [] + cur_len = 0 + for c in components: + if not current or cur_len + 2 + len(indent) + len(c) < 70: + current.append(c) + cur_len += len(c) + 2 + else: + res.append(indent + ", ".join(current)) + current = [c] + cur_len = len(c) + if not res: + # Result fits on a single line + return "{%s}" % ", ".join(current) + # Multi-line result + res.append(indent + ", ".join(current)) + return "{\n " + ",\n ".join(res) + "\n" + indent + "}" diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index a9b51b8ff1a4..6a272d1aee2b 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -1004,6 +1004,7 @@ def generate_readonly_getter( emitter.ctype_spaced(rtype), NATIVE_PREFIX, func_ir.cname(emitter.names) ) ) + emitter.emit_error_check("retval", rtype, "return NULL;") emitter.emit_box("retval", "retbox", rtype, declare_dest=True) emitter.emit_line("return retbox;") else: @@ -1016,7 +1017,6 @@ def generate_readonly_getter( def generate_property_setter( cl: ClassIR, attr: str, arg_type: RType, func_ir: FuncIR, emitter: Emitter ) -> None: - emitter.emit_line("static int") emitter.emit_line( "{}({} *self, PyObject *value, void *closure)".format( diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index e7fb7db80413..f2406ff1a257 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -5,7 +5,7 @@ from typing_extensions import Final from mypyc.analysis.blockfreq import frequently_executed_blocks -from mypyc.codegen.emit import DEBUG_ERRORS, Emitter, TracebackAndGotoHandler +from mypyc.codegen.emit import DEBUG_ERRORS, Emitter, TracebackAndGotoHandler, c_array_initializer from mypyc.common import MODULE_PREFIX, NATIVE_PREFIX, REG_PREFIX, STATIC_PREFIX, TYPE_PREFIX from mypyc.ir.class_ir import ClassIR from mypyc.ir.func_ir import FUNC_CLASSMETHOD, FUNC_STATICMETHOD, FuncDecl, FuncIR, all_values @@ -25,6 +25,10 @@ ComparisonOp, DecRef, Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, GetAttr, GetElementPtr, Goto, @@ -258,12 +262,12 @@ def visit_assign_multi(self, op: AssignMulti) -> None: # RArray values can only be assigned to once, so we can always # declare them on initialization. self.emit_line( - "%s%s[%d] = {%s};" + "%s%s[%d] = %s;" % ( self.emitter.ctype_spaced(typ.item_type), dest, len(op.src), - ", ".join(self.reg(s) for s in op.src), + c_array_initializer([self.reg(s) for s in op.src], indented=True), ) ) @@ -278,15 +282,12 @@ def visit_load_error_value(self, op: LoadErrorValue) -> None: def visit_load_literal(self, op: LoadLiteral) -> None: index = self.literals.literal_index(op.value) - s = repr(op.value) - if not any(x in s for x in ("/*", "*/", "\0")): - ann = " /* %s */" % s - else: - ann = "" if not is_int_rprimitive(op.type): - self.emit_line("%s = CPyStatics[%d];%s" % (self.reg(op), index, ann)) + self.emit_line("%s = CPyStatics[%d];" % (self.reg(op), index), ann=op.value) else: - self.emit_line("%s = (CPyTagged)CPyStatics[%d] | 1;%s" % (self.reg(op), index, ann)) + self.emit_line( + "%s = (CPyTagged)CPyStatics[%d] | 1;" % (self.reg(op), index), ann=op.value + ) def get_attr_expr(self, obj: str, op: GetAttr | SetAttr, decl_cl: ClassIR) -> str: """Generate attribute accessor for normal (non-property) access. @@ -464,12 +465,7 @@ def visit_load_static(self, op: LoadStatic) -> None: name = self.emitter.static_name(op.identifier, op.module_name, prefix) if op.namespace == NAMESPACE_TYPE: name = "(PyObject *)%s" % name - ann = "" - if op.ann: - s = repr(op.ann) - if not any(x in s for x in ("/*", "*/", "\0")): - ann = " /* %s */" % s - self.emit_line(f"{dest} = {name};{ann}") + self.emit_line(f"{dest} = {name};", ann=op.ann) def visit_init_static(self, op: InitStatic) -> None: value = self.reg(op.value) @@ -632,12 +628,7 @@ def visit_extend(self, op: Extend) -> None: def visit_load_global(self, op: LoadGlobal) -> None: dest = self.reg(op) - ann = "" - if op.ann: - s = repr(op.ann) - if not any(x in s for x in ("/*", "*/", "\0")): - ann = " /* %s */" % s - self.emit_line(f"{dest} = {op.identifier};{ann}") + self.emit_line(f"{dest} = {op.identifier};", ann=op.ann) def visit_int_op(self, op: IntOp) -> None: dest = self.reg(op) @@ -671,6 +662,27 @@ def visit_comparison_op(self, op: ComparisonOp) -> None: lhs_cast = self.emit_signed_int_cast(op.lhs.type) self.emit_line(f"{dest} = {lhs_cast}{lhs} {op.op_str[op.op]} {rhs_cast}{rhs};") + def visit_float_op(self, op: FloatOp) -> None: + dest = self.reg(op) + lhs = self.reg(op.lhs) + rhs = self.reg(op.rhs) + if op.op != FloatOp.MOD: + self.emit_line("%s = %s %s %s;" % (dest, lhs, op.op_str[op.op], rhs)) + else: + # TODO: This may set errno as a side effect, that is a little sketchy. + self.emit_line("%s = fmod(%s, %s);" % (dest, lhs, rhs)) + + def visit_float_neg(self, op: FloatNeg) -> None: + dest = self.reg(op) + src = self.reg(op.src) + self.emit_line(f"{dest} = -{src};") + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> None: + dest = self.reg(op) + lhs = self.reg(op.lhs) + rhs = self.reg(op.rhs) + self.emit_line("%s = %s %s %s;" % (dest, lhs, op.op_str[op.op], rhs)) + def visit_load_mem(self, op: LoadMem) -> None: dest = self.reg(op) src = self.reg(op.src) @@ -702,7 +714,13 @@ def visit_get_element_ptr(self, op: GetElementPtr) -> None: def visit_load_address(self, op: LoadAddress) -> None: typ = op.type dest = self.reg(op) - src = self.reg(op.src) if isinstance(op.src, Register) else op.src + if isinstance(op.src, Register): + src = self.reg(op.src) + elif isinstance(op.src, LoadStatic): + prefix = self.PREFIX_MAP[op.src.namespace] + src = self.emitter.static_name(op.src.identifier, op.src.module_name, prefix) + else: + src = op.src self.emit_line(f"{dest} = ({typ._ctype})&{src};") def visit_keep_alive(self, op: KeepAlive) -> None: @@ -732,6 +750,13 @@ def reg(self, reg: Value) -> str: elif val <= -(1 << 31): s += "LL" return s + elif isinstance(reg, Float): + r = repr(reg.value) + if r == "inf": + return "INFINITY" + elif r == "-inf": + return "-INFINITY" + return r else: return self.emitter.reg(reg) @@ -744,8 +769,8 @@ def c_error_value(self, rtype: RType) -> str: def c_undefined_value(self, rtype: RType) -> str: return self.emitter.c_undefined_value(rtype) - def emit_line(self, line: str) -> None: - self.emitter.emit_line(line) + def emit_line(self, line: str, *, ann: object = None) -> None: + self.emitter.emit_line(line, ann=ann) def emit_lines(self, *lines: str) -> None: self.emitter.emit_lines(*lines) diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index a8226314039d..0e80ff6da1f2 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -26,7 +26,7 @@ from mypy.plugin import Plugin, ReportConfigContext from mypy.util import hash_digest from mypyc.codegen.cstring import c_string_initializer -from mypyc.codegen.emit import Emitter, EmitterContext, HeaderDeclaration +from mypyc.codegen.emit import Emitter, EmitterContext, HeaderDeclaration, c_array_initializer from mypyc.codegen.emitclass import generate_class, generate_class_type_decl from mypyc.codegen.emitfunc import generate_native_function, native_function_header from mypyc.codegen.emitwrapper import ( @@ -296,11 +296,11 @@ def compile_ir_to_c( # compiled into a separate extension module. ctext: dict[str | None, list[tuple[str, str]]] = {} for group_sources, group_name in groups: - group_modules = [ - (source.module, modules[source.module]) + group_modules = { + source.module: modules[source.module] for source in group_sources if source.module in modules - ] + } if not group_modules: ctext[group_name] = [] continue @@ -465,7 +465,7 @@ def group_dir(group_name: str) -> str: class GroupGenerator: def __init__( self, - modules: list[tuple[str, ModuleIR]], + modules: dict[str, ModuleIR], source_paths: dict[str, str], group_name: str | None, group_map: dict[str, str | None], @@ -512,7 +512,7 @@ def generate_c_for_modules(self) -> list[tuple[str, str]]: multi_file = self.use_shared_lib and self.multi_file # Collect all literal refs in IR. - for _, module in self.modules: + for module in self.modules.values(): for fn in module.functions: collect_literals(fn, self.context.literals) @@ -528,7 +528,7 @@ def generate_c_for_modules(self) -> list[tuple[str, str]]: self.generate_literal_tables() - for module_name, module in self.modules: + for module_name, module in self.modules.items(): if multi_file: emitter = Emitter(self.context) emitter.emit_line(f'#include "__native{self.short_group_suffix}.h"') @@ -582,7 +582,7 @@ def generate_c_for_modules(self) -> list[tuple[str, str]]: declarations.emit_line("int CPyGlobalsInit(void);") declarations.emit_line() - for module_name, module in self.modules: + for module_name, module in self.modules.items(): self.declare_finals(module_name, module.final_names, declarations) for cl in module.classes: generate_class_type_decl(cl, emitter, ext_declarations, declarations) @@ -790,7 +790,7 @@ def generate_shared_lib_init(self, emitter: Emitter) -> None: "", ) - for mod, _ in self.modules: + for mod in self.modules: name = exported_name(mod) emitter.emit_lines( f"extern PyObject *CPyInit_{name}(void);", @@ -1023,12 +1023,13 @@ def module_internal_static_name(self, module_name: str, emitter: Emitter) -> str return emitter.static_name(module_name + "_internal", None, prefix=MODULE_PREFIX) def declare_module(self, module_name: str, emitter: Emitter) -> None: - # We declare two globals for each module: + # We declare two globals for each compiled module: # one used internally in the implementation of module init to cache results # and prevent infinite recursion in import cycles, and one used # by other modules to refer to it. - internal_static_name = self.module_internal_static_name(module_name, emitter) - self.declare_global("CPyModule *", internal_static_name, initializer="NULL") + if module_name in self.modules: + internal_static_name = self.module_internal_static_name(module_name, emitter) + self.declare_global("CPyModule *", internal_static_name, initializer="NULL") static_name = emitter.static_name(module_name, None, prefix=MODULE_PREFIX) self.declare_global("CPyModule *", static_name) self.simple_inits.append((static_name, "Py_None")) @@ -1126,37 +1127,6 @@ def collect_literals(fn: FuncIR, literals: Literals) -> None: literals.record_literal(op.value) -def c_array_initializer(components: list[str]) -> str: - """Construct an initializer for a C array variable. - - Components are C expressions valid in an initializer. - - For example, if components are ["1", "2"], the result - would be "{1, 2}", which can be used like this: - - int a[] = {1, 2}; - - If the result is long, split it into multiple lines. - """ - res = [] - current: list[str] = [] - cur_len = 0 - for c in components: - if not current or cur_len + 2 + len(c) < 70: - current.append(c) - cur_len += len(c) + 2 - else: - res.append(", ".join(current)) - current = [c] - cur_len = len(c) - if not res: - # Result fits on a single line - return "{%s}" % ", ".join(current) - # Multi-line result - res.append(", ".join(current)) - return "{\n " + ",\n ".join(res) + "\n}" - - def c_string_array_initializer(components: list[bytes]) -> str: result = [] result.append("{\n") diff --git a/mypyc/codegen/emitwrapper.py b/mypyc/codegen/emitwrapper.py index ed03bb7948cc..791e856c274a 100644 --- a/mypyc/codegen/emitwrapper.py +++ b/mypyc/codegen/emitwrapper.py @@ -179,7 +179,7 @@ def generate_wrapper_function( nargs = "nargs" parse_fn = "CPyArg_ParseStackAndKeywords" # Special case some common signatures - if len(real_args) == 0: + if not real_args: # No args parse_fn = "CPyArg_ParseStackAndKeywordsNoArgs" elif len(real_args) == 1 and len(groups[ARG_POS]) == 1: diff --git a/mypyc/common.py b/mypyc/common.py index c8da5ff63bab..05e13370cb98 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -69,6 +69,7 @@ "getargs.c", "getargsfast.c", "int_ops.c", + "float_ops.c", "str_ops.c", "bytes_ops.c", "list_ops.c", diff --git a/mypyc/doc/float_operations.rst b/mypyc/doc/float_operations.rst index c1e4d284c4ba..915c184ae8e7 100644 --- a/mypyc/doc/float_operations.rst +++ b/mypyc/doc/float_operations.rst @@ -7,18 +7,40 @@ These ``float`` operations have fast, optimized implementations. Other floating point operations use generic implementations that are often slower. -.. note:: - - At the moment, only a few float operations are optimized. This will - improve in future mypyc releases. - Construction ------------ * Float literal -* ``float(string)`` +* ``float(x: int)`` +* ``float(x: i64)`` +* ``float(x: i32)`` +* ``float(x: str)`` +* ``float(x: float)`` (no-op) + +Operators +--------- + +* Arithmetic (``+``, ``-``, ``*``, ``/``, ``//``, ``%``) +* Comparisons (``==``, ``!=``, ``<``, etc.) +* Augmented assignment (``x += y``, etc.) Functions --------- +* ``int(f)`` +* ``i32(f)`` (convert to ``i32``) +* ``i64(f)`` (convert to ``i64``) * ``abs(f)`` +* ``math.sin(f)`` +* ``math.cos(f)`` +* ``math.tan(f)`` +* ``math.sqrt(f)`` +* ``math.exp(f)`` +* ``math.log(f)`` +* ``math.floor(f)`` +* ``math.ceil(f)`` +* ``math.fabs(f)`` +* ``math.pow(x, y)`` +* ``math.copysign(x, y)`` +* ``math.isinf(f)`` +* ``math.isnan(f)`` diff --git a/mypyc/doc/int_operations.rst b/mypyc/doc/int_operations.rst index 038b6e5dbc63..058fdbd511dd 100644 --- a/mypyc/doc/int_operations.rst +++ b/mypyc/doc/int_operations.rst @@ -3,32 +3,133 @@ Native integer operations ========================= -Operations on ``int`` values that are listed here have fast, optimized +Mypyc supports these integer types: + +* ``int`` (arbitrary-precision integer) +* ``i64`` (64-bit signed integer) +* ``i32`` (32-bit signed integer) + +``i64`` and ``i32`` are *native integer types* and must be imported +from the ``mypy_extensions`` module. ``int`` corresponds to the Python +``int`` type, but uses a more efficient runtime representation (tagged +pointer). Native integer types are value types. All integer types have +optimized primitive operations, but the native integer types are more +efficient than ``int``, since they don't require range or bounds +checks. + +Operations on integers that are listed here have fast, optimized implementations. Other integer operations use generic implementations -that are often slower. Some operations involving integers and other -types are documented elsewhere, such as list indexing. +that are generally slower. Some operations involving integers and other +types, such as list indexing, are documented elsewhere. Construction ------------ +``int`` type: + * Integer literal * ``int(x: float)`` +* ``int(x: i64)`` +* ``int(x: i32)`` * ``int(x: str)`` * ``int(x: str, base: int)`` +* ``int(x: int)`` (no-op) + +``i64`` type: + +* ``i64(x: int)`` +* ``i64(x: float)`` +* ``i64(x: i32)`` +* ``i64(x: str)`` +* ``i64(x: str, base: int)`` +* ``i64(x: i64)`` (no-op) + +``i32`` type: + +* ``i32(x: int)`` +* ``i32(x: float)`` +* ``i32(x: i64)`` (truncate) +* ``i32(x: str)`` +* ``i32(x: str, base: int)`` +* ``i32(x: i32)`` (no-op) + +Conversions from ``int`` to a native integer type raise +``OverflowError`` if the value is too large or small. Conversions from +a wider native integer type to a narrower one truncate the value and never +fail. More generally, operations between native integer types don't +check for overflow. + +Implicit conversions +-------------------- + +``int`` values can be implicitly converted to a native integer type, +for convenience. This means that these are equivalent:: + + def implicit() -> None: + # Implicit conversion of 0 (int) to i64 + x: i64 = 0 + + def explicit() -> None: + # Explicit conversion of 0 (int) to i64 + x = i64(0) + +Similarly, a native integer value can be implicitly converted to an +arbitrary-precision integer. These two functions are equivalent:: + + def implicit(x: i64) -> int: + # Implicit conversion from i64 to int + return x + + def explicit(x: i64) -> int: + # Explicit conversion from i64 to int + return int(x) Operators --------- -* Arithmetic (``+``, ``-``, ``*``, ``//``, ``%``) +* Arithmetic (``+``, ``-``, ``*``, ``//``, ``/``, ``%``) * Bitwise operations (``&``, ``|``, ``^``, ``<<``, ``>>``, ``~``) * Comparisons (``==``, ``!=``, ``<``, etc.) * Augmented assignment (``x += y``, etc.) +If one of the above native integer operations overflows or underflows, +the behavior is undefined. Native integer types should only be used if +all possible values are small enough for the type. For this reason, +the arbitrary-precision ``int`` type is recommended unless the +performance of integer operations is critical. + +It's a compile-time error to mix different native integer types in a +binary operation such as addition. An explicit conversion is required:: + + def add(x: i64, y: i32) -> None: + a = x + y # Error (i64 + i32) + b = x + i64(y) # OK + +You can freely mix a native integer value and an arbitrary-precision +``int`` value in an operation. The native integer type is "sticky" +and the ``int`` operand is coerced to the native integer type:: + + def example(x: i64, y: int) -> None: + a = x * y + # Type of "a" is "i64" + ... + b = 1 - x + # Similarly, type of "b" is "i64" + Statements ---------- -For loop over range: +For loop over a range is compiled efficiently, if the ``range(...)`` object +is constructed in the for statement (after ``in``): * ``for x in range(end)`` * ``for x in range(start, end)`` * ``for x in range(start, end, step)`` + +If one of the arguments to ``range`` in a for loop is a native integer +type, the type of the loop variable is inferred to have this native +integer type, instead of ``int``:: + + for x in range(i64(n)): + # Type of "x" is "i64" + ... diff --git a/mypyc/doc/introduction.rst b/mypyc/doc/introduction.rst index 874071bac23f..53c86ecdab1b 100644 --- a/mypyc/doc/introduction.rst +++ b/mypyc/doc/introduction.rst @@ -10,7 +10,7 @@ The compiled language is a strict, *gradually typed* Python variant. It restricts the use of some dynamic Python features to gain performance, but it's mostly compatible with standard Python. -Mypyc uses `mypy `_ to perform type +Mypyc uses `mypy `_ to perform type checking and type inference. Most type system features in the stdlib `typing `_ module are supported. diff --git a/mypyc/doc/make.bat b/mypyc/doc/make.bat index 2119f51099bf..153be5e2f6f9 100644 --- a/mypyc/doc/make.bat +++ b/mypyc/doc/make.bat @@ -21,7 +21,7 @@ if errorlevel 9009 ( echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ + echo.https://www.sphinx-doc.org/ exit /b 1 ) diff --git a/mypyc/doc/performance_tips_and_tricks.rst b/mypyc/doc/performance_tips_and_tricks.rst index 668d32827402..ae0b2950814c 100644 --- a/mypyc/doc/performance_tips_and_tricks.rst +++ b/mypyc/doc/performance_tips_and_tricks.rst @@ -103,8 +103,6 @@ These things also tend to be relatively slow: * Using generator functions -* Using floating point numbers (they are relatively unoptimized) - * Using callable values (i.e. not leveraging early binding to call functions or methods) @@ -160,6 +158,8 @@ Here are examples of features that are fast, in no particular order * Many integer operations +* Many ``float`` operations + * Booleans * :ref:`Native list operations `, such as indexing, diff --git a/mypyc/doc/using_type_annotations.rst b/mypyc/doc/using_type_annotations.rst index a01246ab0914..6c9277786751 100644 --- a/mypyc/doc/using_type_annotations.rst +++ b/mypyc/doc/using_type_annotations.rst @@ -30,6 +30,8 @@ mypyc, and many operations on these types have efficient implementations: * ``int`` (:ref:`native operations `) +* ``i64`` (:ref:`documentation `, :ref:`native operations `) +* ``i32`` (:ref:`documentation `, :ref:`native operations `) * ``float`` (:ref:`native operations `) * ``bool`` (:ref:`native operations `) * ``str`` (:ref:`native operations `) @@ -271,7 +273,8 @@ Value and heap types In CPython, memory for all objects is dynamically allocated on the heap. All Python types are thus *heap types*. In compiled code, some types are *value types* -- no object is (necessarily) allocated on the -heap. ``bool``, ``None`` and fixed-length tuples are value types. +heap. ``bool``, ``float``, ``None``, :ref:`native integer types ` +and fixed-length tuples are value types. ``int`` is a hybrid. For typical integer values, it is a value type. Large enough integer values, those that require more than 63 @@ -287,9 +290,9 @@ Value types have a few differences from heap types: * Similarly, mypyc transparently changes from a heap-based representation to a value representation (unboxing). -* Object identity of integers and tuples is not preserved. You should - use ``==`` instead of ``is`` if you are comparing two integers or - fixed-length tuples. +* Object identity of integers, floating point values and tuples is not + preserved. You should use ``==`` instead of ``is`` if you are comparing + two integers, floats or fixed-length tuples. * When an instance of a subclass of a value type is converted to the base type, it is implicitly converted to an instance of the target @@ -312,3 +315,81 @@ Example:: x = a[0] # True is converted to 1 on assignment x = True + +Since integers and floating point values have a different runtime +representations and neither can represent all the values of the other +type, type narrowing of floating point values through assignment is +disallowed in compiled code. For consistency, mypyc rejects assigning +an integer value to a float variable even in variable initialization. +An explicit conversion is required. + +Examples:: + + def narrowing(n: int) -> None: + # Error: Incompatible value representations in assignment + # (expression has type "int", variable has type "float") + x: float = 0 + + y: float = 0.0 # Ok + + if f(): + y = n # Error + if f(): + y = float(n) # Ok + +.. _native-ints: + +Native integer types +-------------------- + +You can use the native integer types ``i64`` (64-bit signed integer) +and ``i32`` (32-bit signed integer) if you know that integer values +will always fit within fixed bounds. These types are faster than the +arbitrary-precision ``int`` type, since they don't require overflow +checks on operations. ``i32`` may also use less memory than ``int`` +values. The types are imported from the ``mypy_extensions`` module +(installed via ``pip install mypy_extensions``). + +Example:: + + from mypy_extensions import i64 + + def sum_list(l: list[i64]) -> i64: + s: i64 = 0 + for n in l: + s += n + return s + + # Implicit conversions from int to i64 + print(sum_list([1, 3, 5])) + +.. note:: + + Since there are no overflow checks when performing native integer + arithmetic, the above function could result in an overflow or other + undefined behavior if the sum might not fit within 64 bits. + + The behavior when running as interpreted Python program will be + different if there are overflows. Declaring native integer types + have no effect unless code is compiled. Native integer types are + effectively equivalent to ``int`` when interpreted. + +Native integer types have these additional properties: + +* Values can be implicitly converted between ``int`` and a native + integer type (both ways). + +* Conversions between different native integer types must be explicit. + A conversion to a narrower native integer type truncates the value + without a runtime overflow check. + +* If a binary operation (such as ``+``) or an augmented assignment + (such as ``+=``) mixes native integer and ``int`` values, the + ``int`` operand is implicitly coerced to the native integer type + (native integer types are "sticky"). + +* You can't mix different native integer types in binary + operations. Instead, convert between types explicitly. + +For more information about native integer types, refer to +:ref:`native integer operations `. diff --git a/mypyc/errors.py b/mypyc/errors.py index 1dd269fe25f3..8bc9b2714f75 100644 --- a/mypyc/errors.py +++ b/mypyc/errors.py @@ -1,13 +1,14 @@ from __future__ import annotations import mypy.errors +from mypy.options import Options class Errors: - def __init__(self) -> None: + def __init__(self, options: Options) -> None: self.num_errors = 0 self.num_warnings = 0 - self._errors = mypy.errors.Errors(hide_error_codes=True) + self._errors = mypy.errors.Errors(options, hide_error_codes=True) def error(self, msg: str, path: str, line: int) -> None: self._errors.report(line, None, msg, severity="error", file=path) diff --git a/mypyc/ir/module_ir.py b/mypyc/ir/module_ir.py index 4b6a177af149..dcf6f8768547 100644 --- a/mypyc/ir/module_ir.py +++ b/mypyc/ir/module_ir.py @@ -23,7 +23,7 @@ def __init__( final_names: list[tuple[str, RType]], ) -> None: self.fullname = fullname - self.imports = imports[:] + self.imports = imports.copy() self.functions = functions self.classes = classes self.final_names = final_names diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 51a0bffcf3f1..351f7c01efe2 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -25,6 +25,7 @@ RVoid, bit_rprimitive, bool_rprimitive, + float_rprimitive, int_rprimitive, is_bit_rprimitive, is_bool_rprimitive, @@ -190,6 +191,25 @@ def __init__(self, value: int, rtype: RType = short_int_rprimitive, line: int = self.type = rtype self.line = line + def numeric_value(self) -> int: + if is_short_int_rprimitive(self.type) or is_int_rprimitive(self.type): + return self.value // 2 + return self.value + + +class Float(Value): + """Float literal. + + Floating point literals are treated as constant values and are generally + not included in data flow analyses and such, unlike Register and + Op subclasses. + """ + + def __init__(self, value: float, line: int = -1) -> None: + self.value = value + self.type = float_rprimitive + self.line = line + class Op(Value): """Abstract base class for all IR operations. @@ -279,7 +299,7 @@ def __init__(self, dest: Register, src: list[Value], line: int = -1) -> None: self.src = src def sources(self) -> list[Value]: - return self.src[:] + return self.src.copy() def stolen(self) -> list[Value]: return [] @@ -522,7 +542,7 @@ def __init__(self, fn: FuncDecl, args: Sequence[Value], line: int) -> None: super().__init__(line) def sources(self) -> list[Value]: - return list(self.args[:]) + return list(self.args.copy()) def accept(self, visitor: OpVisitor[T]) -> T: return visitor.visit_call(self) @@ -550,7 +570,7 @@ def __init__(self, obj: Value, method: str, args: list[Value], line: int = -1) - super().__init__(line) def sources(self) -> list[Value]: - return self.args[:] + [self.obj] + return self.args.copy() + [self.obj] def accept(self, visitor: OpVisitor[T]) -> T: return visitor.visit_method_call(self) @@ -770,7 +790,7 @@ def __init__(self, items: list[Value], line: int) -> None: self.type = self.tuple_type def sources(self) -> list[Value]: - return self.items[:] + return self.items.copy() def accept(self, visitor: OpVisitor[T]) -> T: return visitor.visit_tuple_set(self) @@ -895,6 +915,7 @@ class RaiseStandardError(RegisterOp): UNBOUND_LOCAL_ERROR: Final = "UnboundLocalError" RUNTIME_ERROR: Final = "RuntimeError" NAME_ERROR: Final = "NameError" + ZERO_DIVISION_ERROR: Final = "ZeroDivisionError" def __init__(self, class_name: str, value: str | Value | None, line: int) -> None: super().__init__(line) @@ -1042,7 +1063,7 @@ class IntOp(RegisterOp): """Binary arithmetic or bitwise op on integer operands (e.g., r1 = r2 + r3). These ops are low-level and are similar to the corresponding C - operations (and unlike Python operations). + operations. The left and right values must have low-level integer types with compatible representations. Fixed-width integers, short_int_rprimitive, @@ -1156,6 +1177,94 @@ def accept(self, visitor: OpVisitor[T]) -> T: return visitor.visit_comparison_op(self) +class FloatOp(RegisterOp): + """Binary float arithmetic op (e.g., r1 = r2 + r3). + + These ops are low-level and are similar to the corresponding C + operations (and somewhat different from Python operations). + + The left and right values must be floats. + """ + + error_kind = ERR_NEVER + + ADD: Final = 0 + SUB: Final = 1 + MUL: Final = 2 + DIV: Final = 3 + MOD: Final = 4 + + op_str: Final = {ADD: "+", SUB: "-", MUL: "*", DIV: "/", MOD: "%"} + + def __init__(self, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: + super().__init__(line) + self.type = float_rprimitive + self.lhs = lhs + self.rhs = rhs + self.op = op + + def sources(self) -> List[Value]: + return [self.lhs, self.rhs] + + def accept(self, visitor: "OpVisitor[T]") -> T: + return visitor.visit_float_op(self) + + +# We can't have this in the FloatOp class body, because of +# https://github.com/mypyc/mypyc/issues/932. +float_op_to_id: Final = {op: op_id for op_id, op in FloatOp.op_str.items()} + + +class FloatNeg(RegisterOp): + """Float negation op (r1 = -r2).""" + + error_kind = ERR_NEVER + + def __init__(self, src: Value, line: int = -1) -> None: + super().__init__(line) + self.type = float_rprimitive + self.src = src + + def sources(self) -> List[Value]: + return [self.src] + + def accept(self, visitor: "OpVisitor[T]") -> T: + return visitor.visit_float_neg(self) + + +class FloatComparisonOp(RegisterOp): + """Low-level comparison op for floats.""" + + error_kind = ERR_NEVER + + EQ: Final = 200 + NEQ: Final = 201 + LT: Final = 202 + GT: Final = 203 + LE: Final = 204 + GE: Final = 205 + + op_str: Final = {EQ: "==", NEQ: "!=", LT: "<", GT: ">", LE: "<=", GE: ">="} + + def __init__(self, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: + super().__init__(line) + self.type = bit_rprimitive + self.lhs = lhs + self.rhs = rhs + self.op = op + + def sources(self) -> List[Value]: + return [self.lhs, self.rhs] + + def accept(self, visitor: "OpVisitor[T]") -> T: + return visitor.visit_float_comparison_op(self) + + +# We can't have this in the FloatOp class body, because of +# https://github.com/mypyc/mypyc/issues/932. +float_comparison_op_to_id: Final = {op: op_id for op_id, op in FloatComparisonOp.op_str.items()} + + class LoadMem(RegisterOp): """Read a memory location: result = *(type *)src. @@ -1239,13 +1348,14 @@ class LoadAddress(RegisterOp): Attributes: type: Type of the loaded address(e.g. ptr/object_ptr) src: Source value (str for globals like 'PyList_Type', - Register for temporary values or locals) + Register for temporary values or locals, LoadStatic + for statics.) """ error_kind = ERR_NEVER is_borrowed = True - def __init__(self, type: RType, src: str | Register, line: int = -1) -> None: + def __init__(self, type: RType, src: str | Register | LoadStatic, line: int = -1) -> None: super().__init__(line) self.type = type self.src = src @@ -1285,7 +1395,7 @@ def __init__(self, src: list[Value]) -> None: self.src = src def sources(self) -> list[Value]: - return self.src[:] + return self.src.copy() def accept(self, visitor: OpVisitor[T]) -> T: return visitor.visit_keep_alive(self) @@ -1405,6 +1515,18 @@ def visit_int_op(self, op: IntOp) -> T: def visit_comparison_op(self, op: ComparisonOp) -> T: raise NotImplementedError + @abstractmethod + def visit_float_op(self, op: FloatOp) -> T: + raise NotImplementedError + + @abstractmethod + def visit_float_neg(self, op: FloatNeg) -> T: + raise NotImplementedError + + @abstractmethod + def visit_float_comparison_op(self, op: FloatComparisonOp) -> T: + raise NotImplementedError + @abstractmethod def visit_load_mem(self, op: LoadMem) -> T: raise NotImplementedError @@ -1428,6 +1550,7 @@ def visit_keep_alive(self, op: KeepAlive) -> T: # TODO: Should the following definition live somewhere else? + # We do a three-pass deserialization scheme in order to resolve name # references. # 1. Create an empty ClassIR for each class in an SCC. diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index cb9e4a2d2541..4d10a91835ca 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -23,6 +23,10 @@ ControlOp, DecRef, Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, GetAttr, GetElementPtr, Goto, @@ -241,6 +245,15 @@ def visit_comparison_op(self, op: ComparisonOp) -> str: "%r = %r %s %r%s", op, op.lhs, ComparisonOp.op_str[op.op], op.rhs, sign_format ) + def visit_float_op(self, op: FloatOp) -> str: + return self.format("%r = %r %s %r", op, op.lhs, FloatOp.op_str[op.op], op.rhs) + + def visit_float_neg(self, op: FloatNeg) -> str: + return self.format("%r = -%r", op, op.src) + + def visit_float_comparison_op(self, op: FloatComparisonOp) -> str: + return self.format("%r = %r %s %r", op, op.lhs, op.op_str[op.op], op.rhs) + def visit_load_mem(self, op: LoadMem) -> str: return self.format("%r = load_mem %r :: %t*", op, op.src, op.type) @@ -253,6 +266,11 @@ def visit_get_element_ptr(self, op: GetElementPtr) -> str: def visit_load_address(self, op: LoadAddress) -> str: if isinstance(op.src, Register): return self.format("%r = load_address %r", op, op.src) + elif isinstance(op.src, LoadStatic): + name = op.src.identifier + if op.src.module_name is not None: + name = f"{op.src.module_name}.{name}" + return self.format("%r = load_address %s :: %s", op, name, op.src.namespace) else: return self.format("%r = load_address %s", op, op.src) @@ -289,6 +307,8 @@ def format(self, fmt: str, *args: Any) -> str: assert isinstance(arg, Value) if isinstance(arg, Integer): result.append(str(arg.value)) + elif isinstance(arg, Float): + result.append(repr(arg.value)) else: result.append(self.names[arg]) elif typespec == "d": @@ -445,7 +465,7 @@ def generate_names_for_ir(args: list[Register], blocks: list[BasicBlock]) -> dic continue if isinstance(value, Register) and value.name: name = value.name - elif isinstance(value, Integer): + elif isinstance(value, (Integer, Float)): continue else: name = "r%d" % temp_index diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py index babfe0770f35..7ff82ac9b297 100644 --- a/mypyc/ir/rtypes.py +++ b/mypyc/ir/rtypes.py @@ -221,6 +221,8 @@ def __init__( self.c_undefined = "2" elif ctype in ("PyObject **", "void *"): self.c_undefined = "NULL" + elif ctype == "double": + self.c_undefined = "-113.0" else: assert False, "Unrecognized ctype: %r" % ctype @@ -366,7 +368,14 @@ def __hash__(self) -> int: # Floats are represent as 'float' PyObject * values. (In the future # we'll likely switch to a more efficient, unboxed representation.) -float_rprimitive: Final = RPrimitive("builtins.float", is_unboxed=False, is_refcounted=True) +float_rprimitive: Final = RPrimitive( + "builtins.float", + is_unboxed=True, + is_refcounted=False, + ctype="double", + size=8, + error_overlap=True, +) # An unboxed Python bool value. This actually has three possible values # (0 -> False, 1 -> True, 2 -> error). If you only need True/False, use @@ -527,6 +536,8 @@ def visit_rprimitive(self, t: RPrimitive) -> str: return "8" # "8 byte integer" elif t._ctype == "int32_t": return "4" # "4 byte integer" + elif t._ctype == "double": + return "F" assert not t.is_unboxed, f"{t} unexpected unboxed type" return "O" @@ -809,17 +820,11 @@ def make_simplified_union(items: list[RType]) -> RType: items = flatten_nested_unions(items) assert items - # Remove duplicate items using set + list to preserve item order - seen = set() - new_items = [] - for item in items: - if item not in seen: - new_items.append(item) - seen.add(item) - if len(new_items) > 1: - return RUnion(new_items) + unique_items = dict.fromkeys(items) + if len(unique_items) > 1: + return RUnion(list(unique_items)) else: - return new_items[0] + return next(iter(unique_items)) def accept(self, visitor: RTypeVisitor[T]) -> T: return visitor.visit_runion(self) diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index a49429f1c6ec..b23e561a2179 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -52,6 +52,7 @@ ProperType, TupleType, Type, + TypedDictType, TypeOfAny, UninhabitedType, UnionType, @@ -89,13 +90,14 @@ RType, RUnion, bitmap_rprimitive, - c_int_rprimitive, c_pyssize_t_rprimitive, dict_rprimitive, int_rprimitive, + is_float_rprimitive, is_list_rprimitive, is_none_rprimitive, is_object_rprimitive, + is_tagged, is_tuple_rprimitive, none_rprimitive, object_rprimitive, @@ -124,12 +126,7 @@ from mypyc.primitives.dict_ops import dict_get_item_op, dict_set_item_op from mypyc.primitives.generic_ops import iter_op, next_op, py_setattr_op from mypyc.primitives.list_ops import list_get_item_unsafe_op, list_pop_last, to_list -from mypyc.primitives.misc_ops import ( - check_unpack_count_op, - get_module_dict_op, - import_extra_args_op, - import_op, -) +from mypyc.primitives.misc_ops import check_unpack_count_op, get_module_dict_op, import_op from mypyc.primitives.registry import CFunctionDescription, function_ops # These int binary operations can borrow their operands safely, since the @@ -191,6 +188,8 @@ def __init__( self.encapsulating_funcs = pbv.encapsulating_funcs self.nested_fitems = pbv.nested_funcs.keys() self.fdefs_to_decorators = pbv.funcs_to_decorators + self.module_import_groups = pbv.module_import_groups + self.singledispatch_impls = singledispatch_impls self.visitor = visitor @@ -392,22 +391,6 @@ def add_to_non_ext_dict( key_unicode = self.load_str(key) self.call_c(dict_set_item_op, [non_ext.dict, key_unicode, val], line) - def gen_import_from( - self, id: str, globals_dict: Value, imported: list[str], line: int - ) -> Value: - self.imports[id] = None - - null_dict = Integer(0, dict_rprimitive, line) - names_to_import = self.new_list_op([self.load_str(name) for name in imported], line) - zero_int = Integer(0, c_int_rprimitive, line) - value = self.call_c( - import_extra_args_op, - [self.load_str(id), globals_dict, null_dict, names_to_import, zero_int], - line, - ) - self.add(InitStatic(value, id, namespace=NAMESPACE_MODULE)) - return value - def gen_import(self, id: str, line: int) -> None: self.imports[id] = None @@ -548,16 +531,14 @@ def load_final_static( error_msg=f'value for final name "{error_name}" was not set', ) - def load_final_literal_value(self, val: int | str | bytes | float | bool, line: int) -> Value: - """Load value of a final name or class-level attribute.""" + def load_literal_value(self, val: int | str | bytes | float | complex | bool) -> Value: + """Load value of a final name, class-level attribute, or constant folded expression.""" if isinstance(val, bool): if val: return self.true() else: return self.false() elif isinstance(val, int): - # TODO: take care of negative integer initializers - # (probably easier to fix this in mypy itself). return self.builder.load_int(val) elif isinstance(val, float): return self.builder.load_float(val) @@ -565,8 +546,10 @@ def load_final_literal_value(self, val: int | str | bytes | float | bool, line: return self.builder.load_str(val) elif isinstance(val, bytes): return self.builder.load_bytes(val) + elif isinstance(val, complex): + return self.builder.load_complex(val) else: - assert False, "Unsupported final literal value" + assert False, "Unsupported literal value" def get_assignment_target( self, lvalue: Lvalue, line: int = -1, *, for_read: bool = False @@ -665,13 +648,13 @@ def read( def assign(self, target: Register | AssignmentTarget, rvalue_reg: Value, line: int) -> None: if isinstance(target, Register): - self.add(Assign(target, self.coerce(rvalue_reg, target.type, line))) + self.add(Assign(target, self.coerce_rvalue(rvalue_reg, target.type, line))) elif isinstance(target, AssignmentTargetRegister): - rvalue_reg = self.coerce(rvalue_reg, target.type, line) + rvalue_reg = self.coerce_rvalue(rvalue_reg, target.type, line) self.add(Assign(target.register, rvalue_reg)) elif isinstance(target, AssignmentTargetAttr): if isinstance(target.obj_type, RInstance): - rvalue_reg = self.coerce(rvalue_reg, target.type, line) + rvalue_reg = self.coerce_rvalue(rvalue_reg, target.type, line) self.add(SetAttr(target.obj, target.attr, rvalue_reg, line)) else: key = self.load_str(target.attr) @@ -698,6 +681,18 @@ def assign(self, target: Register | AssignmentTarget, rvalue_reg: Value, line: i else: assert False, "Unsupported assignment target" + def coerce_rvalue(self, rvalue: Value, rtype: RType, line: int) -> Value: + if is_float_rprimitive(rtype) and is_tagged(rvalue.type): + typename = rvalue.type.short_name() + if typename == "short_int": + typename = "int" + self.error( + "Incompatible value representations in assignment " + + f'(expression has type "{typename}", variable has type "float")', + line, + ) + return self.coerce(rvalue, rtype, line) + def process_sequence_assignment( self, target: AssignmentTargetTuple, rvalue: Value, line: int ) -> None: @@ -741,7 +736,6 @@ def process_iterator_tuple_assignment_helper( def process_iterator_tuple_assignment( self, target: AssignmentTargetTuple, rvalue_reg: Value, line: int ) -> None: - iterator = self.call_c(iter_op, [rvalue_reg], line) # This may be the whole lvalue list if there is no starred value @@ -899,8 +893,12 @@ def get_dict_base_type(self, expr: Expression) -> list[Instance]: dict_types = [] for t in types: - assert isinstance(t, Instance), t - dict_base = next(base for base in t.type.mro if base.fullname == "builtins.dict") + if isinstance(t, TypedDictType): + t = t.fallback + dict_base = next(base for base in t.type.mro if base.fullname == "typing.Mapping") + else: + assert isinstance(t, Instance), t + dict_base = next(base for base in t.type.mro if base.fullname == "builtins.dict") dict_types.append(map_instance_to_supertype(t, dict_base)) return dict_types @@ -1010,7 +1008,7 @@ def emit_load_final( line: line number where loading occurs """ if final_var.final_value is not None: # this is safe even for non-native names - return self.load_final_literal_value(final_var.final_value, line) + return self.load_literal_value(final_var.final_value) elif native: return self.load_final_static(fullname, self.mapper.type_to_rtype(typ), line, name) else: @@ -1022,7 +1020,6 @@ def is_module_member_expr(self, expr: MemberExpr) -> bool: def call_refexpr_with_args( self, expr: CallExpr, callee: RefExpr, arg_values: list[Value] ) -> Value: - # Handle data-driven special-cased primitive call ops. if callee.fullname and expr.arg_kinds == [ARG_POS] * len(arg_values): fullname = get_call_target_fullname(callee) diff --git a/mypyc/irbuild/callable_class.py b/mypyc/irbuild/callable_class.py index d3ee54a208cd..599dbb81f767 100644 --- a/mypyc/irbuild/callable_class.py +++ b/mypyc/irbuild/callable_class.py @@ -17,7 +17,7 @@ def setup_callable_class(builder: IRBuilder) -> None: - """Generate an (incomplete) callable class representing function. + """Generate an (incomplete) callable class representing a function. This can be a nested function or a function within a non-extension class. Also set up the 'self' variable for that class. diff --git a/mypyc/irbuild/constant_fold.py b/mypyc/irbuild/constant_fold.py index 4e9eb53b9222..dc21be4689e2 100644 --- a/mypyc/irbuild/constant_fold.py +++ b/mypyc/irbuild/constant_fold.py @@ -13,17 +13,26 @@ from typing import Union from typing_extensions import Final -from mypy.constant_fold import ( - constant_fold_binary_int_op, - constant_fold_binary_str_op, - constant_fold_unary_int_op, +from mypy.constant_fold import constant_fold_binary_op, constant_fold_unary_op +from mypy.nodes import ( + BytesExpr, + ComplexExpr, + Expression, + FloatExpr, + IntExpr, + MemberExpr, + NameExpr, + OpExpr, + StrExpr, + UnaryExpr, + Var, ) -from mypy.nodes import Expression, IntExpr, MemberExpr, NameExpr, OpExpr, StrExpr, UnaryExpr, Var from mypyc.irbuild.builder import IRBuilder +from mypyc.irbuild.util import bytes_from_str # All possible result types of constant folding -ConstantValue = Union[int, str] -CONST_TYPES: Final = (int, str) +ConstantValue = Union[int, float, complex, str, bytes] +CONST_TYPES: Final = (int, float, complex, str, bytes) def constant_fold_expr(builder: IRBuilder, expr: Expression) -> ConstantValue | None: @@ -33,31 +42,55 @@ def constant_fold_expr(builder: IRBuilder, expr: Expression) -> ConstantValue | """ if isinstance(expr, IntExpr): return expr.value + if isinstance(expr, FloatExpr): + return expr.value if isinstance(expr, StrExpr): return expr.value + if isinstance(expr, BytesExpr): + return bytes_from_str(expr.value) + if isinstance(expr, ComplexExpr): + return expr.value elif isinstance(expr, NameExpr): node = expr.node if isinstance(node, Var) and node.is_final: - value = node.final_value - if isinstance(value, (CONST_TYPES)): - return value + final_value = node.final_value + if isinstance(final_value, (CONST_TYPES)): + return final_value elif isinstance(expr, MemberExpr): final = builder.get_final_ref(expr) if final is not None: fn, final_var, native = final if final_var.is_final: - value = final_var.final_value - if isinstance(value, (CONST_TYPES)): - return value + final_value = final_var.final_value + if isinstance(final_value, (CONST_TYPES)): + return final_value elif isinstance(expr, OpExpr): left = constant_fold_expr(builder, expr.left) right = constant_fold_expr(builder, expr.right) - if isinstance(left, int) and isinstance(right, int): - return constant_fold_binary_int_op(expr.op, left, right) - elif isinstance(left, str) and isinstance(right, str): - return constant_fold_binary_str_op(expr.op, left, right) + if left is not None and right is not None: + return constant_fold_binary_op_extended(expr.op, left, right) elif isinstance(expr, UnaryExpr): value = constant_fold_expr(builder, expr.expr) - if isinstance(value, int): - return constant_fold_unary_int_op(expr.op, value) + if value is not None and not isinstance(value, bytes): + return constant_fold_unary_op(expr.op, value) + return None + + +def constant_fold_binary_op_extended( + op: str, left: ConstantValue, right: ConstantValue +) -> ConstantValue | None: + """Like mypy's constant_fold_binary_op(), but includes bytes support. + + mypy cannot use constant folded bytes easily so it's simpler to only support them in mypyc. + """ + if not isinstance(left, bytes) and not isinstance(right, bytes): + return constant_fold_binary_op(op, left, right) + + if op == "+" and isinstance(left, bytes) and isinstance(right, bytes): + return left + right + elif op == "*" and isinstance(left, bytes) and isinstance(right, int): + return left * right + elif op == "*" and isinstance(left, int) and isinstance(right, bytes): + return left * right + return None diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index 5997bdbd0a43..281cbb5cd726 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import Callable, Sequence, cast +from typing import Callable, Sequence from mypy.nodes import ( ARG_POS, @@ -90,7 +90,6 @@ tokenizer_printf_style, ) from mypyc.irbuild.specialize import apply_function_specialization, apply_method_specialization -from mypyc.irbuild.util import bytes_from_str from mypyc.primitives.bytes_ops import bytes_slice_op from mypyc.primitives.dict_ops import dict_get_item_op, dict_new_op, dict_set_item_op from mypyc.primitives.generic_ops import iter_op @@ -289,6 +288,9 @@ def transform_call_expr(builder: IRBuilder, expr: CallExpr) -> Value: callee = callee.analyzed.expr # Unwrap type application if isinstance(callee, MemberExpr): + if isinstance(callee.expr, RefExpr) and isinstance(callee.expr.node, MypyFile): + # Call a module-level function, not a method. + return translate_call(builder, expr, callee) return apply_method_specialization(builder, expr, callee) or translate_method_call( builder, expr, callee ) @@ -381,7 +383,7 @@ def translate_method_call(builder: IRBuilder, expr: CallExpr, callee: MemberExpr def call_classmethod(builder: IRBuilder, ir: ClassIR, expr: CallExpr, callee: MemberExpr) -> Value: decl = ir.method_decl(callee.name) args = [] - arg_kinds, arg_names = expr.arg_kinds[:], expr.arg_names[:] + arg_kinds, arg_names = expr.arg_kinds.copy(), expr.arg_names.copy() # Add the class argument for class methods in extension classes if decl.kind == FUNC_CLASSMETHOD and ir.is_ext_class: args.append(builder.load_native_type_object(ir.fullname)) @@ -448,7 +450,7 @@ def translate_super_method_call(builder: IRBuilder, expr: CallExpr, callee: Supe decl = base.method_decl(callee.name) arg_values = [builder.accept(arg) for arg in expr.args] - arg_kinds, arg_names = expr.arg_kinds[:], expr.arg_names[:] + arg_kinds, arg_names = expr.arg_kinds.copy(), expr.arg_names.copy() if decl.kind != FUNC_STATICMETHOD: # Grab first argument @@ -492,7 +494,7 @@ def transform_op_expr(builder: IRBuilder, expr: OpExpr) -> Value: return builder.shortcircuit_expr(expr) # Special case for string formatting - if expr.op == "%" and (isinstance(expr.left, StrExpr) or isinstance(expr.left, BytesExpr)): + if expr.op == "%" and isinstance(expr.left, (StrExpr, BytesExpr)): ret = translate_printf_style_formatting(builder, expr.left, expr.right) if ret is not None: return ret @@ -562,10 +564,8 @@ def try_constant_fold(builder: IRBuilder, expr: Expression) -> Value | None: Return None otherwise. """ value = constant_fold_expr(builder, expr) - if isinstance(value, int): - return builder.load_int(value) - elif isinstance(value, str): - return builder.load_str(value) + if value is not None: + return builder.load_literal_value(value) return None @@ -647,10 +647,6 @@ def set_literal_values(builder: IRBuilder, items: Sequence[Expression]) -> list[ values.append(True) elif item.fullname == "builtins.False": values.append(False) - elif isinstance(item, (BytesExpr, FloatExpr, ComplexExpr)): - # constant_fold_expr() doesn't handle these (yet?) - v = bytes_from_str(item.value) if isinstance(item, BytesExpr) else item.value - values.append(v) elif isinstance(item, TupleExpr): tuple_values = set_literal_values(builder, item.items) if tuple_values is not None: @@ -670,7 +666,6 @@ def precompute_set_literal(builder: IRBuilder, s: SetExpr) -> Value | None: Supported items: - Anything supported by irbuild.constant_fold.constant_fold_expr() - None, True, and False - - Float, byte, and complex literals - Tuple literals with only items listed above """ values = set_literal_values(builder, s.items) @@ -704,7 +699,9 @@ def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: lhs = e.operands[0] mypy_file = builder.graph["builtins"].tree assert mypy_file is not None - bool_type = Instance(cast(TypeInfo, mypy_file.names["bool"].node), []) + info = mypy_file.names["bool"].node + assert isinstance(info, TypeInfo) + bool_type = Instance(info, []) exprs = [] for item in items: expr = ComparisonExpr([cmp_op], [lhs, item]) @@ -804,10 +801,10 @@ def transform_basic_comparison( if ( is_int_rprimitive(left.type) and is_int_rprimitive(right.type) - and op in int_comparison_op_mapping.keys() + and op in int_comparison_op_mapping ): return builder.compare_tagged(left, right, op, line) - if is_fixed_width_rtype(left.type) and op in int_comparison_op_mapping.keys(): + if is_fixed_width_rtype(left.type) and op in int_comparison_op_mapping: if right.type == left.type: op_id = ComparisonOp.signed_ops[op] return builder.builder.comparison_op(left, right, op_id, line) @@ -818,7 +815,7 @@ def transform_basic_comparison( ) elif ( is_fixed_width_rtype(right.type) - and op in int_comparison_op_mapping.keys() + and op in int_comparison_op_mapping and isinstance(left, Integer) ): op_id = ComparisonOp.signed_ops[op] diff --git a/mypyc/irbuild/format_str_tokenizer.py b/mypyc/irbuild/format_str_tokenizer.py index 5ab38d0f2264..480c683aa164 100644 --- a/mypyc/irbuild/format_str_tokenizer.py +++ b/mypyc/irbuild/format_str_tokenizer.py @@ -13,6 +13,7 @@ from mypy.errors import Errors from mypy.messages import MessageBuilder from mypy.nodes import Context, Expression +from mypy.options import Options from mypyc.ir.ops import Integer, Value from mypyc.ir.rtypes import ( c_pyssize_t_rprimitive, @@ -108,7 +109,9 @@ def tokenizer_format_call(format_str: str) -> tuple[list[str], list[FormatOp]] | """ # Creates an empty MessageBuilder here. # It wouldn't be used since the code has passed the type-checking. - specifiers = parse_format_value(format_str, EMPTY_CONTEXT, MessageBuilder(Errors(), {})) + specifiers = parse_format_value( + format_str, EMPTY_CONTEXT, MessageBuilder(Errors(Options()), {}) + ) if specifiers is None: return None format_ops = generate_format_ops(specifiers) diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index 02155d70e928..822350ea829b 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -435,7 +435,6 @@ def handle_ext_method(builder: IRBuilder, cdef: ClassDef, fdef: FuncDef) -> None class_ir.method_decls[name].sig, base.method_decls[name].sig ) ): - # TODO: Support contravariant subtyping in the input argument for # property setters. Need to make a special glue method for handling this, # similar to gen_glue_property. @@ -516,7 +515,7 @@ def gen_func_ns(builder: IRBuilder) -> str: return "_".join( info.name + ("" if not info.class_name else "_" + info.class_name) for info in builder.fn_infos - if info.name and info.name != "" + if info.name and info.name != "" ) @@ -643,7 +642,7 @@ def f(builder: IRBuilder, x: object) -> int: ... args = args[: -base_sig.num_bitmap_args] arg_kinds = arg_kinds[: -base_sig.num_bitmap_args] arg_names = arg_names[: -base_sig.num_bitmap_args] - bitmap_args = builder.builder.args[-base_sig.num_bitmap_args :] + bitmap_args = list(builder.builder.args[-base_sig.num_bitmap_args :]) # We can do a passthrough *args/**kwargs with a native call, but if the # args need to get distributed out to arguments, we just let python handle it diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index 2391ccc4d0ed..aa152d32a144 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -46,6 +46,10 @@ Cast, ComparisonOp, Extend, + Float, + FloatComparisonOp, + FloatNeg, + FloatOp, GetAttr, GetElementPtr, Goto, @@ -64,9 +68,12 @@ SetMem, Truncate, TupleGet, + TupleSet, Unbox, Unreachable, Value, + float_comparison_op_to_id, + float_op_to_id, int_op_to_id, ) from mypyc.ir.rtypes import ( @@ -96,6 +103,7 @@ is_bytes_rprimitive, is_dict_rprimitive, is_fixed_width_rtype, + is_float_rprimitive, is_int32_rprimitive, is_int64_rprimitive, is_int_rprimitive, @@ -126,6 +134,7 @@ dict_update_in_display_op, ) from mypyc.primitives.exc_ops import err_occurred_op, keep_propagating_op +from mypyc.primitives.float_ops import copysign_op, int_to_float_op from mypyc.primitives.generic_ops import ( generic_len_op, generic_ssize_t_len_op, @@ -257,7 +266,7 @@ def self(self) -> Register: def flush_keep_alives(self) -> None: if self.keep_alives: - self.add(KeepAlive(self.keep_alives[:])) + self.add(KeepAlive(self.keep_alives.copy())) self.keep_alives = [] # Type conversions @@ -340,11 +349,39 @@ def coerce( is_bool_rprimitive(src_type) or is_bit_rprimitive(src_type) ) and is_fixed_width_rtype(target_type): return self.add(Extend(src, target_type, signed=False)) - else: - # To go from one unboxed type to another, we go through a boxed - # in-between value, for simplicity. - tmp = self.box(src) - return self.unbox_or_cast(tmp, target_type, line) + elif isinstance(src, Integer) and is_float_rprimitive(target_type): + if is_tagged(src_type): + return Float(float(src.value // 2)) + return Float(float(src.value)) + elif is_tagged(src_type) and is_float_rprimitive(target_type): + return self.int_to_float(src, line) + elif ( + isinstance(src_type, RTuple) + and isinstance(target_type, RTuple) + and len(src_type.types) == len(target_type.types) + ): + # Coerce between two tuple types by coercing each item separately + values = [] + for i in range(len(src_type.types)): + v = None + if isinstance(src, TupleSet): + item = src.items[i] + # We can't reuse register values, since they can be modified. + if not isinstance(item, Register): + v = item + if v is None: + v = TupleGet(src, i) + self.add(v) + values.append(v) + return self.add( + TupleSet( + [self.coerce(v, t, line) for v, t in zip(values, target_type.types)], line + ) + ) + # To go between any other unboxed types, we go through a boxed + # in-between value, for simplicity. + tmp = self.box(src) + return self.unbox_or_cast(tmp, target_type, line) if (not src_type.is_unboxed and target_type.is_unboxed) or not is_subtype( src_type, target_type ): @@ -839,10 +876,8 @@ def _py_vector_call( ): if arg_values: # Create a C array containing all arguments as boxed values. - array = Register(RArray(object_rprimitive, len(arg_values))) coerced_args = [self.coerce(arg, object_rprimitive, line) for arg in arg_values] - self.add(AssignMulti(array, coerced_args)) - arg_ptr = self.add(LoadAddress(object_pointer_rprimitive, array)) + arg_ptr = self.setup_rarray(object_rprimitive, coerced_args, object_ptr=True) else: arg_ptr = Integer(0, object_pointer_rprimitive) num_pos = num_positional_args(arg_values, arg_kinds) @@ -916,13 +951,10 @@ def _py_vector_method_call( not kind.is_star() and not kind.is_optional() for kind in arg_kinds ): method_name_reg = self.load_str(method_name) - array = Register(RArray(object_rprimitive, len(arg_values) + 1)) - self_arg = self.coerce(obj, object_rprimitive, line) - coerced_args = [self_arg] + [ - self.coerce(arg, object_rprimitive, line) for arg in arg_values + coerced_args = [ + self.coerce(arg, object_rprimitive, line) for arg in [obj] + arg_values ] - self.add(AssignMulti(array, coerced_args)) - arg_ptr = self.add(LoadAddress(object_pointer_rprimitive, array)) + arg_ptr = self.setup_rarray(object_rprimitive, coerced_args, object_ptr=True) num_pos = num_positional_args(arg_values, arg_kinds) keywords = self._vectorcall_keywords(arg_names) value = self.call_c( @@ -1027,6 +1059,8 @@ def native_args_to_positional( elif not lst: if is_fixed_width_rtype(arg.type): output_arg = Integer(0, arg.type) + elif is_float_rprimitive(arg.type): + output_arg = Float(0.0) else: output_arg = self.add(LoadErrorValue(arg.type, is_borrowed=True)) else: @@ -1166,7 +1200,7 @@ def load_int(self, value: int) -> Value: def load_float(self, value: float) -> Value: """Load a float literal value.""" - return self.add(LoadLiteral(value, float_rprimitive)) + return Float(value) def load_str(self, value: str) -> Value: """Load a str literal value. @@ -1328,6 +1362,24 @@ def binary_op(self, lreg: Value, rreg: Value, op: str, line: int) -> Value: if is_tagged(rtype) and is_subtype(ltype, rtype): lreg = self.coerce(lreg, short_int_rprimitive, line) return self.compare_tagged(lreg, rreg, op, line) + if is_float_rprimitive(ltype) or is_float_rprimitive(rtype): + if isinstance(lreg, Integer): + lreg = Float(float(lreg.numeric_value())) + elif isinstance(rreg, Integer): + rreg = Float(float(rreg.numeric_value())) + elif is_int_rprimitive(lreg.type): + lreg = self.int_to_float(lreg, line) + elif is_int_rprimitive(rreg.type): + rreg = self.int_to_float(rreg, line) + if is_float_rprimitive(lreg.type) and is_float_rprimitive(rreg.type): + if op in float_comparison_op_to_id: + return self.compare_floats(lreg, rreg, float_comparison_op_to_id[op], line) + if op.endswith("="): + base_op = op[:-1] + else: + base_op = op + if base_op in float_op_to_id: + return self.float_op(lreg, rreg, base_op, line) call_c_ops_candidates = binary_ops.get(op, []) target = self.matching_call_c(call_c_ops_candidates, [lreg, rreg], line) @@ -1556,6 +1608,12 @@ def unary_op(self, value: Value, expr_op: str, line: int) -> Value: return self.int_op(typ, value, Integer(-1, typ), IntOp.XOR, line) elif expr_op == "+": return value + if is_float_rprimitive(typ): + if expr_op == "-": + return self.add(FloatNeg(value, line)) + elif expr_op == "+": + return value + if isinstance(value, Integer): # TODO: Overflow? Unsigned? num = value.value @@ -1564,6 +1622,8 @@ def unary_op(self, value: Value, expr_op: str, line: int) -> Value: return Integer(-num, typ, value.line) if is_tagged(typ) and expr_op == "+": return value + if isinstance(value, Float): + return Float(-value.value, value.line) if isinstance(typ, RInstance): if expr_op == "-": method = "__neg__" @@ -1631,7 +1691,7 @@ def new_list_op(self, values: list[Value], line: int) -> Value: # for-loop and inline the SetMem operation, which is faster # than list_build_op, however generates more code. result_list = self.call_c(new_list_op, length, line) - if len(values) == 0: + if not values: return result_list args = [self.coerce(item, object_rprimitive, line) for item in values] ob_item_ptr = self.add(GetElementPtr(result_list, PyListObject, "ob_item", line)) @@ -1651,6 +1711,16 @@ def new_list_op(self, values: list[Value], line: int) -> Value: def new_set_op(self, values: list[Value], line: int) -> Value: return self.call_c(new_set_op, values, line) + def setup_rarray( + self, item_type: RType, values: Sequence[Value], *, object_ptr: bool = False + ) -> Value: + """Declare and initialize a new RArray, returning its address.""" + array = Register(RArray(item_type, len(values))) + self.add(AssignMulti(array, list(values))) + return self.add( + LoadAddress(object_pointer_rprimitive if object_ptr else c_pointer_rprimitive, array) + ) + def shortcircuit_helper( self, op: str, @@ -1713,6 +1783,8 @@ def bool_value(self, value: Value) -> Value: ): # Directly call the __bool__ method on classes that have it. result = self.gen_method_call(value, "__bool__", [], bool_rprimitive, value.line) + elif is_float_rprimitive(value.type): + result = self.compare_floats(value, Float(0.0), FloatComparisonOp.NEQ, value.line) else: value_type = optional_value_type(value.type) if value_type is not None: @@ -1890,6 +1962,62 @@ def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int = -1) - """ return self.add(IntOp(type, lhs, rhs, op, line)) + def float_op(self, lhs: Value, rhs: Value, op: str, line: int) -> Value: + """Generate a native float binary arithmetic operation. + + This follows Python semantics (e.g. raise exception on division by zero). + Add a FloatOp directly if you want low-level semantics. + + Args: + op: Binary operator (e.g. '+' or '*') + """ + op_id = float_op_to_id[op] + if op_id in (FloatOp.DIV, FloatOp.MOD): + if not (isinstance(rhs, Float) and rhs.value != 0.0): + c = self.compare_floats(rhs, Float(0.0), FloatComparisonOp.EQ, line) + err, ok = BasicBlock(), BasicBlock() + self.add(Branch(c, err, ok, Branch.BOOL, rare=True)) + self.activate_block(err) + if op_id == FloatOp.DIV: + msg = "float division by zero" + else: + msg = "float modulo" + self.add(RaiseStandardError(RaiseStandardError.ZERO_DIVISION_ERROR, msg, line)) + self.add(Unreachable()) + self.activate_block(ok) + if op_id == FloatOp.MOD: + # Adjust the result to match Python semantics (FloatOp follows C semantics). + return self.float_mod(lhs, rhs, line) + else: + return self.add(FloatOp(lhs, rhs, op_id, line)) + + def float_mod(self, lhs: Value, rhs: Value, line: int) -> Value: + """Perform x % y on floats using Python semantics.""" + mod = self.add(FloatOp(lhs, rhs, FloatOp.MOD, line)) + res = Register(float_rprimitive) + self.add(Assign(res, mod)) + tricky, adjust, copysign, done = BasicBlock(), BasicBlock(), BasicBlock(), BasicBlock() + is_zero = self.add(FloatComparisonOp(res, Float(0.0), FloatComparisonOp.EQ, line)) + self.add(Branch(is_zero, copysign, tricky, Branch.BOOL)) + self.activate_block(tricky) + same_signs = self.is_same_float_signs(lhs, rhs, line) + self.add(Branch(same_signs, done, adjust, Branch.BOOL)) + self.activate_block(adjust) + adj = self.float_op(res, rhs, "+", line) + self.add(Assign(res, adj)) + self.add(Goto(done)) + self.activate_block(copysign) + # If the remainder is zero, CPython ensures the result has the + # same sign as the denominator. + adj = self.call_c(copysign_op, [Float(0.0), rhs], line) + self.add(Assign(res, adj)) + self.add(Goto(done)) + self.activate_block(done) + return res + + def compare_floats(self, lhs: Value, rhs: Value, op: int, line: int) -> Value: + return self.add(FloatComparisonOp(lhs, rhs, op, line)) + def fixed_width_int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int) -> Value: """Generate a binary op using Python fixed-width integer semantics. @@ -1932,13 +2060,12 @@ def inline_fixed_width_divide(self, type: RType, lhs: Value, rhs: Value, line: i res = Register(type) div = self.int_op(type, lhs, rhs, IntOp.DIV, line) self.add(Assign(res, div)) - diff_signs = self.is_different_native_int_signs(type, lhs, rhs, line) + same_signs = self.is_same_native_int_signs(type, lhs, rhs, line) tricky, adjust, done = BasicBlock(), BasicBlock(), BasicBlock() - self.add(Branch(diff_signs, done, tricky, Branch.BOOL)) + self.add(Branch(same_signs, done, tricky, Branch.BOOL)) self.activate_block(tricky) mul = self.int_op(type, res, rhs, IntOp.MUL, line) mul_eq = self.add(ComparisonOp(mul, lhs, ComparisonOp.EQ, line)) - adjust = BasicBlock() self.add(Branch(mul_eq, done, adjust, Branch.BOOL)) self.activate_block(adjust) adj = self.int_op(type, res, Integer(1, type), IntOp.SUB, line) @@ -1952,12 +2079,11 @@ def inline_fixed_width_mod(self, type: RType, lhs: Value, rhs: Value, line: int) res = Register(type) mod = self.int_op(type, lhs, rhs, IntOp.MOD, line) self.add(Assign(res, mod)) - diff_signs = self.is_different_native_int_signs(type, lhs, rhs, line) + same_signs = self.is_same_native_int_signs(type, lhs, rhs, line) tricky, adjust, done = BasicBlock(), BasicBlock(), BasicBlock() - self.add(Branch(diff_signs, done, tricky, Branch.BOOL)) + self.add(Branch(same_signs, done, tricky, Branch.BOOL)) self.activate_block(tricky) is_zero = self.add(ComparisonOp(res, Integer(0, type), ComparisonOp.EQ, line)) - adjust = BasicBlock() self.add(Branch(is_zero, done, adjust, Branch.BOOL)) self.activate_block(adjust) adj = self.int_op(type, res, rhs, IntOp.ADD, line) @@ -1966,11 +2092,16 @@ def inline_fixed_width_mod(self, type: RType, lhs: Value, rhs: Value, line: int) self.activate_block(done) return res - def is_different_native_int_signs(self, type: RType, a: Value, b: Value, line: int) -> Value: + def is_same_native_int_signs(self, type: RType, a: Value, b: Value, line: int) -> Value: neg1 = self.add(ComparisonOp(a, Integer(0, type), ComparisonOp.SLT, line)) neg2 = self.add(ComparisonOp(b, Integer(0, type), ComparisonOp.SLT, line)) return self.add(ComparisonOp(neg1, neg2, ComparisonOp.EQ, line)) + def is_same_float_signs(self, a: Value, b: Value, line: int) -> Value: + neg1 = self.add(FloatComparisonOp(a, Float(0.0), FloatComparisonOp.LT, line)) + neg2 = self.add(FloatComparisonOp(b, Float(0.0), FloatComparisonOp.LT, line)) + return self.add(ComparisonOp(neg1, neg2, ComparisonOp.EQ, line)) + def comparison_op(self, lhs: Value, rhs: Value, op: int, line: int) -> Value: return self.add(ComparisonOp(lhs, rhs, op, line)) @@ -2042,6 +2173,9 @@ def new_tuple_with_length(self, length: Value, line: int) -> Value: """ return self.call_c(new_tuple_with_length_op, [length], line) + def int_to_float(self, n: Value, line: int) -> Value: + return self.call_c(int_to_float_op, [n], line) + # Internal helpers def decompose_union_helper( diff --git a/mypyc/irbuild/main.py b/mypyc/irbuild/main.py index 9bbb90aad207..85b905393af1 100644 --- a/mypyc/irbuild/main.py +++ b/mypyc/irbuild/main.py @@ -130,7 +130,7 @@ def transform_mypy_file(builder: IRBuilder, mypyfile: MypyFile) -> None: ir = builder.mapper.type_to_ir[cls.info] builder.classes.append(ir) - builder.enter("") + builder.enter("") # Make sure we have a builtins import builder.gen_import("builtins", -1) diff --git a/mypyc/irbuild/prebuildvisitor.py b/mypyc/irbuild/prebuildvisitor.py index d99453955002..519b3445e925 100644 --- a/mypyc/irbuild/prebuildvisitor.py +++ b/mypyc/irbuild/prebuildvisitor.py @@ -1,22 +1,25 @@ from __future__ import annotations from mypy.nodes import ( + Block, Decorator, Expression, FuncDef, FuncItem, + Import, LambdaExpr, MemberExpr, MypyFile, NameExpr, + Node, SymbolNode, Var, ) -from mypy.traverser import TraverserVisitor +from mypy.traverser import ExtendedTraverserVisitor from mypyc.errors import Errors -class PreBuildVisitor(TraverserVisitor): +class PreBuildVisitor(ExtendedTraverserVisitor): """Mypy file AST visitor run before building the IR. This collects various things, including: @@ -26,6 +29,7 @@ class PreBuildVisitor(TraverserVisitor): * Find non-local variables (free variables) * Find property setters * Find decorators of functions + * Find module import groups The main IR build pass uses this information. """ @@ -68,10 +72,26 @@ def __init__( # Map function to indices of decorators to remove self.decorators_to_remove: dict[FuncDef, list[int]] = decorators_to_remove + # A mapping of import groups (a series of Import nodes with + # nothing inbetween) where each group is keyed by its first + # import node. + self.module_import_groups: dict[Import, list[Import]] = {} + self._current_import_group: Import | None = None + self.errors: Errors = errors self.current_file: MypyFile = current_file + def visit(self, o: Node) -> bool: + if not isinstance(o, Import): + self._current_import_group = None + return True + + def visit_block(self, block: Block) -> None: + self._current_import_group = None + super().visit_block(block) + self._current_import_group = None + def visit_decorator(self, dec: Decorator) -> None: if dec.decorators: # Only add the function being decorated if there exist @@ -123,6 +143,14 @@ def visit_func(self, func: FuncItem) -> None: super().visit_func(func) self.funcs.pop() + def visit_import(self, imp: Import) -> None: + if self._current_import_group is not None: + self.module_import_groups[self._current_import_group].append(imp) + else: + self.module_import_groups[imp] = [imp] + self._current_import_group = imp + super().visit_import(imp) + def visit_name_expr(self, expr: NameExpr) -> None: if isinstance(expr.node, (Var, FuncDef)): self.visit_symbol_node(expr.node) diff --git a/mypyc/irbuild/prepare.py b/mypyc/irbuild/prepare.py index b3d10887ce21..5e6520048197 100644 --- a/mypyc/irbuild/prepare.py +++ b/mypyc/irbuild/prepare.py @@ -85,7 +85,7 @@ def build_type_map( ) class_ir.is_ext_class = is_extension_class(cdef) if class_ir.is_ext_class: - class_ir.deletable = cdef.info.deletable_attributes[:] + class_ir.deletable = cdef.info.deletable_attributes.copy() # If global optimizations are disabled, turn of tracking of class children if not options.global_opts: class_ir.children = None @@ -465,7 +465,6 @@ def prepare_init_method(cdef: ClassDef, ir: ClassIR, module_name: str, mapper: M def prepare_non_ext_class_def( path: str, module_name: str, cdef: ClassDef, errors: Errors, mapper: Mapper ) -> None: - ir = mapper.type_to_ir[cdef.info] info = cdef.info diff --git a/mypyc/irbuild/specialize.py b/mypyc/irbuild/specialize.py index 8cb24c5b47da..ff9df0cd597b 100644 --- a/mypyc/irbuild/specialize.py +++ b/mypyc/irbuild/specialize.py @@ -55,6 +55,7 @@ is_bool_rprimitive, is_dict_rprimitive, is_fixed_width_rtype, + is_float_rprimitive, is_int32_rprimitive, is_int64_rprimitive, is_int_rprimitive, @@ -728,3 +729,15 @@ def translate_bool(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Value arg = expr.args[0] src = builder.accept(arg) return builder.builder.bool_value(src) + + +@specialize_function("builtins.float") +def translate_float(builder: IRBuilder, expr: CallExpr, callee: RefExpr) -> Optional[Value]: + if len(expr.args) != 1 or expr.arg_kinds[0] != ARG_POS: + return None + arg = expr.args[0] + arg_type = builder.node_type(arg) + if is_float_rprimitive(arg_type): + # No-op float conversion. + return builder.accept(arg) + return None diff --git a/mypyc/irbuild/statement.py b/mypyc/irbuild/statement.py index b9754ba1a147..63297618108c 100644 --- a/mypyc/irbuild/statement.py +++ b/mypyc/irbuild/statement.py @@ -43,13 +43,17 @@ YieldFromExpr, ) from mypyc.ir.ops import ( + NAMESPACE_MODULE, NO_TRACEBACK_LINE_NO, Assign, BasicBlock, Branch, + InitStatic, Integer, LoadAddress, LoadErrorValue, + LoadLiteral, + LoadStatic, MethodCall, RaiseStandardError, Register, @@ -60,6 +64,7 @@ ) from mypyc.ir.rtypes import ( RInstance, + c_pyssize_t_rprimitive, exc_rtuple, is_tagged, none_rprimitive, @@ -96,7 +101,8 @@ from mypyc.primitives.misc_ops import ( check_stop_op, coro_op, - import_from_op, + import_from_many_op, + import_many_op, send_op, type_op, yield_from_except_op, @@ -145,7 +151,7 @@ def transform_return_stmt(builder: IRBuilder, stmt: ReturnStmt) -> None: def transform_assignment_stmt(builder: IRBuilder, stmt: AssignmentStmt) -> None: lvalues = stmt.lvalues - assert len(lvalues) >= 1 + assert lvalues builder.disallow_class_assignments(lvalues, stmt.line) first_lvalue = lvalues[0] if stmt.type and isinstance(stmt.rvalue, TempNode): @@ -169,7 +175,7 @@ def transform_assignment_stmt(builder: IRBuilder, stmt: AssignmentStmt) -> None: temp = Register(rvalue_reg.type) builder.assign(temp, rvalue_reg, stmt.line) temps.append(temp) - for (left, temp) in zip(first_lvalue.items, temps): + for left, temp in zip(first_lvalue.items, temps): assignment_target = builder.get_assignment_target(left) builder.assign(assignment_target, temp, stmt.line) builder.flush_keep_alives() @@ -214,35 +220,93 @@ def transform_operator_assignment_stmt(builder: IRBuilder, stmt: OperatorAssignm builder.flush_keep_alives() +def import_globals_id_and_name(module_id: str, as_name: str | None) -> tuple[str, str]: + """Compute names for updating the globals dict with the appropriate module. + + * For 'import foo.bar as baz' we add 'foo.bar' with the name 'baz' + * For 'import foo.bar' we add 'foo' with the name 'foo' + + Typically we then ignore these entries and access things directly + via the module static, but we will use the globals version for + modules that mypy couldn't find, since it doesn't analyze module + references from those properly.""" + if as_name: + globals_id = module_id + globals_name = as_name + else: + globals_id = globals_name = module_id.split(".")[0] + + return globals_id, globals_name + + def transform_import(builder: IRBuilder, node: Import) -> None: if node.is_mypy_only: return - globals = builder.load_globals_dict() - for node_id, as_name in node.ids: - builder.gen_import(node_id, node.line) - - # Update the globals dict with the appropriate module: - # * For 'import foo.bar as baz' we add 'foo.bar' with the name 'baz' - # * For 'import foo.bar' we add 'foo' with the name 'foo' - # Typically we then ignore these entries and access things directly - # via the module static, but we will use the globals version for modules - # that mypy couldn't find, since it doesn't analyze module references - # from those properly. - - # TODO: Don't add local imports to the global namespace - - # Miscompiling imports inside of functions, like below in import from. - if as_name: - name = as_name - base = node_id - else: - base = name = node_id.split(".")[0] - obj = builder.get_module(base, node.line) + # Imports (not from imports!) are processed in an odd way so they can be + # table-driven and compact. Here's how it works: + # + # Import nodes are divided in groups (in the prebuild visitor). Each group + # consists of consecutive Import nodes: + # + # import mod <| group #1 + # import mod2 | + # + # def foo() -> None: + # import mod3 <- group #2 (*) + # + # import mod4 <| group #3 + # import mod5 | + # + # Every time we encounter the first import of a group, build IR to call a + # helper function that will perform all of the group's imports in one go. + if not node.is_top_level: + # (*) Unless the import is within a function. In that case, prioritize + # speed over codesize when generating IR. + globals = builder.load_globals_dict() + for mod_id, as_name in node.ids: + builder.gen_import(mod_id, node.line) + globals_id, globals_name = import_globals_id_and_name(mod_id, as_name) + builder.gen_method_call( + globals, + "__setitem__", + [builder.load_str(globals_name), builder.get_module(globals_id, node.line)], + result_type=None, + line=node.line, + ) + return - builder.gen_method_call( - globals, "__setitem__", [builder.load_str(name), obj], result_type=None, line=node.line - ) + if node not in builder.module_import_groups: + return + + modules = [] + static_ptrs = [] + # To show the right line number on failure, we have to add the traceback + # entry within the helper function (which is admittedly ugly). To drive + # this, we need the line number corresponding to each module. + mod_lines = [] + for import_node in builder.module_import_groups[node]: + for mod_id, as_name in import_node.ids: + builder.imports[mod_id] = None + modules.append((mod_id, *import_globals_id_and_name(mod_id, as_name))) + mod_static = LoadStatic(object_rprimitive, mod_id, namespace=NAMESPACE_MODULE) + static_ptrs.append(builder.add(LoadAddress(object_pointer_rprimitive, mod_static))) + mod_lines.append(Integer(import_node.line, c_pyssize_t_rprimitive)) + + static_array_ptr = builder.builder.setup_rarray(object_pointer_rprimitive, static_ptrs) + import_line_ptr = builder.builder.setup_rarray(c_pyssize_t_rprimitive, mod_lines) + builder.call_c( + import_many_op, + [ + builder.add(LoadLiteral(tuple(modules), object_rprimitive)), + static_array_ptr, + builder.load_globals_dict(), + builder.load_str(builder.module_path), + builder.load_str(builder.fn_info.name), + import_line_ptr, + ], + NO_TRACEBACK_LINE_NO, + ) def transform_import_from(builder: IRBuilder, node: ImportFrom) -> None: @@ -258,29 +322,25 @@ def transform_import_from(builder: IRBuilder, node: ImportFrom) -> None: module_package = "" id = importlib.util.resolve_name("." * node.relative + node.id, module_package) - - globals = builder.load_globals_dict() - imported_names = [name for name, _ in node.names] - module = builder.gen_import_from(id, globals, imported_names, node.line) - - # Copy everything into our module's dict. + builder.imports[id] = None + + names = [name for name, _ in node.names] + as_names = [as_name or name for name, as_name in node.names] + names_literal = builder.add(LoadLiteral(tuple(names), object_rprimitive)) + if as_names == names: + # Reuse names tuple to reduce verbosity. + as_names_literal = names_literal + else: + as_names_literal = builder.add(LoadLiteral(tuple(as_names), object_rprimitive)) # Note that we miscompile import from inside of functions here, - # since that case *shouldn't* load it into the globals dict. + # since that case *shouldn't* load everything into the globals dict. # This probably doesn't matter much and the code runs basically right. - for name, maybe_as_name in node.names: - as_name = maybe_as_name or name - obj = builder.call_c( - import_from_op, - [module, builder.load_str(id), builder.load_str(name), builder.load_str(as_name)], - node.line, - ) - builder.gen_method_call( - globals, - "__setitem__", - [builder.load_str(as_name), obj], - result_type=None, - line=node.line, - ) + module = builder.call_c( + import_from_many_op, + [builder.load_str(id), names_literal, as_names_literal, builder.load_globals_dict()], + node.line, + ) + builder.add(InitStatic(module, id, namespace=NAMESPACE_MODULE)) def transform_import_all(builder: IRBuilder, node: ImportAll) -> None: diff --git a/mypyc/lib-rt/CPy.h b/mypyc/lib-rt/CPy.h index 016a6d3ea9e0..7a3e16fe9d65 100644 --- a/mypyc/lib-rt/CPy.h +++ b/mypyc/lib-rt/CPy.h @@ -147,9 +147,9 @@ CPyTagged CPyTagged_Lshift(CPyTagged left, CPyTagged right); bool CPyTagged_IsEq_(CPyTagged left, CPyTagged right); bool CPyTagged_IsLt_(CPyTagged left, CPyTagged right); PyObject *CPyTagged_Str(CPyTagged n); +CPyTagged CPyTagged_FromFloat(double f); PyObject *CPyLong_FromStrWithBase(PyObject *o, CPyTagged base); PyObject *CPyLong_FromStr(PyObject *o); -PyObject *CPyLong_FromFloat(PyObject *o); PyObject *CPyBool_Str(bool b); int64_t CPyLong_AsInt64(PyObject *o); int64_t CPyInt64_Divide(int64_t x, int64_t y); @@ -158,6 +158,7 @@ int32_t CPyLong_AsInt32(PyObject *o); int32_t CPyInt32_Divide(int32_t x, int32_t y); int32_t CPyInt32_Remainder(int32_t x, int32_t y); void CPyInt32_Overflow(void); +double CPyTagged_TrueDivide(CPyTagged x, CPyTagged y); static inline int CPyTagged_CheckLong(CPyTagged x) { return x & CPY_INT_TAG; @@ -283,6 +284,24 @@ static inline bool CPyTagged_IsLe(CPyTagged left, CPyTagged right) { } +// Float operations + + +double CPyFloat_FloorDivide(double x, double y); +double CPyFloat_Pow(double x, double y); +double CPyFloat_Sin(double x); +double CPyFloat_Cos(double x); +double CPyFloat_Tan(double x); +double CPyFloat_Sqrt(double x); +double CPyFloat_Exp(double x); +double CPyFloat_Log(double x); +CPyTagged CPyFloat_Floor(double x); +CPyTagged CPyFloat_Ceil(double x); +double CPyFloat_FromTagged(CPyTagged x); +bool CPyFloat_IsInf(double x); +bool CPyFloat_IsNaN(double x); + + // Generic operations (that work with arbitrary types) @@ -452,7 +471,6 @@ PyObject *CPyBytes_Join(PyObject *sep, PyObject *iter); int CPyBytes_Compare(PyObject *left, PyObject *right); - // Set operations @@ -604,8 +622,10 @@ PyObject *CPy_Super(PyObject *builtins, PyObject *self); PyObject *CPy_CallReverseOpMethod(PyObject *left, PyObject *right, const char *op, _Py_Identifier *method); -PyObject *CPyImport_ImportFrom(PyObject *module, PyObject *package_name, - PyObject *import_name, PyObject *as_name); +bool CPyImport_ImportMany(PyObject *modules, CPyModule **statics[], PyObject *globals, + PyObject *tb_path, PyObject *tb_function, Py_ssize_t *tb_lines); +PyObject *CPyImport_ImportFromMany(PyObject *mod_id, PyObject *names, PyObject *as_names, + PyObject *globals); PyObject *CPySingledispatch_RegisterFunction(PyObject *singledispatch_func, PyObject *cls, PyObject *func); diff --git a/mypyc/lib-rt/float_ops.c b/mypyc/lib-rt/float_ops.c new file mode 100644 index 000000000000..d8c6f25955fa --- /dev/null +++ b/mypyc/lib-rt/float_ops.c @@ -0,0 +1,192 @@ +// Float primitive operations +// +// These are registered in mypyc.primitives.float_ops. + +#include +#include "CPy.h" + + +static double CPy_DomainError(void) { + PyErr_SetString(PyExc_ValueError, "math domain error"); + return CPY_FLOAT_ERROR; +} + +static double CPy_MathRangeError(void) { + PyErr_SetString(PyExc_OverflowError, "math range error"); + return CPY_FLOAT_ERROR; +} + +double CPyFloat_FromTagged(CPyTagged x) { + if (CPyTagged_CheckShort(x)) { + return CPyTagged_ShortAsSsize_t(x); + } + double result = PyFloat_AsDouble(CPyTagged_LongAsObject(x)); + if (unlikely(result == -1.0) && PyErr_Occurred()) { + return CPY_FLOAT_ERROR; + } + return result; +} + +double CPyFloat_Sin(double x) { + double v = sin(x); + if (unlikely(isnan(v)) && !isnan(x)) { + return CPy_DomainError(); + } + return v; +} + +double CPyFloat_Cos(double x) { + double v = cos(x); + if (unlikely(isnan(v)) && !isnan(x)) { + return CPy_DomainError(); + } + return v; +} + +double CPyFloat_Tan(double x) { + if (unlikely(isinf(x))) { + return CPy_DomainError(); + } + return tan(x); +} + +double CPyFloat_Sqrt(double x) { + if (x < 0.0) { + return CPy_DomainError(); + } + return sqrt(x); +} + +double CPyFloat_Exp(double x) { + double v = exp(x); + if (unlikely(v == INFINITY) && x != INFINITY) { + return CPy_MathRangeError(); + } + return v; +} + +double CPyFloat_Log(double x) { + if (x <= 0.0) { + return CPy_DomainError(); + } + return log(x); +} + +CPyTagged CPyFloat_Floor(double x) { + double v = floor(x); + return CPyTagged_FromFloat(v); +} + +CPyTagged CPyFloat_Ceil(double x) { + double v = ceil(x); + return CPyTagged_FromFloat(v); +} + +bool CPyFloat_IsInf(double x) { + return isinf(x) != 0; +} + +bool CPyFloat_IsNaN(double x) { + return isnan(x) != 0; +} + +// From CPython 3.10.0, Objects/floatobject.c +static void +_float_div_mod(double vx, double wx, double *floordiv, double *mod) +{ + double div; + *mod = fmod(vx, wx); + /* fmod is typically exact, so vx-mod is *mathematically* an + exact multiple of wx. But this is fp arithmetic, and fp + vx - mod is an approximation; the result is that div may + not be an exact integral value after the division, although + it will always be very close to one. + */ + div = (vx - *mod) / wx; + if (*mod) { + /* ensure the remainder has the same sign as the denominator */ + if ((wx < 0) != (*mod < 0)) { + *mod += wx; + div -= 1.0; + } + } + else { + /* the remainder is zero, and in the presence of signed zeroes + fmod returns different results across platforms; ensure + it has the same sign as the denominator. */ + *mod = copysign(0.0, wx); + } + /* snap quotient to nearest integral value */ + if (div) { + *floordiv = floor(div); + if (div - *floordiv > 0.5) { + *floordiv += 1.0; + } + } + else { + /* div is zero - get the same sign as the true quotient */ + *floordiv = copysign(0.0, vx / wx); /* zero w/ sign of vx/wx */ + } +} + +double CPyFloat_FloorDivide(double x, double y) { + double mod, floordiv; + if (y == 0) { + PyErr_SetString(PyExc_ZeroDivisionError, "float floor division by zero"); + return CPY_FLOAT_ERROR; + } + _float_div_mod(x, y, &floordiv, &mod); + return floordiv; +} + +// Adapted from CPython 3.10.7 +double CPyFloat_Pow(double x, double y) { + if (!isfinite(x) || !isfinite(y)) { + if (isnan(x)) + return y == 0.0 ? 1.0 : x; /* NaN**0 = 1 */ + else if (isnan(y)) + return x == 1.0 ? 1.0 : y; /* 1**NaN = 1 */ + else if (isinf(x)) { + int odd_y = isfinite(y) && fmod(fabs(y), 2.0) == 1.0; + if (y > 0.0) + return odd_y ? x : fabs(x); + else if (y == 0.0) + return 1.0; + else /* y < 0. */ + return odd_y ? copysign(0.0, x) : 0.0; + } + else if (isinf(y)) { + if (fabs(x) == 1.0) + return 1.0; + else if (y > 0.0 && fabs(x) > 1.0) + return y; + else if (y < 0.0 && fabs(x) < 1.0) { + #if PY_VERSION_HEX < 0x030B0000 + if (x == 0.0) { /* 0**-inf: divide-by-zero */ + return CPy_DomainError(); + } + #endif + return -y; /* result is +inf */ + } else + return 0.0; + } + } + double r = pow(x, y); + if (!isfinite(r)) { + if (isnan(r)) { + return CPy_DomainError(); + } + /* + an infinite result here arises either from: + (A) (+/-0.)**negative (-> divide-by-zero) + (B) overflow of x**y with x and y finite + */ + else if (isinf(r)) { + if (x == 0.0) + return CPy_DomainError(); + else + return CPy_MathRangeError(); + } + } + return r; +} diff --git a/mypyc/lib-rt/int_ops.c b/mypyc/lib-rt/int_ops.c index 5ea2f65d5776..843d9b0d2230 100644 --- a/mypyc/lib-rt/int_ops.c +++ b/mypyc/lib-rt/int_ops.c @@ -293,13 +293,14 @@ PyObject *CPyLong_FromStr(PyObject *o) { return CPyLong_FromStrWithBase(o, base); } -PyObject *CPyLong_FromFloat(PyObject *o) { - if (PyLong_Check(o)) { - CPy_INCREF(o); - return o; - } else { - return PyLong_FromDouble(PyFloat_AS_DOUBLE(o)); +CPyTagged CPyTagged_FromFloat(double f) { + if (f < ((double)CPY_TAGGED_MAX + 1.0) && f > (CPY_TAGGED_MIN - 1.0)) { + return (Py_ssize_t)f << 1; } + PyObject *o = PyLong_FromDouble(f); + if (o == NULL) + return CPY_INT_TAG; + return CPyTagged_StealFromObject(o); } PyObject *CPyBool_Str(bool b) { @@ -639,3 +640,22 @@ int32_t CPyInt32_Remainder(int32_t x, int32_t y) { void CPyInt32_Overflow() { PyErr_SetString(PyExc_OverflowError, "int too large to convert to i32"); } + +double CPyTagged_TrueDivide(CPyTagged x, CPyTagged y) { + if (unlikely(y == 0)) { + PyErr_SetString(PyExc_ZeroDivisionError, "division by zero"); + return CPY_FLOAT_ERROR; + } + if (likely(!CPyTagged_CheckLong(x) && !CPyTagged_CheckLong(y))) { + return (double)((Py_ssize_t)x >> 1) / (double)((Py_ssize_t)y >> 1); + } else { + PyObject *xo = CPyTagged_AsObject(x); + PyObject *yo = CPyTagged_AsObject(y); + PyObject *result = PyNumber_TrueDivide(xo, yo); + if (result == NULL) { + return CPY_FLOAT_ERROR; + } + return PyFloat_AsDouble(result); + } + return 1.0; +} diff --git a/mypyc/lib-rt/misc_ops.c b/mypyc/lib-rt/misc_ops.c index 5fda78704bbc..88a76fb210d7 100644 --- a/mypyc/lib-rt/misc_ops.c +++ b/mypyc/lib-rt/misc_ops.c @@ -669,9 +669,62 @@ CPy_Super(PyObject *builtins, PyObject *self) { return result; } +static bool import_single(PyObject *mod_id, PyObject **mod_static, + PyObject *globals_id, PyObject *globals_name, PyObject *globals) { + if (*mod_static == Py_None) { + CPyModule *mod = PyImport_Import(mod_id); + if (mod == NULL) { + return false; + } + *mod_static = mod; + } + + PyObject *mod_dict = PyImport_GetModuleDict(); + CPyModule *globals_mod = CPyDict_GetItem(mod_dict, globals_id); + if (globals_mod == NULL) { + return false; + } + int ret = CPyDict_SetItem(globals, globals_name, globals_mod); + Py_DECREF(globals_mod); + if (ret < 0) { + return false; + } + + return true; +} + +// Table-driven import helper. See transform_import() in irbuild for the details. +bool CPyImport_ImportMany(PyObject *modules, CPyModule **statics[], PyObject *globals, + PyObject *tb_path, PyObject *tb_function, Py_ssize_t *tb_lines) { + for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(modules); i++) { + PyObject *module = PyTuple_GET_ITEM(modules, i); + PyObject *mod_id = PyTuple_GET_ITEM(module, 0); + PyObject *globals_id = PyTuple_GET_ITEM(module, 1); + PyObject *globals_name = PyTuple_GET_ITEM(module, 2); + + if (!import_single(mod_id, statics[i], globals_id, globals_name, globals)) { + assert(PyErr_Occurred() && "error indicator should be set on bad import!"); + PyObject *typ, *val, *tb; + PyErr_Fetch(&typ, &val, &tb); + const char *path = PyUnicode_AsUTF8(tb_path); + if (path == NULL) { + path = ""; + } + const char *function = PyUnicode_AsUTF8(tb_function); + if (function == NULL) { + function = ""; + } + PyErr_Restore(typ, val, tb); + CPy_AddTraceback(path, function, tb_lines[i], globals); + return false; + } + } + return true; +} + // This helper function is a simplification of cpython/ceval.c/import_from() -PyObject *CPyImport_ImportFrom(PyObject *module, PyObject *package_name, - PyObject *import_name, PyObject *as_name) { +static PyObject *CPyImport_ImportFrom(PyObject *module, PyObject *package_name, + PyObject *import_name, PyObject *as_name) { // check if the imported module has an attribute by that name PyObject *x = PyObject_GetAttr(module, import_name); if (x == NULL) { @@ -702,6 +755,31 @@ PyObject *CPyImport_ImportFrom(PyObject *module, PyObject *package_name, return NULL; } +PyObject *CPyImport_ImportFromMany(PyObject *mod_id, PyObject *names, PyObject *as_names, + PyObject *globals) { + PyObject *mod = PyImport_ImportModuleLevelObject(mod_id, globals, 0, names, 0); + if (mod == NULL) { + return NULL; + } + + for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(names); i++) { + PyObject *name = PyTuple_GET_ITEM(names, i); + PyObject *as_name = PyTuple_GET_ITEM(as_names, i); + PyObject *obj = CPyImport_ImportFrom(mod, mod_id, name, as_name); + if (obj == NULL) { + Py_DECREF(mod); + return NULL; + } + int ret = CPyDict_SetItem(globals, as_name, obj); + Py_DECREF(obj); + if (ret < 0) { + Py_DECREF(mod); + return NULL; + } + } + return mod; +} + // From CPython static PyObject * CPy_BinopTypeError(PyObject *left, PyObject *right, const char *op) { diff --git a/mypyc/lib-rt/mypyc_util.h b/mypyc/lib-rt/mypyc_util.h index 0fae239cbb9e..13672087fbbc 100644 --- a/mypyc/lib-rt/mypyc_util.h +++ b/mypyc/lib-rt/mypyc_util.h @@ -56,6 +56,9 @@ typedef PyObject CPyModule; // Error value for fixed-width (low-level) integers #define CPY_LL_INT_ERROR -113 +// Error value for floats +#define CPY_FLOAT_ERROR -113.0 + typedef void (*CPyVTableItem)(void); static inline CPyTagged CPyTagged_ShortFromInt(int x) { diff --git a/mypyc/lib-rt/setup.py b/mypyc/lib-rt/setup.py index e04d7041ad72..a31b705cd723 100644 --- a/mypyc/lib-rt/setup.py +++ b/mypyc/lib-rt/setup.py @@ -7,12 +7,14 @@ import sys from distutils.core import Extension, setup +from typing import Any +kwargs: dict[str, Any] if sys.platform == "darwin": kwargs = {"language": "c++"} compile_args = [] else: - kwargs = {} # type: ignore + kwargs = {} compile_args = ["--std=c++11"] setup( @@ -21,7 +23,15 @@ ext_modules=[ Extension( "test_capi", - ["test_capi.cc", "init.c", "int_ops.c", "list_ops.c", "exc_ops.c", "generic_ops.c"], + [ + "test_capi.cc", + "init.c", + "int_ops.c", + "float_ops.c", + "list_ops.c", + "exc_ops.c", + "generic_ops.c", + ], depends=["CPy.h", "mypyc_util.h", "pythonsupport.h"], extra_compile_args=["-Wno-unused-function", "-Wno-sign-compare"] + compile_args, library_dirs=["../external/googletest/make"], diff --git a/mypyc/primitives/float_ops.py b/mypyc/primitives/float_ops.py index 535606df6176..14e8d4caf09c 100644 --- a/mypyc/primitives/float_ops.py +++ b/mypyc/primitives/float_ops.py @@ -2,18 +2,41 @@ from __future__ import annotations -from mypyc.ir.ops import ERR_MAGIC -from mypyc.ir.rtypes import float_rprimitive, object_rprimitive, str_rprimitive -from mypyc.primitives.registry import function_op, load_address_op +from mypyc.ir.ops import ERR_MAGIC, ERR_MAGIC_OVERLAPPING, ERR_NEVER +from mypyc.ir.rtypes import ( + bool_rprimitive, + float_rprimitive, + int_rprimitive, + object_rprimitive, + str_rprimitive, +) +from mypyc.primitives.registry import binary_op, function_op, load_address_op # Get the 'builtins.float' type object. load_address_op(name="builtins.float", type=object_rprimitive, src="PyFloat_Type") +binary_op( + name="//", + arg_types=[float_rprimitive, float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_FloorDivide", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# float(int) +int_to_float_op = function_op( + name="builtins.float", + arg_types=[int_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_FromTagged", + error_kind=ERR_MAGIC_OVERLAPPING, +) + # float(str) function_op( name="builtins.float", arg_types=[str_rprimitive], - return_type=float_rprimitive, + return_type=object_rprimitive, c_function_name="PyFloat_FromString", error_kind=ERR_MAGIC, ) @@ -23,6 +46,123 @@ name="builtins.abs", arg_types=[float_rprimitive], return_type=float_rprimitive, - c_function_name="PyNumber_Absolute", + c_function_name="fabs", + error_kind=ERR_NEVER, +) + +# math.sin(float) +function_op( + name="math.sin", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Sin", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.cos(float) +function_op( + name="math.cos", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Cos", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.tan(float) +function_op( + name="math.tan", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Tan", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.sqrt(float) +function_op( + name="math.sqrt", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Sqrt", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.exp(float) +function_op( + name="math.exp", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Exp", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.log(float) +function_op( + name="math.log", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Log", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.floor(float) +function_op( + name="math.floor", + arg_types=[float_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyFloat_Floor", error_kind=ERR_MAGIC, ) + +# math.ceil(float) +function_op( + name="math.ceil", + arg_types=[float_rprimitive], + return_type=int_rprimitive, + c_function_name="CPyFloat_Ceil", + error_kind=ERR_MAGIC, +) + +# math.fabs(float) +function_op( + name="math.fabs", + arg_types=[float_rprimitive], + return_type=float_rprimitive, + c_function_name="fabs", + error_kind=ERR_NEVER, +) + +# math.pow(float, float) +pow_op = function_op( + name="math.pow", + arg_types=[float_rprimitive, float_rprimitive], + return_type=float_rprimitive, + c_function_name="CPyFloat_Pow", + error_kind=ERR_MAGIC_OVERLAPPING, +) + +# math.copysign(float, float) +copysign_op = function_op( + name="math.copysign", + arg_types=[float_rprimitive, float_rprimitive], + return_type=float_rprimitive, + c_function_name="copysign", + error_kind=ERR_NEVER, +) + +# math.isinf(float) +function_op( + name="math.isinf", + arg_types=[float_rprimitive], + return_type=bool_rprimitive, + c_function_name="CPyFloat_IsInf", + error_kind=ERR_NEVER, +) + +# math.isnan(float) +function_op( + name="math.isnan", + arg_types=[float_rprimitive], + return_type=bool_rprimitive, + c_function_name="CPyFloat_IsNaN", + error_kind=ERR_NEVER, +) diff --git a/mypyc/primitives/int_ops.py b/mypyc/primitives/int_ops.py index 7eda9bab7e3c..eff4b4ffd8ab 100644 --- a/mypyc/primitives/int_ops.py +++ b/mypyc/primitives/int_ops.py @@ -50,8 +50,8 @@ function_op( name=int_name, arg_types=[float_rprimitive], - return_type=object_rprimitive, - c_function_name="CPyLong_FromFloat", + return_type=int_rprimitive, + c_function_name="CPyTagged_FromFloat", error_kind=ERR_MAGIC, ) @@ -126,6 +126,10 @@ def int_binary_op( int_binary_op(">>", "CPyTagged_Rshift", error_kind=ERR_MAGIC) int_binary_op("<<", "CPyTagged_Lshift", error_kind=ERR_MAGIC) +int_binary_op( + "/", "CPyTagged_TrueDivide", return_type=float_rprimitive, error_kind=ERR_MAGIC_OVERLAPPING +) + # This should work because assignment operators are parsed differently # and the code in irbuild that handles it does the assignment # regardless of whether or not the operator works in place anyway. @@ -157,6 +161,7 @@ def int_unary_op(name: str, c_function_name: str) -> CFunctionDescription: # Primitives related to integer comparison operations: + # Description for building int comparison ops # # Fields: diff --git a/mypyc/primitives/misc_ops.py b/mypyc/primitives/misc_ops.py index 07df9c69714b..5a8cc111ebc2 100644 --- a/mypyc/primitives/misc_ops.py +++ b/mypyc/primitives/misc_ops.py @@ -7,10 +7,10 @@ bit_rprimitive, bool_rprimitive, c_int_rprimitive, + c_pointer_rprimitive, c_pyssize_t_rprimitive, dict_rprimitive, int_rprimitive, - list_rprimitive, object_pointer_rprimitive, object_rprimitive, str_rprimitive, @@ -112,7 +112,7 @@ is_borrowed=True, ) -# Import a module +# Import a module (plain) import_op = custom_op( arg_types=[str_rprimitive], return_type=object_rprimitive, @@ -120,25 +120,26 @@ error_kind=ERR_MAGIC, ) -# Import with extra arguments (used in from import handling) -import_extra_args_op = custom_op( +# Table-driven import op. +import_many_op = custom_op( arg_types=[ - str_rprimitive, - dict_rprimitive, - dict_rprimitive, - list_rprimitive, - c_int_rprimitive, + object_rprimitive, + c_pointer_rprimitive, + object_rprimitive, + object_rprimitive, + object_rprimitive, + c_pointer_rprimitive, ], - return_type=object_rprimitive, - c_function_name="PyImport_ImportModuleLevelObject", - error_kind=ERR_MAGIC, + return_type=bit_rprimitive, + c_function_name="CPyImport_ImportMany", + error_kind=ERR_FALSE, ) -# Import-from helper op -import_from_op = custom_op( - arg_types=[object_rprimitive, str_rprimitive, str_rprimitive, str_rprimitive], +# From import helper op +import_from_many_op = custom_op( + arg_types=[object_rprimitive, object_rprimitive, object_rprimitive, object_rprimitive], return_type=object_rprimitive, - c_function_name="CPyImport_ImportFrom", + c_function_name="CPyImport_ImportFromMany", error_kind=ERR_MAGIC, ) diff --git a/mypyc/rt_subtype.py b/mypyc/rt_subtype.py index f3fe1a442d22..004e56ed75bc 100644 --- a/mypyc/rt_subtype.py +++ b/mypyc/rt_subtype.py @@ -51,7 +51,7 @@ def visit_rinstance(self, left: RInstance) -> bool: return is_subtype(left, self.right) def visit_runion(self, left: RUnion) -> bool: - return is_subtype(left, self.right) + return not self.right.is_unboxed and is_subtype(left, self.right) def visit_rprimitive(self, left: RPrimitive) -> bool: if is_short_int_rprimitive(left) and is_int_rprimitive(self.right): diff --git a/mypyc/test-data/exceptions.test b/mypyc/test-data/exceptions.test index 187551249676..16bf8ba1eb89 100644 --- a/mypyc/test-data/exceptions.test +++ b/mypyc/test-data/exceptions.test @@ -570,6 +570,34 @@ L0: c.x = r1 return 1 +[case testExceptionWithOverlappingFloatErrorValue] +def f() -> float: + return 0.0 + +def g() -> float: + return f() +[out] +def f(): +L0: + return 0.0 +def g(): + r0 :: float + r1 :: bit + r2 :: object + r3 :: float +L0: + r0 = f() + r1 = r0 == -113.0 + if r1 goto L2 else goto L1 :: bool +L1: + return r0 +L2: + r2 = PyErr_Occurred() + if not is_error(r2) goto L3 (error at g:5) else goto L1 +L3: + r3 = :: float + return r3 + [case testExceptionWithLowLevelIntAttribute] from mypy_extensions import i32, i64 @@ -639,3 +667,47 @@ L5: L6: r6 = :: int64 return r6 + +[case testExceptionWithFloatAttribute] +class C: + def __init__(self, x: float, y: float) -> None: + self.x = x + if x: + self.y = y + +def f(c: C) -> float: + return c.x + c.y +[out] +def C.__init__(self, x, y): + self :: __main__.C + x, y :: float + r0 :: bit +L0: + self.x = x + r0 = x != 0.0 + if r0 goto L1 else goto L2 :: bool +L1: + self.y = y +L2: + return 1 +def f(c): + c :: __main__.C + r0, r1 :: float + r2 :: bit + r3 :: float + r4 :: object + r5 :: float +L0: + r0 = c.x + r1 = c.y + r2 = r1 == -113.0 + if r2 goto L2 else goto L1 :: bool +L1: + r3 = r0 + r1 + return r3 +L2: + r4 = PyErr_Occurred() + if not is_error(r4) goto L3 (error at f:8) else goto L1 +L3: + r5 = :: float + return r5 diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index 27e225f273bc..bf06613ad2a8 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -1,6 +1,7 @@ # These builtins stubs are used implicitly in AST to IR generation # test cases. +import _typeshed from typing import ( TypeVar, Generic, List, Iterator, Iterable, Dict, Optional, Tuple, Any, Set, overload, Mapping, Union, Callable, Sequence, FrozenSet, Protocol @@ -86,6 +87,8 @@ def __init__(self) -> None: pass @overload def __init__(self, x: object) -> None: pass def __add__(self, x: str) -> str: pass + def __mul__(self, x: int) -> str: pass + def __rmul__(self, x: int) -> str: pass def __eq__(self, x: object) -> bool: pass def __ne__(self, x: object) -> bool: pass def __lt__(self, x: str) -> bool: ... @@ -111,19 +114,31 @@ def encode(self, x: str=..., y: str=...) -> bytes: ... class float: def __init__(self, x: object) -> None: pass def __add__(self, n: float) -> float: pass + def __radd__(self, n: float) -> float: pass def __sub__(self, n: float) -> float: pass + def __rsub__(self, n: float) -> float: pass def __mul__(self, n: float) -> float: pass def __truediv__(self, n: float) -> float: pass + def __floordiv__(self, n: float) -> float: pass + def __mod__(self, n: float) -> float: pass def __pow__(self, n: float) -> float: pass def __neg__(self) -> float: pass def __pos__(self) -> float: pass def __abs__(self) -> float: pass def __invert__(self) -> float: pass + def __eq__(self, x: object) -> bool: pass + def __ne__(self, x: object) -> bool: pass + def __lt__(self, x: float) -> bool: ... + def __le__(self, x: float) -> bool: ... + def __gt__(self, x: float) -> bool: ... + def __ge__(self, x: float) -> bool: ... class complex: def __init__(self, x: object, y: object = None) -> None: pass def __add__(self, n: complex) -> complex: pass + def __radd__(self, n: float) -> complex: pass def __sub__(self, n: complex) -> complex: pass + def __rsub__(self, n: float) -> complex: pass def __mul__(self, n: complex) -> complex: pass def __truediv__(self, n: complex) -> complex: pass def __neg__(self) -> complex: pass @@ -134,6 +149,8 @@ def __init__(self) -> None: ... @overload def __init__(self, x: object) -> None: ... def __add__(self, x: bytes) -> bytes: ... + def __mul__(self, x: int) -> bytes: ... + def __rmul__(self, x: int) -> bytes: ... def __eq__(self, x: object) -> bool: ... def __ne__(self, x: object) -> bool: ... @overload @@ -288,6 +305,7 @@ class ValueError(Exception): pass class AttributeError(Exception): pass class ImportError(Exception): pass class NameError(Exception): pass +class UnboundLocalError(NameError): pass class LookupError(Exception): pass class KeyError(LookupError): pass class IndexError(LookupError): pass diff --git a/mypyc/test-data/fixtures/testutil.py b/mypyc/test-data/fixtures/testutil.py index 7b4fcc9fc1ca..5a4b1d0f549e 100644 --- a/mypyc/test-data/fixtures/testutil.py +++ b/mypyc/test-data/fixtures/testutil.py @@ -2,10 +2,43 @@ from contextlib import contextmanager from collections.abc import Iterator +import math from typing import ( Any, Iterator, TypeVar, Generator, Optional, List, Tuple, Sequence, Union, Callable, Awaitable, ) +from typing_extensions import Final + +FLOAT_MAGIC: Final = -113.0 + +# Various different float values +float_vals = [ + float(n) * 0.25 for n in range(-10, 10) +] + [ + -0.0, + 1.0/3.0, + math.sqrt(2.0), + 1.23e200, + -2.34e200, + 5.43e-100, + -6.532e-200, + float('inf'), + -float('inf'), + float('nan'), + FLOAT_MAGIC, + math.pi, + 2.0 * math.pi, + math.pi / 2.0, + -math.pi / 2.0, + -1.7976931348623158e+308, # Smallest finite value + -2.2250738585072014e-308, # Closest to zero negative normal value + -7.5491e-312, # Arbitrary negative subnormal value + -5e-324, # Closest to zero negative subnormal value + 1.7976931348623158e+308, # Largest finite value + 2.2250738585072014e-308, # Closest to zero positive normal value + -6.3492e-312, # Arbitrary positive subnormal value + 5e-324, # Closest to zero positive subnormal value +] @contextmanager def assertRaises(typ: type, msg: str = '') -> Iterator[None]: @@ -17,6 +50,12 @@ def assertRaises(typ: type, msg: str = '') -> Iterator[None]: else: assert False, f"Expected {typ.__name__} but got no exception" +def assertDomainError() -> Any: + return assertRaises(ValueError, "math domain error") + +def assertMathRangeError() -> Any: + return assertRaises(OverflowError, "math range error") + T = TypeVar('T') U = TypeVar('U') V = TypeVar('V') diff --git a/mypyc/test-data/fixtures/typing-full.pyi b/mypyc/test-data/fixtures/typing-full.pyi index c36b1001106e..52bca09a1dec 100644 --- a/mypyc/test-data/fixtures/typing-full.pyi +++ b/mypyc/test-data/fixtures/typing-full.pyi @@ -125,6 +125,7 @@ class Sequence(Iterable[T_co], Container[T_co]): def __getitem__(self, n: Any) -> T_co: pass class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta): + def keys(self) -> Iterable[T]: pass # Approximate return type def __getitem__(self, key: T) -> T_co: pass @overload def get(self, k: T) -> Optional[T_co]: pass diff --git a/mypyc/test-data/irbuild-any.test b/mypyc/test-data/irbuild-any.test index 8d4e085179ae..8274e3d5c619 100644 --- a/mypyc/test-data/irbuild-any.test +++ b/mypyc/test-data/irbuild-any.test @@ -187,15 +187,14 @@ def f() -> None: def f(): r0, r1 :: object r2, a :: int - r3, r4, b :: float + r3, b :: float L0: r0 = object 1 r1 = PyNumber_Absolute(r0) r2 = unbox(int, r1) a = r2 - r3 = 1.1 - r4 = PyNumber_Absolute(r3) - b = r4 + r3 = fabs(1.1) + b = r3 return 1 [case testFunctionBasedOps] @@ -228,13 +227,12 @@ L0: def f3(): r0, r1, r2, r3 :: object r4 :: int - r5 :: object + r5 :: float L0: r0 = object 2 r1 = object 5 r2 = object 3 r3 = PyNumber_Power(r0, r1, r2) r4 = unbox(int, r3) - r5 = box(int, r4) + r5 = CPyFloat_FromTagged(r4) return r5 - diff --git a/mypyc/test-data/irbuild-basic.test b/mypyc/test-data/irbuild-basic.test index a06977d037b2..556e0a4bbc50 100644 --- a/mypyc/test-data/irbuild-basic.test +++ b/mypyc/test-data/irbuild-basic.test @@ -682,14 +682,106 @@ L0: r5 = unbox(int, r4) return r5 -[case testFromImport] -from testmodule import g +[case testImport_toplevel] +import sys +import enum as enum2 +import collections.abc +import collections.abc as abc2 +_ = "filler" +import single +single.hello() + +[file single.py] +def hello() -> None: + print("hello, world") + +[out] +def __top_level__(): + r0, r1 :: object + r2 :: bit + r3 :: str + r4 :: object + r5, r6, r7, r8 :: object_ptr + r9 :: object_ptr[4] + r10 :: c_ptr + r11 :: native_int[4] + r12 :: c_ptr + r13 :: object + r14 :: dict + r15, r16 :: str + r17 :: bit + r18 :: str + r19 :: dict + r20 :: str + r21 :: int32 + r22 :: bit + r23 :: object_ptr + r24 :: object_ptr[1] + r25 :: c_ptr + r26 :: native_int[1] + r27 :: c_ptr + r28 :: object + r29 :: dict + r30, r31 :: str + r32 :: bit + r33 :: object + r34 :: str + r35, r36 :: object +L0: + r0 = builtins :: module + r1 = load_address _Py_NoneStruct + r2 = r0 != r1 + if r2 goto L2 else goto L1 :: bool +L1: + r3 = 'builtins' + r4 = PyImport_Import(r3) + builtins = r4 :: module +L2: + r5 = load_address sys :: module + r6 = load_address enum :: module + r7 = load_address collections.abc :: module + r8 = load_address collections.abc :: module + r9 = [r5, r6, r7, r8] + r10 = load_address r9 + r11 = [1, 2, 3, 4] + r12 = load_address r11 + r13 = (('sys', 'sys', 'sys'), ('enum', 'enum', 'enum2'), ('collections.abc', 'collections', 'collections'), ('collections.abc', 'collections.abc', 'abc2')) + r14 = __main__.globals :: static + r15 = 'main' + r16 = '' + r17 = CPyImport_ImportMany(r13, r10, r14, r15, r16, r12) + r18 = 'filler' + r19 = __main__.globals :: static + r20 = '_' + r21 = CPyDict_SetItem(r19, r20, r18) + r22 = r21 >= 0 :: signed + r23 = load_address single :: module + r24 = [r23] + r25 = load_address r24 + r26 = [6] + r27 = load_address r26 + r28 = (('single', 'single', 'single'),) + r29 = __main__.globals :: static + r30 = 'main' + r31 = '' + r32 = CPyImport_ImportMany(r28, r25, r29, r30, r31, r27) + r33 = single :: module + r34 = 'hello' + r35 = CPyObject_GetAttr(r33, r34) + r36 = PyObject_CallFunctionObjArgs(r35, 0) + return 1 + +[case testFromImport_toplevel] +from testmodule import g, h +from testmodule import h as two def f(x: int) -> int: - return g(x) + return g(x) + h() + two() [file testmodule.py] def g(x: int) -> int: return x + 1 +def h() -> int: + return 2 [out] def f(x): x :: int @@ -697,6 +789,14 @@ def f(x): r1 :: str r2, r3, r4 :: object r5 :: int + r6 :: dict + r7 :: str + r8, r9 :: object + r10, r11 :: int + r12 :: dict + r13 :: str + r14, r15 :: object + r16, r17 :: int L0: r0 = __main__.globals :: static r1 = 'g' @@ -704,7 +804,52 @@ L0: r3 = box(int, x) r4 = PyObject_CallFunctionObjArgs(r2, r3, 0) r5 = unbox(int, r4) - return r5 + r6 = __main__.globals :: static + r7 = 'h' + r8 = CPyDict_GetItem(r6, r7) + r9 = PyObject_CallFunctionObjArgs(r8, 0) + r10 = unbox(int, r9) + r11 = CPyTagged_Add(r5, r10) + r12 = __main__.globals :: static + r13 = 'two' + r14 = CPyDict_GetItem(r12, r13) + r15 = PyObject_CallFunctionObjArgs(r14, 0) + r16 = unbox(int, r15) + r17 = CPyTagged_Add(r11, r16) + return r17 +def __top_level__(): + r0, r1 :: object + r2 :: bit + r3 :: str + r4, r5 :: object + r6 :: str + r7 :: dict + r8, r9, r10 :: object + r11 :: str + r12 :: dict + r13 :: object +L0: + r0 = builtins :: module + r1 = load_address _Py_NoneStruct + r2 = r0 != r1 + if r2 goto L2 else goto L1 :: bool +L1: + r3 = 'builtins' + r4 = PyImport_Import(r3) + builtins = r4 :: module +L2: + r5 = ('g', 'h') + r6 = 'testmodule' + r7 = __main__.globals :: static + r8 = CPyImport_ImportFromMany(r6, r5, r5, r7) + testmodule = r8 :: module + r9 = ('h',) + r10 = ('two',) + r11 = 'testmodule' + r12 = __main__.globals :: static + r13 = CPyImport_ImportFromMany(r11, r9, r10, r12) + testmodule = r13 :: module + return 1 [case testPrintFullname] import builtins @@ -1016,35 +1161,27 @@ def assign_and_return_float_sum() -> float: return f1 * f2 + f3 [out] def assign_and_return_float_sum(): - r0, f1, r1, f2, r2, f3 :: float - r3 :: object - r4 :: float - r5 :: object - r6 :: float -L0: - r0 = 1.0 - f1 = r0 - r1 = 2.0 - f2 = r1 - r2 = 3.0 - f3 = r2 - r3 = PyNumber_Multiply(f1, f2) - r4 = cast(float, r3) - r5 = PyNumber_Add(r4, f3) - r6 = cast(float, r5) - return r6 + f1, f2, f3, r0, r1 :: float +L0: + f1 = 1.0 + f2 = 2.0 + f3 = 3.0 + r0 = f1 * f2 + r1 = r0 + f3 + return r1 [case testLoadComplex] def load() -> complex: - return 5j+1.0 + real = 1 + return 5j+real [out] def load(): - r0 :: object - r1 :: float - r2 :: object + real :: int + r0, r1, r2 :: object L0: + real = 2 r0 = 5j - r1 = 1.0 + r1 = box(int, real) r2 = PyNumber_Add(r0, r1) return r2 @@ -1176,10 +1313,8 @@ L0: r5 = unbox(int, r4) return r5 def return_float(): - r0 :: float L0: - r0 = 5.0 - return r0 + return 5.0 def return_callable_type(): r0 :: dict r1 :: str @@ -1196,7 +1331,7 @@ L0: r0 = return_callable_type() f = r0 r1 = PyObject_CallFunctionObjArgs(f, 0) - r2 = cast(float, r1) + r2 = unbox(float, r1) return r2 [case testCallableTypesWithKeywordArgs] @@ -2276,79 +2411,61 @@ def __top_level__(): r0, r1 :: object r2 :: bit r3 :: str - r4 :: object - r5 :: dict - r6, r7, r8 :: str - r9 :: list - r10, r11, r12, r13 :: ptr + r4, r5 :: object + r6 :: str + r7 :: dict + r8 :: object + r9, r10 :: str + r11 :: object + r12 :: tuple[str, object] + r13 :: object r14 :: str r15 :: object - r16, r17, r18 :: str + r16 :: tuple[str, object] + r17 :: object + r18 :: tuple[object, object] r19 :: object - r20 :: str - r21 :: int32 - r22 :: bit - r23, r24, r25 :: str - r26 :: object - r27 :: str - r28 :: int32 - r29 :: bit - r30, r31, r32 :: str - r33 :: object - r34 :: str - r35 :: int32 - r36 :: bit - r37, r38 :: str - r39 :: object - r40 :: tuple[str, object] - r41 :: object - r42 :: str - r43 :: object - r44 :: tuple[str, object] - r45 :: object - r46 :: tuple[object, object] - r47 :: object - r48 :: dict - r49 :: str - r50, r51 :: object + r20 :: dict + r21 :: str + r22, r23 :: object + r24 :: dict + r25 :: str + r26 :: int32 + r27 :: bit + r28 :: str + r29 :: dict + r30 :: str + r31, r32, r33 :: object + r34 :: tuple + r35 :: dict + r36 :: str + r37 :: int32 + r38 :: bit + r39 :: dict + r40 :: str + r41, r42, r43 :: object + r44 :: dict + r45 :: str + r46 :: int32 + r47 :: bit + r48 :: str + r49 :: dict + r50 :: str + r51 :: object r52 :: dict r53 :: str - r54 :: int32 - r55 :: bit - r56 :: str - r57 :: dict - r58 :: str - r59, r60, r61 :: object - r62 :: tuple - r63 :: dict - r64 :: str - r65 :: int32 - r66 :: bit - r67 :: dict - r68 :: str - r69, r70, r71 :: object - r72 :: dict - r73 :: str - r74 :: int32 - r75 :: bit - r76 :: str - r77 :: dict - r78 :: str - r79 :: object - r80 :: dict - r81 :: str - r82, r83 :: object - r84 :: dict - r85 :: str - r86 :: int32 - r87 :: bit - r88 :: list - r89, r90, r91 :: object - r92, r93, r94, r95 :: ptr - r96 :: dict - r97 :: str - r98 :: int32 - r99 :: bit + r54, r55 :: object + r56 :: dict + r57 :: str + r58 :: int32 + r59 :: bit + r60 :: list + r61, r62, r63 :: object + r64, r65, r66, r67 :: ptr + r68 :: dict + r69 :: str + r70 :: int32 + r71 :: bit L0: r0 = builtins :: module r1 = load_address _Py_NoneStruct @@ -2359,110 +2476,78 @@ L1: r4 = PyImport_Import(r3) builtins = r4 :: module L2: - r5 = __main__.globals :: static - r6 = 'List' - r7 = 'NewType' - r8 = 'NamedTuple' - r9 = PyList_New(3) - r10 = get_element_ptr r9 ob_item :: PyListObject - r11 = load_mem r10 :: ptr* - set_mem r11, r6 :: builtins.object* - r12 = r11 + WORD_SIZE*1 - set_mem r12, r7 :: builtins.object* - r13 = r11 + WORD_SIZE*2 - set_mem r13, r8 :: builtins.object* - keep_alive r9 - r14 = 'typing' - r15 = PyImport_ImportModuleLevelObject(r14, r5, 0, r9, 0) - typing = r15 :: module - r16 = 'typing' - r17 = 'List' - r18 = 'List' - r19 = CPyImport_ImportFrom(r15, r16, r17, r18) - r20 = 'List' - r21 = CPyDict_SetItem(r5, r20, r19) - r22 = r21 >= 0 :: signed - r23 = 'typing' - r24 = 'NewType' - r25 = 'NewType' - r26 = CPyImport_ImportFrom(r15, r23, r24, r25) - r27 = 'NewType' - r28 = CPyDict_SetItem(r5, r27, r26) - r29 = r28 >= 0 :: signed - r30 = 'typing' - r31 = 'NamedTuple' - r32 = 'NamedTuple' - r33 = CPyImport_ImportFrom(r15, r30, r31, r32) - r34 = 'NamedTuple' - r35 = CPyDict_SetItem(r5, r34, r33) - r36 = r35 >= 0 :: signed - r37 = 'Lol' - r38 = 'a' - r39 = load_address PyLong_Type - r40 = (r38, r39) - r41 = box(tuple[str, object], r40) - r42 = 'b' - r43 = load_address PyUnicode_Type - r44 = (r42, r43) - r45 = box(tuple[str, object], r44) - r46 = (r41, r45) - r47 = box(tuple[object, object], r46) - r48 = __main__.globals :: static - r49 = 'NamedTuple' - r50 = CPyDict_GetItem(r48, r49) - r51 = PyObject_CallFunctionObjArgs(r50, r37, r47, 0) + r5 = ('List', 'NewType', 'NamedTuple') + r6 = 'typing' + r7 = __main__.globals :: static + r8 = CPyImport_ImportFromMany(r6, r5, r5, r7) + typing = r8 :: module + r9 = 'Lol' + r10 = 'a' + r11 = load_address PyLong_Type + r12 = (r10, r11) + r13 = box(tuple[str, object], r12) + r14 = 'b' + r15 = load_address PyUnicode_Type + r16 = (r14, r15) + r17 = box(tuple[str, object], r16) + r18 = (r13, r17) + r19 = box(tuple[object, object], r18) + r20 = __main__.globals :: static + r21 = 'NamedTuple' + r22 = CPyDict_GetItem(r20, r21) + r23 = PyObject_CallFunctionObjArgs(r22, r9, r19, 0) + r24 = __main__.globals :: static + r25 = 'Lol' + r26 = CPyDict_SetItem(r24, r25, r23) + r27 = r26 >= 0 :: signed + r28 = '' + r29 = __main__.globals :: static + r30 = 'Lol' + r31 = CPyDict_GetItem(r29, r30) + r32 = object 1 + r33 = PyObject_CallFunctionObjArgs(r31, r32, r28, 0) + r34 = cast(tuple, r33) + r35 = __main__.globals :: static + r36 = 'x' + r37 = CPyDict_SetItem(r35, r36, r34) + r38 = r37 >= 0 :: signed + r39 = __main__.globals :: static + r40 = 'List' + r41 = CPyDict_GetItem(r39, r40) + r42 = load_address PyLong_Type + r43 = PyObject_GetItem(r41, r42) + r44 = __main__.globals :: static + r45 = 'Foo' + r46 = CPyDict_SetItem(r44, r45, r43) + r47 = r46 >= 0 :: signed + r48 = 'Bar' + r49 = __main__.globals :: static + r50 = 'Foo' + r51 = CPyDict_GetItem(r49, r50) r52 = __main__.globals :: static - r53 = 'Lol' - r54 = CPyDict_SetItem(r52, r53, r51) - r55 = r54 >= 0 :: signed - r56 = '' - r57 = __main__.globals :: static - r58 = 'Lol' - r59 = CPyDict_GetItem(r57, r58) - r60 = object 1 - r61 = PyObject_CallFunctionObjArgs(r59, r60, r56, 0) - r62 = cast(tuple, r61) - r63 = __main__.globals :: static - r64 = 'x' - r65 = CPyDict_SetItem(r63, r64, r62) - r66 = r65 >= 0 :: signed - r67 = __main__.globals :: static - r68 = 'List' - r69 = CPyDict_GetItem(r67, r68) - r70 = load_address PyLong_Type - r71 = PyObject_GetItem(r69, r70) - r72 = __main__.globals :: static - r73 = 'Foo' - r74 = CPyDict_SetItem(r72, r73, r71) - r75 = r74 >= 0 :: signed - r76 = 'Bar' - r77 = __main__.globals :: static - r78 = 'Foo' - r79 = CPyDict_GetItem(r77, r78) - r80 = __main__.globals :: static - r81 = 'NewType' - r82 = CPyDict_GetItem(r80, r81) - r83 = PyObject_CallFunctionObjArgs(r82, r76, r79, 0) - r84 = __main__.globals :: static - r85 = 'Bar' - r86 = CPyDict_SetItem(r84, r85, r83) - r87 = r86 >= 0 :: signed - r88 = PyList_New(3) - r89 = object 1 - r90 = object 2 - r91 = object 3 - r92 = get_element_ptr r88 ob_item :: PyListObject - r93 = load_mem r92 :: ptr* - set_mem r93, r89 :: builtins.object* - r94 = r93 + WORD_SIZE*1 - set_mem r94, r90 :: builtins.object* - r95 = r93 + WORD_SIZE*2 - set_mem r95, r91 :: builtins.object* - keep_alive r88 - r96 = __main__.globals :: static - r97 = 'y' - r98 = CPyDict_SetItem(r96, r97, r88) - r99 = r98 >= 0 :: signed + r53 = 'NewType' + r54 = CPyDict_GetItem(r52, r53) + r55 = PyObject_CallFunctionObjArgs(r54, r48, r51, 0) + r56 = __main__.globals :: static + r57 = 'Bar' + r58 = CPyDict_SetItem(r56, r57, r55) + r59 = r58 >= 0 :: signed + r60 = PyList_New(3) + r61 = object 1 + r62 = object 2 + r63 = object 3 + r64 = get_element_ptr r60 ob_item :: PyListObject + r65 = load_mem r64 :: ptr* + set_mem r65, r61 :: builtins.object* + r66 = r65 + WORD_SIZE*1 + set_mem r66, r62 :: builtins.object* + r67 = r65 + WORD_SIZE*2 + set_mem r67, r63 :: builtins.object* + keep_alive r60 + r68 = __main__.globals :: static + r69 = 'y' + r70 = CPyDict_SetItem(r68, r69, r60) + r71 = r70 >= 0 :: signed return 1 [case testChainedConditional] @@ -2786,31 +2871,23 @@ def __top_level__(): r0, r1 :: object r2 :: bit r3 :: str - r4 :: object - r5 :: dict + r4, r5 :: object r6 :: str - r7 :: list - r8, r9 :: ptr + r7 :: dict + r8 :: object + r9 :: dict r10 :: str r11 :: object - r12, r13, r14 :: str - r15 :: object - r16 :: str - r17 :: int32 - r18 :: bit - r19 :: dict - r20 :: str - r21 :: object - r22 :: dict - r23 :: str - r24, r25 :: object - r26 :: dict - r27 :: str - r28, r29 :: object - r30 :: dict - r31 :: str - r32 :: int32 - r33 :: bit + r12 :: dict + r13 :: str + r14, r15 :: object + r16 :: dict + r17 :: str + r18, r19 :: object + r20 :: dict + r21 :: str + r22 :: int32 + r23 :: bit L0: r0 = builtins :: module r1 = load_address _Py_NoneStruct @@ -2821,38 +2898,26 @@ L1: r4 = PyImport_Import(r3) builtins = r4 :: module L2: - r5 = __main__.globals :: static - r6 = 'Callable' - r7 = PyList_New(1) - r8 = get_element_ptr r7 ob_item :: PyListObject - r9 = load_mem r8 :: ptr* - set_mem r9, r6 :: builtins.object* - keep_alive r7 - r10 = 'typing' - r11 = PyImport_ImportModuleLevelObject(r10, r5, 0, r7, 0) - typing = r11 :: module - r12 = 'typing' - r13 = 'Callable' - r14 = 'Callable' - r15 = CPyImport_ImportFrom(r11, r12, r13, r14) - r16 = 'Callable' - r17 = CPyDict_SetItem(r5, r16, r15) - r18 = r17 >= 0 :: signed - r19 = __main__.globals :: static - r20 = 'c' - r21 = CPyDict_GetItem(r19, r20) - r22 = __main__.globals :: static - r23 = 'b' - r24 = CPyDict_GetItem(r22, r23) - r25 = PyObject_CallFunctionObjArgs(r24, r21, 0) - r26 = __main__.globals :: static - r27 = 'a' - r28 = CPyDict_GetItem(r26, r27) - r29 = PyObject_CallFunctionObjArgs(r28, r25, 0) - r30 = __main__.globals :: static - r31 = 'c' - r32 = CPyDict_SetItem(r30, r31, r29) - r33 = r32 >= 0 :: signed + r5 = ('Callable',) + r6 = 'typing' + r7 = __main__.globals :: static + r8 = CPyImport_ImportFromMany(r6, r5, r5, r7) + typing = r8 :: module + r9 = __main__.globals :: static + r10 = 'c' + r11 = CPyDict_GetItem(r9, r10) + r12 = __main__.globals :: static + r13 = 'b' + r14 = CPyDict_GetItem(r12, r13) + r15 = PyObject_CallFunctionObjArgs(r14, r11, 0) + r16 = __main__.globals :: static + r17 = 'a' + r18 = CPyDict_GetItem(r16, r17) + r19 = PyObject_CallFunctionObjArgs(r18, r15, 0) + r20 = __main__.globals :: static + r21 = 'c' + r22 = CPyDict_SetItem(r20, r21, r19) + r23 = r22 >= 0 :: signed return 1 [case testDecoratorsSimple_toplevel] @@ -2927,18 +2992,10 @@ def __top_level__(): r0, r1 :: object r2 :: bit r3 :: str - r4 :: object - r5 :: dict + r4, r5 :: object r6 :: str - r7 :: list - r8, r9 :: ptr - r10 :: str - r11 :: object - r12, r13, r14 :: str - r15 :: object - r16 :: str - r17 :: int32 - r18 :: bit + r7 :: dict + r8 :: object L0: r0 = builtins :: module r1 = load_address _Py_NoneStruct @@ -2949,23 +3006,11 @@ L1: r4 = PyImport_Import(r3) builtins = r4 :: module L2: - r5 = __main__.globals :: static - r6 = 'Callable' - r7 = PyList_New(1) - r8 = get_element_ptr r7 ob_item :: PyListObject - r9 = load_mem r8 :: ptr* - set_mem r9, r6 :: builtins.object* - keep_alive r7 - r10 = 'typing' - r11 = PyImport_ImportModuleLevelObject(r10, r5, 0, r7, 0) - typing = r11 :: module - r12 = 'typing' - r13 = 'Callable' - r14 = 'Callable' - r15 = CPyImport_ImportFrom(r11, r12, r13, r14) - r16 = 'Callable' - r17 = CPyDict_SetItem(r5, r16, r15) - r18 = r17 >= 0 :: signed + r5 = ('Callable',) + r6 = 'typing' + r7 = __main__.globals :: static + r8 = CPyImport_ImportFromMany(r6, r5, r5, r7) + typing = r8 :: module return 1 [case testAnyAllG] @@ -3426,24 +3471,85 @@ L0: r2 = truncate r0: int32 to builtins.bool return r2 -[case testLocalImportSubmodule] -def f() -> int: +[case testLocalImports] +def root() -> None: + import dataclasses + import enum + +def submodule() -> int: import p.m return p.x [file p/__init__.py] x = 1 [file p/m.py] [out] -def f(): +def root(): r0 :: dict r1, r2 :: object r3 :: bit r4 :: str r5 :: object - r6 :: dict - r7 :: str - r8 :: object - r9 :: str + r6 :: str + r7 :: dict + r8 :: str + r9 :: object + r10 :: int32 + r11 :: bit + r12 :: dict + r13, r14 :: object + r15 :: bit + r16 :: str + r17 :: object + r18 :: str + r19 :: dict + r20 :: str + r21 :: object + r22 :: int32 + r23 :: bit +L0: + r0 = __main__.globals :: static + r1 = dataclasses :: module + r2 = load_address _Py_NoneStruct + r3 = r1 != r2 + if r3 goto L2 else goto L1 :: bool +L1: + r4 = 'dataclasses' + r5 = PyImport_Import(r4) + dataclasses = r5 :: module +L2: + r6 = 'dataclasses' + r7 = PyImport_GetModuleDict() + r8 = 'dataclasses' + r9 = CPyDict_GetItem(r7, r8) + r10 = CPyDict_SetItem(r0, r6, r9) + r11 = r10 >= 0 :: signed + r12 = __main__.globals :: static + r13 = enum :: module + r14 = load_address _Py_NoneStruct + r15 = r13 != r14 + if r15 goto L4 else goto L3 :: bool +L3: + r16 = 'enum' + r17 = PyImport_Import(r16) + enum = r17 :: module +L4: + r18 = 'enum' + r19 = PyImport_GetModuleDict() + r20 = 'enum' + r21 = CPyDict_GetItem(r19, r20) + r22 = CPyDict_SetItem(r12, r18, r21) + r23 = r22 >= 0 :: signed + return 1 +def submodule(): + r0 :: dict + r1, r2 :: object + r3 :: bit + r4 :: str + r5 :: object + r6 :: str + r7 :: dict + r8 :: str + r9 :: object r10 :: int32 r11 :: bit r12 :: dict @@ -3463,11 +3569,11 @@ L1: r5 = PyImport_Import(r4) p.m = r5 :: module L2: - r6 = PyImport_GetModuleDict() - r7 = 'p' - r8 = CPyDict_GetItem(r6, r7) - r9 = 'p' - r10 = CPyDict_SetItem(r0, r9, r8) + r6 = 'p' + r7 = PyImport_GetModuleDict() + r8 = 'p' + r9 = CPyDict_GetItem(r7, r8) + r10 = CPyDict_SetItem(r0, r6, r9) r11 = r10 >= 0 :: signed r12 = PyImport_GetModuleDict() r13 = 'p' @@ -3573,7 +3679,7 @@ def f() -> None: def f(): i, r0 :: int r1, i__redef__, r2 :: str - r3, i__redef____redef__ :: float + i__redef____redef__ :: float L0: i = 0 r0 = CPyTagged_Add(i, 2) @@ -3582,8 +3688,7 @@ L0: i__redef__ = r1 r2 = CPyStr_Append(i__redef__, i__redef__) i__redef__ = r2 - r3 = 0.0 - i__redef____redef__ = r3 + i__redef____redef__ = 0.0 return 1 [case testNewType] diff --git a/mypyc/test-data/irbuild-classes.test b/mypyc/test-data/irbuild-classes.test index 0f98fc69e5f3..0a7076e5f0ad 100644 --- a/mypyc/test-data/irbuild-classes.test +++ b/mypyc/test-data/irbuild-classes.test @@ -200,84 +200,63 @@ def __top_level__(): r0, r1 :: object r2 :: bit r3 :: str - r4 :: object - r5 :: dict - r6, r7 :: str - r8 :: list - r9, r10, r11 :: ptr - r12 :: str - r13 :: object - r14, r15, r16 :: str - r17 :: object - r18 :: str - r19 :: int32 - r20 :: bit - r21, r22, r23 :: str - r24 :: object - r25 :: str - r26 :: int32 - r27 :: bit - r28 :: dict - r29 :: str - r30 :: list - r31, r32 :: ptr - r33 :: str - r34 :: object - r35, r36, r37 :: str - r38 :: object + r4, r5 :: object + r6 :: str + r7 :: dict + r8, r9 :: object + r10 :: str + r11 :: dict + r12 :: object + r13 :: str + r14 :: dict + r15 :: str + r16, r17 :: object + r18 :: dict + r19 :: str + r20 :: int32 + r21 :: bit + r22 :: object + r23 :: str + r24, r25 :: object + r26 :: bool + r27 :: str + r28 :: tuple + r29 :: int32 + r30 :: bit + r31 :: dict + r32 :: str + r33 :: int32 + r34 :: bit + r35 :: object + r36 :: str + r37, r38 :: object r39 :: str - r40 :: int32 - r41 :: bit - r42 :: str + r40 :: tuple + r41 :: int32 + r42 :: bit r43 :: dict r44 :: str - r45, r46 :: object - r47 :: dict - r48 :: str - r49 :: int32 - r50 :: bit + r45 :: int32 + r46 :: bit + r47, r48 :: object + r49 :: dict + r50 :: str r51 :: object - r52 :: str - r53, r54 :: object - r55 :: bool - r56 :: str - r57 :: tuple - r58 :: int32 - r59 :: bit - r60 :: dict - r61 :: str - r62 :: int32 - r63 :: bit - r64 :: object - r65 :: str - r66, r67 :: object - r68 :: str - r69 :: tuple - r70 :: int32 - r71 :: bit - r72 :: dict - r73 :: str - r74 :: int32 - r75 :: bit - r76, r77 :: object - r78 :: dict - r79 :: str - r80 :: object - r81 :: dict - r82 :: str - r83, r84 :: object - r85 :: tuple - r86 :: str - r87, r88 :: object - r89 :: bool - r90, r91 :: str - r92 :: tuple - r93 :: int32 - r94 :: bit - r95 :: dict - r96 :: str - r97 :: int32 - r98 :: bit + r52 :: dict + r53 :: str + r54, r55 :: object + r56 :: tuple + r57 :: str + r58, r59 :: object + r60 :: bool + r61, r62 :: str + r63 :: tuple + r64 :: int32 + r65 :: bit + r66 :: dict + r67 :: str + r68 :: int32 + r69 :: bit L0: r0 = builtins :: module r1 = load_address _Py_NoneStruct @@ -288,110 +267,76 @@ L1: r4 = PyImport_Import(r3) builtins = r4 :: module L2: - r5 = __main__.globals :: static - r6 = 'TypeVar' - r7 = 'Generic' - r8 = PyList_New(2) - r9 = get_element_ptr r8 ob_item :: PyListObject - r10 = load_mem r9 :: ptr* - set_mem r10, r6 :: builtins.object* - r11 = r10 + WORD_SIZE*1 - set_mem r11, r7 :: builtins.object* - keep_alive r8 - r12 = 'typing' - r13 = PyImport_ImportModuleLevelObject(r12, r5, 0, r8, 0) - typing = r13 :: module - r14 = 'typing' + r5 = ('TypeVar', 'Generic') + r6 = 'typing' + r7 = __main__.globals :: static + r8 = CPyImport_ImportFromMany(r6, r5, r5, r7) + typing = r8 :: module + r9 = ('trait',) + r10 = 'mypy_extensions' + r11 = __main__.globals :: static + r12 = CPyImport_ImportFromMany(r10, r9, r9, r11) + mypy_extensions = r12 :: module + r13 = 'T' + r14 = __main__.globals :: static r15 = 'TypeVar' - r16 = 'TypeVar' - r17 = CPyImport_ImportFrom(r13, r14, r15, r16) - r18 = 'TypeVar' - r19 = CPyDict_SetItem(r5, r18, r17) - r20 = r19 >= 0 :: signed - r21 = 'typing' - r22 = 'Generic' - r23 = 'Generic' - r24 = CPyImport_ImportFrom(r13, r21, r22, r23) - r25 = 'Generic' - r26 = CPyDict_SetItem(r5, r25, r24) - r27 = r26 >= 0 :: signed - r28 = __main__.globals :: static - r29 = 'trait' - r30 = PyList_New(1) - r31 = get_element_ptr r30 ob_item :: PyListObject - r32 = load_mem r31 :: ptr* - set_mem r32, r29 :: builtins.object* - keep_alive r30 - r33 = 'mypy_extensions' - r34 = PyImport_ImportModuleLevelObject(r33, r28, 0, r30, 0) - mypy_extensions = r34 :: module - r35 = 'mypy_extensions' - r36 = 'trait' - r37 = 'trait' - r38 = CPyImport_ImportFrom(r34, r35, r36, r37) - r39 = 'trait' - r40 = CPyDict_SetItem(r28, r39, r38) - r41 = r40 >= 0 :: signed - r42 = 'T' + r16 = CPyDict_GetItem(r14, r15) + r17 = PyObject_CallFunctionObjArgs(r16, r13, 0) + r18 = __main__.globals :: static + r19 = 'T' + r20 = CPyDict_SetItem(r18, r19, r17) + r21 = r20 >= 0 :: signed + r22 = :: object + r23 = '__main__' + r24 = __main__.C_template :: type + r25 = CPyType_FromTemplate(r24, r22, r23) + r26 = C_trait_vtable_setup() + r27 = '__mypyc_attrs__' + r28 = PyTuple_Pack(0) + r29 = PyObject_SetAttr(r25, r27, r28) + r30 = r29 >= 0 :: signed + __main__.C = r25 :: type + r31 = __main__.globals :: static + r32 = 'C' + r33 = CPyDict_SetItem(r31, r32, r25) + r34 = r33 >= 0 :: signed + r35 = :: object + r36 = '__main__' + r37 = __main__.S_template :: type + r38 = CPyType_FromTemplate(r37, r35, r36) + r39 = '__mypyc_attrs__' + r40 = PyTuple_Pack(0) + r41 = PyObject_SetAttr(r38, r39, r40) + r42 = r41 >= 0 :: signed + __main__.S = r38 :: type r43 = __main__.globals :: static - r44 = 'TypeVar' - r45 = CPyDict_GetItem(r43, r44) - r46 = PyObject_CallFunctionObjArgs(r45, r42, 0) - r47 = __main__.globals :: static - r48 = 'T' - r49 = CPyDict_SetItem(r47, r48, r46) - r50 = r49 >= 0 :: signed - r51 = :: object - r52 = '__main__' - r53 = __main__.C_template :: type - r54 = CPyType_FromTemplate(r53, r51, r52) - r55 = C_trait_vtable_setup() - r56 = '__mypyc_attrs__' - r57 = PyTuple_Pack(0) - r58 = PyObject_SetAttr(r54, r56, r57) - r59 = r58 >= 0 :: signed - __main__.C = r54 :: type - r60 = __main__.globals :: static - r61 = 'C' - r62 = CPyDict_SetItem(r60, r61, r54) - r63 = r62 >= 0 :: signed - r64 = :: object - r65 = '__main__' - r66 = __main__.S_template :: type - r67 = CPyType_FromTemplate(r66, r64, r65) - r68 = '__mypyc_attrs__' - r69 = PyTuple_Pack(0) - r70 = PyObject_SetAttr(r67, r68, r69) - r71 = r70 >= 0 :: signed - __main__.S = r67 :: type - r72 = __main__.globals :: static - r73 = 'S' - r74 = CPyDict_SetItem(r72, r73, r67) - r75 = r74 >= 0 :: signed - r76 = __main__.C :: type - r77 = __main__.S :: type - r78 = __main__.globals :: static - r79 = 'Generic' - r80 = CPyDict_GetItem(r78, r79) - r81 = __main__.globals :: static - r82 = 'T' - r83 = CPyDict_GetItem(r81, r82) - r84 = PyObject_GetItem(r80, r83) - r85 = PyTuple_Pack(3, r76, r77, r84) - r86 = '__main__' - r87 = __main__.D_template :: type - r88 = CPyType_FromTemplate(r87, r85, r86) - r89 = D_trait_vtable_setup() - r90 = '__mypyc_attrs__' - r91 = '__dict__' - r92 = PyTuple_Pack(1, r91) - r93 = PyObject_SetAttr(r88, r90, r92) - r94 = r93 >= 0 :: signed - __main__.D = r88 :: type - r95 = __main__.globals :: static - r96 = 'D' - r97 = CPyDict_SetItem(r95, r96, r88) - r98 = r97 >= 0 :: signed + r44 = 'S' + r45 = CPyDict_SetItem(r43, r44, r38) + r46 = r45 >= 0 :: signed + r47 = __main__.C :: type + r48 = __main__.S :: type + r49 = __main__.globals :: static + r50 = 'Generic' + r51 = CPyDict_GetItem(r49, r50) + r52 = __main__.globals :: static + r53 = 'T' + r54 = CPyDict_GetItem(r52, r53) + r55 = PyObject_GetItem(r51, r54) + r56 = PyTuple_Pack(3, r47, r48, r55) + r57 = '__main__' + r58 = __main__.D_template :: type + r59 = CPyType_FromTemplate(r58, r56, r57) + r60 = D_trait_vtable_setup() + r61 = '__mypyc_attrs__' + r62 = '__dict__' + r63 = PyTuple_Pack(1, r62) + r64 = PyObject_SetAttr(r59, r61, r63) + r65 = r64 >= 0 :: signed + __main__.D = r59 :: type + r66 = __main__.globals :: static + r67 = 'D' + r68 = CPyDict_SetItem(r66, r67, r59) + r69 = r68 >= 0 :: signed return 1 [case testIsInstance] diff --git a/mypyc/test-data/irbuild-constant-fold.test b/mypyc/test-data/irbuild-constant-fold.test index 7d9127887aa6..c7c5c054e7ce 100644 --- a/mypyc/test-data/irbuild-constant-fold.test +++ b/mypyc/test-data/irbuild-constant-fold.test @@ -3,6 +3,7 @@ def bin_ops() -> None: add = 15 + 47 add_mul = (2 + 3) * 5 sub = 7 - 11 + div = 3 / 2 bit_and = 6 & 10 bit_or = 6 | 10 bit_xor = 6 ^ 10 @@ -25,11 +26,14 @@ def pow() -> None: p3 = 0**0 [out] def bin_ops(): - add, add_mul, sub, bit_and, bit_or, bit_xor, lshift, rshift, lshift0, rshift0 :: int + add, add_mul, sub :: int + div :: float + bit_and, bit_or, bit_xor, lshift, rshift, lshift0, rshift0 :: int L0: add = 124 add_mul = 50 sub = -8 + div = 1.5 bit_and = 4 bit_or = 28 bit_xor = 24 @@ -117,44 +121,28 @@ L0: [case testIntConstantFoldingUnsupportedCases] def error_cases() -> None: - div_by_zero = 5 // 0 + div_by_zero = 5 / 0 + floor_div_by_zero = 5 // 0 mod_by_zero = 5 % 0 lshift_neg = 6 << -1 rshift_neg = 7 >> -1 -def unsupported_div() -> None: - x = 4 / 6 - y = 10 / 5 def unsupported_pow() -> None: p = 3 ** (-1) [out] def error_cases(): - r0, div_by_zero, r1, mod_by_zero, r2, lshift_neg, r3, rshift_neg :: int + r0, div_by_zero :: float + r1, floor_div_by_zero, r2, mod_by_zero, r3, lshift_neg, r4, rshift_neg :: int L0: - r0 = CPyTagged_FloorDivide(10, 0) + r0 = CPyTagged_TrueDivide(10, 0) div_by_zero = r0 - r1 = CPyTagged_Remainder(10, 0) - mod_by_zero = r1 - r2 = CPyTagged_Lshift(12, -2) - lshift_neg = r2 - r3 = CPyTagged_Rshift(14, -2) - rshift_neg = r3 - return 1 -def unsupported_div(): - r0, r1, r2 :: object - r3, x :: float - r4, r5, r6 :: object - r7, y :: float -L0: - r0 = object 4 - r1 = object 6 - r2 = PyNumber_TrueDivide(r0, r1) - r3 = cast(float, r2) - x = r3 - r4 = object 10 - r5 = object 5 - r6 = PyNumber_TrueDivide(r4, r5) - r7 = cast(float, r6) - y = r7 + r1 = CPyTagged_FloorDivide(10, 0) + floor_div_by_zero = r1 + r2 = CPyTagged_Remainder(10, 0) + mod_by_zero = r2 + r3 = CPyTagged_Lshift(12, -2) + lshift_neg = r3 + r4 = CPyTagged_Rshift(14, -2) + rshift_neg = r4 return 1 def unsupported_pow(): r0, r1, r2 :: object @@ -163,7 +151,7 @@ L0: r0 = object 3 r1 = object -1 r2 = CPyNumber_Power(r0, r1) - r3 = cast(float, r2) + r3 = unbox(float, r2) p = r3 return 1 @@ -233,20 +221,216 @@ L0: a = 12 return 1 +[case testFloatConstantFolding] +def bin_ops() -> None: + add = 0.5 + 0.5 + add_mul = (1.5 + 3.5) * 5.0 + sub = 7.0 - 7.5 + div = 3.0 / 2.0 + floor_div = 3.0 // 2.0 +def unary_ops() -> None: + neg1 = -5.5 + neg2 = --1.5 + neg3 = -0.0 + pos = +5.5 +def pow() -> None: + p0 = 16.0**0 + p1 = 16.0**0.5 + p2 = (-5.0)**3 + p3 = 0.0**0.0 +def error_cases() -> None: + div = 2.0 / 0.0 + floor_div = 2.0 // 0.0 + power_imag = (-2.0)**0.5 + power_overflow = 2.0**10000.0 +[out] +def bin_ops(): + add, add_mul, sub, div, floor_div :: float +L0: + add = 1.0 + add_mul = 25.0 + sub = -0.5 + div = 1.5 + floor_div = 1.0 + return 1 +def unary_ops(): + neg1, neg2, neg3, pos :: float +L0: + neg1 = -5.5 + neg2 = 1.5 + neg3 = -0.0 + pos = 5.5 + return 1 +def pow(): + p0, p1, p2, p3 :: float +L0: + p0 = 1.0 + p1 = 4.0 + p2 = -125.0 + p3 = 1.0 + return 1 +def error_cases(): + r0 :: bit + r1 :: bool + r2, div, r3, floor_div :: float + r4, r5, r6 :: object + r7, power_imag :: float + r8, r9, r10 :: object + r11, power_overflow :: float +L0: + r0 = 0.0 == 0.0 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = raise ZeroDivisionError('float division by zero') + unreachable +L2: + r2 = 2.0 / 0.0 + div = r2 + r3 = CPyFloat_FloorDivide(2.0, 0.0) + floor_div = r3 + r4 = box(float, -2.0) + r5 = box(float, 0.5) + r6 = CPyNumber_Power(r4, r5) + r7 = unbox(float, r6) + power_imag = r7 + r8 = box(float, 2.0) + r9 = box(float, 10000.0) + r10 = CPyNumber_Power(r8, r9) + r11 = unbox(float, r10) + power_overflow = r11 + return 1 + +[case testMixedFloatIntConstantFolding] +def bin_ops() -> None: + add = 1 + 0.5 + sub = 1 - 0.5 + mul = 0.5 * 5 + div = 5 / 0.5 + floor_div = 9.5 // 5 +def error_cases() -> None: + div = 2.0 / 0 + floor_div = 2.0 // 0 + power_overflow = 2.0**10000 +[out] +def bin_ops(): + add, sub, mul, div, floor_div :: float +L0: + add = 1.5 + sub = 0.5 + mul = 2.5 + div = 10.0 + floor_div = 1.0 + return 1 +def error_cases(): + r0 :: bit + r1 :: bool + r2, div, r3, floor_div :: float + r4, r5, r6 :: object + r7, power_overflow :: float +L0: + r0 = 0.0 == 0.0 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = raise ZeroDivisionError('float division by zero') + unreachable +L2: + r2 = 2.0 / 0.0 + div = r2 + r3 = CPyFloat_FloorDivide(2.0, 0.0) + floor_div = r3 + r4 = box(float, 2.0) + r5 = box(float, 10000.0) + r6 = CPyNumber_Power(r4, r5) + r7 = unbox(float, r6) + power_overflow = r7 + return 1 + [case testStrConstantFolding] from typing_extensions import Final S: Final = 'z' +N: Final = 2 def f() -> None: x = 'foo' + 'bar' y = 'x' + 'y' + S + mul = "foobar" * 2 + mul2 = N * "foobar" [out] def f(): - r0, x, r1, y :: str + r0, x, r1, y, r2, mul, r3, mul2 :: str L0: r0 = 'foobar' x = r0 r1 = 'xyz' y = r1 + r2 = 'foobarfoobar' + mul = r2 + r3 = 'foobarfoobar' + mul2 = r3 + return 1 + +[case testBytesConstantFolding] +from typing_extensions import Final + +N: Final = 2 + +def f() -> None: + # Unfortunately, mypy doesn't store the bytes value of final refs. + x = b'foo' + b'bar' + mul = b"foobar" * 2 + mul2 = N * b"foobar" +[out] +def f(): + r0, x, r1, mul, r2, mul2 :: bytes +L0: + r0 = b'foobar' + x = r0 + r1 = b'foobarfoobar' + mul = r1 + r2 = b'foobarfoobar' + mul2 = r2 return 1 + +[case testComplexConstantFolding] +from typing_extensions import Final + +N: Final = 1 +FLOAT_N: Final = 1.5 + +def integral() -> None: + pos = 1+2j + pos_2 = 2j+N + neg = 1-2j + neg_2 = 2j-N +def floating() -> None: + pos = 1.5+2j + pos_2 = 2j+FLOAT_N + neg = 1.5-2j + neg_2 = 2j-FLOAT_N +[out] +def integral(): + r0, pos, r1, pos_2, r2, neg, r3, neg_2 :: object +L0: + r0 = (1+2j) + pos = r0 + r1 = (1+2j) + pos_2 = r1 + r2 = (1-2j) + neg = r2 + r3 = (-1+2j) + neg_2 = r3 + return 1 +def floating(): + r0, pos, r1, pos_2, r2, neg, r3, neg_2 :: object +L0: + r0 = (1.5+2j) + pos = r0 + r1 = (1.5+2j) + pos_2 = r1 + r2 = (1.5-2j) + neg = r2 + r3 = (-1.5+2j) + neg_2 = r3 + return 1 + diff --git a/mypyc/test-data/irbuild-dict.test b/mypyc/test-data/irbuild-dict.test index 99643b9451f0..d1fc4f956ce7 100644 --- a/mypyc/test-data/irbuild-dict.test +++ b/mypyc/test-data/irbuild-dict.test @@ -219,6 +219,12 @@ L0: [case testDictIterationMethods] from typing import Dict, Union +from typing_extensions import TypedDict + +class Person(TypedDict): + name: str + age: int + def print_dict_methods(d1: Dict[int, int], d2: Dict[int, int]) -> None: for v in d1.values(): if v in d2: @@ -229,6 +235,10 @@ def union_of_dicts(d: Union[Dict[str, int], Dict[str, str]]) -> None: new = {} for k, v in d.items(): new[k] = int(v) +def typeddict(d: Person) -> None: + for k, v in d.items(): + if k == "name": + name = v [out] def print_dict_methods(d1, d2): d1, d2 :: dict @@ -370,6 +380,65 @@ L4: r19 = CPy_NoErrOccured() L5: return 1 +def typeddict(d): + d :: dict + r0 :: short_int + r1 :: native_int + r2 :: short_int + r3 :: object + r4 :: tuple[bool, short_int, object, object] + r5 :: short_int + r6 :: bool + r7, r8 :: object + r9, k :: str + v :: object + r10 :: str + r11 :: int32 + r12 :: bit + r13 :: object + r14, r15, r16 :: bit + name :: object + r17, r18 :: bit +L0: + r0 = 0 + r1 = PyDict_Size(d) + r2 = r1 << 1 + r3 = CPyDict_GetItemsIter(d) +L1: + r4 = CPyDict_NextItem(r3, r0) + r5 = r4[1] + r0 = r5 + r6 = r4[0] + if r6 goto L2 else goto L9 :: bool +L2: + r7 = r4[2] + r8 = r4[3] + r9 = cast(str, r7) + k = r9 + v = r8 + r10 = 'name' + r11 = PyUnicode_Compare(k, r10) + r12 = r11 == -1 + if r12 goto L3 else goto L5 :: bool +L3: + r13 = PyErr_Occurred() + r14 = r13 != 0 + if r14 goto L4 else goto L5 :: bool +L4: + r15 = CPy_KeepPropagating() +L5: + r16 = r11 == 0 + if r16 goto L6 else goto L7 :: bool +L6: + name = v +L7: +L8: + r17 = CPyDict_CheckSize(d, r2) + goto L1 +L9: + r18 = CPy_NoErrOccured() +L10: + return 1 [case testDictLoadAddress] def f() -> None: diff --git a/mypyc/test-data/irbuild-dunders.test b/mypyc/test-data/irbuild-dunders.test index 82f04dcdf687..3c140d927c0f 100644 --- a/mypyc/test-data/irbuild-dunders.test +++ b/mypyc/test-data/irbuild-dunders.test @@ -184,10 +184,8 @@ L0: return 6 def C.__float__(self): self :: __main__.C - r0 :: float L0: - r0 = 4.0 - return r0 + return 4.0 def C.__pos__(self): self :: __main__.C L0: @@ -223,4 +221,3 @@ L0: r6 = c.__bool__() r7 = c.__complex__() return 1 - diff --git a/mypyc/test-data/irbuild-float.test b/mypyc/test-data/irbuild-float.test new file mode 100644 index 000000000000..e3a60852574b --- /dev/null +++ b/mypyc/test-data/irbuild-float.test @@ -0,0 +1,497 @@ +[case testFloatAdd] +def f(x: float, y: float) -> float: + return x + y +def g(x: float) -> float: + z = x - 1.5 + return 2.5 * z +[out] +def f(x, y): + x, y, r0 :: float +L0: + r0 = x + y + return r0 +def g(x): + x, r0, z, r1 :: float +L0: + r0 = x - 1.5 + z = r0 + r1 = 2.5 * z + return r1 + +[case testFloatBoxAndUnbox] +from typing import Any +def f(x: float) -> object: + return x +def g(x: Any) -> float: + return x +[out] +def f(x): + x :: float + r0 :: object +L0: + r0 = box(float, x) + return r0 +def g(x): + x :: object + r0 :: float +L0: + r0 = unbox(float, x) + return r0 + +[case testFloatNegAndPos] +def f(x: float) -> float: + y = +x * -0.5 + return -y +[out] +def f(x): + x, r0, y, r1 :: float +L0: + r0 = x * -0.5 + y = r0 + r1 = -y + return r1 + +[case testFloatCoerceFromInt] +def from_int(x: int) -> float: + return x + +def from_literal() -> float: + return 5 + +def from_literal_neg() -> float: + return -2 +[out] +def from_int(x): + x :: int + r0 :: float +L0: + r0 = CPyFloat_FromTagged(x) + return r0 +def from_literal(): +L0: + return 5.0 +def from_literal_neg(): +L0: + return -2.0 + +[case testConvertBetweenFloatAndInt] +def to_int(x: float) -> int: + return int(x) +def from_int(x: int) -> float: + return float(x) +[out] +def to_int(x): + x :: float + r0 :: int +L0: + r0 = CPyTagged_FromFloat(x) + return r0 +def from_int(x): + x :: int + r0 :: float +L0: + r0 = CPyFloat_FromTagged(x) + return r0 + +[case testFloatOperatorAssignment] +def f(x: float, y: float) -> float: + x += y + x -= 5.0 + return x +[out] +def f(x, y): + x, y, r0, r1 :: float +L0: + r0 = x + y + x = r0 + r1 = x - 5.0 + x = r1 + return x + +[case testFloatOperatorAssignmentWithInt] +def f(x: float, y: int) -> None: + x += y + x -= 5 +[out] +def f(x, y): + x :: float + y :: int + r0, r1, r2 :: float +L0: + r0 = CPyFloat_FromTagged(y) + r1 = x + r0 + x = r1 + r2 = x - 5.0 + x = r2 + return 1 + +[case testFloatComparison] +def lt(x: float, y: float) -> bool: + return x < y +def eq(x: float, y: float) -> bool: + return x == y +[out] +def lt(x, y): + x, y :: float + r0 :: bit +L0: + r0 = x < y + return r0 +def eq(x, y): + x, y :: float + r0 :: bit +L0: + r0 = x == y + return r0 + +[case testFloatOpWithLiteralInt] +def f(x: float) -> None: + y = x * 2 + z = 1 - y + b = z < 3 + c = 0 == z +[out] +def f(x): + x, r0, y, r1, z :: float + r2 :: bit + b :: bool + r3 :: bit + c :: bool +L0: + r0 = x * 2.0 + y = r0 + r1 = 1.0 - y + z = r1 + r2 = z < 3.0 + b = r2 + r3 = 0.0 == z + c = r3 + return 1 + +[case testFloatCallFunctionWithLiteralInt] +def f(x: float) -> None: pass + +def g() -> None: + f(3) + f(-2) +[out] +def f(x): + x :: float +L0: + return 1 +def g(): + r0, r1 :: None +L0: + r0 = f(3.0) + r1 = f(-2.0) + return 1 + +[case testFloatAsBool] +def f(x: float) -> int: + if x: + return 2 + else: + return 5 +[out] +def f(x): + x :: float + r0 :: bit +L0: + r0 = x != 0.0 + if r0 goto L1 else goto L2 :: bool +L1: + return 4 +L2: + return 10 +L3: + unreachable + +[case testCallSqrtViaMathModule] +import math + +def f(x: float) -> float: + return math.sqrt(x) +[out] +def f(x): + x, r0 :: float +L0: + r0 = CPyFloat_Sqrt(x) + return r0 + +[case testFloatFinalConstant] +from typing_extensions import Final + +X: Final = 123.0 +Y: Final = -1.0 + +def f() -> float: + a = X + return a + Y +[out] +def f(): + a, r0 :: float +L0: + a = 123.0 + r0 = a + -1.0 + return r0 + +[case testFloatDefaultArg] +def f(x: float = 1.5) -> float: + return x +[out] +def f(x, __bitmap): + x :: float + __bitmap, r0 :: uint32 + r1 :: bit +L0: + r0 = __bitmap & 1 + r1 = r0 == 0 + if r1 goto L1 else goto L2 :: bool +L1: + x = 1.5 +L2: + return x + +[case testFloatMixedOperations] +def f(x: float, y: int) -> None: + if x < y: + z = x + y + x -= y + z = y + z + if y == x: + x -= 1 +[out] +def f(x, y): + x :: float + y :: int + r0 :: float + r1 :: bit + r2, r3, z, r4, r5, r6, r7, r8 :: float + r9 :: bit + r10 :: float +L0: + r0 = CPyFloat_FromTagged(y) + r1 = x < r0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = CPyFloat_FromTagged(y) + r3 = x + r2 + z = r3 + r4 = CPyFloat_FromTagged(y) + r5 = x - r4 + x = r5 + r6 = CPyFloat_FromTagged(y) + r7 = r6 + z + z = r7 +L2: + r8 = CPyFloat_FromTagged(y) + r9 = r8 == x + if r9 goto L3 else goto L4 :: bool +L3: + r10 = x - 1.0 + x = r10 +L4: + return 1 + +[case testFloatDivideSimple] +def f(x: float, y: float) -> float: + z = x / y + z = z / 2.0 + return z / 3 +[out] +def f(x, y): + x, y :: float + r0 :: bit + r1 :: bool + r2, z, r3, r4 :: float +L0: + r0 = y == 0.0 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = raise ZeroDivisionError('float division by zero') + unreachable +L2: + r2 = x / y + z = r2 + r3 = z / 2.0 + z = r3 + r4 = z / 3.0 + return r4 + +[case testFloatDivideIntOperand] +def f(n: int, m: int) -> float: + return n / m +[out] +def f(n, m): + n, m :: int + r0 :: float +L0: + r0 = CPyTagged_TrueDivide(n, m) + return r0 + +[case testFloatResultOfIntDivide] +def f(f: float, n: int) -> float: + x = f / n + return n / x +[out] +def f(f, n): + f :: float + n :: int + r0 :: float + r1 :: bit + r2 :: bool + r3, x, r4 :: float + r5 :: bit + r6 :: bool + r7 :: float +L0: + r0 = CPyFloat_FromTagged(n) + r1 = r0 == 0.0 + if r1 goto L1 else goto L2 :: bool +L1: + r2 = raise ZeroDivisionError('float division by zero') + unreachable +L2: + r3 = f / r0 + x = r3 + r4 = CPyFloat_FromTagged(n) + r5 = x == 0.0 + if r5 goto L3 else goto L4 :: bool +L3: + r6 = raise ZeroDivisionError('float division by zero') + unreachable +L4: + r7 = r4 / x + return r7 + +[case testFloatExplicitConversions] +def f(f: float, n: int) -> int: + x = float(n) + y = float(x) # no-op + return int(y) +[out] +def f(f, n): + f :: float + n :: int + r0, x, y :: float + r1 :: int +L0: + r0 = CPyFloat_FromTagged(n) + x = r0 + y = x + r1 = CPyTagged_FromFloat(y) + return r1 + +[case testFloatModulo] +def f(x: float, y: float) -> float: + return x % y +[out] +def f(x, y): + x, y :: float + r0 :: bit + r1 :: bool + r2, r3 :: float + r4, r5, r6, r7 :: bit + r8, r9 :: float +L0: + r0 = y == 0.0 + if r0 goto L1 else goto L2 :: bool +L1: + r1 = raise ZeroDivisionError('float modulo') + unreachable +L2: + r2 = x % y + r3 = r2 + r4 = r3 == 0.0 + if r4 goto L5 else goto L3 :: bool +L3: + r5 = x < 0.0 + r6 = y < 0.0 + r7 = r5 == r6 + if r7 goto L6 else goto L4 :: bool +L4: + r8 = r3 + y + r3 = r8 + goto L6 +L5: + r9 = copysign(0.0, y) + r3 = r9 +L6: + return r3 + +[case testFloatFloorDivide] +def f(x: float, y: float) -> float: + return x // y +def g(x: float, y: int) -> float: + return x // y +[out] +def f(x, y): + x, y, r0 :: float +L0: + r0 = CPyFloat_FloorDivide(x, y) + return r0 +def g(x, y): + x :: float + y :: int + r0, r1 :: float +L0: + r0 = CPyFloat_FromTagged(y) + r1 = CPyFloat_FloorDivide(x, r0) + return r1 + +[case testFloatNarrowToIntDisallowed] +class C: + x: float + +def narrow_local(x: float, n: int) -> int: + x = n # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + return x + +def narrow_tuple_lvalue(x: float, y: float, n: int) -> int: + x, y = 1.0, n # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + return y + +def narrow_multiple_lvalues(x: float, y: float, n: int) -> int: + x = a = n # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + a = y = n # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + return x + y + +def narrow_attribute(c: C, n: int) -> int: + c.x = n # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + return c.x + +def narrow_using_int_literal(x: float) -> int: + x = 1 # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + return x + +def narrow_using_declaration(n: int) -> int: + x: float + x = n # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + return x + +[case testFloatInitializeFromInt] +def init(n: int) -> None: + # These are strictly speaking safe, since these don't narrow, but for consistency with + # narrowing assignments, generate errors here + x: float = n # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + y: float = 5 # E: Incompatible value representations in assignment (expression has type "int", variable has type "float") + +[case testFloatCoerceTupleFromIntValues] +from __future__ import annotations + +def f(x: int) -> None: + t: tuple[float, float, float] = (x, 2.5, -7) +[out] +def f(x): + x :: int + r0 :: tuple[int, float, int] + r1 :: int + r2 :: float + r3, t :: tuple[float, float, float] +L0: + r0 = (x, 2.5, -14) + r1 = r0[0] + r2 = CPyFloat_FromTagged(r1) + r3 = (r2, 2.5, -7.0) + t = r3 + return 1 diff --git a/mypyc/test-data/irbuild-i32.test b/mypyc/test-data/irbuild-i32.test index 7ea3c0864728..725e183657b1 100644 --- a/mypyc/test-data/irbuild-i32.test +++ b/mypyc/test-data/irbuild-i32.test @@ -481,7 +481,7 @@ L0: z = -3 return 1 -[case testI32ExplicitConversionFromVariousTypes] +[case testI32ExplicitConversionFromVariousTypes_64bit] from mypy_extensions import i32 def bool_to_i32(b: bool) -> i32: @@ -526,9 +526,62 @@ L0: return r0 def float_to_i32(x): x :: float - r0 :: object - r1 :: int32 + r0 :: int + r1 :: native_int + r2, r3, r4 :: bit + r5 :: native_int + r6, r7 :: int32 L0: - r0 = CPyLong_FromFloat(x) - r1 = unbox(int32, r0) - return r1 + r0 = CPyTagged_FromFloat(x) + r1 = r0 & 1 + r2 = r1 == 0 + if r2 goto L1 else goto L4 :: bool +L1: + r3 = r0 < 4294967296 :: signed + if r3 goto L2 else goto L4 :: bool +L2: + r4 = r0 >= -4294967296 :: signed + if r4 goto L3 else goto L4 :: bool +L3: + r5 = r0 >> 1 + r6 = truncate r5: native_int to int32 + r7 = r6 + goto L5 +L4: + CPyInt32_Overflow() + unreachable +L5: + return r7 + +[case testI32ExplicitConversionFromFloat_32bit] +from mypy_extensions import i32 + +def float_to_i32(x: float) -> i32: + return i32(x) +[out] +def float_to_i32(x): + x :: float + r0 :: int + r1 :: native_int + r2 :: bit + r3, r4 :: int32 + r5 :: ptr + r6 :: c_ptr + r7 :: int32 +L0: + r0 = CPyTagged_FromFloat(x) + r1 = r0 & 1 + r2 = r1 == 0 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = r0 >> 1 + r4 = r3 + goto L3 +L2: + r5 = r0 ^ 1 + r6 = r5 + r7 = CPyLong_AsInt32(r6) + r4 = r7 + keep_alive r0 +L3: + return r4 diff --git a/mypyc/test-data/irbuild-i64.test b/mypyc/test-data/irbuild-i64.test index f616893d8fe5..a18171c41d57 100644 --- a/mypyc/test-data/irbuild-i64.test +++ b/mypyc/test-data/irbuild-i64.test @@ -650,7 +650,6 @@ def f(x: i64, y: i64) -> Tuple[i64, i64]: return x, y def g() -> Tuple[i64, i64]: - # TODO: Avoid boxing and unboxing return 1, 2 def h() -> i64: @@ -666,13 +665,11 @@ L0: return r0 def g(): r0 :: tuple[int, int] - r1 :: object - r2 :: tuple[int64, int64] + r1 :: tuple[int64, int64] L0: r0 = (2, 4) - r1 = box(tuple[int, int], r0) - r2 = unbox(tuple[int64, int64], r1) - return r2 + r1 = (1, 2) + return r1 def h(): r0 :: tuple[int64, int64] r1, x, r2, y :: int64 @@ -1844,7 +1841,7 @@ L2: L3: return r4 -[case testI64ExplicitConversionFromVariousTypes] +[case testI64ExplicitConversionFromVariousTypes_64bit] from mypy_extensions import i64 def bool_to_i64(b: bool) -> i64: @@ -1900,12 +1897,123 @@ L0: return r0 def float_to_i64(x): x :: float - r0 :: object - r1 :: int64 + r0 :: int + r1 :: native_int + r2 :: bit + r3, r4 :: int64 + r5 :: ptr + r6 :: c_ptr + r7 :: int64 L0: - r0 = CPyLong_FromFloat(x) - r1 = unbox(int64, r0) - return r1 + r0 = CPyTagged_FromFloat(x) + r1 = r0 & 1 + r2 = r1 == 0 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = r0 >> 1 + r4 = r3 + goto L3 +L2: + r5 = r0 ^ 1 + r6 = r5 + r7 = CPyLong_AsInt64(r6) + r4 = r7 + keep_alive r0 +L3: + return r4 + +[case testI64ExplicitConversionFromFloat_32bit] +from mypy_extensions import i64 + +def float_to_i64(x: float) -> i64: + return i64(x) +[out] +def float_to_i64(x): + x :: float + r0 :: int + r1 :: native_int + r2 :: bit + r3, r4, r5 :: int64 + r6 :: ptr + r7 :: c_ptr + r8 :: int64 +L0: + r0 = CPyTagged_FromFloat(x) + r1 = r0 & 1 + r2 = r1 == 0 + if r2 goto L1 else goto L2 :: bool +L1: + r3 = extend signed r0: builtins.int to int64 + r4 = r3 >> 1 + r5 = r4 + goto L3 +L2: + r6 = r0 ^ 1 + r7 = r6 + r8 = CPyLong_AsInt64(r7) + r5 = r8 + keep_alive r0 +L3: + return r5 + +[case testI64ConvertToFloat_64bit] +from mypy_extensions import i64 + +def i64_to_float(x: i64) -> float: + return float(x) +[out] +def i64_to_float(x): + x :: int64 + r0, r1 :: bit + r2, r3, r4 :: int + r5 :: float +L0: + r0 = x <= 4611686018427387903 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = x >= -4611686018427387904 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromInt64(x) + r3 = r2 + goto L4 +L3: + r4 = x << 1 + r3 = r4 +L4: + r5 = CPyFloat_FromTagged(r3) + return r5 + +[case testI64ConvertToFloat_32bit] +from mypy_extensions import i64 + +def i64_to_float(x: i64) -> float: + return float(x) +[out] +def i64_to_float(x): + x :: int64 + r0, r1 :: bit + r2, r3 :: int + r4 :: native_int + r5 :: int + r6 :: float +L0: + r0 = x <= 1073741823 :: signed + if r0 goto L1 else goto L2 :: bool +L1: + r1 = x >= -1073741824 :: signed + if r1 goto L3 else goto L2 :: bool +L2: + r2 = CPyTagged_FromInt64(x) + r3 = r2 + goto L4 +L3: + r4 = truncate x: int64 to native_int + r5 = r4 << 1 + r3 = r5 +L4: + r6 = CPyFloat_FromTagged(r3) + return r6 [case testI64IsinstanceNarrowing] from typing import Union @@ -1970,3 +2078,78 @@ L2: r6 = r5.a keep_alive x return r6 + +[case testI64ConvertBetweenTuples_64bit] +from __future__ import annotations +from mypy_extensions import i64 + +def f(t: tuple[int, i64, int]) -> None: + tt: tuple[int, i64, i64] = t + +def g(n: int) -> None: + t: tuple[i64, i64] = (1, n) +[out] +def f(t): + t :: tuple[int, int64, int] + r0 :: int + r1 :: int64 + r2 :: int + r3 :: native_int + r4 :: bit + r5, r6 :: int64 + r7 :: ptr + r8 :: c_ptr + r9 :: int64 + r10, tt :: tuple[int, int64, int64] +L0: + r0 = t[0] + r1 = t[1] + r2 = t[2] + r3 = r2 & 1 + r4 = r3 == 0 + if r4 goto L1 else goto L2 :: bool +L1: + r5 = r2 >> 1 + r6 = r5 + goto L3 +L2: + r7 = r2 ^ 1 + r8 = r7 + r9 = CPyLong_AsInt64(r8) + r6 = r9 + keep_alive r2 +L3: + r10 = (r0, r1, r6) + tt = r10 + return 1 +def g(n): + n :: int + r0 :: tuple[int, int] + r1 :: int + r2 :: native_int + r3 :: bit + r4, r5 :: int64 + r6 :: ptr + r7 :: c_ptr + r8 :: int64 + r9, t :: tuple[int64, int64] +L0: + r0 = (2, n) + r1 = r0[1] + r2 = r1 & 1 + r3 = r2 == 0 + if r3 goto L1 else goto L2 :: bool +L1: + r4 = r1 >> 1 + r5 = r4 + goto L3 +L2: + r6 = r1 ^ 1 + r7 = r6 + r8 = CPyLong_AsInt64(r7) + r5 = r8 + keep_alive r1 +L3: + r9 = (1, r5) + t = r9 + return 1 diff --git a/mypyc/test-data/run-bools.test b/mypyc/test-data/run-bools.test index 522296592c54..6d4244286185 100644 --- a/mypyc/test-data/run-bools.test +++ b/mypyc/test-data/run-bools.test @@ -221,3 +221,9 @@ def test_mixed_comparisons_i64() -> None: assert neq_mixed_i64(n, x) == (n != int(x)) assert lt_mixed_i64(x, n) == (int(x) < n) assert gt_mixed_i64(n, x) == (n > int(x)) + +[case testBoolMixInt] +y = False +print((y or 0) and True) +[out] +0 \ No newline at end of file diff --git a/mypyc/test-data/run-dicts.test b/mypyc/test-data/run-dicts.test index 41675e7fcc91..58b862e3f303 100644 --- a/mypyc/test-data/run-dicts.test +++ b/mypyc/test-data/run-dicts.test @@ -95,7 +95,13 @@ assert get_content_set(od) == ({1, 3}, {2, 4}, {(1, 2), (3, 4)}) [typing fixtures/typing-full.pyi] [case testDictIterationMethodsRun] -from typing import Dict +from typing import Dict, Union +from typing_extensions import TypedDict + +class ExtensionDict(TypedDict): + python: str + c: str + def print_dict_methods(d1: Dict[int, int], d2: Dict[int, int], d3: Dict[int, int]) -> None: @@ -107,13 +113,27 @@ def print_dict_methods(d1: Dict[int, int], for v in d3.values(): print(v) +def print_dict_methods_special(d1: Union[Dict[int, int], Dict[str, str]], + d2: ExtensionDict) -> None: + for k in d1.keys(): + print(k) + for k, v in d1.items(): + print(k) + print(v) + for v2 in d2.values(): + print(v2) + for k2, v2 in d2.items(): + print(k2) + print(v2) + + def clear_during_iter(d: Dict[int, int]) -> None: for k in d: d.clear() class Custom(Dict[int, int]): pass [file driver.py] -from native import print_dict_methods, Custom, clear_during_iter +from native import print_dict_methods, print_dict_methods_special, Custom, clear_during_iter from collections import OrderedDict print_dict_methods({}, {}, {}) print_dict_methods({1: 2}, {3: 4, 5: 6}, {7: 8}) @@ -124,6 +144,7 @@ print('==') d = OrderedDict([(1, 2), (3, 4)]) print_dict_methods(d, d, d) print('==') +print_dict_methods_special({1: 2}, {"python": ".py", "c": ".c"}) d.move_to_end(1) print_dict_methods(d, d, d) clear_during_iter({}) # OK @@ -185,6 +206,15 @@ else: 2 4 == +1 +1 +2 +.py +.c +python +.py +c +.c 3 1 3 diff --git a/mypyc/test-data/run-floats.test b/mypyc/test-data/run-floats.test index 1b67a1190cd8..2c101100549d 100644 --- a/mypyc/test-data/run-floats.test +++ b/mypyc/test-data/run-floats.test @@ -1,30 +1,516 @@ # Test cases for floats (compile and run) -[case testStrToFloat] +[case testFloatOps] +from __future__ import annotations +from typing import Any, cast +from typing_extensions import Final +from testutil import assertRaises, float_vals, FLOAT_MAGIC +import math + +def test_arithmetic() -> None: + zero = float(0.0) + one = zero + 1.0 + x = one + one / 2.0 + assert x == 1.5 + assert x - one == 0.5 + assert x * x == 2.25 + assert x / 2.0 == 0.75 + assert x * (-0.5) == -0.75 + assert -x == -1.5 + for x in float_vals: + assert repr(-x) == repr(getattr(x, "__neg__")()) + + for y in float_vals: + assert repr(x + y) == repr(getattr(x, "__add__")(y)) + assert repr(x - y) == repr(getattr(x, "__sub__")(y)) + assert repr(x * y) == repr(getattr(x, "__mul__")(y)) + if y != 0: + assert repr(x / y) == repr(getattr(x, "__truediv__")(y)) + +def test_mod() -> None: + zero = float(0.0) + one = zero + 1.0 + x = one + one / 2.0 + assert x % 0.4 == 0.29999999999999993 + assert (-x) % 0.4 == 0.10000000000000009 + assert x % -0.4 == -0.10000000000000009 + assert (-x) % -0.4 == -0.29999999999999993 + for x in float_vals: + for y in float_vals: + if y != 0: + assert repr(x % y) == repr(getattr(x, "__mod__")(y)) + +def test_floor_div() -> None: + for x in float_vals: + for y in float_vals: + if y != 0: + assert repr(x // y) == repr(getattr(x, "__floordiv__")(y)) + else: + with assertRaises(ZeroDivisionError, "float floor division by zero"): + x // y + +def test_mixed_arithmetic() -> None: + zf = float(0.0) + zn = int() + assert (zf + 5.5) + (zn + 1) == 6.5 + assert (zn - 2) - (zf - 5.5) == 3.5 + x = zf + 3.4 + x += zn + 2 + assert x == 5.4 + +def test_arithmetic_errors() -> None: + zero = float(0.0) + one = zero + 1.0 + with assertRaises(ZeroDivisionError, "float division by zero"): + print(one / zero) + with assertRaises(ZeroDivisionError, "float modulo"): + print(one % zero) + +def test_comparisons() -> None: + zero = float(0.0) + one = zero + 1.0 + x = one + one / 2.0 + assert x < (1.51 + zero) + assert not (x < (1.49 + zero)) + assert x > (1.49 + zero) + assert not (x > (1.51 + zero)) + assert x <= (1.5 + zero) + assert not (x <= (1.49 + zero)) + assert x >= (1.5 + zero) + assert not (x >= (1.51 + zero)) + for x in float_vals: + for y in float_vals: + assert (x <= y) == getattr(x, "__le__")(y) + assert (x < y) == getattr(x, "__lt__")(y) + assert (x >= y) == getattr(x, "__ge__")(y) + assert (x > y) == getattr(x, "__gt__")(y) + assert (x == y) == getattr(x, "__eq__")(y) + assert (x != y) == getattr(x, "__ne__")(y) + +def test_mixed_comparisons() -> None: + zf = float(0.0) + zn = int() + if (zf + 1.0) == (zn + 1): + assert True + else: + assert False + if (zf + 1.1) == (zn + 1): + assert False + else: + assert True + assert (zf + 1.1) != (zn + 1) + assert (zf + 1.1) > (zn + 1) + assert not (zf + 0.9) > (zn + 1) + assert (zn + 1) < (zf + 1.1) + +def test_boxing_and_unboxing() -> None: + x = 1.5 + boxed: Any = x + assert repr(boxed) == "1.5" + assert type(boxed) is float + y: float = boxed + assert y == x + boxed_int: Any = 5 + assert [type(boxed_int)] == [int] # Avoid mypy type narrowing + z: float = boxed_int + assert z == 5.0 + for xx in float_vals: + bb: Any = xx + yy: float = bb + assert repr(xx) == repr(bb) + assert repr(xx) == repr(yy) + for b in True, False: + boxed_bool: Any = b + assert type(boxed_bool) is bool + zz: float = boxed_bool + assert zz == int(b) + +def test_unboxing_failure() -> None: + boxed: Any = '1.5' + with assertRaises(TypeError): + x: float = boxed + +def identity(x: float) -> float: + return x + +def test_coerce_from_int_literal() -> None: + assert identity(34) == 34.0 + assert identity(-1) == -1.0 + +def test_coerce_from_short_tagged_int() -> None: + n = int() - 17 + assert identity(n) == -17.0 + for i in range(-300, 300): + assert identity(i) == float(i) + +def test_coerce_from_long_tagged_int() -> None: + n = int() + 2**100 + x = identity(n) + assert repr(x) == '1.2676506002282294e+30' + n = int() - 2**100 + y = identity(n) + assert repr(y) == '-1.2676506002282294e+30' + +def test_coerce_from_very_long_tagged_int() -> None: + n = int() + 10**1000 + with assertRaises(OverflowError, "int too large to convert to float"): + identity(n) + with assertRaises(OverflowError, "int too large to convert to float"): + identity(int(n)) + n = int() - 10**1000 + with assertRaises(OverflowError, "int too large to convert to float"): + identity(n) + with assertRaises(OverflowError, "int too large to convert to float"): + identity(int(n)) + +def test_explicit_conversion_from_int() -> None: + float_any: Any = float + a = [0, 1, 2, 3, -1, -2, 13257, -928745] + for n in range(1, 100): + for delta in -1, 0, 1, 2342345: + a.append(2**n + delta) + a.append(-2**n + delta) + for x in a: + assert repr(float(x)) == repr(float_any(x)) + +def test_explicit_conversion_to_int() -> None: + int_any: Any = int + for x in float_vals: + if math.isinf(x): + with assertRaises(OverflowError, "cannot convert float infinity to integer"): + int(x) + elif math.isnan(x): + with assertRaises(ValueError, "cannot convert float NaN to integer"): + int(x) + else: + assert repr(int(x)) == repr(int_any(x)) + + # Test some edge cases + assert 2**30 == int(2.0**30 + int()) + assert 2**30 - 1 == int(1073741823.9999999 + int()) # math.nextafter(2.0**30, 0)) + assert -2**30 - 1 == int(-2.0**30 - 1 + int()) + assert -2**30 == int(-1073741824.9999998 + int()) # math.nextafter(-2.0**30 - 1, 0) + assert 2**62 == int(2.0**62 + int()) + assert 2**62 == int(2.0**62 - 1 + int()) + assert -2**62 == int(-2.0**62 + int()) + assert -2**62 == int(-2.0**62 - 1 + int()) + def str_to_float(x: str) -> float: return float(x) -[file driver.py] -from native import str_to_float +def test_str_to_float() -> None: + assert str_to_float("1") == 1.0 + assert str_to_float("1.234567") == 1.234567 + assert str_to_float("44324") == 44324.0 + assert str_to_float("23.4") == 23.4 + assert str_to_float("-43.44e-4") == -43.44e-4 + assert str_to_float("-43.44e-4") == -43.44e-4 + assert math.isinf(str_to_float("inf")) + assert math.isinf(str_to_float("-inf")) + assert str_to_float("inf") > 0.0 + assert str_to_float("-inf") < 0.0 + assert math.isnan(str_to_float("nan")) + assert math.isnan(str_to_float("NaN")) + assert repr(str_to_float("-0.0")) == "-0.0" -assert str_to_float("1") == 1.0 -assert str_to_float("1.234567") == 1.234567 -assert str_to_float("44324") == 44324.0 -assert str_to_float("23.4") == 23.4 -assert str_to_float("-43.44e-4") == -43.44e-4 - -[case testFloatArithmetic] def test_abs() -> None: assert abs(0.0) == 0.0 assert abs(-1.234567) == 1.234567 assert abs(44324.732) == 44324.732 assert abs(-23.4) == 23.4 assert abs(-43.44e-4) == 43.44e-4 + abs_any: Any = abs + for x in float_vals: + assert repr(abs(x)) == repr(abs_any(x)) def test_float_min_max() -> None: - x: float = 20.0 - y: float = 30.0 - assert min(x, y) == 20.0 - assert min(y, x) == 20.0 - assert max(x, y) == 30.0 - assert max(y, x) == 30.0 + for x in float_vals: + for y in float_vals: + min_any: Any = min + assert repr(min(x, y)) == repr(min_any(x, y)) + max_any: Any = max + assert repr(max(x, y)) == repr(max_any(x, y)) + +def default(x: float = 2) -> float: + return x + 1 + +def test_float_default_value() -> None: + assert default(1.2) == 2.2 + for i in range(-200, 200): + assert default(float(i)) == i + 1 + assert default() == 3.0 + +def test_float_default_value_wrapper() -> None: + f: Any = default + assert f(1.2) == 2.2 + for i in range(-200, 200): + assert f(float(i)) == i + 1 + assert f() == 3.0 + +class C: + def __init__(self, x: float) -> None: + self.x = x + +def test_float_attr() -> None: + for i in range(-200, 200): + f = float(i) + c = C(f) + assert c.x == f + a: Any = c + assert a.x == f + c.x = FLOAT_MAGIC + assert c.x == FLOAT_MAGIC + assert a.x == FLOAT_MAGIC + a.x = 1.0 + assert a.x == 1.0 + a.x = FLOAT_MAGIC + assert a.x == FLOAT_MAGIC + +class D: + def __init__(self, x: float) -> None: + if x: + self.x = x + +def test_float_attr_maybe_undefned() -> None: + for i in range(-200, 200): + if i == 0: + d = D(0.0) + with assertRaises(AttributeError): + d.x + a: Any = d + with assertRaises(AttributeError): + a.x + d.x = FLOAT_MAGIC + assert d.x == FLOAT_MAGIC + assert a.x == FLOAT_MAGIC + d.x = 0.0 + assert d.x == 0.0 + assert a.x == 0.0 + a.x = FLOAT_MAGIC + assert a.x == FLOAT_MAGIC + d = D(0.0) + a = cast(Any, d) + a.x = FLOAT_MAGIC + assert d.x == FLOAT_MAGIC + else: + f = float(i) + d = D(f) + assert d.x == f + a2: Any = d + assert a2.x == f + +def f(x: float) -> float: + return x + 1 + +def test_return_values() -> None: + a: Any = f + for i in range(-200, 200): + x = float(i) + assert f(x) == x + 1 + assert a(x) == x + 1 + for x in float_vals: + if not math.isnan(x): + assert f(x) == x + 1 + else: + assert math.isnan(f(x)) + +def exc() -> float: + raise IndexError('x') + +def test_exception() -> None: + with assertRaises(IndexError): + exc() + a: Any = exc + with assertRaises(IndexError): + a() + +def test_undefined_local_var() -> None: + if not int(): + x = -113.0 + assert x == -113.0 + if int(): + y = -113.0 + with assertRaises(UnboundLocalError, 'local variable "y" referenced before assignment'): + print(y) + if not int(): + x2 = -1.0 + assert x2 == -1.0 + if int(): + y2 = -1.0 + with assertRaises(UnboundLocalError, 'local variable "y2" referenced before assignment'): + print(y2) + +def test_tuples() -> None: + t1: tuple[float, float] = (1.5, 2.5) + assert t1 == tuple([1.5, 2.5]) + n = int() + 5 + t2: tuple[float, float, float, float] = (n, 1.5, -7, -113) + assert t2 == tuple([5.0, 1.5, -7.0, -113.0]) + +[case testFloatGlueMethodsAndInheritance] +from typing import Any +from typing_extensions import Final + +from mypy_extensions import trait + +from testutil import assertRaises + +MAGIC: Final = -113.0 + +class Base: + def foo(self) -> float: + return 5.0 + + def bar(self, x: float = 2.0) -> float: + return x + 1 + + def hoho(self, x: float) -> float: + return x - 1 + +class Derived(Base): + def foo(self, x: float = 5.0) -> float: + return x + 10 + + def bar(self, x: float = 3, y: float = 20) -> float: + return x + y + 2 + + def hoho(self, x: float = 7) -> float: + return x - 2 + +def test_derived_adds_bitmap() -> None: + b: Base = Derived() + assert b.foo() == 15 + +def test_derived_adds_another_default_arg() -> None: + b: Base = Derived() + assert b.bar() == 25 + assert b.bar(1) == 23 + assert b.bar(MAGIC) == MAGIC + 22 + +def test_derived_switches_arg_to_have_default() -> None: + b: Base = Derived() + assert b.hoho(5) == 3 + assert b.hoho(MAGIC) == MAGIC - 2 + +@trait +class T: + @property + def x(self) -> float: ... + @property + def y(self) -> float: ... + +class C(T): + x: float = 1.0 + y: float = 4 + +def test_read_only_property_in_trait_implemented_as_attribute() -> None: + c = C() + c.x = 5.5 + assert c.x == 5.5 + c.x = MAGIC + assert c.x == MAGIC + assert c.y == 4 + c.y = 6.5 + assert c.y == 6.5 + t: T = C() + assert t.y == 4 + t = c + assert t.x == MAGIC + c.x = 55.5 + assert t.x == 55.5 + assert t.y == 6.5 + a: Any = c + assert a.x == 55.5 + assert a.y == 6.5 + a.x = 7.0 + a.y = 8.0 + assert a.x == 7 + assert a.y == 8 + +class D(T): + xx: float + + @property + def x(self) -> float: + return self.xx + + @property + def y(self) -> float: + raise TypeError + +def test_read_only_property_in_trait_implemented_as_property() -> None: + d = D() + d.xx = 5.0 + assert d.x == 5 + d.xx = MAGIC + assert d.x == MAGIC + with assertRaises(TypeError): + d.y + t: T = d + assert t.x == MAGIC + d.xx = 6.0 + assert t.x == 6 + with assertRaises(TypeError): + t.y + +@trait +class T2: + x: float + y: float + +class C2(T2): + pass + +def test_inherit_trait_attribute() -> None: + c = C2() + c.x = 5.0 + assert c.x == 5 + c.x = MAGIC + assert c.x == MAGIC + with assertRaises(AttributeError): + c.y + c.y = 6.0 + assert c.y == 6.0 + t: T2 = C2() + with assertRaises(AttributeError): + t.y + t = c + assert t.x == MAGIC + c.x = 55.0 + assert t.x == 55 + assert t.y == 6 + a: Any = c + assert a.x == 55 + assert a.y == 6 + a.x = 7.0 + a.y = 8.0 + assert a.x == 7 + assert a.y == 8 + +class D2(T2): + x: float + y: float = 4 + +def test_implement_trait_attribute() -> None: + d = D2() + d.x = 5.0 + assert d.x == 5 + d.x = MAGIC + assert d.x == MAGIC + assert d.y == 4 + d.y = 6.0 + assert d.y == 6 + t: T2 = D2() + assert t.y == 4 + t = d + assert t.x == MAGIC + d.x = 55.0 + assert t.x == 55 + assert t.y == 6 + a: Any = d + assert a.x == 55 + assert a.y == 6 + a.x = 7.0 + a.y = 8.0 + assert a.x == 7 + assert a.y == 8 diff --git a/mypyc/test-data/run-i64.test b/mypyc/test-data/run-i64.test index cd4ac19532d2..bcde39fed5ff 100644 --- a/mypyc/test-data/run-i64.test +++ b/mypyc/test-data/run-i64.test @@ -315,7 +315,8 @@ def test_explicit_conversion_from_float() -> None: assert from_float(0.0) == 0 assert from_float(1.456) == 1 assert from_float(-1234.567) == -1234 - assert from_float(2**63 - 1) == 2**63 - 1 + # Subtract 1024 due to limited precision of 64-bit floats + assert from_float(2**63 - 1024) == 2**63 - 1024 assert from_float(-2**63) == -2**63 # The error message could be better, but this is acceptable with assertRaises(OverflowError, "int too large to convert to i64"): diff --git a/mypyc/test-data/run-imports.test b/mypyc/test-data/run-imports.test index c6d5bdb3d864..c5839d57820e 100644 --- a/mypyc/test-data/run-imports.test +++ b/mypyc/test-data/run-imports.test @@ -2,6 +2,8 @@ [case testImports] import testmodule +import pkg2.mod +import pkg2.mod2 as mm2 def f(x: int) -> int: return testmodule.factorial(5) @@ -13,15 +15,21 @@ def g(x: int) -> int: def test_import_basics() -> None: assert f(5) == 120 assert g(5) == 5 + assert "pkg2.mod" not in globals(), "the root module should be in globals!" + assert pkg2.mod.x == 1 + assert "mod2" not in globals(), "pkg2.mod2 is aliased to mm2!" + assert mm2.y == 2 def test_import_submodule_within_function() -> None: import pkg.mod assert pkg.x == 1 assert pkg.mod.y == 2 + assert "pkg.mod" not in globals(), "the root module should be in globals!" def test_import_as_submodule_within_function() -> None: import pkg.mod as mm assert mm.y == 2 + assert "pkg.mod" not in globals(), "the root module should be in globals!" # TODO: Don't add local imports to globals() # @@ -57,6 +65,11 @@ def foo(x: int) -> int: x = 1 [file pkg/mod.py] y = 2 +[file pkg2/__init__.py] +[file pkg2/mod.py] +x = 1 +[file pkg2/mod2.py] +y = 2 [file nob.py] z = 3 @@ -192,3 +205,61 @@ a.x = 10 x = 20 [file driver.py] import native + +[case testLazyImport] +import shared + +def do_import() -> None: + import a + +assert shared.counter == 0 +do_import() +assert shared.counter == 1 + +[file a.py] +import shared +shared.counter += 1 + +[file shared.py] +counter = 0 + +[case testDelayedImport] +import a +print("inbetween") +import b + +[file a.py] +print("first") + +[file b.py] +print("last") + +[out] +first +inbetween +last + +[case testImportErrorLineNumber] +try: + import enum + import dataclasses, missing # type: ignore[import] +except ImportError as e: + line = e.__traceback__.tb_lineno # type: ignore[attr-defined] + assert line == 3, f"traceback's line number is {line}, expected 3" + +[case testImportGroupIsolation] +def func() -> None: + import second + +import first +func() + +[file first.py] +print("first") + +[file second.py] +print("second") + +[out] +first +second diff --git a/mypyc/test-data/run-integers.test b/mypyc/test-data/run-integers.test index c65f36110b46..d575e141b567 100644 --- a/mypyc/test-data/run-integers.test +++ b/mypyc/test-data/run-integers.test @@ -173,6 +173,7 @@ assert test_isinstance_int_and_not_bool(1) == True [case testIntOps] from typing import Any +from testutil import assertRaises def check_and(x: int, y: int) -> None: # eval() can be trusted to calculate expected result @@ -390,7 +391,7 @@ def test_no_op_conversion() -> None: for x in 1, 55, -1, -7, 1 << 50, 1 << 101, -(1 << 50), -(1 << 101): assert no_op_conversion(x) == x -def test_divide() -> None: +def test_floor_divide() -> None: for x in range(-100, 100): for y in range(-100, 100): if y != 0: @@ -470,6 +471,25 @@ def test_floor_divide_by_literal() -> None: assert div_by_3(i) == i_boxed // int('3') assert div_by_4(i) == i_boxed // int('4') +def test_true_divide() -> None: + for x in range(-150, 100): + for y in range(-150, 100): + if y != 0: + assert x / y == getattr(x, "__truediv__")(y) + large1 = (123 + int())**123 + large2 = (121 + int())**121 + assert large1 / large2 == getattr(large1, "__truediv__")(large2) + assert large1 / 135 == getattr(large1, "__truediv__")(135) + assert large1 / -2 == getattr(large1, "__truediv__")(-2) + assert 17 / large2 == getattr(17, "__truediv__")(large2) + + huge = 10**1000 + int() + with assertRaises(OverflowError, "integer division result too large for a float"): + huge / 2 + with assertRaises(OverflowError, "integer division result too large for a float"): + huge / -2 + assert 1 / huge == 0.0 + [case testIntMinMax] def test_int_min_max() -> None: x: int = 200 diff --git a/mypyc/test-data/run-math.test b/mypyc/test-data/run-math.test new file mode 100644 index 000000000000..64d5c1812afa --- /dev/null +++ b/mypyc/test-data/run-math.test @@ -0,0 +1,88 @@ +# Test cases for the math module (compile and run) + +[case testMathOps] +from typing import Any, Callable +from typing_extensions import Final +import math +from testutil import assertRaises, float_vals, assertDomainError, assertMathRangeError + +pymath: Any = math + +def validate_one_arg(test: Callable[[float], float], validate: Callable[[float], float]) -> None: + """Ensure that test and validate behave the same for various float args.""" + for x in float_vals: + try: + expected = validate(x) + except Exception as e: + try: + test(x) + assert False, f"no exception raised for {x!r}, expected {e!r}" + except Exception as e2: + assert repr(e) == repr(e2), f"actual for {x!r}: {e2!r}, expected: {e!r}" + continue + actual = test(x) + assert repr(actual) == repr(expected), ( + f"actual for {x!r}: {actual!r}, expected {expected!r}") + +def validate_two_arg(test: Callable[[float, float], float], + validate: Callable[[float, float], float]) -> None: + """Ensure that test and validate behave the same for various float args.""" + for x in float_vals: + for y in float_vals: + args = f"({x!r}, {y!r})" + try: + expected = validate(x, y) + except Exception as e: + try: + test(x, y) + assert False, f"no exception raised for {args}, expected {e!r}" + except Exception as e2: + assert repr(e) == repr(e2), f"actual for {args}: {e2!r}, expected: {e!r}" + continue + try: + actual = test(x, y) + except Exception as e: + assert False, f"no exception expected for {args}, got {e!r}" + assert repr(actual) == repr(expected), ( + f"actual for {args}: {actual!r}, expected {expected!r}") + +def test_sqrt() -> None: + validate_one_arg(lambda x: math.sqrt(x), pymath.sqrt) + +def test_sin() -> None: + validate_one_arg(lambda x: math.sin(x), pymath.sin) + +def test_cos() -> None: + validate_one_arg(lambda x: math.cos(x), pymath.cos) + +def test_tan() -> None: + validate_one_arg(lambda x: math.tan(x), pymath.tan) + +def test_exp() -> None: + validate_one_arg(lambda x: math.exp(x), pymath.exp) + +def test_log() -> None: + validate_one_arg(lambda x: math.log(x), pymath.log) + +def test_floor() -> None: + validate_one_arg(lambda x: math.floor(x), pymath.floor) + +def test_ceil() -> None: + validate_one_arg(lambda x: math.ceil(x), pymath.ceil) + +def test_fabs() -> None: + validate_one_arg(lambda x: math.fabs(x), pymath.fabs) + +def test_pow() -> None: + validate_two_arg(lambda x, y: math.pow(x, y), pymath.pow) + +def test_copysign() -> None: + validate_two_arg(lambda x, y: math.copysign(x, y), pymath.copysign) + +def test_isinf() -> None: + for x in float_vals: + assert repr(math.isinf(x)) == repr(pymath.isinf(x)) + +def test_isnan() -> None: + for x in float_vals: + assert repr(math.isnan(x)) == repr(pymath.isnan(x)) diff --git a/mypyc/test-data/run-multimodule.test b/mypyc/test-data/run-multimodule.test index 418af66ba060..70c73dc2088b 100644 --- a/mypyc/test-data/run-multimodule.test +++ b/mypyc/test-data/run-multimodule.test @@ -11,21 +11,23 @@ -- about how this is specified (e.g. .2 file name suffixes). [case testMultiModulePackage] -from p.other import g +from p.other import g, _i as i def f(x: int) -> int: from p.other import h - return h(g(x + 1)) + return i(h(g(x + 1))) [file p/__init__.py] [file p/other.py] def g(x: int) -> int: return x + 2 def h(x: int) -> int: return x + 1 +def _i(x: int) -> int: + return x + 3 [file driver.py] import native from native import f from p.other import g -assert f(3) == 7 +assert f(3) == 10 assert g(2) == 4 try: f(1.1) diff --git a/mypyc/test-data/run-strings.test b/mypyc/test-data/run-strings.test index 4a20c13ce789..be668435d073 100644 --- a/mypyc/test-data/run-strings.test +++ b/mypyc/test-data/run-strings.test @@ -232,8 +232,7 @@ def test_fstring_basics() -> None: x = bytes([1, 2, 3, 4]) # assert f'bytes: {x}' == "bytes: b'\\x01\\x02\\x03\\x04'" - # error: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; - # use "{!r}" if this is desired behavior behavior + # error: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes float_num = 123.4 assert f'{float_num}' == '123.4' diff --git a/mypyc/test/test_emit.py b/mypyc/test/test_emit.py index 7351cd7fb13e..54bf4eef3c74 100644 --- a/mypyc/test/test_emit.py +++ b/mypyc/test/test_emit.py @@ -22,6 +22,16 @@ def test_reg(self) -> None: emitter = Emitter(self.context, names) assert emitter.reg(self.n) == "cpy_r_n" + def test_object_annotation(self) -> None: + emitter = Emitter(self.context, {}) + assert emitter.object_annotation("hello, world", "line;") == " /* 'hello, world' */" + assert ( + emitter.object_annotation(list(range(30)), "line;") + == """\ + /* [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29] */""" + ) + def test_emit_line(self) -> None: emitter = Emitter(self.context, {}) emitter.emit_line("line;") @@ -29,3 +39,13 @@ def test_emit_line(self) -> None: emitter.emit_line("f();") emitter.emit_line("}") assert emitter.fragments == ["line;\n", "a {\n", " f();\n", "}\n"] + emitter = Emitter(self.context, {}) + emitter.emit_line("CPyStatics[0];", ann="hello, world") + emitter.emit_line("CPyStatics[1];", ann=list(range(30))) + assert emitter.fragments[0] == "CPyStatics[0]; /* 'hello, world' */\n" + assert ( + emitter.fragments[1] + == """\ +CPyStatics[1]; /* [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29] */\n""" + ) diff --git a/mypyc/test/test_irbuild.py b/mypyc/test/test_irbuild.py index cb5e690eed55..86bdf7c590d8 100644 --- a/mypyc/test/test_irbuild.py +++ b/mypyc/test/test_irbuild.py @@ -31,6 +31,7 @@ "irbuild-set.test", "irbuild-str.test", "irbuild-bytes.test", + "irbuild-float.test", "irbuild-statements.test", "irbuild-nested.test", "irbuild-classes.test", diff --git a/mypyc/test/test_run.py b/mypyc/test/test_run.py index cd4ea8396cce..dc054ac9002f 100644 --- a/mypyc/test/test_run.py +++ b/mypyc/test/test_run.py @@ -42,6 +42,7 @@ "run-i64.test", "run-i32.test", "run-floats.test", + "run-math.test", "run-bools.test", "run-strings.test", "run-bytes.test", @@ -141,9 +142,9 @@ class TestRun(MypycDataSuite): def run_case(self, testcase: DataDrivenTestCase) -> None: # setup.py wants to be run from the root directory of the package, which we accommodate # by chdiring into tmp/ - with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase), ( - chdir_manager("tmp") - ): + with use_custom_builtins( + os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase + ), chdir_manager("tmp"): self.run_case_inner(testcase) def run_case_inner(self, testcase: DataDrivenTestCase) -> None: @@ -240,7 +241,7 @@ def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) -> groups=groups, alt_lib_path=".", ) - errors = Errors() + errors = Errors(options) ir, cfiles = emitmodule.compile_modules_to_c( result, compiler_options=compiler_options, errors=errors, groups=groups ) diff --git a/mypyc/test/test_typeops.py b/mypyc/test/test_typeops.py index f414edd1a2bb..0d9860d88ffe 100644 --- a/mypyc/test/test_typeops.py +++ b/mypyc/test/test_typeops.py @@ -54,6 +54,13 @@ def test_bool(self) -> None: assert not is_runtime_subtype(bool_rprimitive, bit_rprimitive) assert not is_runtime_subtype(bool_rprimitive, int_rprimitive) + def test_union(self) -> None: + bool_int_mix = RUnion([bool_rprimitive, int_rprimitive]) + assert not is_runtime_subtype(bool_int_mix, short_int_rprimitive) + assert not is_runtime_subtype(bool_int_mix, int_rprimitive) + assert not is_runtime_subtype(short_int_rprimitive, bool_int_mix) + assert not is_runtime_subtype(int_rprimitive, bool_int_mix) + class TestUnionSimplification(unittest.TestCase): def test_simple_type_result(self) -> None: diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py index 609ffc27385e..796811a6363c 100644 --- a/mypyc/test/testutil.py +++ b/mypyc/test/testutil.py @@ -121,7 +121,7 @@ def build_ir_for_single_file2( if result.errors: raise CompileError(result.errors) - errors = Errors() + errors = Errors(options) modules = build_ir( [result.files["__main__"]], result.graph, diff --git a/mypyc/transform/exceptions.py b/mypyc/transform/exceptions.py index 2851955ff38f..bf5e60659f8f 100644 --- a/mypyc/transform/exceptions.py +++ b/mypyc/transform/exceptions.py @@ -23,6 +23,7 @@ Branch, CallC, ComparisonOp, + Float, GetAttr, Integer, LoadErrorValue, @@ -33,7 +34,7 @@ TupleGet, Value, ) -from mypyc.ir.rtypes import RTuple, bool_rprimitive +from mypyc.ir.rtypes import RTuple, bool_rprimitive, is_float_rprimitive from mypyc.primitives.exc_ops import err_occurred_op from mypyc.primitives.registry import CFunctionDescription @@ -173,7 +174,11 @@ def insert_overlapping_error_value_check(ops: list[Op], target: Value) -> Compar ops.append(item) return insert_overlapping_error_value_check(ops, item) else: - errvalue = Integer(int(typ.c_undefined), rtype=typ) + errvalue: Value + if is_float_rprimitive(target.type): + errvalue = Float(float(typ.c_undefined)) + else: + errvalue = Integer(int(typ.c_undefined), rtype=typ) op = ComparisonOp(target, errvalue, ComparisonOp.EQ) ops.append(op) return op diff --git a/mypyc/transform/refcount.py b/mypyc/transform/refcount.py index 13f6a121e7f1..f2ab438f6576 100644 --- a/mypyc/transform/refcount.py +++ b/mypyc/transform/refcount.py @@ -70,7 +70,7 @@ def insert_ref_count_opcodes(ir: FuncIR) -> None: defined = analyze_must_defined_regs(ir.blocks, cfg, args, values, strict_errors=True) ordering = make_value_ordering(ir) cache: BlockCache = {} - for block in ir.blocks[:]: + for block in ir.blocks.copy(): if isinstance(block.ops[-1], (Branch, Goto)): insert_branch_inc_and_decrefs( block, diff --git a/pyproject.toml b/pyproject.toml index 328b9bf159a1..20301bf64216 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ requires = [ # the following is from build-requirements.txt "types-psutil", "types-setuptools", - "types-typed-ast>=1.5.8,<1.6.0", + "types-typed-ast>=1.5.8.5,<1.6.0", ] build-backend = "setuptools.build_meta" diff --git a/pytest.ini b/pytest.ini index b164c14b6414..a123b0f11328 100644 --- a/pytest.ini +++ b/pytest.ini @@ -9,7 +9,7 @@ python_files = test*.py # logic by implementing `pytest_pycollect_makeitem` in mypy.test.data; # the test files import that module, and pytest sees the magic name # and invokes it at the relevant moment. See -# http://doc.pytest.org/en/latest/writing_plugins.html#collection-hooks +# https://doc.pytest.org/en/latest/how-to/writing_plugins.html#collection-hooks # Both our plugin and unittest provide their own collection logic, # So we can disable the default python collector by giving it empty diff --git a/runtests.py b/runtests.py index ade0a8adee5e..f99fe5dc8b16 100755 --- a/runtests.py +++ b/runtests.py @@ -129,7 +129,7 @@ def main() -> None: exit(1) if not args: - args = DEFAULT_COMMANDS[:] + args = DEFAULT_COMMANDS.copy() status = 0 diff --git a/setup.py b/setup.py index 5d5ea06fb714..061bb9ddf5b5 100644 --- a/setup.py +++ b/setup.py @@ -202,7 +202,7 @@ def run(self): long_description=long_description, author="Jukka Lehtosalo", author_email="jukka.lehtosalo@iki.fi", - url="http://www.mypy-lang.org/", + url="https://www.mypy-lang.org/", license="MIT License", py_modules=[], ext_modules=ext_modules, @@ -236,7 +236,7 @@ def run(self): python_requires=">=3.7", include_package_data=True, project_urls={ - "News": "http://mypy-lang.org/news.html", + "News": "https://mypy-lang.org/news.html", "Documentation": "https://mypy.readthedocs.io/en/stable/index.html", "Repository": "https://github.com/python/mypy", }, diff --git a/test-data/unit/check-assert-type-fail.test b/test-data/unit/check-assert-type-fail.test new file mode 100644 index 000000000000..2811e71978c8 --- /dev/null +++ b/test-data/unit/check-assert-type-fail.test @@ -0,0 +1,28 @@ +[case testAssertTypeFail1] +import typing +import array as arr +class array: + pass +def f(si: arr.array[int]): + typing.assert_type(si, array) # E: Expression is of type "array.array[int]", not "__main__.array" +[builtins fixtures/tuple.pyi] + +[case testAssertTypeFail2] +import typing +import array as arr +class array: + class array: + i = 1 +def f(si: arr.array[int]): + typing.assert_type(si, array.array) # E: Expression is of type "array.array[int]", not "__main__.array.array" +[builtins fixtures/tuple.pyi] + +[case testAssertTypeFail3] +import typing +import array as arr +class array: + class array: + i = 1 +def f(si: arr.array[int]): + typing.assert_type(si, int) # E: Expression is of type "array[int]", not "int" +[builtins fixtures/tuple.pyi] \ No newline at end of file diff --git a/test-data/unit/check-async-await.test b/test-data/unit/check-async-await.test index 7356fa59c86d..83a66ef4a815 100644 --- a/test-data/unit/check-async-await.test +++ b/test-data/unit/check-async-await.test @@ -944,6 +944,49 @@ async def bar(x: Union[A, B]) -> None: [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] +[case testAsyncIteratorWithIgnoredErrors] +from m import L + +async def func(l: L) -> None: + reveal_type(l.get_iterator) # N: Revealed type is "def () -> typing.AsyncIterator[builtins.str]" + reveal_type(l.get_iterator2) # N: Revealed type is "def () -> typing.AsyncIterator[builtins.str]" + async for i in l.get_iterator(): + reveal_type(i) # N: Revealed type is "builtins.str" + +[file m.py] +# mypy: ignore-errors=True +from typing import AsyncIterator + +class L: + async def some_func(self, i: int) -> str: + return 'x' + + async def get_iterator(self) -> AsyncIterator[str]: + yield await self.some_func(0) + + async def get_iterator2(self) -> AsyncIterator[str]: + if self: + a = (yield 'x') + +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] + +[case testAsyncIteratorWithIgnoredErrorsAndYieldFrom] +from m import L + +async def func(l: L) -> None: + reveal_type(l.get_iterator) + +[file m.py] +# mypy: ignore-errors=True +from typing import AsyncIterator + +class L: + async def get_iterator(self) -> AsyncIterator[str]: + yield from ['x'] # E: "yield from" in async function +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] + [case testInvalidComprehensionNoCrash] # flags: --show-error-codes async def foo(x: int) -> int: ... @@ -959,3 +1002,45 @@ async def good() -> None: y = [await foo(x) for x in [1, 2, 3]] # OK [builtins fixtures/async_await.pyi] [typing fixtures/typing-async.pyi] + +[case testNestedAsyncFunctionAndTypeVarAvalues] +from typing import TypeVar + +T = TypeVar('T', int, str) + +def f(x: T) -> None: + async def g() -> T: + return x +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] + +[case testNestedAsyncGeneratorAndTypeVarAvalues] +from typing import AsyncGenerator, TypeVar + +T = TypeVar('T', int, str) + +def f(x: T) -> None: + async def g() -> AsyncGenerator[T, None]: + yield x +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] + +[case testNestedDecoratedCoroutineAndTypeVarValues] +from typing import Generator, TypeVar +from types import coroutine + +T = TypeVar('T', int, str) + +def f(x: T) -> None: + @coroutine + def inner() -> Generator[T, None, None]: + yield x + reveal_type(inner) # N: Revealed type is "def () -> typing.AwaitableGenerator[builtins.int, None, None, typing.Generator[builtins.int, None, None]]" \ + # N: Revealed type is "def () -> typing.AwaitableGenerator[builtins.str, None, None, typing.Generator[builtins.str, None, None]]" + +@coroutine +def coro() -> Generator[int, None, None]: + yield 1 +reveal_type(coro) # N: Revealed type is "def () -> typing.AwaitableGenerator[builtins.int, None, None, typing.Generator[builtins.int, None, None]]" +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] diff --git a/test-data/unit/check-classes.test b/test-data/unit/check-classes.test index da95674ed08f..6476ad1566dc 100644 --- a/test-data/unit/check-classes.test +++ b/test-data/unit/check-classes.test @@ -387,7 +387,7 @@ main:7: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incom [case testEqMethodsOverridingWithNonObjects] class A: def __eq__(self, other: A) -> bool: pass # Fail -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [out] main:2: error: Argument 1 of "__eq__" is incompatible with supertype "object"; supertype defines the argument type as "object" main:2: note: This violates the Liskov substitution principle @@ -1395,6 +1395,13 @@ main:8: note: def f(cls) -> None main:8: note: Subclass: main:8: note: def f(self) -> None +[case testClassMethodAndStaticMethod] +class C: + @classmethod # E: Cannot have both classmethod and staticmethod + @staticmethod + def foo(cls) -> None: pass +[builtins fixtures/classmethod.pyi] + -- Properties -- ---------- @@ -3464,9 +3471,9 @@ class ProUser(User): pass class BasicUser(User): pass U = TypeVar('U', bound=Union[ProUser, BasicUser]) def process(cls: Type[U]): - cls.foo() # E: "Type[U]" has no attribute "foo" + cls.foo() obj = cls() - cls.bar(obj) # E: "Type[U]" has no attribute "bar" + cls.bar(obj) cls.mro() # Defined in class type cls.error # E: "Type[U]" has no attribute "error" [builtins fixtures/classmethod.pyi] @@ -7797,3 +7804,58 @@ class Element(Generic[_T]): class Bar(Foo): ... e: Element[Bar] reveal_type(e.elements) # N: Revealed type is "typing.Sequence[__main__.Element[__main__.Bar]]" + +[case testIterableUnpackingWithGetAttr] +from typing import Union, Tuple + +class C: + def __getattr__(self, name): + pass + +class D: + def f(self) -> C: + return C() + + def g(self) -> None: + # iter(x) looks up `__iter__` on the type of x rather than x itself, + # so this is correct behaviour. + # Instances of C should not be treated as being iterable, + # despite having a __getattr__ method + # that could allow for arbitrary attributes to be accessed on instances, + # since `type(C()).__iter__` still raises AttributeError at runtime, + # and that's what matters. + a, b = self.f() # E: "C" has no attribute "__iter__" (not iterable) +[builtins fixtures/tuple.pyi] + +[case testUsingNumbersType] +from numbers import Number, Complex, Real, Rational, Integral + +def f1(x: Number) -> None: pass +f1(1) # E: Argument 1 to "f1" has incompatible type "int"; expected "Number" \ + # N: Types from "numbers" aren't supported for static type checking \ + # N: See https://peps.python.org/pep-0484/#the-numeric-tower \ + # N: Consider using a protocol instead, such as typing.SupportsFloat + +def f2(x: Complex) -> None: pass +f2(1) # E: Argument 1 to "f2" has incompatible type "int"; expected "Complex" \ + # N: Types from "numbers" aren't supported for static type checking \ + # N: See https://peps.python.org/pep-0484/#the-numeric-tower \ + # N: Consider using a protocol instead, such as typing.SupportsFloat + +def f3(x: Real) -> None: pass +f3(1) # E: Argument 1 to "f3" has incompatible type "int"; expected "Real" \ + # N: Types from "numbers" aren't supported for static type checking \ + # N: See https://peps.python.org/pep-0484/#the-numeric-tower \ + # N: Consider using a protocol instead, such as typing.SupportsFloat + +def f4(x: Rational) -> None: pass +f4(1) # E: Argument 1 to "f4" has incompatible type "int"; expected "Rational" \ + # N: Types from "numbers" aren't supported for static type checking \ + # N: See https://peps.python.org/pep-0484/#the-numeric-tower \ + # N: Consider using a protocol instead, such as typing.SupportsFloat + +def f5(x: Integral) -> None: pass +f5(1) # E: Argument 1 to "f5" has incompatible type "int"; expected "Integral" \ + # N: Types from "numbers" aren't supported for static type checking \ + # N: See https://peps.python.org/pep-0484/#the-numeric-tower \ + # N: Consider using a protocol instead, such as typing.SupportsFloat diff --git a/test-data/unit/check-ctypes.test b/test-data/unit/check-ctypes.test index beb1afd779c0..1eefdd3c66c1 100644 --- a/test-data/unit/check-ctypes.test +++ b/test-data/unit/check-ctypes.test @@ -7,7 +7,7 @@ class MyCInt(ctypes.c_int): intarr4 = ctypes.c_int * 4 a = intarr4(1, ctypes.c_int(2), MyCInt(3), 4) intarr4(1, 2, 3, "invalid") # E: Array constructor argument 4 of type "str" is not convertible to the array element type "c_int" -reveal_type(a) # N: Revealed type is "ctypes.Array[ctypes.c_int]" +reveal_type(a) # N: Revealed type is "_ctypes.Array[ctypes.c_int]" reveal_type(a[0]) # N: Revealed type is "builtins.int" reveal_type(a[1:3]) # N: Revealed type is "builtins.list[builtins.int]" a[0] = 42 @@ -33,7 +33,7 @@ myintarr4 = MyCInt * 4 mya = myintarr4(1, 2, MyCInt(3), 4) myintarr4(1, ctypes.c_int(2), MyCInt(3), "invalid") # E: Array constructor argument 2 of type "c_int" is not convertible to the array element type "MyCInt" \ # E: Array constructor argument 4 of type "str" is not convertible to the array element type "MyCInt" -reveal_type(mya) # N: Revealed type is "ctypes.Array[__main__.MyCInt]" +reveal_type(mya) # N: Revealed type is "_ctypes.Array[__main__.MyCInt]" reveal_type(mya[0]) # N: Revealed type is "__main__.MyCInt" reveal_type(mya[1:3]) # N: Revealed type is "builtins.list[__main__.MyCInt]" mya[0] = 42 @@ -63,7 +63,7 @@ class MyCInt(ctypes.c_int): pass mya: ctypes.Array[Union[MyCInt, ctypes.c_uint]] -reveal_type(mya) # N: Revealed type is "ctypes.Array[Union[__main__.MyCInt, ctypes.c_uint]]" +reveal_type(mya) # N: Revealed type is "_ctypes.Array[Union[__main__.MyCInt, ctypes.c_uint]]" reveal_type(mya[0]) # N: Revealed type is "Union[__main__.MyCInt, builtins.int]" reveal_type(mya[1:3]) # N: Revealed type is "builtins.list[Union[__main__.MyCInt, builtins.int]]" # The behavior here is not strictly correct, but intentional. @@ -161,10 +161,10 @@ intarr4 = ctypes.c_int * 4 intarr6 = ctypes.c_int * 6 int_values = [1, 2, 3, 4] c_int_values = [ctypes.c_int(1), ctypes.c_int(2), ctypes.c_int(3), ctypes.c_int(4)] -reveal_type(intarr4(*int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" -reveal_type(intarr4(*c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" -reveal_type(intarr6(1, ctypes.c_int(2), *int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" -reveal_type(intarr6(1, ctypes.c_int(2), *c_int_values)) # N: Revealed type is "ctypes.Array[ctypes.c_int]" +reveal_type(intarr4(*int_values)) # N: Revealed type is "_ctypes.Array[ctypes.c_int]" +reveal_type(intarr4(*c_int_values)) # N: Revealed type is "_ctypes.Array[ctypes.c_int]" +reveal_type(intarr6(1, ctypes.c_int(2), *int_values)) # N: Revealed type is "_ctypes.Array[ctypes.c_int]" +reveal_type(intarr6(1, ctypes.c_int(2), *c_int_values)) # N: Revealed type is "_ctypes.Array[ctypes.c_int]" [typing fixtures/typing-medium.pyi] float_values = [1.0, 2.0, 3.0, 4.0] diff --git a/test-data/unit/check-dataclass-transform.test b/test-data/unit/check-dataclass-transform.test index ec87bd4757ed..be6b46d70846 100644 --- a/test-data/unit/check-dataclass-transform.test +++ b/test-data/unit/check-dataclass-transform.test @@ -328,6 +328,38 @@ Foo(a=1, b='bye') [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] +[case testDataclassTransformFieldSpecifierImplicitInit] +# flags: --python-version 3.11 +from typing import dataclass_transform, Literal, overload + +def init(*, init: Literal[True] = True): ... +def no_init(*, init: Literal[False] = False): ... + +@overload +def field_overload(*, custom: None, init: Literal[True] = True): ... +@overload +def field_overload(*, custom: str, init: Literal[False] = False): ... +def field_overload(*, custom, init): ... + +@dataclass_transform(field_specifiers=(init, no_init, field_overload)) +def my_dataclass(cls): return cls + +@my_dataclass +class Foo: + a: int = init() + b: int = field_overload(custom=None) + + bad1: int = no_init() + bad2: int = field_overload(custom="bad2") + +reveal_type(Foo) # N: Revealed type is "def (a: builtins.int, b: builtins.int) -> __main__.Foo" +Foo(a=1, b=2) +Foo(a=1, b=2, bad1=0) # E: Unexpected keyword argument "bad1" for "Foo" +Foo(a=1, b=2, bad2=0) # E: Unexpected keyword argument "bad2" for "Foo" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + [case testDataclassTransformOverloadsDecoratorOnOverload] # flags: --python-version 3.11 from typing import dataclass_transform, overload, Any, Callable, Type, Literal @@ -415,7 +447,11 @@ from typing import dataclass_transform @dataclass_transform(frozen_default=True) class Dataclass(type): ... -class Person(metaclass=Dataclass, kw_only=True): +# Note that PEP 681 states that a class that directly specifies a dataclass_transform-decorated +# metaclass should be treated as neither frozen nor unfrozen. For Person to have frozen semantics, +# it may not directly specify the metaclass. +class BaseDataclass(metaclass=Dataclass): ... +class Person(BaseDataclass, kw_only=True): name: str age: int @@ -451,3 +487,569 @@ Foo(1) # E: Too many arguments for "Foo" [typing fixtures/typing-full.pyi] [builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformTypeCheckingInFunction] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type, TYPE_CHECKING + +@dataclass_transform() +def model(cls: Type) -> Type: + return cls + +@model +class FunctionModel: + if TYPE_CHECKING: + string_: str + integer_: int + else: + string_: tuple + integer_: tuple + +FunctionModel(string_="abc", integer_=1) +FunctionModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[, ...]"; expected "int" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformNegatedTypeCheckingInFunction] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type, TYPE_CHECKING + +@dataclass_transform() +def model(cls: Type) -> Type: + return cls + +@model +class FunctionModel: + if not TYPE_CHECKING: + string_: tuple + integer_: tuple + else: + string_: str + integer_: int + +FunctionModel(string_="abc", integer_=1) +FunctionModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "FunctionModel" has incompatible type "Tuple[, ...]"; expected "int" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + + +[case testDataclassTransformTypeCheckingInBaseClass] +# flags: --python-version 3.11 +from typing import dataclass_transform, TYPE_CHECKING + +@dataclass_transform() +class ModelBase: + ... + +class BaseClassModel(ModelBase): + if TYPE_CHECKING: + string_: str + integer_: int + else: + string_: tuple + integer_: tuple + +BaseClassModel(string_="abc", integer_=1) +BaseClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[, ...]"; expected "int" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformNegatedTypeCheckingInBaseClass] +# flags: --python-version 3.11 +from typing import dataclass_transform, TYPE_CHECKING + +@dataclass_transform() +class ModelBase: + ... + +class BaseClassModel(ModelBase): + if not TYPE_CHECKING: + string_: tuple + integer_: tuple + else: + string_: str + integer_: int + +BaseClassModel(string_="abc", integer_=1) +BaseClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "BaseClassModel" has incompatible type "Tuple[, ...]"; expected "int" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformTypeCheckingInMetaClass] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type, TYPE_CHECKING + +@dataclass_transform() +class ModelMeta(type): + ... + +class ModelBaseWithMeta(metaclass=ModelMeta): + ... + +class MetaClassModel(ModelBaseWithMeta): + if TYPE_CHECKING: + string_: str + integer_: int + else: + string_: tuple + integer_: tuple + +MetaClassModel(string_="abc", integer_=1) +MetaClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[, ...]"; expected "int" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformNegatedTypeCheckingInMetaClass] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type, TYPE_CHECKING + +@dataclass_transform() +class ModelMeta(type): + ... + +class ModelBaseWithMeta(metaclass=ModelMeta): + ... + +class MetaClassModel(ModelBaseWithMeta): + if not TYPE_CHECKING: + string_: tuple + integer_: tuple + else: + string_: str + integer_: int + +MetaClassModel(string_="abc", integer_=1) +MetaClassModel(string_="abc", integer_=tuple()) # E: Argument "integer_" to "MetaClassModel" has incompatible type "Tuple[, ...]"; expected "int" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformStaticConditionalAttributes] +# flags: --python-version 3.11 --always-true TRUTH +from typing import dataclass_transform, Type, TYPE_CHECKING + +TRUTH = False # Is set to --always-true + +@dataclass_transform() +def model(cls: Type) -> Type: + return cls + +@model +class FunctionModel: + if TYPE_CHECKING: + present_1: int + else: + skipped_1: int + if True: # Mypy does not know if it is True or False, so the block is used + present_2: int + if False: # Mypy does not know if it is True or False, so the block is used + present_3: int + if not TRUTH: + skipped_2: int + else: + present_4: int + +FunctionModel( + present_1=1, + present_2=2, + present_3=3, + present_4=4, +) +FunctionModel() # E: Missing positional arguments "present_1", "present_2", "present_3", "present_4" in call to "FunctionModel" +FunctionModel( # E: Unexpected keyword argument "skipped_1" for "FunctionModel" + present_1=1, + present_2=2, + present_3=3, + present_4=4, + skipped_1=5, +) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + + +[case testDataclassTransformStaticDeterministicConditionalElifAttributes] +# flags: --python-version 3.11 --always-true TRUTH --always-false LIE +from typing import dataclass_transform, Type, TYPE_CHECKING + +TRUTH = False # Is set to --always-true +LIE = True # Is set to --always-false + +@dataclass_transform() +def model(cls: Type) -> Type: + return cls + +@model +class FunctionModel: + if TYPE_CHECKING: + present_1: int + elif TRUTH: + skipped_1: int + else: + skipped_2: int + if LIE: + skipped_3: int + elif TRUTH: + present_2: int + else: + skipped_4: int + if LIE: + skipped_5: int + elif LIE: + skipped_6: int + else: + present_3: int + +FunctionModel( + present_1=1, + present_2=2, + present_3=3, +) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformStaticNotDeterministicConditionalElifAttributes] +# flags: --python-version 3.11 --always-true TRUTH --always-false LIE +from typing import dataclass_transform, Type, TYPE_CHECKING + +TRUTH = False # Is set to --always-true +LIE = True # Is set to --always-false + +@dataclass_transform() +def model(cls: Type) -> Type: + return cls + +@model +class FunctionModel: + if 123: # Mypy does not know if it is True or False, so this block is used + present_1: int + elif TRUTH: # Mypy does not know if previous condition is True or False, so it uses also this block + present_2: int + else: # Previous block is for sure True, so this block is skipped + skipped_1: int + if 123: + present_3: int + elif 123: + present_4: int + else: + present_5: int + if 123: # Mypy does not know if it is True or False, so this block is used + present_6: int + elif LIE: # This is for sure False, so the block is skipped used + skipped_2: int + else: # None of the conditions above for sure True, so this block is used + present_7: int + +FunctionModel( + present_1=1, + present_2=2, + present_3=3, + present_4=4, + present_5=5, + present_6=6, + present_7=7, +) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformFunctionConditionalAttributes] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type + +@dataclass_transform() +def model(cls: Type) -> Type: + return cls + +def condition() -> bool: + return True + +@model +class FunctionModel: + if condition(): + x: int + y: int + z1: int + else: + x: str # E: Name "x" already defined on line 14 + y: int # E: Name "y" already defined on line 15 + z2: int + +FunctionModel(x=1, y=2, z1=3, z2=4) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + + +[case testDataclassTransformNegatedFunctionConditionalAttributes] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type + +@dataclass_transform() +def model(cls: Type) -> Type: + return cls + +def condition() -> bool: + return True + +@model +class FunctionModel: + if not condition(): + x: int + y: int + z1: int + else: + x: str # E: Name "x" already defined on line 14 + y: int # E: Name "y" already defined on line 15 + z2: int + +FunctionModel(x=1, y=2, z1=3, z2=4) + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformDirectMetaclassNeitherFrozenNorNotFrozen] +# flags: --python-version 3.11 +from typing import dataclass_transform, Type + +@dataclass_transform() +class Meta(type): ... +class Base(metaclass=Meta): + base: int +class Foo(Base, frozen=True): + foo: int +class Bar(Base, frozen=False): + bar: int + + +foo = Foo(0, 1) +foo.foo = 5 # E: Property "foo" defined in "Foo" is read-only +foo.base = 6 +reveal_type(foo.base) # N: Revealed type is "builtins.int" +bar = Bar(0, 1) +bar.bar = 5 +bar.base = 6 +reveal_type(bar.base) # N: Revealed type is "builtins.int" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformSimpleDescriptor] +# flags: --python-version 3.11 + +from typing import dataclass_transform, overload, Any + +@dataclass_transform() +def my_dataclass(cls): ... + +class Desc: + @overload + def __get__(self, instance: None, owner: Any) -> Desc: ... + @overload + def __get__(self, instance: object, owner: Any) -> str: ... + def __get__(self, instance: object | None, owner: Any) -> Desc | str: ... + + def __set__(self, instance: Any, value: str) -> None: ... + +@my_dataclass +class C: + x: Desc + y: int + +C(x='x', y=1) +C(x=1, y=1) # E: Argument "x" to "C" has incompatible type "int"; expected "str" +reveal_type(C(x='x', y=1).x) # N: Revealed type is "builtins.str" +reveal_type(C(x='x', y=1).y) # N: Revealed type is "builtins.int" +reveal_type(C.x) # N: Revealed type is "__main__.Desc" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformUnannotatedDescriptor] +# flags: --python-version 3.11 + +from typing import dataclass_transform, overload, Any + +@dataclass_transform() +def my_dataclass(cls): ... + +class Desc: + @overload + def __get__(self, instance: None, owner: Any) -> Desc: ... + @overload + def __get__(self, instance: object, owner: Any) -> str: ... + def __get__(self, instance: object | None, owner: Any) -> Desc | str: ... + + def __set__(*args, **kwargs): ... + +@my_dataclass +class C: + x: Desc + y: int + +C(x='x', y=1) +C(x=1, y=1) +reveal_type(C(x='x', y=1).x) # N: Revealed type is "builtins.str" +reveal_type(C(x='x', y=1).y) # N: Revealed type is "builtins.int" +reveal_type(C.x) # N: Revealed type is "__main__.Desc" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformGenericDescriptor] +# flags: --python-version 3.11 + +from typing import dataclass_transform, overload, Any, TypeVar, Generic + +@dataclass_transform() +def my_dataclass(frozen: bool = False): ... + +T = TypeVar("T") + +class Desc(Generic[T]): + @overload + def __get__(self, instance: None, owner: Any) -> Desc[T]: ... + @overload + def __get__(self, instance: object, owner: Any) -> T: ... + def __get__(self, instance: object | None, owner: Any) -> Desc | T: ... + + def __set__(self, instance: Any, value: T) -> None: ... + +@my_dataclass() +class C: + x: Desc[str] + +C(x='x') +C(x=1) # E: Argument "x" to "C" has incompatible type "int"; expected "str" +reveal_type(C(x='x').x) # N: Revealed type is "builtins.str" +reveal_type(C.x) # N: Revealed type is "__main__.Desc[builtins.str]" + +@my_dataclass() +class D(C): + y: Desc[int] + +d = D(x='x', y=1) +reveal_type(d.x) # N: Revealed type is "builtins.str" +reveal_type(d.y) # N: Revealed type is "builtins.int" +reveal_type(D.x) # N: Revealed type is "__main__.Desc[builtins.str]" +reveal_type(D.y) # N: Revealed type is "__main__.Desc[builtins.int]" + +@my_dataclass(frozen=True) +class F: + x: Desc[str] = Desc() + +F(x='x') +F(x=1) # E: Argument "x" to "F" has incompatible type "int"; expected "str" +reveal_type(F(x='x').x) # N: Revealed type is "builtins.str" +reveal_type(F.x) # N: Revealed type is "__main__.Desc[builtins.str]" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformGenericDescriptorWithInheritance] +# flags: --python-version 3.11 + +from typing import dataclass_transform, overload, Any, TypeVar, Generic + +@dataclass_transform() +def my_dataclass(cls): ... + +T = TypeVar("T") + +class Desc(Generic[T]): + @overload + def __get__(self, instance: None, owner: Any) -> Desc[T]: ... + @overload + def __get__(self, instance: object, owner: Any) -> T: ... + def __get__(self, instance: object | None, owner: Any) -> Desc | T: ... + + def __set__(self, instance: Any, value: T) -> None: ... + +class Desc2(Desc[str]): + pass + +@my_dataclass +class C: + x: Desc2 + +C(x='x') +C(x=1) # E: Argument "x" to "C" has incompatible type "int"; expected "str" +reveal_type(C(x='x').x) # N: Revealed type is "builtins.str" +reveal_type(C.x) # N: Revealed type is "__main__.Desc[builtins.str]" + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformDescriptorWithDifferentGetSetTypes] +# flags: --python-version 3.11 + +from typing import dataclass_transform, overload, Any + +@dataclass_transform() +def my_dataclass(cls): ... + +class Desc: + @overload + def __get__(self, instance: None, owner: Any) -> int: ... + @overload + def __get__(self, instance: object, owner: Any) -> str: ... + def __get__(self, instance, owner): ... + + def __set__(self, instance: Any, value: bytes) -> None: ... + +@my_dataclass +class C: + x: Desc + +c = C(x=b'x') +C(x=1) # E: Argument "x" to "C" has incompatible type "int"; expected "bytes" +reveal_type(c.x) # N: Revealed type is "builtins.str" +reveal_type(C.x) # N: Revealed type is "builtins.int" +c.x = b'x' +c.x = 1 # E: Incompatible types in assignment (expression has type "int", variable has type "bytes") + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +[case testDataclassTransformUnsupportedDescriptors] +# flags: --python-version 3.11 + +from typing import dataclass_transform, overload, Any + +@dataclass_transform() +def my_dataclass(cls): ... + +class Desc: + @overload + def __get__(self, instance: None, owner: Any) -> int: ... + @overload + def __get__(self, instance: object, owner: Any) -> str: ... + def __get__(self, instance, owner): ... + + def __set__(*args, **kwargs) -> None: ... + +class Desc2: + @overload + def __get__(self, instance: None, owner: Any) -> int: ... + @overload + def __get__(self, instance: object, owner: Any) -> str: ... + def __get__(self, instance, owner): ... + + @overload + def __set__(self, instance: Any, value: bytes) -> None: ... + @overload + def __set__(self) -> None: ... + def __set__(self, *args, **kawrga) -> None: ... + +@my_dataclass +class C: + x: Desc # E: Unsupported signature for "__set__" in "Desc" + y: Desc2 # E: Unsupported "__set__" in "Desc2" +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-dataclasses.test b/test-data/unit/check-dataclasses.test index 4d85be391186..9a68651ed5f6 100644 --- a/test-data/unit/check-dataclasses.test +++ b/test-data/unit/check-dataclasses.test @@ -2001,3 +2001,52 @@ class Bar(Foo): ... e: Element[Bar] reveal_type(e.elements) # N: Revealed type is "typing.Sequence[__main__.Element[__main__.Bar]]" [builtins fixtures/dataclasses.pyi] + + +[case testIfConditionsInDefinition] +# flags: --python-version 3.11 --always-true TRUTH +from dataclasses import dataclass +from typing import TYPE_CHECKING + +TRUTH = False # Is set to --always-true + +@dataclass +class Foo: + if TYPE_CHECKING: + present_1: int + else: + skipped_1: int + if True: # Mypy does not know if it is True or False, so the block is used + present_2: int + if False: # Mypy does not know if it is True or False, so the block is used + present_3: int + if not TRUTH: + skipped_2: int + elif 123: + present_4: int + elif TRUTH: + present_5: int + else: + skipped_3: int + +Foo( + present_1=1, + present_2=2, + present_3=3, + present_4=4, + present_5=5, +) +[builtins fixtures/dataclasses.pyi] + +[case testProtocolNoCrash] +from typing import Protocol, Union, ClassVar +from dataclasses import dataclass, field + +DEFAULT = 0 + +@dataclass +class Test(Protocol): + x: int + def reset(self) -> None: + self.x = DEFAULT +[builtins fixtures/dataclasses.pyi] diff --git a/test-data/unit/check-enum.test b/test-data/unit/check-enum.test index 80a7ca7ff99f..b62ed3d94210 100644 --- a/test-data/unit/check-enum.test +++ b/test-data/unit/check-enum.test @@ -1345,7 +1345,6 @@ class Foo(bytes, Enum): a = Foo.A reveal_type(a.value) # N: Revealed type is "Any" reveal_type(a._value_) # N: Revealed type is "Any" -[builtins fixtures/__new__.pyi] [builtins fixtures/primitives.pyi] [typing fixtures/typing-medium.pyi] @@ -1368,7 +1367,6 @@ class Bar(Foo): a = Bar.A reveal_type(a.value) # N: Revealed type is "Any" reveal_type(a._value_) # N: Revealed type is "Any" -[builtins fixtures/__new__.pyi] [builtins fixtures/primitives.pyi] [typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-errorcodes.test b/test-data/unit/check-errorcodes.test index 124d6952fe5f..fc498c9aa6c0 100644 --- a/test-data/unit/check-errorcodes.test +++ b/test-data/unit/check-errorcodes.test @@ -83,7 +83,7 @@ b = 'x'.foobar(c) # type: int # type: ignore[name-defined, xyz] # E: "str" has [case testErrorCodeWarnUnusedIgnores1] # flags: --warn-unused-ignores -x # type: ignore[name-defined, attr-defined] # E: Unused "type: ignore[attr-defined]" comment +x # type: ignore[name-defined, attr-defined] # E: Unused "type: ignore[attr-defined]" comment [unused-ignore] [case testErrorCodeWarnUnusedIgnores2] # flags: --warn-unused-ignores @@ -91,19 +91,19 @@ x # type: ignore[name-defined, attr-defined] # E: Unused "type: ignore[attr-defi [case testErrorCodeWarnUnusedIgnores3] # flags: --warn-unused-ignores -"x".foobar(y) # type: ignore[name-defined, attr-defined, xyz] # E: Unused "type: ignore[xyz]" comment +"x".foobar(y) # type: ignore[name-defined, attr-defined, xyz] # E: Unused "type: ignore[xyz]" comment [unused-ignore] [case testErrorCodeWarnUnusedIgnores4] # flags: --warn-unused-ignores -"x".foobar(y) # type: ignore[name-defined, attr-defined, valid-type] # E: Unused "type: ignore[valid-type]" comment +"x".foobar(y) # type: ignore[name-defined, attr-defined, valid-type] # E: Unused "type: ignore[valid-type]" comment [unused-ignore] [case testErrorCodeWarnUnusedIgnores5] # flags: --warn-unused-ignores -"x".foobar(y) # type: ignore[name-defined, attr-defined, valid-type, xyz] # E: Unused "type: ignore[valid-type, xyz]" comment +"x".foobar(y) # type: ignore[name-defined, attr-defined, valid-type, xyz] # E: Unused "type: ignore[valid-type, xyz]" comment [unused-ignore] [case testErrorCodeWarnUnusedIgnores6_NoDetailWhenSingleErrorCode] # flags: --warn-unused-ignores -"x" # type: ignore[name-defined] # E: Unused "type: ignore" comment +"x" # type: ignore[name-defined] # E: Unused "type: ignore" comment [unused-ignore] [case testErrorCodeMissingWhenRequired] # flags: --enable-error-code ignore-without-code @@ -114,9 +114,11 @@ z # type: ignore[name-defined] [case testErrorCodeMissingDoesntTrampleUnusedIgnoresWarning] # flags: --enable-error-code ignore-without-code --warn-unused-ignores -"x" # type: ignore # E: Unused "type: ignore" comment -"y" # type: ignore[ignore-without-code] # E: Unused "type: ignore" comment -z # type: ignore[ignore-without-code] # E: Unused "type: ignore" comment # E: Name "z" is not defined [name-defined] # N: Error code "name-defined" not covered by "type: ignore" comment +"x" # type: ignore # E: Unused "type: ignore" comment [unused-ignore] +"y" # type: ignore[ignore-without-code] # E: Unused "type: ignore" comment [unused-ignore] +z # type: ignore[ignore-without-code] # E: Unused "type: ignore" comment [unused-ignore] \ + # E: Name "z" is not defined [name-defined] \ + # N: Error code "name-defined" not covered by "type: ignore" comment [case testErrorCodeMissingWholeFileIgnores] # flags: --enable-error-code ignore-without-code @@ -638,8 +640,8 @@ def g() -> int: '%d' % 'no' # E: Incompatible types in string interpolation (expression has type "str", placeholder has type "Union[int, float, SupportsInt]") [str-format] '%d + %d' % (1, 2, 3) # E: Not all arguments converted during string formatting [str-format] -'{}'.format(b'abc') # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior [str-bytes-safe] -'%s' % b'abc' # E: On Python 3 formatting "b'abc'" with "%s" produces "b'abc'", not "abc"; use "%r" if this is desired behavior [str-bytes-safe] +'{}'.format(b'abc') # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes [str-bytes-safe] +'%s' % b'abc' # E: If x = b'abc' then "%s" % x produces "b'abc'", not "abc". If this is desired behavior use "%r" % x. Otherwise, decode the bytes [str-bytes-safe] [builtins fixtures/primitives.pyi] [typing fixtures/typing-medium.pyi] @@ -805,6 +807,22 @@ j = [x for x in lst if False] # E: If condition in comprehension is a k = [x for x in lst if isinstance(x, int) or foo()] # E: If condition in comprehension is always true [redundant-expr] [builtins fixtures/isinstancelist.pyi] +[case testRedundantExprTruthiness] +# flags: --enable-error-code redundant-expr +from typing import List + +def maybe() -> bool: ... + +class Foo: + def __init__(self, x: List[int]) -> None: + self.x = x or [] + + def method(self) -> int: + if not self.x or maybe(): + return 1 + return 2 +[builtins fixtures/list.pyi] + [case testNamedTupleNameMismatch] from typing import NamedTuple @@ -1054,4 +1072,8 @@ A.f = h # OK class A: def f(self) -> None: pass def h(self: A) -> None: pass -A.f = h # type: ignore[assignment] # E: Unused "type: ignore" comment, use narrower [method-assign] instead of [assignment] +A.f = h # type: ignore[assignment] # E: Unused "type: ignore" comment, use narrower [method-assign] instead of [assignment] code [unused-ignore] + +[case testUnusedIgnoreEnableCode] +# flags: --enable-error-code=unused-ignore +x = 1 # type: ignore # E: Unused "type: ignore" comment [unused-ignore] diff --git a/test-data/unit/check-expressions.test b/test-data/unit/check-expressions.test index 49a3f0d4aaa7..1fa551f6a2e4 100644 --- a/test-data/unit/check-expressions.test +++ b/test-data/unit/check-expressions.test @@ -937,6 +937,8 @@ reveal_type(returned) # N: Revealed type is "builtins.int" assert_type(a, str) # E: Expression is of type "int", not "str" assert_type(a, Any) # E: Expression is of type "int", not "Any" assert_type(a, Literal[1]) # E: Expression is of type "int", not "Literal[1]" +assert_type(42, Literal[42]) +assert_type(42, int) # E: Expression is of type "Literal[42]", not "int" [builtins fixtures/tuple.pyi] [case testAssertTypeGeneric] @@ -1115,11 +1117,28 @@ o[:] # E: Value of type "object" is not indexable [case testNonIntSliceBounds] from typing import Any -a, o = None, None # type: (Any, object) -a[o:1] # E: Slice index must be an integer or None -a[1:o] # E: Slice index must be an integer or None -a[o:] # E: Slice index must be an integer or None -a[:o] # E: Slice index must be an integer or None +a: Any +o: object +a[o:1] # E: Slice index must be an integer, SupportsIndex or None +a[1:o] # E: Slice index must be an integer, SupportsIndex or None +a[o:] # E: Slice index must be an integer, SupportsIndex or None +a[:o] # E: Slice index must be an integer, SupportsIndex or None +[builtins fixtures/slice.pyi] + +[case testSliceSupportsIndex] +import typing_extensions +class Index: + def __init__(self, value: int) -> None: + self.value = value + def __index__(self) -> int: + return self.value + +c = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +reveal_type(c[Index(0):Index(5)]) # N: Revealed type is "builtins.list[builtins.int]" +[file typing_extensions.pyi] +from typing import Protocol +class SupportsIndex(Protocol): + def __index__(self) -> int: ... [builtins fixtures/slice.pyi] [case testNoneSliceBounds] @@ -1769,13 +1788,35 @@ b = {'z': 26, *a} # E: invalid syntax [case testDictWithStarStarExpr] -from typing import Dict +from typing import Dict, Iterable + +class Thing: + def keys(self) -> Iterable[str]: + ... + def __getitem__(self, key: str) -> int: + ... + a = {'a': 1} b = {'z': 26, **a} c = {**b} d = {**a, **b, 'c': 3} -e = {1: 'a', **a} # E: Argument 1 to "update" of "dict" has incompatible type "Dict[str, int]"; expected "Mapping[int, str]" -f = {**b} # type: Dict[int, int] # E: List item 0 has incompatible type "Dict[str, int]"; expected "Mapping[int, int]" +e = {1: 'a', **a} # E: Cannot infer type argument 1 of \ + # N: Try assigning the literal to a variable annotated as dict[, ] +f = {**b} # type: Dict[int, int] # E: Unpacked dict entry 0 has incompatible type "Dict[str, int]"; expected "SupportsKeysAndGetItem[int, int]" +g = {**Thing()} +h = {**a, **Thing()} +i = {**Thing()} # type: Dict[int, int] # E: Unpacked dict entry 0 has incompatible type "Thing"; expected "SupportsKeysAndGetItem[int, int]" \ + # N: Following member(s) of "Thing" have conflicts: \ + # N: Expected: \ + # N: def __getitem__(self, int, /) -> int \ + # N: Got: \ + # N: def __getitem__(self, str, /) -> int \ + # N: Expected: \ + # N: def keys(self) -> Iterable[int] \ + # N: Got: \ + # N: def keys(self) -> Iterable[str] +j = {1: 'a', **Thing()} # E: Cannot infer type argument 1 of \ + # N: Try assigning the literal to a variable annotated as dict[, ] [builtins fixtures/dict.pyi] [typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-flags.test b/test-data/unit/check-flags.test index 0ac39ebf9c10..244e728f3ab6 100644 --- a/test-data/unit/check-flags.test +++ b/test-data/unit/check-flags.test @@ -1311,7 +1311,7 @@ import attr class Unannotated: foo = attr.ib() -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testDisallowIncompleteDefsAttrsWithAnnotations] # flags: --disallow-incomplete-defs @@ -1321,7 +1321,7 @@ import attr class Annotated: bar: int = attr.ib() -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testDisallowIncompleteDefsAttrsPartialAnnotations] # flags: --disallow-incomplete-defs @@ -1332,7 +1332,7 @@ class PartiallyAnnotated: # E: Function is missing a type annotation for one or bar: int = attr.ib() baz = attr.ib() -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAlwaysTrueAlwaysFalseFlags] # flags: --always-true=YOLO --always-true=YOLO1 --always-false=BLAH1 --always-false BLAH --ignore-missing-imports diff --git a/test-data/unit/check-formatting.test b/test-data/unit/check-formatting.test index 5c0d0ed65292..588b2c11714e 100644 --- a/test-data/unit/check-formatting.test +++ b/test-data/unit/check-formatting.test @@ -30,8 +30,8 @@ xb: bytes xs: str '%s' % xs # OK -'%s' % xb # E: On Python 3 formatting "b'abc'" with "%s" produces "b'abc'", not "abc"; use "%r" if this is desired behavior -'%(name)s' % {'name': b'value'} # E: On Python 3 formatting "b'abc'" with "%s" produces "b'abc'", not "abc"; use "%r" if this is desired behavior +'%s' % xb # E: If x = b'abc' then "%s" % x produces "b'abc'", not "abc". If this is desired behavior use "%r" % x. Otherwise, decode the bytes +'%(name)s' % {'name': b'value'} # E: If x = b'abc' then "%s" % x produces "b'abc'", not "abc". If this is desired behavior use "%r" % x. Otherwise, decode the bytes [builtins fixtures/primitives.pyi] [case testStringInterpolationCount] @@ -125,14 +125,29 @@ b'%(x)s' % {b'x': b'data'} [typing fixtures/typing-medium.pyi] [case testStringInterpolationMappingDictTypes] -from typing import Any, Dict +from typing import Any, Dict, Iterable + +class StringThing: + def keys(self) -> Iterable[str]: + ... + def __getitem__(self, __key: str) -> str: + ... + +class BytesThing: + def keys(self) -> Iterable[bytes]: + ... + def __getitem__(self, __key: bytes) -> str: + ... + a = None # type: Any ds, do, di = None, None, None # type: Dict[str, int], Dict[object, int], Dict[int, int] -'%(a)' % 1 # E: Format requires a mapping (expression has type "int", expected type for mapping is "Mapping[str, Any]") +'%(a)' % 1 # E: Format requires a mapping (expression has type "int", expected type for mapping is "SupportsKeysAndGetItem[str, Any]") '%()d' % a '%()d' % ds -'%()d' % do # E: Format requires a mapping (expression has type "Dict[object, int]", expected type for mapping is "Mapping[str, Any]") -b'%()d' % ds # E: Format requires a mapping (expression has type "Dict[str, int]", expected type for mapping is "Mapping[bytes, Any]") +'%()d' % do # E: Format requires a mapping (expression has type "Dict[object, int]", expected type for mapping is "SupportsKeysAndGetItem[str, Any]") +b'%()d' % ds # E: Format requires a mapping (expression has type "Dict[str, int]", expected type for mapping is "SupportsKeysAndGetItem[bytes, Any]") +'%()s' % StringThing() +b'%()s' % BytesThing() [builtins fixtures/primitives.pyi] [case testStringInterpolationMappingInvalidSpecifiers] @@ -435,21 +450,21 @@ N = NewType('N', bytes) n: N '{}'.format(a) -'{}'.format(b) # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior -'{}'.format(x) # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior -'{}'.format(n) # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior +'{}'.format(b) # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes +'{}'.format(x) # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes +'{}'.format(n) # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes -f'{b}' # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior -f'{x}' # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior -f'{n}' # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior +f'{b}' # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes +f'{x}' # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes +f'{n}' # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes class C(Generic[B]): x: B def meth(self) -> None: - '{}'.format(self.x) # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior + '{}'.format(self.x) # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes def func(x: A) -> A: - '{}'.format(x) # E: On Python 3 formatting "b'abc'" with "{}" produces "b'abc'", not "abc"; use "{!r}" if this is desired behavior + '{}'.format(x) # E: If x = b'abc' then f"{x}" or "{}".format(x) produces "b'abc'", not "abc". If this is desired behavior, use f"{x!r}" or "{!r}".format(x). Otherwise, decode the bytes return x '{!r}'.format(a) diff --git a/test-data/unit/check-functools.test b/test-data/unit/check-functools.test index f95b823a5291..ebfe86f2b241 100644 --- a/test-data/unit/check-functools.test +++ b/test-data/unit/check-functools.test @@ -20,7 +20,6 @@ Ord() <= 1 # E: Unsupported operand types for <= ("Ord" and "int") Ord() == 1 Ord() > 1 # E: Unsupported operand types for > ("Ord" and "int") Ord() >= 1 # E: Unsupported operand types for >= ("Ord" and "int") -[builtins fixtures/ops.pyi] [builtins fixtures/dict.pyi] [case testTotalOrderingLambda] @@ -43,7 +42,6 @@ Ord() <= 1 # E: Unsupported operand types for <= ("Ord" and "int") Ord() == 1 Ord() > 1 # E: Unsupported operand types for > ("Ord" and "int") Ord() >= 1 # E: Unsupported operand types for >= ("Ord" and "int") -[builtins fixtures/ops.pyi] [builtins fixtures/dict.pyi] [case testTotalOrderingNonCallable] @@ -59,8 +57,6 @@ class Ord(object): Ord() <= Ord() # E: Unsupported left operand type for <= ("Ord") Ord() > Ord() # E: "int" not callable Ord() >= Ord() # E: Unsupported left operand type for >= ("Ord") - -[builtins fixtures/ops.pyi] [builtins fixtures/dict.pyi] [case testTotalOrderingReturnNotBool] @@ -79,8 +75,6 @@ reveal_type(Ord() <= Ord()) # N: Revealed type is "Any" reveal_type(Ord() == Ord()) # N: Revealed type is "builtins.bool" reveal_type(Ord() > Ord()) # N: Revealed type is "Any" reveal_type(Ord() >= Ord()) # N: Revealed type is "Any" - -[builtins fixtures/ops.pyi] [builtins fixtures/dict.pyi] [case testTotalOrderingAllowsAny] @@ -105,7 +99,6 @@ Ord() <= 1 # E: Unsupported left operand type for <= ("Ord") Ord() == 1 Ord() > 1 Ord() >= 1 # E: Unsupported left operand type for >= ("Ord") -[builtins fixtures/ops.pyi] [builtins fixtures/dict.pyi] [case testCachedProperty] @@ -151,5 +144,4 @@ def f(d: D[C]) -> None: reveal_type(d.__gt__) # N: Revealed type is "def (other: Any) -> builtins.bool" d: D[int] # E: Type argument "int" of "D" must be a subtype of "C" -[builtins fixtures/ops.pyi] [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-generic-subtyping.test b/test-data/unit/check-generic-subtyping.test index 1f06bc7c540a..a34e054fd827 100644 --- a/test-data/unit/check-generic-subtyping.test +++ b/test-data/unit/check-generic-subtyping.test @@ -990,6 +990,7 @@ main:13: note: Revealed type is "builtins.dict[builtins.int, builtins.str]" main:14: error: Keywords must be strings main:14: error: Argument 1 to "func_with_kwargs" has incompatible type "**X1[str, int]"; expected "int" [builtins fixtures/dict.pyi] +[typing fixtures/typing-medium.pyi] [case testSubtypingMappingUnpacking3] from typing import Generic, TypeVar, Mapping, Iterable diff --git a/test-data/unit/check-generics.test b/test-data/unit/check-generics.test index a62028ca94ea..06b80be85096 100644 --- a/test-data/unit/check-generics.test +++ b/test-data/unit/check-generics.test @@ -2698,3 +2698,38 @@ def func(var: T) -> T: reveal_type(func(1)) # N: Revealed type is "builtins.int" [builtins fixtures/tuple.pyi] + +[case testGenericLambdaGenericMethodNoCrash] +from typing import TypeVar, Union, Callable, Generic + +S = TypeVar("S") +T = TypeVar("T") + +def f(x: Callable[[G[T]], int]) -> T: ... + +class G(Generic[T]): + def g(self, x: S) -> Union[S, T]: ... + +f(lambda x: x.g(0)) # E: Cannot infer type argument 1 of "f" + +[case testDictStarInference] +class B: ... +class C1(B): ... +class C2(B): ... + +dict1 = {"a": C1()} +dict2 = {"a": C2(), **dict1} +reveal_type(dict2) # N: Revealed type is "builtins.dict[builtins.str, __main__.B]" +[builtins fixtures/dict.pyi] + +[case testDictStarAnyKeyJoinValue] +from typing import Any + +class B: ... +class C1(B): ... +class C2(B): ... + +dict1: Any +dict2 = {"a": C1(), **{x: C2() for x in dict1}} +reveal_type(dict2) # N: Revealed type is "builtins.dict[Any, __main__.B]" +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-ignore.test b/test-data/unit/check-ignore.test index 982fb67f4e7f..fa451f373e70 100644 --- a/test-data/unit/check-ignore.test +++ b/test-data/unit/check-ignore.test @@ -220,29 +220,24 @@ def f() -> None: pass yield # type: ignore # E: "yield" outside function [case testIgnoreWholeModule1] -# flags: --warn-unused-ignores -# type: ignore -IGNORE # type: ignore # E: Unused "type: ignore" comment - -[case testIgnoreWholeModule2] # type: ignore if True: IGNORE -[case testIgnoreWholeModule3] +[case testIgnoreWholeModule2] # type: ignore @d class C: ... IGNORE -[case testIgnoreWholeModule4] +[case testIgnoreWholeModule3] # type: ignore @d def f(): ... IGNORE -[case testIgnoreWholeModule5] +[case testIgnoreWholeModule4] # type: ignore import MISSING @@ -279,3 +274,19 @@ class CD(six.with_metaclass(M)): # E: Multiple metaclass definitions 42 + 'no way' # type: ignore [builtins fixtures/tuple.pyi] + +[case testUnusedIgnoreTryExcept] +# flags: --warn-unused-ignores +try: + import foo # type: ignore # E: Unused "type: ignore" comment + import bar # type: ignore[import] # E: Unused "type: ignore" comment + import foobar # type: ignore[unused-ignore] + import barfoo # type: ignore[import,unused-ignore] + import missing # type: ignore[import,unused-ignore] +except Exception: + pass +[file foo.py] +[file bar.py] +[file foobar.py] +[file barfoo.py] +[builtins fixtures/exception.pyi] diff --git a/test-data/unit/check-incremental.test b/test-data/unit/check-incremental.test index ec0c5d5e4805..ec48ff43cc64 100644 --- a/test-data/unit/check-incremental.test +++ b/test-data/unit/check-incremental.test @@ -3069,7 +3069,7 @@ from attr import attrib, attrs class A: a: int -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [rechecked] [stale] [out2] @@ -3410,7 +3410,7 @@ class C: b: int = attr.ib(converter=int) c: A = attr.ib(converter=A) d: int = attr.ib(converter=parse) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [out1] main:6: note: Revealed type is "def (a: Union[builtins.float, builtins.str], b: Union[builtins.str, builtins.bytes, builtins.int], c: builtins.str, d: Union[builtins.int, builtins.str]) -> a.C" main:10: note: Revealed type is "def (a: Union[builtins.float, builtins.str], b: Union[builtins.str, builtins.bytes, builtins.int], c: builtins.str, d: Union[builtins.int, builtins.str], x: builtins.str) -> __main__.D" @@ -3699,8 +3699,8 @@ cache_fine_grained = False [file mypy.ini.2] \[mypy] cache_fine_grained = True -[rechecked a, builtins, typing] -[stale a, builtins, typing] +[rechecked _typeshed, a, builtins, typing] +[stale _typeshed, a, builtins, typing] [builtins fixtures/tuple.pyi] [case testIncrementalPackageNameOverload] @@ -3751,8 +3751,8 @@ Signature: 8a477f597d28d172789f06886806bc55 [file b.py.2] # uh -- Every file should get reloaded, since the cache was invalidated -[stale a, b, builtins, typing] -[rechecked a, b, builtins, typing] +[stale _typeshed, a, b, builtins, typing] +[rechecked _typeshed, a, b, builtins, typing] [builtins fixtures/tuple.pyi] [case testIncrementalBustedFineGrainedCache2] @@ -3764,8 +3764,8 @@ import b [file b.py.2] # uh -- Every file should get reloaded, since the settings changed -[stale a, b, builtins, typing] -[rechecked a, b, builtins, typing] +[stale _typeshed, a, b, builtins, typing] +[rechecked _typeshed, a, b, builtins, typing] [builtins fixtures/tuple.pyi] [case testIncrementalBustedFineGrainedCache3] @@ -3780,8 +3780,8 @@ import b [file b.py.2] # uh -- Every file should get reloaded, since the cache was invalidated -[stale a, b, builtins, typing] -[rechecked a, b, builtins, typing] +[stale _typeshed, a, b, builtins, typing] +[rechecked _typeshed, a, b, builtins, typing] [builtins fixtures/tuple.pyi] [case testIncrementalWorkingFineGrainedCache] @@ -6403,3 +6403,29 @@ def g(d: Dict[TValue]) -> TValue: tmp/b.py:6: error: TypedDict "a.Dict[TValue]" has no key "x" [out2] tmp/b.py:6: error: TypedDict "a.Dict[TValue]" has no key "y" + +[case testParamSpecNoCrash] +import m +[file m.py] +from typing import Callable, TypeVar +from lib import C + +T = TypeVar("T") +def test(x: Callable[..., T]) -> T: ... +test(C) # type: ignore + +[file m.py.2] +from typing import Callable, TypeVar +from lib import C + +T = TypeVar("T") +def test(x: Callable[..., T]) -> T: ... +test(C) # type: ignore +# touch +[file lib.py] +from typing import ParamSpec, Generic, Callable + +P = ParamSpec("P") +class C(Generic[P]): + def __init__(self, fn: Callable[P, int]) -> None: ... +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test index cfb553820d9e..166e173e7301 100644 --- a/test-data/unit/check-inference.test +++ b/test-data/unit/check-inference.test @@ -270,6 +270,120 @@ def f() -> None: class A: pass [out] +[case testClassObjectsNotUnpackableWithoutIterableMetaclass] +from typing import Type + +class Foo: ... +A: Type[Foo] = Foo +a, b = Foo # E: "Type[Foo]" object is not iterable +c, d = A # E: "Type[Foo]" object is not iterable + +class Meta(type): ... +class Bar(metaclass=Meta): ... +B: Type[Bar] = Bar +e, f = Bar # E: "Type[Bar]" object is not iterable +g, h = B # E: "Type[Bar]" object is not iterable + +reveal_type(a) # E: Cannot determine type of "a" # N: Revealed type is "Any" +reveal_type(b) # E: Cannot determine type of "b" # N: Revealed type is "Any" +reveal_type(c) # E: Cannot determine type of "c" # N: Revealed type is "Any" +reveal_type(d) # E: Cannot determine type of "d" # N: Revealed type is "Any" +reveal_type(e) # E: Cannot determine type of "e" # N: Revealed type is "Any" +reveal_type(f) # E: Cannot determine type of "f" # N: Revealed type is "Any" +reveal_type(g) # E: Cannot determine type of "g" # N: Revealed type is "Any" +reveal_type(h) # E: Cannot determine type of "h" # N: Revealed type is "Any" +[out] + +[case testInferringLvarTypesUnpackedFromIterableClassObject] +from typing import Iterator, Type, TypeVar, Union, overload +class Meta(type): + def __iter__(cls) -> Iterator[int]: + yield from [1, 2, 3] + +class Meta2(type): + def __iter__(cls) -> Iterator[str]: + yield from ["foo", "bar", "baz"] + +class Meta3(type): ... + +class Foo(metaclass=Meta): ... +class Bar(metaclass=Meta2): ... +class Baz(metaclass=Meta3): ... +class Spam: ... + +class Eggs(metaclass=Meta): + @overload + def __init__(self, x: int) -> None: ... + @overload + def __init__(self, x: int, y: int, z: int) -> None: ... + def __init__(self, x: int, y: int = ..., z: int = ...) -> None: ... + +A: Type[Foo] = Foo +B: Type[Union[Foo, Bar]] = Foo +C: Union[Type[Foo], Type[Bar]] = Foo +D: Type[Union[Foo, Baz]] = Foo +E: Type[Union[Foo, Spam]] = Foo +F: Type[Eggs] = Eggs +G: Type[Union[Foo, Eggs]] = Foo + +a, b, c = Foo +d, e, f = A +g, h, i = B +j, k, l = C +m, n, o = D # E: "Type[Baz]" object is not iterable +p, q, r = E # E: "Type[Spam]" object is not iterable +s, t, u = Eggs +v, w, x = F +y, z, aa = G + +for var in [a, b, c, d, e, f, s, t, u, v, w, x, y, z, aa]: + reveal_type(var) # N: Revealed type is "builtins.int" + +for var2 in [g, h, i, j, k, l]: + reveal_type(var2) # N: Revealed type is "Union[builtins.int, builtins.str]" + +for var3 in [m, n, o, p, q, r]: + reveal_type(var3) # N: Revealed type is "Union[builtins.int, Any]" + +T = TypeVar("T", bound=Type[Foo]) + +def check(x: T) -> T: + a, b, c = x + for var in [a, b, c]: + reveal_type(var) # N: Revealed type is "builtins.int" + return x + +T2 = TypeVar("T2", bound=Type[Union[Foo, Bar]]) + +def check2(x: T2) -> T2: + a, b, c = x + for var in [a, b, c]: + reveal_type(var) # N: Revealed type is "Union[builtins.int, builtins.str]" + return x + +T3 = TypeVar("T3", bound=Union[Type[Foo], Type[Bar]]) + +def check3(x: T3) -> T3: + a, b, c = x + for var in [a, b, c]: + reveal_type(var) # N: Revealed type is "Union[builtins.int, builtins.str]" + return x +[out] + +[case testInferringLvarTypesUnpackedFromIterableClassObjectWithGenericIter] +from typing import Iterator, Type, TypeVar + +T = TypeVar("T") +class Meta(type): + def __iter__(self: Type[T]) -> Iterator[T]: ... +class Foo(metaclass=Meta): ... + +A, B, C = Foo +reveal_type(A) # N: Revealed type is "__main__.Foo" +reveal_type(B) # N: Revealed type is "__main__.Foo" +reveal_type(C) # N: Revealed type is "__main__.Foo" +[out] + [case testInferringLvarTypesInMultiDefWithInvalidTuple] from typing import Tuple t = None # type: Tuple[object, object, object] @@ -1557,7 +1671,9 @@ a() # E: "Dict[str, int]" not callable [case testInferDictInitializedToEmptyUsingUpdateError] a = {} # E: Need type annotation for "a" (hint: "a: Dict[, ] = ...") -a.update([1, 2]) # E: Argument 1 to "update" of "dict" has incompatible type "List[int]"; expected "Mapping[Any, Any]" +a.update([1, 2]) # E: Argument 1 to "update" of "dict" has incompatible type "List[int]"; expected "SupportsKeysAndGetItem[Any, Any]" \ + # N: "list" is missing following "SupportsKeysAndGetItem" protocol member: \ + # N: keys a() # E: "Dict[Any, Any]" not callable [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-inline-config.test b/test-data/unit/check-inline-config.test index ee847deabd40..db04536dd4f9 100644 --- a/test-data/unit/check-inline-config.test +++ b/test-data/unit/check-inline-config.test @@ -211,6 +211,111 @@ enable_error_code = ignore-without-code, truthy-bool \[mypy-tests.*] disable_error_code = ignore-without-code +[case testIgnoreErrorsSimple] +# mypy: ignore-errors=True + +def f() -> None: + while 1(): + pass + +[case testIgnoreErrorsInImportedModule] +from m import C +c = C() +reveal_type(c.x) # N: Revealed type is "builtins.int" + +[file m.py] +# mypy: ignore-errors=True + +class C: + def f(self) -> None: + self.x = 1 + +[case testIgnoreErrorsWithLambda] +# mypy: ignore-errors=True + +def f(self, x=lambda: 1) -> None: + pass + +class C: + def f(self) -> None: + l = lambda: 1 + self.x = 1 + +[case testIgnoreErrorsWithUnsafeSuperCall_no_empty] +# flags: --strict-optional + +from m import C + +class D(C): + def m(self) -> None: + super().m1() + super().m2() \ + # E: Call to abstract method "m2" of "C" with trivial body via super() is unsafe + super().m3() \ + # E: Call to abstract method "m3" of "C" with trivial body via super() is unsafe + super().m4() \ + # E: Call to abstract method "m4" of "C" with trivial body via super() is unsafe + super().m5() \ + # E: Call to abstract method "m5" of "C" with trivial body via super() is unsafe + super().m6() \ + # E: Call to abstract method "m6" of "C" with trivial body via super() is unsafe + super().m7() + + def m1(self) -> int: + return 0 + + def m2(self) -> int: + return 0 + + def m3(self) -> int: + return 0 + + def m4(self) -> int: + return 0 + + def m5(self) -> int: + return 0 + + def m6(self) -> int: + return 0 + +[file m.py] +# mypy: ignore-errors=True +import abc + +class C: + @abc.abstractmethod + def m1(self) -> int: + """x""" + return 0 + + @abc.abstractmethod + def m2(self) -> int: + """doc""" + + @abc.abstractmethod + def m3(self) -> int: + pass + + @abc.abstractmethod + def m4(self) -> int: ... + + @abc.abstractmethod + def m5(self) -> int: + """doc""" + ... + + @abc.abstractmethod + def m6(self) -> int: + raise NotImplementedError() + + @abc.abstractmethod + def m7(self) -> int: + raise NotImplementedError() + pass + +[builtins fixtures/exception.pyi] + [case testInlineErrorCodesMultipleCodes] # mypy: disable-error-code="truthy-bool, ignore-without-code" class Foo: diff --git a/test-data/unit/check-isinstance.test b/test-data/unit/check-isinstance.test index 0722ee8d91e5..2d010b8ba38d 100644 --- a/test-data/unit/check-isinstance.test +++ b/test-data/unit/check-isinstance.test @@ -2670,10 +2670,9 @@ if type(x) == int == str: else: reveal_type(x) # N: Revealed type is "Union[builtins.int, builtins.str]" -# mypy shows an error about "Unsupported left operand type for !=" if we don't include this -[builtins fixtures/typing-medium.pyi] # mypy thinks int isn't defined unless we include this [builtins fixtures/primitives.pyi] + [case testTypeNotEqualsCheck] from typing import Union @@ -2683,8 +2682,6 @@ if type(x) != int: else: reveal_type(x) # N: Revealed type is "builtins.int" -# mypy shows an error about "Unsupported left operand type for !=" if we don't include this -[builtins fixtures/typing-medium.pyi] # mypy thinks int isn't defined unless we include this [builtins fixtures/primitives.pyi] diff --git a/test-data/unit/check-kwargs.test b/test-data/unit/check-kwargs.test index e0fe389bbbd9..81fdc444aced 100644 --- a/test-data/unit/check-kwargs.test +++ b/test-data/unit/check-kwargs.test @@ -499,7 +499,7 @@ g(**{}) [case testKeywordUnpackWithDifferentTypes] # https://github.com/python/mypy/issues/11144 -from typing import Dict, Generic, TypeVar, Mapping +from typing import Dict, Generic, TypeVar, Mapping, Iterable T = TypeVar("T") T2 = TypeVar("T2") @@ -516,6 +516,12 @@ class C(Generic[T, T2]): class D: ... +class E: + def keys(self) -> Iterable[str]: + ... + def __getitem__(self, key: str) -> float: + ... + def foo(**i: float) -> float: ... @@ -523,7 +529,8 @@ a: A[str, str] b: B[str, str] c: C[str, float] d: D -e = {"a": "b"} +e: E +f = {"a": "b"} foo(k=1.5) foo(**a) @@ -531,6 +538,7 @@ foo(**b) foo(**c) foo(**d) foo(**e) +foo(**f) # Correct: @@ -544,9 +552,9 @@ foo(**good1) foo(**good2) foo(**good3) [out] -main:29: error: Argument 1 to "foo" has incompatible type "**A[str, str]"; expected "float" -main:30: error: Argument 1 to "foo" has incompatible type "**B[str, str]"; expected "float" -main:31: error: Argument after ** must be a mapping, not "C[str, float]" -main:32: error: Argument after ** must be a mapping, not "D" -main:33: error: Argument 1 to "foo" has incompatible type "**Dict[str, str]"; expected "float" +main:36: error: Argument 1 to "foo" has incompatible type "**A[str, str]"; expected "float" +main:37: error: Argument 1 to "foo" has incompatible type "**B[str, str]"; expected "float" +main:38: error: Argument after ** must be a mapping, not "C[str, float]" +main:39: error: Argument after ** must be a mapping, not "D" +main:41: error: Argument 1 to "foo" has incompatible type "**Dict[str, str]"; expected "float" [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-lowercase.test b/test-data/unit/check-lowercase.test new file mode 100644 index 000000000000..d1ebbdd282fa --- /dev/null +++ b/test-data/unit/check-lowercase.test @@ -0,0 +1,51 @@ + +[case testTupleLowercaseSettingOff] +# flags: --python-version 3.9 --force-uppercase-builtins +x = (3,) +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "Tuple[int]") +[builtins fixtures/tuple.pyi] + +[case testTupleLowercaseSettingOn] +# flags: --python-version 3.9 --no-force-uppercase-builtins +x = (3,) +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "tuple[int]") +[builtins fixtures/tuple.pyi] + +[case testListLowercaseSettingOff] +# flags: --python-version 3.9 --force-uppercase-builtins +x = [3] +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "List[int]") + +[case testListLowercaseSettingOn] +# flags: --python-version 3.9 --no-force-uppercase-builtins +x = [3] +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "list[int]") + +[case testDictLowercaseSettingOff] +# flags: --python-version 3.9 --force-uppercase-builtins +x = {"key": "value"} +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "Dict[str, str]") + +[case testDictLowercaseSettingOn] +# flags: --python-version 3.9 --no-force-uppercase-builtins +x = {"key": "value"} +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "dict[str, str]") + +[case testSetLowercaseSettingOff] +# flags: --python-version 3.9 --force-uppercase-builtins +x = {3} +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "Set[int]") +[builtins fixtures/set.pyi] + +[case testSetLowercaseSettingOn] +# flags: --python-version 3.9 --no-force-uppercase-builtins +x = {3} +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "set[int]") +[builtins fixtures/set.pyi] + +[case testTypeLowercaseSettingOff] +# flags: --python-version 3.9 --no-force-uppercase-builtins +x: type[type] +y: int + +y = x # E: Incompatible types in assignment (expression has type "type[type]", variable has type "int") diff --git a/test-data/unit/check-modules.test b/test-data/unit/check-modules.test index 4b8308310ae6..d02dcdc7eb99 100644 --- a/test-data/unit/check-modules.test +++ b/test-data/unit/check-modules.test @@ -39,7 +39,7 @@ try: pass except m.Err: pass -except m.Bad: # E: Exception type must be derived from BaseException +except m.Bad: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass [file m.py] class Err(BaseException): pass @@ -53,7 +53,7 @@ try: pass except Err: pass -except Bad: # E: Exception type must be derived from BaseException +except Bad: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass [file m.py] class Err(BaseException): pass @@ -3216,7 +3216,6 @@ class Bar(Foo): from a import Foo class Bar(Foo): def frobnicate(self, *args) -> None: pass # type: ignore[override] # I know -[builtins fixtures/tuple.pyi] [builtins fixtures/dict.pyi] [out1] tmp/b.py:3: error: Signature of "frobnicate" incompatible with supertype "Foo" diff --git a/test-data/unit/check-multiple-inheritance.test b/test-data/unit/check-multiple-inheritance.test index a8d053f9504e..d03f2e35e1c4 100644 --- a/test-data/unit/check-multiple-inheritance.test +++ b/test-data/unit/check-multiple-inheritance.test @@ -668,3 +668,41 @@ class D1(B[str], C1): ... class D2(B[Union[int, str]], C2): ... class D3(C2, B[str]): ... class D4(B[str], C2): ... # E: Definition of "foo" in base class "A" is incompatible with definition in base class "C2" + + +[case testMultipleInheritanceOverridingOfFunctionsWithCallableInstances] +from typing import Any, Callable + +def dec1(f: Callable[[Any, int], None]) -> Callable[[Any, int], None]: ... + +class F: + def __call__(self, x: int) -> None: ... + +def dec2(f: Callable[[Any, int], None]) -> F: ... + +class B1: + def f(self, x: int) -> None: ... + +class B2: + @dec1 + def f(self, x: int) -> None: ... + +class B3: + @dec2 + def f(self, x: int) -> None: ... + +class B4: + f = F() + +class C12(B1, B2): ... +class C13(B1, B3): ... # E: Definition of "f" in base class "B1" is incompatible with definition in base class "B3" +class C14(B1, B4): ... # E: Definition of "f" in base class "B1" is incompatible with definition in base class "B4" +class C21(B2, B1): ... +class C23(B2, B3): ... # E: Definition of "f" in base class "B2" is incompatible with definition in base class "B3" +class C24(B2, B4): ... # E: Definition of "f" in base class "B2" is incompatible with definition in base class "B4" +class C31(B3, B1): ... +class C32(B3, B2): ... +class C34(B3, B4): ... +class C41(B4, B1): ... +class C42(B4, B2): ... +class C43(B4, B3): ... diff --git a/test-data/unit/check-narrowing.test b/test-data/unit/check-narrowing.test index f05e2aaf5c19..c329ccf840a8 100644 --- a/test-data/unit/check-narrowing.test +++ b/test-data/unit/check-narrowing.test @@ -703,47 +703,47 @@ class FlipFlopStr: def mutate(self) -> None: self.state = "state-2" if self.state == "state-1" else "state-1" -def test1(switch: FlipFlopEnum) -> None: + +def test1(switch: FlipFlopStr) -> None: # Naively, we might assume the 'assert' here would narrow the type to - # Literal[State.A]. However, doing this ends up breaking a fair number of real-world + # Literal["state-1"]. However, doing this ends up breaking a fair number of real-world # code (usually test cases) that looks similar to this function: e.g. checks # to make sure a field was mutated to some particular value. # # And since mypy can't really reason about state mutation, we take a conservative # approach and avoid narrowing anything here. - assert switch.state == State.A - reveal_type(switch.state) # N: Revealed type is "__main__.State" + assert switch.state == "state-1" + reveal_type(switch.state) # N: Revealed type is "builtins.str" switch.mutate() - assert switch.state == State.B - reveal_type(switch.state) # N: Revealed type is "__main__.State" + assert switch.state == "state-2" + reveal_type(switch.state) # N: Revealed type is "builtins.str" def test2(switch: FlipFlopEnum) -> None: - # So strictly speaking, we ought to do the same thing with 'is' comparisons - # for the same reasons as above. But in practice, not too many people seem to - # know that doing 'some_enum is MyEnum.Value' is idiomatic. So in practice, - # this is probably good enough for now. + # This is the same thing as 'test1', except we use enums, which we allow to be narrowed + # to literals. - assert switch.state is State.A + assert switch.state == State.A reveal_type(switch.state) # N: Revealed type is "Literal[__main__.State.A]" switch.mutate() - assert switch.state is State.B # E: Non-overlapping identity check (left operand type: "Literal[State.A]", right operand type: "Literal[State.B]") + assert switch.state == State.B # E: Non-overlapping equality check (left operand type: "Literal[State.A]", right operand type: "Literal[State.B]") reveal_type(switch.state) # E: Statement is unreachable -def test3(switch: FlipFlopStr) -> None: - # This is the same thing as 'test1', except we try using str literals. +def test3(switch: FlipFlopEnum) -> None: + # Same thing, but using 'is' comparisons. Previously mypy's behaviour differed + # here, narrowing when using 'is', but not when using '=='. - assert switch.state == "state-1" - reveal_type(switch.state) # N: Revealed type is "builtins.str" + assert switch.state is State.A + reveal_type(switch.state) # N: Revealed type is "Literal[__main__.State.A]" switch.mutate() - assert switch.state == "state-2" - reveal_type(switch.state) # N: Revealed type is "builtins.str" + assert switch.state is State.B # E: Non-overlapping identity check (left operand type: "Literal[State.A]", right operand type: "Literal[State.B]") + reveal_type(switch.state) # E: Statement is unreachable [builtins fixtures/primitives.pyi] [case testNarrowingEqualityRequiresExplicitStrLiteral] @@ -795,6 +795,7 @@ reveal_type(x_union) # N: Revealed type is "Union[Literal['A'], Literal['B' [case testNarrowingEqualityRequiresExplicitEnumLiteral] # flags: --strict-optional +from typing import Union from typing_extensions import Literal, Final from enum import Enum @@ -805,19 +806,19 @@ class Foo(Enum): A_final: Final = Foo.A A_literal: Literal[Foo.A] -# See comments in testNarrowingEqualityRequiresExplicitStrLiteral and -# testNarrowingEqualityFlipFlop for more on why we can't narrow here. +# Note this is unlike testNarrowingEqualityRequiresExplicitStrLiteral +# See also testNarrowingEqualityFlipFlop x1: Foo if x1 == Foo.A: - reveal_type(x1) # N: Revealed type is "__main__.Foo" + reveal_type(x1) # N: Revealed type is "Literal[__main__.Foo.A]" else: - reveal_type(x1) # N: Revealed type is "__main__.Foo" + reveal_type(x1) # N: Revealed type is "Literal[__main__.Foo.B]" x2: Foo if x2 == A_final: - reveal_type(x2) # N: Revealed type is "__main__.Foo" + reveal_type(x2) # N: Revealed type is "Literal[__main__.Foo.A]" else: - reveal_type(x2) # N: Revealed type is "__main__.Foo" + reveal_type(x2) # N: Revealed type is "Literal[__main__.Foo.B]" # But we let this narrow since there's an explicit literal in the RHS. x3: Foo @@ -825,6 +826,14 @@ if x3 == A_literal: reveal_type(x3) # N: Revealed type is "Literal[__main__.Foo.A]" else: reveal_type(x3) # N: Revealed type is "Literal[__main__.Foo.B]" + + +class SingletonFoo(Enum): + A = "A" + +def bar(x: Union[SingletonFoo, Foo], y: SingletonFoo) -> None: + if x == y: + reveal_type(x) # N: Revealed type is "Literal[__main__.SingletonFoo.A]" [builtins fixtures/primitives.pyi] [case testNarrowingEqualityDisabledForCustomEquality] diff --git a/test-data/unit/check-optional.test b/test-data/unit/check-optional.test index 754c6b52ff19..ae247b0047f1 100644 --- a/test-data/unit/check-optional.test +++ b/test-data/unit/check-optional.test @@ -1040,3 +1040,306 @@ x: Optional[List[int]] if 3 in x: pass +[case testNarrowedVariableInNestedFunctionBasic] +from typing import Optional + +def can_narrow(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return reveal_type(x) # N: Revealed type is "builtins.str" + nested() + +def foo(a): pass + +class C: + def can_narrow_in_method(self, x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return reveal_type(x) # N: Revealed type is "builtins.str" + # Reading the variable is fine + y = x + with foo(x): + foo(x) + for a in foo(x): + foo(x) + nested() + +def can_narrow_lambda(x: Optional[str]) -> None: + if x is None: + x = "a" + nested = lambda: x + reveal_type(nested()) # N: Revealed type is "builtins.str" + +def cannot_narrow_if_reassigned(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + if int(): + x = None + nested() + +x: Optional[str] = "x" + +def narrow_global_in_func() -> None: + global x + if x is None: + x = "a" + def nested() -> str: + # This should perhaps not be narrowed, since the nested function could outlive + # the outer function, and since other functions could also assign to x, but + # this seems like a minor issue. + return x + nested() + +x = "y" + +def narrowing_global_at_top_level_not_propagated() -> str: + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + +[case testNarrowedVariableInNestedFunctionMore1] +from typing import Optional, overload + +class C: + a: Optional[str] + +def attribute_narrowing(c: C) -> None: + # This case is not supported, since we can't keep track of assignments to attributes. + c.a = "x" + def nested() -> str: + return c.a # E: Incompatible return value type (got "Optional[str]", expected "str") + nested() + +def assignment_in_for(x: Optional[str]) -> None: + if x is None: + x = "e" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + for x in ["x"]: + pass + +def foo(): pass + +def assignment_in_with(x: Optional[str]) -> None: + if x is None: + x = "e" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + with foo() as x: + pass + +g: Optional[str] + +def assign_to_global() -> None: + global g + g = "x" + # This is unsafe, but we don't generate an error, for convenience. Besides, + # this is probably a very rare case. + def nested() -> str: + return g + +def assign_to_nonlocal(x: Optional[str]) -> None: + def nested() -> str: + nonlocal x + + if x is None: + x = "a" + + def nested2() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + + return nested2() + nested() + x = None + +def dec(f): + return f + +@dec +def decorated_outer(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x + nested() + +@dec +def decorated_outer_bad(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + x = None + nested() + +def decorated_inner(x: Optional[str]) -> None: + if x is None: + x = "a" + @dec + def nested() -> str: + return x + nested() + +def decorated_inner_bad(x: Optional[str]) -> None: + if x is None: + x = "a" + @dec + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + x = None + nested() + +@overload +def overloaded_outer(x: None) -> None: ... +@overload +def overloaded_outer(x: str) -> None: ... +def overloaded_outer(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x + nested() + +@overload +def overloaded_outer_bad(x: None) -> None: ... +@overload +def overloaded_outer_bad(x: str) -> None: ... +def overloaded_outer_bad(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + x = None + nested() + +[case testNarrowedVariableInNestedFunctionMore2] +from typing import Optional + +def narrow_multiple(x: Optional[str], y: Optional[int]) -> None: + z: Optional[str] = x + if x is None: + x = "" + if y is None: + y = 1 + if int(): + if z is None: + z = "" + def nested() -> None: + a: str = x + b: int = y + c: str = z + nested() + +def narrow_multiple_partial(x: Optional[str], y: Optional[int]) -> None: + z: Optional[str] = x + if x is None: + x = "" + if isinstance(y, int): + if z is None: + z = "" + def nested() -> None: + a: str = x + b: int = y + c: str = z # E: Incompatible types in assignment (expression has type "Optional[str]", variable has type "str") + z = None + nested() + +def multiple_nested_functions(x: Optional[str], y: Optional[str]) -> None: + if x is None: + x = "" + def nested1() -> str: + return x + if y is None: + y = "" + def nested2() -> str: + a: str = y + return x + +class C: + a: str + def __setitem__(self, key, value): pass + +def narrowed_variable_used_in_lvalue_but_not_assigned(c: Optional[C]) -> None: + if c is None: + c = C() + def nested() -> C: + return c + c.a = "x" + c[1] = 2 + cc = C() + cc[c] = 3 + nested() + +def narrow_with_multi_lvalues_1(x: Optional[str]) -> None: + if x is None: + x = "" + + def nested() -> str: + return x + + y = z = None + +def narrow_with_multi_lvalue_2(x: Optional[str]) -> None: + if x is None: + x = "" + + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + + x = y = None + +def narrow_with_multi_lvalue_3(x: Optional[str]) -> None: + if x is None: + x = "" + + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + + y = x = None + +def narrow_with_multi_assign_1(x: Optional[str]) -> None: + if x is None: + x = "" + + def nested() -> str: + return x + + y, z = None, None + +def narrow_with_multi_assign_2(x: Optional[str]) -> None: + if x is None: + x = "" + + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + + x, y = None, None + +def narrow_with_multi_assign_3(x: Optional[str]) -> None: + if x is None: + x = "" + + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + + y, x = None, None + +[builtins fixtures/isinstance.pyi] + +[case testNestedFunctionSpecialCase] +class C: + def __enter__(self, *args): ... + def __exit__(self, *args) -> bool: ... + +def f(x: object) -> None: + if x is not None: + pass + + def nested() -> None: + with C(): + pass +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-parameter-specification.test b/test-data/unit/check-parameter-specification.test index 56fc3b6faa14..fe66b18fbfea 100644 --- a/test-data/unit/check-parameter-specification.test +++ b/test-data/unit/check-parameter-specification.test @@ -539,7 +539,6 @@ def three(**kwargs: int) -> int: ... @expects_int_first # Accepted def four(*args: int) -> int: ... -[builtins fixtures/tuple.pyi] [builtins fixtures/dict.pyi] [case testParamSpecTwiceSolving] @@ -1471,3 +1470,53 @@ def test(f: Concat[T, ...]) -> None: ... class Defer: ... [builtins fixtures/paramspec.pyi] + +[case testNoParamSpecDoubling] +# https://github.com/python/mypy/issues/12734 +from typing import Callable, ParamSpec +from typing_extensions import Concatenate + +P = ParamSpec("P") +Q = ParamSpec("Q") + +def foo(f: Callable[P, int]) -> Callable[P, int]: + return f + +def bar(f: Callable[Concatenate[str, Q], int]) -> Callable[Concatenate[str, Q], int]: + return foo(f) +[builtins fixtures/paramspec.pyi] + +[case testAlreadyExpandedCallableWithParamSpecReplacement] +from typing import Callable, Any, overload +from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") + +@overload +def command() -> Callable[[Callable[Concatenate[object, object, P], object]], None]: # E: Overloaded function signatures 1 and 2 overlap with incompatible return types + ... + +@overload +def command( + cls: int = ..., +) -> Callable[[Callable[Concatenate[object, P], object]], None]: + ... + +def command( + cls: int = 42, +) -> Any: + ... +[builtins fixtures/paramspec.pyi] + +[case testCopiedParamSpecComparison] +# minimized from https://github.com/python/mypy/issues/12909 +from typing import Callable +from typing_extensions import ParamSpec + +P = ParamSpec("P") + +def identity(func: Callable[P, None]) -> Callable[P, None]: ... + +@identity +def f(f: Callable[P, None], *args: P.args, **kwargs: P.kwargs) -> None: ... +[builtins fixtures/paramspec.pyi] diff --git a/test-data/unit/check-attr.test b/test-data/unit/check-plugin-attrs.test similarity index 84% rename from test-data/unit/check-attr.test rename to test-data/unit/check-plugin-attrs.test index f6ef289e792e..ce1d670431c7 100644 --- a/test-data/unit/check-attr.test +++ b/test-data/unit/check-plugin-attrs.test @@ -210,7 +210,7 @@ A(1) != 1 1 >= A(1) # E: Unsupported operand types for <= ("A" and "int") 1 == A(1) 1 != A(1) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsEqFalse] from attr import attrib, attrs @@ -241,7 +241,7 @@ A(1) != 1 1 >= A(1) # E: Unsupported left operand type for >= ("int") 1 == A(1) 1 != A(1) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsOrderFalse] from attr import attrib, attrs @@ -270,7 +270,7 @@ A(1) != 1 1 >= A(1) # E: Unsupported left operand type for >= ("int") 1 == A(1) 1 != A(1) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsCmpEqOrderValues] from attr import attrib, attrs @@ -289,7 +289,7 @@ class Mixed: @attrs(order=True, eq=False) # E: eq must be True if order is True class Confused: ... -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsInheritance] @@ -469,6 +469,56 @@ A([1], '2') # E: Cannot infer type argument 1 of "A" [builtins fixtures/list.pyi] +[case testAttrsGenericWithConverter] +from typing import TypeVar, Generic, List, Iterable, Iterator, Callable +import attr +T = TypeVar('T') + +def int_gen() -> Iterator[int]: + yield 1 + +def list_converter(x: Iterable[T]) -> List[T]: + return list(x) + +@attr.s(auto_attribs=True) +class A(Generic[T]): + x: List[T] = attr.ib(converter=list_converter) + y: T = attr.ib() + def foo(self) -> List[T]: + return [self.y] + def bar(self) -> T: + return self.x[0] + def problem(self) -> T: + return self.x # E: Incompatible return value type (got "List[T]", expected "T") +reveal_type(A) # N: Revealed type is "def [T] (x: typing.Iterable[T`1], y: T`1) -> __main__.A[T`1]" +a1 = A([1], 2) +reveal_type(a1) # N: Revealed type is "__main__.A[builtins.int]" +reveal_type(a1.x) # N: Revealed type is "builtins.list[builtins.int]" +reveal_type(a1.y) # N: Revealed type is "builtins.int" + +a2 = A(int_gen(), 2) +reveal_type(a2) # N: Revealed type is "__main__.A[builtins.int]" +reveal_type(a2.x) # N: Revealed type is "builtins.list[builtins.int]" +reveal_type(a2.y) # N: Revealed type is "builtins.int" + + +def get_int() -> int: + return 1 + +class Other(Generic[T]): + def __init__(self, x: T) -> None: + pass + +@attr.s(auto_attribs=True) +class B(Generic[T]): + x: Other[Callable[..., T]] = attr.ib(converter=Other[Callable[..., T]]) + +b1 = B(get_int) +reveal_type(b1) # N: Revealed type is "__main__.B[builtins.int]" +reveal_type(b1.x) # N: Revealed type is "__main__.Other[def (*Any, **Any) -> builtins.int]" + +[builtins fixtures/list.pyi] + [case testAttrsUntypedGenericInheritance] from typing import Generic, TypeVar @@ -919,7 +969,7 @@ class C: o = C("1", "2", "3") o = C(1, 2, "3") -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsCmpWithSubclasses] import attr @@ -1158,7 +1208,7 @@ class A: A(None, None) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsOptionalConverterNewPackage] # flags: --strict-optional @@ -1178,7 +1228,7 @@ class A: A(None, None) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsTypeVarNoCollision] @@ -1191,7 +1241,7 @@ T = TypeVar("T", bytes, str) @attr.s(auto_attribs=True) class A(Generic[T]): v: T -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsKwOnlyAttrib] import attr @@ -1201,7 +1251,7 @@ class A: A() # E: Missing named argument "a" for "A" A(15) # E: Too many positional arguments for "A" A(a=15) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsKwOnlyClass] import attr @@ -1211,7 +1261,7 @@ class A: b: bool A() # E: Missing named argument "a" for "A" # E: Missing named argument "b" for "A" A(b=True, a=15) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsKwOnlyClassNoInit] import attr @@ -1220,7 +1270,7 @@ class B: a = attr.ib(init=False) b = attr.ib() B(b=True) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsKwOnlyWithDefault] import attr @@ -1230,7 +1280,7 @@ class C: b = attr.ib(kw_only=True) c = attr.ib(16, kw_only=True) C(b=17) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsKwOnlyClassWithMixedDefaults] import attr @@ -1240,7 +1290,7 @@ class D: b = attr.ib() c = attr.ib(15) D(b=17) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsKwOnlySubclass] @@ -1252,7 +1302,7 @@ class A2: class B2(A2): b = attr.ib(kw_only=True) B2(b=1) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsNonKwOnlyAfterKwOnly] import attr @@ -1267,7 +1317,7 @@ class C: a = attr.ib(kw_only=True) b = attr.ib(15) -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsDisallowUntypedWorksForward] # flags: --disallow-untyped-defs @@ -1441,7 +1491,7 @@ reveal_type(A.__attrs_attrs__[0]) # N: Revealed type is "attr.Attribute[builtin reveal_type(A.__attrs_attrs__.b) # N: Revealed type is "attr.Attribute[builtins.int]" A.__attrs_attrs__.x # E: "____main___A_AttrsAttributes__" has no attribute "x" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsBareClassHasMagicAttribute] import attr @@ -1456,7 +1506,7 @@ reveal_type(A.__attrs_attrs__[0]) # N: Revealed type is "attr.Attribute[Any]" reveal_type(A.__attrs_attrs__.b) # N: Revealed type is "attr.Attribute[Any]" A.__attrs_attrs__.x # E: "____main___A_AttrsAttributes__" has no attribute "x" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsNGClassHasMagicAttribute] import attr @@ -1471,7 +1521,7 @@ reveal_type(A.__attrs_attrs__[0]) # N: Revealed type is "attr.Attribute[builtin reveal_type(A.__attrs_attrs__.b) # N: Revealed type is "attr.Attribute[builtins.int]" A.__attrs_attrs__.x # E: "____main___A_AttrsAttributes__" has no attribute "x" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsMagicAttributeProtocol] import attr @@ -1496,7 +1546,7 @@ takes_attrs_instance(A(1, "")) takes_attrs_cls(A(1, "")) # E: Argument 1 to "takes_attrs_cls" has incompatible type "A"; expected "Type[AttrsInstance]" takes_attrs_instance(A) # E: Argument 1 to "takes_attrs_instance" has incompatible type "Type[A]"; expected "AttrsInstance" # N: ClassVar protocol member AttrsInstance.__attrs_attrs__ can never be matched by a class object -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsInitMethodAlwaysGenerates] from typing import Tuple @@ -1514,7 +1564,7 @@ reveal_type(A) # N: Revealed type is "def (bc: Tuple[builtins.int, builtins.str reveal_type(A.__init__) # N: Revealed type is "def (self: __main__.A, bc: Tuple[builtins.int, builtins.str])" reveal_type(A.__attrs_init__) # N: Revealed type is "def (self: __main__.A, b: builtins.int, c: builtins.str)" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsClassWithSlots] import attr @@ -1544,7 +1594,7 @@ class C: def __attrs_post_init__(self) -> None: self.b = 1 # E: Trying to assign name "b" that is not in "__slots__" of type "__main__.C" self.c = 2 # E: Trying to assign name "c" that is not in "__slots__" of type "__main__.C" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsWithMatchArgs] # flags: --python-version 3.10 @@ -1560,7 +1610,7 @@ class ToMatch: reveal_type(ToMatch(x=1, y=2, z=3).__match_args__) # N: Revealed type is "Tuple[Literal['x']?, Literal['y']?]" reveal_type(ToMatch(1, 2, z=3).__match_args__) # N: Revealed type is "Tuple[Literal['x']?, Literal['y']?]" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsWithMatchArgsDefaultCase] # flags: --python-version 3.10 @@ -1581,7 +1631,7 @@ class ToMatch2: t2: ToMatch2 reveal_type(t2.__match_args__) # N: Revealed type is "Tuple[Literal['x']?, Literal['y']?]" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsWithMatchArgsOverrideExisting] # flags: --python-version 3.10 @@ -1604,7 +1654,7 @@ class WithoutMatch: y: int reveal_type(WithoutMatch(x=1, y=2).__match_args__) # N: Revealed type is "Tuple[Literal['a']?, Literal['b']?]" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsWithMatchArgsOldVersion] # flags: --python-version 3.9 @@ -1618,7 +1668,7 @@ n: NoMatchArgs reveal_type(n.__match_args__) # E: "NoMatchArgs" has no attribute "__match_args__" \ # N: Revealed type is "Any" -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsMultipleInheritance] # flags: --python-version 3.10 @@ -1634,7 +1684,7 @@ class B: class AB(A, B): pass -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testAttrsForwardReferenceInTypeVarBound] from typing import TypeVar, Generic @@ -1648,7 +1698,7 @@ class D(Generic[T]): class C: pass -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [case testComplexTypeInAttrIb] import a @@ -1664,8 +1714,6 @@ class C: # Note that for this test, the 'Value of type "int" is not indexable' errors are silly, # and a consequence of Callable etc. being set to an int in the test stub. b = attr.ib(type=Callable[[], C]) -[builtins fixtures/bool.pyi] - [file b.py] import attr import a @@ -1867,3 +1915,229 @@ D(1, "").a = 2 # E: Cannot assign to final attribute "a" D(1, "").b = "2" # E: Cannot assign to final attribute "b" [builtins fixtures/property.pyi] + +[case testEvolve] +import attr + +class Base: + pass + +class Derived(Base): + pass + +class Other: + pass + +@attr.s(auto_attribs=True) +class C: + name: str + b: Base + +c = C(name='foo', b=Derived()) +c = attr.evolve(c) +c = attr.evolve(c, name='foo') +c = attr.evolve(c, 'foo') # E: Too many positional arguments for "evolve" of "C" +c = attr.evolve(c, b=Derived()) +c = attr.evolve(c, b=Base()) +c = attr.evolve(c, b=Other()) # E: Argument "b" to "evolve" of "C" has incompatible type "Other"; expected "Base" +c = attr.evolve(c, name=42) # E: Argument "name" to "evolve" of "C" has incompatible type "int"; expected "str" +c = attr.evolve(c, foobar=42) # E: Unexpected keyword argument "foobar" for "evolve" of "C" + +# test passing instance as 'inst' kw +c = attr.evolve(inst=c, name='foo') +c = attr.evolve(not_inst=c, name='foo') # E: Missing positional argument "inst" in call to "evolve" + +# test determining type of first argument's expression from something that's not NameExpr +def f() -> C: + return c + +c = attr.evolve(f(), name='foo') + +[builtins fixtures/plugin_attrs.pyi] + +[case testEvolveFromNonAttrs] +import attr + +attr.evolve(42, name='foo') # E: Argument 1 to "evolve" has incompatible type "int"; expected an attrs class +attr.evolve(None, name='foo') # E: Argument 1 to "evolve" has incompatible type "None"; expected an attrs class +[case testEvolveFromAny] +from typing import Any +import attr + +any: Any = 42 +ret = attr.evolve(any, name='foo') +reveal_type(ret) # N: Revealed type is "Any" + +[typing fixtures/typing-medium.pyi] + +[case testEvolveGeneric] +import attrs +from typing import Generic, TypeVar + +T = TypeVar('T') + +@attrs.define +class A(Generic[T]): + x: T + + +a = A(x=42) +reveal_type(a) # N: Revealed type is "__main__.A[builtins.int]" +a2 = attrs.evolve(a, x=42) +reveal_type(a2) # N: Revealed type is "__main__.A[builtins.int]" +a2 = attrs.evolve(a, x='42') # E: Argument "x" to "evolve" of "A[int]" has incompatible type "str"; expected "int" +reveal_type(a2) # N: Revealed type is "__main__.A[builtins.int]" + +[builtins fixtures/plugin_attrs.pyi] + +[case testEvolveUnion] +# flags: --python-version 3.10 +from typing import Generic, TypeVar +import attrs + +T = TypeVar('T') + + +@attrs.define +class A(Generic[T]): + x: T # exercises meet(T=int, int) = int + y: bool # exercises meet(bool, int) = bool + z: str # exercises meet(str, bytes) = + w: dict # exercises meet(dict, ) = + + +@attrs.define +class B: + x: int + y: bool + z: bytes + + +a_or_b: A[int] | B +a2 = attrs.evolve(a_or_b, x=42, y=True) +a2 = attrs.evolve(a_or_b, x=42, y=True, z='42') # E: Argument "z" to "evolve" of "Union[A[int], B]" has incompatible type "str"; expected +a2 = attrs.evolve(a_or_b, x=42, y=True, w={}) # E: Argument "w" to "evolve" of "Union[A[int], B]" has incompatible type "Dict[, ]"; expected + +[builtins fixtures/plugin_attrs.pyi] + +[case testEvolveUnionOfTypeVar] +# flags: --python-version 3.10 +import attrs +from typing import TypeVar + +@attrs.define +class A: + x: int + y: int + z: str + w: dict + + +class B: + pass + +TA = TypeVar('TA', bound=A) +TB = TypeVar('TB', bound=B) + +def f(b_or_t: TA | TB | int) -> None: + a2 = attrs.evolve(b_or_t) # E: Argument 1 to "evolve" has type "Union[TA, TB, int]" whose item "TB" is not bound to an attrs class # E: Argument 1 to "evolve" has incompatible type "Union[TA, TB, int]" whose item "int" is not an attrs class + + +[builtins fixtures/plugin_attrs.pyi] + +[case testEvolveTypeVarBound] +import attrs +from typing import TypeVar + +@attrs.define +class A: + x: int + +@attrs.define +class B(A): + pass + +TA = TypeVar('TA', bound=A) + +def f(t: TA) -> TA: + t2 = attrs.evolve(t, x=42) + reveal_type(t2) # N: Revealed type is "TA`-1" + t3 = attrs.evolve(t, x='42') # E: Argument "x" to "evolve" of "TA" has incompatible type "str"; expected "int" + return t2 + +f(A(x=42)) +f(B(x=42)) + +[builtins fixtures/plugin_attrs.pyi] + +[case testEvolveTypeVarBoundNonAttrs] +import attrs +from typing import Union, TypeVar + +TInt = TypeVar('TInt', bound=int) +TAny = TypeVar('TAny') +TNone = TypeVar('TNone', bound=None) +TUnion = TypeVar('TUnion', bound=Union[str, int]) + +def f(t: TInt) -> None: + _ = attrs.evolve(t, x=42) # E: Argument 1 to "evolve" has a variable type "TInt" not bound to an attrs class + +def g(t: TAny) -> None: + _ = attrs.evolve(t, x=42) # E: Argument 1 to "evolve" has a variable type "TAny" not bound to an attrs class + +def h(t: TNone) -> None: + _ = attrs.evolve(t, x=42) # E: Argument 1 to "evolve" has a variable type "TNone" not bound to an attrs class + +def x(t: TUnion) -> None: + _ = attrs.evolve(t, x=42) # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "str" is not an attrs class # E: Argument 1 to "evolve" has incompatible type "TUnion" whose item "int" is not an attrs class + +[builtins fixtures/plugin_attrs.pyi] + +[case testEvolveTypeVarConstrained] +import attrs +from typing import TypeVar + +@attrs.define +class A: + x: int + +@attrs.define +class B: + x: str # conflicting with A.x + +T = TypeVar('T', A, B) + +def f(t: T) -> T: + t2 = attrs.evolve(t, x=42) # E: Argument "x" to "evolve" of "B" has incompatible type "int"; expected "str" + reveal_type(t2) # N: Revealed type is "__main__.A" # N: Revealed type is "__main__.B" + t2 = attrs.evolve(t, x='42') # E: Argument "x" to "evolve" of "A" has incompatible type "str"; expected "int" + return t2 + +f(A(x=42)) +f(B(x='42')) + +[builtins fixtures/plugin_attrs.pyi] + +[case testEvolveVariants] +from typing import Any +import attr +import attrs + + +@attr.s(auto_attribs=True) +class C: + name: str + +c = C(name='foo') + +c = attr.assoc(c, name='test') +c = attr.assoc(c, name=42) # E: Argument "name" to "assoc" of "C" has incompatible type "int"; expected "str" + +c = attrs.evolve(c, name='test') +c = attrs.evolve(c, name=42) # E: Argument "name" to "evolve" of "C" has incompatible type "int"; expected "str" + +c = attrs.assoc(c, name='test') +c = attrs.assoc(c, name=42) # E: Argument "name" to "assoc" of "C" has incompatible type "int"; expected "str" + +[builtins fixtures/plugin_attrs.pyi] +[typing fixtures/typing-medium.pyi] diff --git a/test-data/unit/check-protocols.test b/test-data/unit/check-protocols.test index c787b34bf26b..6976b8ee0a39 100644 --- a/test-data/unit/check-protocols.test +++ b/test-data/unit/check-protocols.test @@ -2280,7 +2280,6 @@ def func2(arg: Optional[A]) -> None: ... x: B func1(x) func2(x) -[builtins fixtures/tuple.pyi] [builtins fixtures/dict.pyi] [out] main:14: error: Argument 1 to "func1" has incompatible type "B"; expected "A" @@ -3191,7 +3190,6 @@ class NoneCompatible3(Protocol): class C(NoneCompatible3): ... C() # E: Cannot instantiate abstract class "C" with abstract attributes "f", "g" and "h" -[builtins fixtures/tuple.pyi] [builtins fixtures/classmethod.pyi] [case testEmptyBodyWithFinal] @@ -3524,7 +3522,12 @@ class C: def test(arg: P) -> None: ... test(B) # OK test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ - # N: "C" has constructor incompatible with "__call__" of "P" + # N: "C" has constructor incompatible with "__call__" of "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def __call__(x: int, y: int) -> Any \ + # N: Got: \ + # N: def __init__(x: int, y: str) -> C [case testProtocolClassObjectPureCallback] from typing import Any, ClassVar, Protocol @@ -3540,7 +3543,36 @@ class C: def test(arg: P) -> None: ... test(B) # OK test(C) # E: Argument 1 to "test" has incompatible type "Type[C]"; expected "P" \ - # N: "C" has constructor incompatible with "__call__" of "P" + # N: "C" has constructor incompatible with "__call__" of "P" \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def __call__(x: int, y: int) -> Any \ + # N: Got: \ + # N: def __init__(x: int, y: str) -> C +[builtins fixtures/type.pyi] + +[case testProtocolClassObjectCallableError] +from typing import Protocol, Any, Callable + +class P(Protocol): + def __call__(self, app: int) -> Callable[[str], None]: + ... + +class C: + def __init__(self, app: str) -> None: + pass + + def __call__(self, el: str) -> None: + return None + +p: P = C # E: Incompatible types in assignment (expression has type "Type[C]", variable has type "P") \ + # N: Following member(s) of "C" have conflicts: \ + # N: Expected: \ + # N: def __call__(app: int) -> Callable[[str], None] \ + # N: Got: \ + # N: def __init__(app: str) -> C \ + # N: "P.__call__" has type "Callable[[Arg(int, 'app')], Callable[[str], None]]" + [builtins fixtures/type.pyi] [case testProtocolTypeTypeAttribute] @@ -3998,3 +4030,38 @@ TF = TypeVar("TF", bound=Foo) def outer(cls: Type[TF]) -> TF: reveal_type(test(cls)) # N: Revealed type is "TF`-1" return cls() + +[case testProtocolImportNotMember] +import m +import lib + +class Bad: + x: int +class Good: + x: lib.C + +x: m.P = Bad() # E: Incompatible types in assignment (expression has type "Bad", variable has type "P") \ + # N: Following member(s) of "Bad" have conflicts: \ + # N: x: expected "C", got "int" +x = Good() + +[file m.py] +from typing import Protocol + +class P(Protocol): + import lib + x: lib.C + +[file lib.py] +class C: ... + +[case testAllowDefaultConstructorInProtocols] +from typing import Protocol + +class P(Protocol): + x: int + def __init__(self, x: int) -> None: + self.x = x + +class C(P): ... +C(0) # OK diff --git a/test-data/unit/check-python310.test b/test-data/unit/check-python310.test index 7a934348aaf2..15454fc3e216 100644 --- a/test-data/unit/check-python310.test +++ b/test-data/unit/check-python310.test @@ -1841,3 +1841,95 @@ class D: pass X = None | C Y = None | D [builtins fixtures/type.pyi] + +[case testMatchStatementWalrus] +class A: + a = 1 + +def returns_a_or_none() -> A | None: + return A() + +def returns_a() -> A: + return A() + +def f() -> None: + match x := returns_a_or_none(): + case A(): + reveal_type(x.a) # N: Revealed type is "builtins.int" + match x := returns_a(): + case A(): + reveal_type(x.a) # N: Revealed type is "builtins.int" + y = returns_a_or_none() + match y: + case A(): + reveal_type(y.a) # N: Revealed type is "builtins.int" + +[case testNarrowedVariableInNestedModifiedInMatch] +# flags: --strict-optional +from typing import Optional + +def match_stmt_error1(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + match object(): + case str(x): + pass + nested() + +def foo(x): pass + +def match_stmt_ok1(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x + match foo(x): + case str(y): + z = x + nested() + +def match_stmt_error2(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + match [None]: + case [x]: + pass + nested() + +def match_stmt_error3(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + match {'a': None}: + case {'a': x}: + pass + nested() + +def match_stmt_error4(x: Optional[list[str]]) -> None: + if x is None: + x = ["a"] + def nested() -> list[str]: + return x # E: Incompatible return value type (got "Optional[List[str]]", expected "List[str]") + match ["a"]: + case [*x]: + pass + nested() + +class C: + a: str + +def match_stmt_error5(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + match C(): + case C(a=x): + pass + nested() +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-python311.test b/test-data/unit/check-python311.test index 7196f10f8863..5870c7e17bcc 100644 --- a/test-data/unit/check-python311.test +++ b/test-data/unit/check-python311.test @@ -34,7 +34,7 @@ except* (RuntimeError, Custom) as e: class Bad: ... try: pass -except* (RuntimeError, Bad) as e: # E: Exception type must be derived from BaseException +except* (RuntimeError, Bad) as e: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) reveal_type(e) # N: Revealed type is "builtins.ExceptionGroup[Any]" [builtins fixtures/exception.pyi] @@ -63,3 +63,17 @@ class Variadic(Generic[Unpack[Ts]]): variadic: Variadic[int, str] reveal_type(variadic) # N: Revealed type is "__main__.Variadic[builtins.int, builtins.str]" [builtins fixtures/tuple.pyi] + +[case testAsyncGeneratorWithinComprehension] +# flags: --python-version 3.11 +from typing import Any, Generator, List + +async def asynciter(iterable): + for x in iterable: + yield x + +async def coro() -> Generator[List[Any], None, None]: + return ([i async for i in asynciter([0,j])] for j in [3, 5]) +reveal_type(coro) # N: Revealed type is "def () -> typing.Coroutine[Any, Any, typing.Generator[builtins.list[Any], None, None]]" +[builtins fixtures/async_await.pyi] +[typing fixtures/typing-async.pyi] diff --git a/test-data/unit/check-python38.test b/test-data/unit/check-python38.test index b9f9f2173ae1..5b077c45580a 100644 --- a/test-data/unit/check-python38.test +++ b/test-data/unit/check-python38.test @@ -775,3 +775,55 @@ main:9: note: Revealed type is "builtins.int" class C: [(j := i) for i in [1, 2, 3]] # E: Assignment expression within a comprehension cannot be used in a class body [builtins fixtures/list.pyi] + +[case testNarrowedVariableInNestedModifiedInWalrus] +# flags: --strict-optional +from typing import Optional + +def walrus_with_nested_error(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x # E: Incompatible return value type (got "Optional[str]", expected "str") + if x := None: + pass + nested() + +def walrus_with_nested_ok(x: Optional[str]) -> None: + if x is None: + x = "a" + def nested() -> str: + return x + if y := x: + pass + nested() + +[case testIgnoreWholeModule] +# flags: --warn-unused-ignores +# type: ignore +IGNORE # type: ignore + +[case testUnusedIgnoreVersionCheck] +# flags: --warn-unused-ignores +import sys + +if sys.version_info < (3, 6): + 42 # type: ignore +else: + 42 # type: ignore # E: Unused "type: ignore" comment +[builtins fixtures/ops.pyi] + +[case testDictExpressionErrorLocations] +# flags: --pretty +from typing import Dict + +other: Dict[str, str] +dct: Dict[str, int] = {"a": "b", **other} +[builtins fixtures/dict.pyi] +[out] +main:5: error: Dict entry 0 has incompatible type "str": "str"; expected "str": "int" + dct: Dict[str, int] = {"a": "b", **other} + ^~~~~~~~ +main:5: error: Unpacked dict entry 1 has incompatible type "Dict[str, str]"; expected "SupportsKeysAndGetItem[str, int]" + dct: Dict[str, int] = {"a": "b", **other} + ^~~~~ diff --git a/test-data/unit/check-python39.test b/test-data/unit/check-python39.test index 105051a840bb..09d789ea423e 100644 --- a/test-data/unit/check-python39.test +++ b/test-data/unit/check-python39.test @@ -17,3 +17,12 @@ decorator_list: List[Callable[..., Callable[[int], str]]] def f(x: float) -> float: ... reveal_type(f) # N: Revealed type is "def (builtins.int) -> builtins.str" [builtins fixtures/list.pyi] + +[case testStarredExpressionsInForLoop] +# flags: --python-version 3.9 + +a = b = c = [1, 2, 3] +for x in *a, *b, *c: + reveal_type(x) # N: Revealed type is "builtins.int" +[builtins fixtures/tuple.pyi] + diff --git a/test-data/unit/check-recursive-types.test b/test-data/unit/check-recursive-types.test index b7b4372ecc12..418ab5f1f0d5 100644 --- a/test-data/unit/check-recursive-types.test +++ b/test-data/unit/check-recursive-types.test @@ -897,3 +897,29 @@ Example = NamedTuple("Example", [("rec", List["Example"])]) e: Example reveal_type(e) # N: Revealed type is "Tuple[builtins.list[...], fallback=__main__.Example]" [builtins fixtures/tuple.pyi] + +[case testRecursiveBoundFunctionScopeNoCrash] +from typing import TypeVar, Union, Dict + +def dummy() -> None: + A = Union[str, Dict[str, "A"]] # E: Cannot resolve name "A" (possible cyclic definition) \ + # N: Recursive types are not allowed at function scope + T = TypeVar("T", bound=A) + + def bar(x: T) -> T: + pass + reveal_type(bar) # N: Revealed type is "def [T <: Union[builtins.str, builtins.dict[builtins.str, Any]]] (x: T`-1) -> T`-1" +[builtins fixtures/dict.pyi] + +[case testForwardBoundFunctionScopeWorks] +from typing import TypeVar, Dict + +def dummy() -> None: + A = Dict[str, "B"] + B = Dict[str, str] + T = TypeVar("T", bound=A) + + def bar(x: T) -> T: + pass + reveal_type(bar) # N: Revealed type is "def [T <: builtins.dict[builtins.str, builtins.dict[builtins.str, builtins.str]]] (x: T`-1) -> T`-1" +[builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-selftype.test b/test-data/unit/check-selftype.test index 555cef3641f8..53c24584cb73 100644 --- a/test-data/unit/check-selftype.test +++ b/test-data/unit/check-selftype.test @@ -208,7 +208,31 @@ class J(A[int]): [builtins fixtures/tuple.pyi] -[case testSelfTypeOverrideCompatibilityTypeVar-xfail] +[case testSelfTypeOverrideCompatibilityGeneric] +from typing import TypeVar, Generic, overload + +T = TypeVar("T", str, int, None) + +class A(Generic[T]): + @overload + def f(self, s: T) -> T: ... + @overload + def f(self: A[str], s: bytes) -> str: ... + def f(self, s: object): ... + +class B(A[int]): + def f(self, s: int) -> int: ... + +class C(A[None]): + def f(self, s: int) -> int: ... # E: Signature of "f" incompatible with supertype "A" \ + # N: Superclass: \ + # N: @overload \ + # N: def f(self, s: None) -> None \ + # N: Subclass: \ + # N: def f(self, s: int) -> int +[builtins fixtures/tuple.pyi] + +[case testSelfTypeOverrideCompatibilityTypeVar] from typing import overload, TypeVar, Union AT = TypeVar("AT", bound="A") @@ -242,6 +266,26 @@ class B(A): def f(*a, **kw): ... [builtins fixtures/dict.pyi] +[case testSelfTypeOverrideCompatibilitySelfTypeVar] +from typing import Any, Generic, Self, TypeVar, overload + +T_co = TypeVar('T_co', covariant=True) + +class Config(Generic[T_co]): + @overload + def get(self, instance: None) -> Self: ... + @overload + def get(self, instance: Any) -> T_co: ... + def get(self, *a, **kw): ... + +class MultiConfig(Config[T_co]): + @overload + def get(self, instance: None) -> Self: ... + @overload + def get(self, instance: Any) -> T_co: ... + def get(self, *a, **kw): ... +[builtins fixtures/dict.pyi] + [case testSelfTypeSuper] from typing import TypeVar, cast @@ -1805,3 +1849,21 @@ class C(Generic[T]): reveal_type(self.val) # N: Revealed type is "__main__.A" \ # N: Revealed type is "__main__.B" self.val = x + +[case testNarrowSelfType] +from typing import Self, Union + +class A: ... +class B: + def f1(self, v: Union[Self, A]) -> A: + if isinstance(v, B): + return A() + else: + return v + def f2(self, v: Union[Self, A]) -> A: + if isinstance(v, B): + return A() + else: + return B() # E: Incompatible return value type (got "B", expected "A") + +[builtins fixtures/isinstancelist.pyi] diff --git a/test-data/unit/check-singledispatch.test b/test-data/unit/check-singledispatch.test index 8fe049437c57..45bf1ca9cbf2 100644 --- a/test-data/unit/check-singledispatch.test +++ b/test-data/unit/check-singledispatch.test @@ -180,9 +180,6 @@ def f(arg) -> None: @f.register def g(arg: Mapping) -> None: pass - -[builtins fixtures/args.pyi] -[builtins fixtures/list.pyi] [builtins fixtures/dict.pyi] [case testIncorrectArgumentsInSingledispatchFunctionDefinition] diff --git a/test-data/unit/check-slots.test b/test-data/unit/check-slots.test index 8beb0d8bf3f7..b7ce5e596101 100644 --- a/test-data/unit/check-slots.test +++ b/test-data/unit/check-slots.test @@ -26,7 +26,6 @@ class WithVariable: self.a = 1 self.b = 2 self.c = 3 -[builtins fixtures/tuple.pyi] [builtins fixtures/list.pyi] @@ -332,7 +331,6 @@ b.extra = 'extra' main:22: error: Trying to assign name "c" that is not in "__slots__" of type "__main__.B" main:43: error: Trying to assign name "c" that is not in "__slots__" of type "__main__.B" main:47: error: "B" has no attribute "extra" -[builtins fixtures/tuple.pyi] [builtins fixtures/property.pyi] @@ -363,7 +361,6 @@ a.c = custom_obj a.d = custom_obj a.e = custom_obj [out] -[builtins fixtures/tuple.pyi] [builtins fixtures/dict.pyi] @@ -473,7 +470,6 @@ class A: self.a = 1 self.b = 2 self.missing = 3 -[builtins fixtures/tuple.pyi] [builtins fixtures/list.pyi] @@ -486,7 +482,6 @@ class A: self.a = 1 self.b = 2 self.missing = 3 -[builtins fixtures/tuple.pyi] [builtins fixtures/set.pyi] @@ -499,7 +494,6 @@ class A: self.a = 1 self.b = 2 self.missing = 3 -[builtins fixtures/tuple.pyi] [builtins fixtures/dict.pyi] diff --git a/test-data/unit/check-statements.test b/test-data/unit/check-statements.test index b9551870ddfc..3cb8864f9207 100644 --- a/test-data/unit/check-statements.test +++ b/test-data/unit/check-statements.test @@ -659,9 +659,9 @@ class E2(E1): pass try: pass except (E1, E2): pass -except (E1, object): pass # E: Exception type must be derived from BaseException -except (object, E2): pass # E: Exception type must be derived from BaseException -except (E1, (E2,)): pass # E: Exception type must be derived from BaseException +except (E1, object): pass # E: Exception type must be derived from BaseException (or be a tuple of exception classes) +except (object, E2): pass # E: Exception type must be derived from BaseException (or be a tuple of exception classes) +except (E1, (E2,)): pass # E: Exception type must be derived from BaseException (or be a tuple of exception classes) except (E1, E2): pass except ((E1, E2)): pass @@ -690,7 +690,7 @@ except (E1, E2) as e1: except (E2, E1) as e2: a = e2 # type: E1 b = e2 # type: E2 # E: Incompatible types in assignment (expression has type "E1", variable has type "E2") -except (E1, E2, int) as e3: # E: Exception type must be derived from BaseException +except (E1, E2, int) as e3: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass [builtins fixtures/exception.pyi] @@ -750,13 +750,13 @@ def nested_union(exc: Union[Type[E1], Union[Type[E2], Type[E3]]]) -> None: def error_in_union(exc: Union[Type[E1], int]) -> None: try: pass - except exc as e: # E: Exception type must be derived from BaseException + except exc as e: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass def error_in_variadic(exc: Tuple[int, ...]) -> None: try: pass - except exc as e: # E: Exception type must be derived from BaseException + except exc as e: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass [builtins fixtures/tuple.pyi] @@ -784,15 +784,15 @@ except E1 as e1: reveal_type(e1) # N: Revealed type is "Any" except E2 as e2: reveal_type(e2) # N: Revealed type is "__main__.E2" -except NotBaseDerived as e3: # E: Exception type must be derived from BaseException +except NotBaseDerived as e3: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass -except (NotBaseDerived, E1) as e4: # E: Exception type must be derived from BaseException +except (NotBaseDerived, E1) as e4: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass -except (NotBaseDerived, E2) as e5: # E: Exception type must be derived from BaseException +except (NotBaseDerived, E2) as e5: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass -except (NotBaseDerived, E1, E2) as e6: # E: Exception type must be derived from BaseException +except (NotBaseDerived, E1, E2) as e6: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass -except (E1, E2, NotBaseDerived) as e6: # E: Exception type must be derived from BaseException +except (E1, E2, NotBaseDerived) as e6: # E: Exception type must be derived from BaseException (or be a tuple of exception classes) pass [builtins fixtures/exception.pyi] @@ -953,8 +953,8 @@ except a as b: import typing def exc() -> BaseException: pass try: pass -except exc as e: pass # E: Exception type must be derived from BaseException -except BaseException() as b: pass # E: Exception type must be derived from BaseException +except exc as e: pass # E: Exception type must be derived from BaseException (or be a tuple of exception classes) +except BaseException() as b: pass # E: Exception type must be derived from BaseException (or be a tuple of exception classes) [builtins fixtures/exception.pyi] [case testTupleValueAsExceptionType] @@ -980,7 +980,7 @@ except exs2 as e2: exs3 = (E1, (E1_1, (E1_2,))) try: pass -except exs3 as e3: pass # E: Exception type must be derived from BaseException +except exs3 as e3: pass # E: Exception type must be derived from BaseException (or be a tuple of exception classes) [builtins fixtures/exception.pyi] [case testInvalidTupleValueAsExceptionType] @@ -991,7 +991,7 @@ class E2(E1): pass exs1 = (E1, E2, int) try: pass -except exs1 as e: pass # E: Exception type must be derived from BaseException +except exs1 as e: pass # E: Exception type must be derived from BaseException (or be a tuple of exception classes) [builtins fixtures/exception.pyi] [case testOverloadedExceptionType] @@ -1034,7 +1034,7 @@ def h(e: Type[int]): [builtins fixtures/exception.pyi] [out] main:9: note: Revealed type is "builtins.BaseException" -main:12: error: Exception type must be derived from BaseException +main:12: error: Exception type must be derived from BaseException (or be a tuple of exception classes) -- Del statement diff --git a/test-data/unit/check-tuples.test b/test-data/unit/check-tuples.test index 266bfbf97888..e843532a2560 100644 --- a/test-data/unit/check-tuples.test +++ b/test-data/unit/check-tuples.test @@ -1248,7 +1248,7 @@ t = (0, "") x = 0 y = "" reveal_type(t[x:]) # N: Revealed type is "builtins.tuple[Union[builtins.int, builtins.str], ...]" -t[y:] # E: Slice index must be an integer or None +t[y:] # E: Slice index must be an integer, SupportsIndex or None [builtins fixtures/tuple.pyi] [case testInferTupleTypeFallbackAgainstInstance] diff --git a/test-data/unit/check-type-aliases.test b/test-data/unit/check-type-aliases.test index d7cccd2d6ba6..9dd56ad309f3 100644 --- a/test-data/unit/check-type-aliases.test +++ b/test-data/unit/check-type-aliases.test @@ -1028,3 +1028,18 @@ RHSAlias3: type = tuple[int, ...] WrongTypeElement = str | tuple[float, 1] # E: Invalid type: try using Literal[1] instead? WrongEllipsis = str | tuple[float, float, ...] # E: Unexpected "..." [builtins fixtures/tuple.pyi] + +[case testCompiledNoCrashOnSingleItemUnion] +# flags: --no-strict-optional +from typing import Callable, Union, Generic, TypeVar + +Alias = Callable[[], int] + +T = TypeVar("T") +class C(Generic[T]): + attr: Union[Alias, None] = None + + @classmethod + def test(cls) -> None: + cls.attr +[builtins fixtures/classmethod.pyi] diff --git a/test-data/unit/check-typeguard.test b/test-data/unit/check-typeguard.test index 39bcb091f09e..a5ab35649320 100644 --- a/test-data/unit/check-typeguard.test +++ b/test-data/unit/check-typeguard.test @@ -602,12 +602,30 @@ def func(names: Tuple[str, ...]): from typing_extensions import TypeGuard class Z: - def typeguard(self, *, x: object) -> TypeGuard[int]: # E: TypeGuard functions must have a positional argument + def typeguard1(self, *, x: object) -> TypeGuard[int]: # line 4 + ... + + @staticmethod + def typeguard2(x: object) -> TypeGuard[int]: + ... + + @staticmethod # line 11 + def typeguard3(*, x: object) -> TypeGuard[int]: ... -def bad_typeguard(*, x: object) -> TypeGuard[int]: # E: TypeGuard functions must have a positional argument +def bad_typeguard(*, x: object) -> TypeGuard[int]: # line 15 ... -[builtins fixtures/tuple.pyi] + +# In Python 3.8 the line number associated with FunctionDef nodes changed +[builtins fixtures/classmethod.pyi] +[out] +main:4: error: TypeGuard functions must have a positional argument +main:11: error: TypeGuard functions must have a positional argument +main:15: error: TypeGuard functions must have a positional argument +[out version>=3.8] +main:4: error: TypeGuard functions must have a positional argument +main:12: error: TypeGuard functions must have a positional argument +main:15: error: TypeGuard functions must have a positional argument [case testTypeGuardWithKeywordArg] from typing_extensions import TypeGuard @@ -640,7 +658,6 @@ if Y().typeguard(x): reveal_type(x) # N: Revealed type is "builtins.int" if Y.typeguard(x): reveal_type(x) # N: Revealed type is "builtins.int" -[builtins fixtures/tuple.pyi] [builtins fixtures/classmethod.pyi] [case testTypeGuardKwargFollowingThroughOverloaded] diff --git a/test-data/unit/check-typevar-tuple.test b/test-data/unit/check-typevar-tuple.test index 9afe709ed19b..753773269244 100644 --- a/test-data/unit/check-typevar-tuple.test +++ b/test-data/unit/check-typevar-tuple.test @@ -117,8 +117,7 @@ variadic_single: Variadic[int] reveal_type(variadic_single) # N: Revealed type is "__main__.Variadic[builtins.int]" empty: Variadic[()] -# TODO: fix pretty printer to be better. -reveal_type(empty) # N: Revealed type is "__main__.Variadic" +reveal_type(empty) # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[Any, ...]]]" bad: Variadic[Unpack[Tuple[int, ...]], str, Unpack[Tuple[bool, ...]]] # E: More than one Unpack in a type is not allowed reveal_type(bad) # N: Revealed type is "__main__.Variadic[Unpack[builtins.tuple[builtins.int, ...]], builtins.str]" @@ -512,3 +511,62 @@ call_prefix(target=func_prefix, args=(0, 'foo')) call_prefix(target=func2_prefix, args=(0, 'foo')) # E: Argument "target" to "call_prefix" has incompatible type "Callable[[str, int, str], None]"; expected "Callable[[bytes, int, str], None]" [builtins fixtures/tuple.pyi] +[case testTypeVarTuplePep646UnspecifiedParameters] +from typing import Tuple, Generic, TypeVar +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +class Array(Generic[Unpack[Ts]]): + ... + +def takes_any_array(arr: Array) -> None: + ... + +x: Array[int, bool] +takes_any_array(x) + +T = TypeVar("T") + +class Array2(Generic[T, Unpack[Ts]]): + ... + +def takes_empty_array2(arr: Array2[int]) -> None: + ... + +y: Array2[int] +takes_empty_array2(y) +[builtins fixtures/tuple.pyi] + +[case testTypeVarTuplePep646CallableStarArgs] +from typing import Tuple, Callable +from typing_extensions import Unpack, TypeVarTuple + +Ts = TypeVarTuple("Ts") + +def call( + target: Callable[[Unpack[Ts]], None], + *args: Unpack[Ts], +) -> None: + ... + # TODO: exposes unhandled case in checkexpr + # target(*args) + +class A: + def func(self, arg1: int, arg2: str) -> None: ... + def func2(self, arg1: int, arg2: int) -> None: ... + def func3(self, *args: int) -> None: ... + +vargs: Tuple[int, ...] +vargs_str: Tuple[str, ...] + +call(A().func) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(A().func, 0, 'foo') +call(A().func, 0, 'foo', 0) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(A().func, 0) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[VarArg(object)], None]" +call(A().func, 0, 1) # E: Argument 1 to "call" has incompatible type "Callable[[int, str], None]"; expected "Callable[[int, object], None]" +call(A().func2, 0, 0) +call(A().func3, 0, 1, 2) +call(A().func3) + +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-typevar-values.test b/test-data/unit/check-typevar-values.test index a4a4d68bd9fe..83340c52b63b 100644 --- a/test-data/unit/check-typevar-values.test +++ b/test-data/unit/check-typevar-values.test @@ -660,8 +660,6 @@ T = TypeVar("T", bound=Union[Data, Dict[str, str]]) def f(data: T) -> None: reveal_type(data["x"]) # N: Revealed type is "Union[builtins.int, builtins.str]" - -[builtins fixtures/tuple.pyi] [builtins fixtures/dict.pyi] [case testTypeVarWithTypedDictValueInIndexExpression] @@ -677,7 +675,6 @@ T = TypeVar("T", Data, Dict[str, str]) def f(data: T) -> None: _: Union[str, int] = data["x"] -[builtins fixtures/tuple.pyi] [builtins fixtures/dict.pyi] [case testSelfTypeVarIndexExpr] @@ -699,8 +696,6 @@ class Indexable: def m(self: T) -> T: return self["foo"] - -[builtins fixtures/tuple.pyi] [builtins fixtures/classmethod.pyi] [case testTypeVarWithValueDeferral] diff --git a/test-data/unit/check-union-error-syntax.test b/test-data/unit/check-union-error-syntax.test new file mode 100644 index 000000000000..2928cc312709 --- /dev/null +++ b/test-data/unit/check-union-error-syntax.test @@ -0,0 +1,61 @@ +[case testUnionErrorSyntax] +# flags: --python-version 3.10 --no-force-union-syntax +from typing import Union +x : Union[bool, str] +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "bool | str") + +[case testOrErrorSyntax] +# flags: --python-version 3.10 --force-union-syntax +from typing import Union +x : Union[bool, str] +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "Union[bool, str]") + +[case testOrNoneErrorSyntax] +# flags: --python-version 3.10 --no-force-union-syntax +from typing import Union +x : Union[bool, None] +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "bool | None") + +[case testOptionalErrorSyntax] +# flags: --python-version 3.10 --force-union-syntax +from typing import Union +x : Union[bool, None] +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "Optional[bool]") + +[case testNoneAsFinalItem] +# flags: --python-version 3.10 --no-force-union-syntax +from typing import Union +x : Union[bool, None, str] +x = 3 # E: Incompatible types in assignment (expression has type "int", variable has type "bool | str | None") + +[case testLiteralOrErrorSyntax] +# flags: --python-version 3.10 --no-force-union-syntax +from typing import Union +from typing_extensions import Literal +x : Union[Literal[1], Literal[2], str] +x = 3 # E: Incompatible types in assignment (expression has type "Literal[3]", variable has type "Literal[1, 2] | str") +[builtins fixtures/tuple.pyi] + +[case testLiteralUnionErrorSyntax] +# flags: --python-version 3.10 --force-union-syntax +from typing import Union +from typing_extensions import Literal +x : Union[Literal[1], Literal[2], str] +x = 3 # E: Incompatible types in assignment (expression has type "Literal[3]", variable has type "Union[str, Literal[1, 2]]") +[builtins fixtures/tuple.pyi] + +[case testLiteralOrNoneErrorSyntax] +# flags: --python-version 3.10 --no-force-union-syntax +from typing import Union +from typing_extensions import Literal +x : Union[Literal[1], None] +x = 3 # E: Incompatible types in assignment (expression has type "Literal[3]", variable has type "Literal[1] | None") +[builtins fixtures/tuple.pyi] + +[case testLiteralOptionalErrorSyntax] +# flags: --python-version 3.10 --force-union-syntax +from typing import Union +from typing_extensions import Literal +x : Union[Literal[1], None] +x = 3 # E: Incompatible types in assignment (expression has type "Literal[3]", variable has type "Optional[Literal[1]]") +[builtins fixtures/tuple.pyi] diff --git a/test-data/unit/check-unreachable-code.test b/test-data/unit/check-unreachable-code.test index 6522391899de..a9e025632e24 100644 --- a/test-data/unit/check-unreachable-code.test +++ b/test-data/unit/check-unreachable-code.test @@ -1446,4 +1446,4 @@ class Foo: def f() -> None: Foo()['a'] = 'a' x = 0 # This should not be reported as unreachable -[builtins fixtures/exception.pyi] \ No newline at end of file +[builtins fixtures/exception.pyi] diff --git a/test-data/unit/check-varargs.test b/test-data/unit/check-varargs.test index d598fe13b7e9..92b9f7f04f26 100644 --- a/test-data/unit/check-varargs.test +++ b/test-data/unit/check-varargs.test @@ -762,9 +762,9 @@ class Person(TypedDict): name: str age: int -def foo(x: Unpack[Person]) -> None: # E: TypedDict('__main__.Person', {'name': builtins.str, 'age': builtins.int}) cannot be unpacked (must be tuple or TypeVarTuple) +def foo(x: Unpack[Person]) -> None: # E: "Person" cannot be unpacked (must be tuple or TypeVarTuple) ... -def bar(x: int, *args: Unpack[Person]) -> None: # E: TypedDict('__main__.Person', {'name': builtins.str, 'age': builtins.int}) cannot be unpacked (must be tuple or TypeVarTuple) +def bar(x: int, *args: Unpack[Person]) -> None: # E: "Person" cannot be unpacked (must be tuple or TypeVarTuple) ... def baz(**kwargs: Unpack[Person]) -> None: # OK ... diff --git a/test-data/unit/daemon.test b/test-data/unit/daemon.test index 7586c8763d33..c60068a44bec 100644 --- a/test-data/unit/daemon.test +++ b/test-data/unit/daemon.test @@ -312,7 +312,7 @@ def bar() -> None: foo(arg='xyz') [case testDaemonGetType_python38] -$ dmypy start --log-file log.txt -- --follow-imports=error --no-error-summary +$ dmypy start --log-file log.txt -- --follow-imports=error --no-error-summary --python-version 3.8 Daemon started $ dmypy inspect foo:1:2:3:4 Command "inspect" is only valid after a "check" command (that produces no parse errors) diff --git a/test-data/unit/fine-grained-attr.test b/test-data/unit/fine-grained-attr.test index 3fd40b774c7b..145bfe57e4b2 100644 --- a/test-data/unit/fine-grained-attr.test +++ b/test-data/unit/fine-grained-attr.test @@ -17,7 +17,7 @@ from attr import define @define class A: a: float -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [out] == main:5: error: Incompatible return value type (got "Attribute[float]", expected "Attribute[int]") @@ -32,7 +32,7 @@ from attr import define class A: a: float b: int -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [file m.py] from c import A @@ -54,7 +54,7 @@ import attr @attr.s class Entry: var: int = attr.ib() -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [file m.py] from typing import Any, ClassVar, Protocol diff --git a/test-data/unit/fine-grained-dataclass-transform.test b/test-data/unit/fine-grained-dataclass-transform.test index 7dc852f1d733..cc297bc344aa 100644 --- a/test-data/unit/fine-grained-dataclass-transform.test +++ b/test-data/unit/fine-grained-dataclass-transform.test @@ -86,7 +86,56 @@ class A(Dataclass): [out] main:7: error: Unexpected keyword argument "x" for "B" -builtins.pyi:12: note: "B" defined here +builtins.pyi:13: note: "B" defined here main:7: error: Unexpected keyword argument "y" for "B" -builtins.pyi:12: note: "B" defined here +builtins.pyi:13: note: "B" defined here +== + +[case frozenInheritanceViaDefault] +# flags: --python-version 3.11 +from foo import Foo + +foo = Foo(base=0, foo=1) + +[file transform.py] +from typing import dataclass_transform, Type + +@dataclass_transform(frozen_default=True) +def dataclass(cls: Type) -> Type: return cls + +[file base.py] +from transform import dataclass + +@dataclass +class Base: + base: int + +[file foo.py] +from base import Base +from transform import dataclass + +@dataclass +class Foo(Base): + foo: int + +[file foo.py.2] +from base import Base +from transform import dataclass + +@dataclass +class Foo(Base): + foo: int + bar: int = 0 + +[typing fixtures/typing-full.pyi] +[builtins fixtures/dataclasses.pyi] + +# If the frozen parameter is being maintained correctly, we *don't* expect to see issues; if it's +# broken in incremental mode, then we'll see an error about inheriting a non-frozen class from a +# frozen one. +# +# Ideally we'd also add a `foo.foo = 2` to confirm that frozen semantics are actually being +# enforced, but incremental tests currently can't start with an error, which makes it tricky to +# write such a test case. +[out] == diff --git a/test-data/unit/fine-grained-modules.test b/test-data/unit/fine-grained-modules.test index f76ced64341b..163e859276cb 100644 --- a/test-data/unit/fine-grained-modules.test +++ b/test-data/unit/fine-grained-modules.test @@ -1279,12 +1279,12 @@ a.py:2: error: Too many arguments for "foo" [case testAddModuleAfterCache3-only_when_cache] # cmd: mypy main a.py -# cmd2: mypy main a.py b.py c.py d.py e.py f.py g.py h.py -# cmd3: mypy main a.py b.py c.py d.py e.py f.py g.py h.py +# cmd2: mypy main a.py b.py c.py d.py e.py f.py g.py h.py i.py j.py +# cmd3: mypy main a.py b.py c.py d.py e.py f.py g.py h.py i.py j.py # flags: --ignore-missing-imports --follow-imports=skip import a [file a.py] -import b, c, d, e, f, g, h +import b, c, d, e, f, g, h, i, j b.foo(10) [file b.py.2] def foo() -> None: pass @@ -1294,6 +1294,8 @@ def foo() -> None: pass [file f.py.2] [file g.py.2] [file h.py.2] +[file i.py.2] +[file j.py.2] -- No files should be stale or reprocessed in the first step since the large number -- of missing files will force build to give up on cache loading. @@ -1806,7 +1808,7 @@ import b [file b.py] [file c.py] x = 1 -[file b.py] +[file b.py.2] 1+'x' [file c.py.2] x = '2' diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test index 58339828677d..88a11be31f34 100644 --- a/test-data/unit/fine-grained.test +++ b/test-data/unit/fine-grained.test @@ -989,7 +989,6 @@ import attr class A: a: int other: int -[builtins fixtures/list.pyi] [file a.py.3] import attr @@ -1048,7 +1047,7 @@ import attr @attr.s(kw_only=True) class A: a = attr.ib(15) # type: int -[builtins fixtures/attr.pyi] +[builtins fixtures/plugin_attrs.pyi] [out] == main:2: error: Too many positional arguments for "B" @@ -7547,7 +7546,7 @@ def d() -> Dict[int, int]: pass [builtins fixtures/dict.pyi] [out] == -main:5: error: Argument 1 to "update" of "dict" has incompatible type "Dict[int, int]"; expected "Mapping[int, str]" +main:5: error: Unpacked dict entry 1 has incompatible type "Dict[int, int]"; expected "SupportsKeysAndGetItem[int, str]" [case testAwaitAndAsyncDef-only_when_nocache] from a import g diff --git a/test-data/unit/fixtures/args.pyi b/test-data/unit/fixtures/args.pyi index 9985ccf84817..0020d9ceff46 100644 --- a/test-data/unit/fixtures/args.pyi +++ b/test-data/unit/fixtures/args.pyi @@ -1,5 +1,6 @@ # Builtins stub used to support *args, **kwargs. +import _typeshed from typing import TypeVar, Generic, Iterable, Sequence, Tuple, Dict, Any, overload, Mapping Tco = TypeVar('Tco', covariant=True) diff --git a/test-data/unit/fixtures/dataclasses.pyi b/test-data/unit/fixtures/dataclasses.pyi index e9394c84ba7d..710b8659d265 100644 --- a/test-data/unit/fixtures/dataclasses.pyi +++ b/test-data/unit/fixtures/dataclasses.pyi @@ -1,3 +1,4 @@ +import _typeshed from typing import ( Generic, Iterator, Iterable, Mapping, Optional, Sequence, Tuple, TypeVar, Union, overload, diff --git a/test-data/unit/fixtures/dict.pyi b/test-data/unit/fixtures/dict.pyi index 153832411f50..19d175ff79ab 100644 --- a/test-data/unit/fixtures/dict.pyi +++ b/test-data/unit/fixtures/dict.pyi @@ -1,5 +1,7 @@ # Builtins stub used in dictionary-related test cases. +from _typeshed import SupportsKeysAndGetItem +import _typeshed from typing import ( TypeVar, Generic, Iterable, Iterator, Mapping, Tuple, overload, Optional, Union, Sequence ) @@ -25,7 +27,7 @@ class dict(Mapping[KT, VT]): def __setitem__(self, k: KT, v: VT) -> None: pass def __iter__(self) -> Iterator[KT]: pass def __contains__(self, item: object) -> int: pass - def update(self, a: Mapping[KT, VT]) -> None: pass + def update(self, a: SupportsKeysAndGetItem[KT, VT]) -> None: pass @overload def get(self, k: KT) -> Optional[VT]: pass @overload diff --git a/test-data/unit/fixtures/paramspec.pyi b/test-data/unit/fixtures/paramspec.pyi index 0686924aad6f..5e4b8564e238 100644 --- a/test-data/unit/fixtures/paramspec.pyi +++ b/test-data/unit/fixtures/paramspec.pyi @@ -1,5 +1,6 @@ # builtins stub for paramspec-related test cases +import _typeshed from typing import ( Sequence, Generic, TypeVar, Iterable, Iterator, Tuple, Mapping, Optional, Union, Type, overload, Protocol diff --git a/test-data/unit/fixtures/attr.pyi b/test-data/unit/fixtures/plugin_attrs.pyi similarity index 90% rename from test-data/unit/fixtures/attr.pyi rename to test-data/unit/fixtures/plugin_attrs.pyi index 3bd4f0ec7cbe..f62104809e74 100644 --- a/test-data/unit/fixtures/attr.pyi +++ b/test-data/unit/fixtures/plugin_attrs.pyi @@ -1,4 +1,4 @@ -# Builtins stub used to support @attr.s tests. +# Builtins stub used to support attrs plugin tests. from typing import Union, overload class object: @@ -9,13 +9,13 @@ class object: class type: pass class bytes: pass class function: pass -class bool: pass class float: pass class int: @overload def __init__(self, x: Union[str, bytes, int] = ...) -> None: ... @overload def __init__(self, x: Union[str, bytes], base: int) -> None: ... +class bool(int): pass class complex: @overload def __init__(self, real: float = ..., im: float = ...) -> None: ... diff --git a/test-data/unit/fixtures/primitives.pyi b/test-data/unit/fixtures/primitives.pyi index 90d76b9d76dd..b74252857d6f 100644 --- a/test-data/unit/fixtures/primitives.pyi +++ b/test-data/unit/fixtures/primitives.pyi @@ -1,4 +1,5 @@ # builtins stub with non-generic primitive types +import _typeshed from typing import Generic, TypeVar, Sequence, Iterator, Mapping, Iterable, Tuple, Union T = TypeVar('T') diff --git a/test-data/unit/fixtures/slice.pyi b/test-data/unit/fixtures/slice.pyi index b5a4549da068..b22a12b5213f 100644 --- a/test-data/unit/fixtures/slice.pyi +++ b/test-data/unit/fixtures/slice.pyi @@ -15,3 +15,5 @@ class str: pass class slice: pass class ellipsis: pass class dict: pass +class list(Generic[T]): + def __getitem__(self, x: slice) -> list[T]: pass diff --git a/test-data/unit/fixtures/tuple.pyi b/test-data/unit/fixtures/tuple.pyi index 0261731304b1..e270f3d79d3e 100644 --- a/test-data/unit/fixtures/tuple.pyi +++ b/test-data/unit/fixtures/tuple.pyi @@ -1,5 +1,6 @@ # Builtins stub used in tuple-related test cases. +import _typeshed from typing import Iterable, Iterator, TypeVar, Generic, Sequence, Optional, overload, Tuple, Type T = TypeVar("T") diff --git a/test-data/unit/fixtures/typing-async.pyi b/test-data/unit/fixtures/typing-async.pyi index b061337845c2..b207dd599c33 100644 --- a/test-data/unit/fixtures/typing-async.pyi +++ b/test-data/unit/fixtures/typing-async.pyi @@ -108,6 +108,7 @@ class Sequence(Iterable[T_co], Container[T_co]): def __getitem__(self, n: Any) -> T_co: pass class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta): + def keys(self) -> Iterable[T]: pass # Approximate return type def __getitem__(self, key: T) -> T_co: pass @overload def get(self, k: T) -> Optional[T_co]: pass diff --git a/test-data/unit/fixtures/typing-full.pyi b/test-data/unit/fixtures/typing-full.pyi index 1471473249dc..2f0d51dd2b92 100644 --- a/test-data/unit/fixtures/typing-full.pyi +++ b/test-data/unit/fixtures/typing-full.pyi @@ -136,6 +136,7 @@ class MutableSequence(Sequence[T]): def __setitem__(self, n: Any, o: T) -> None: pass class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta): + def keys(self) -> Iterable[T]: pass # Approximate return type def __getitem__(self, key: T) -> T_co: pass @overload def get(self, k: T) -> Optional[T_co]: pass diff --git a/test-data/unit/fixtures/typing-medium.pyi b/test-data/unit/fixtures/typing-medium.pyi index 863b0703989d..03be1d0a664d 100644 --- a/test-data/unit/fixtures/typing-medium.pyi +++ b/test-data/unit/fixtures/typing-medium.pyi @@ -55,6 +55,7 @@ class Sequence(Iterable[T_co]): def __getitem__(self, n: Any) -> T_co: pass class Mapping(Iterable[T], Generic[T, T_co]): + def keys(self) -> Iterable[T]: pass # Approximate return type def __getitem__(self, key: T) -> T_co: pass class SupportsInt(Protocol): diff --git a/test-data/unit/fixtures/typing-namedtuple.pyi b/test-data/unit/fixtures/typing-namedtuple.pyi index 1a31549463b6..c8658a815a13 100644 --- a/test-data/unit/fixtures/typing-namedtuple.pyi +++ b/test-data/unit/fixtures/typing-namedtuple.pyi @@ -7,13 +7,16 @@ Literal = 0 Optional = 0 Self = 0 +T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) KT = TypeVar('KT') class Iterable(Generic[T_co]): pass class Iterator(Iterable[T_co]): pass class Sequence(Iterable[T_co]): pass -class Mapping(Iterable[KT], Generic[KT, T_co]): pass +class Mapping(Iterable[KT], Generic[KT, T_co]): + def keys(self) -> Iterable[T]: pass # Approximate return type + def __getitem__(self, key: T) -> T_co: pass class Tuple(Sequence): pass class NamedTuple(Tuple): diff --git a/test-data/unit/fixtures/typing-typeddict.pyi b/test-data/unit/fixtures/typing-typeddict.pyi index 92ae402b9ea5..24a2f1328981 100644 --- a/test-data/unit/fixtures/typing-typeddict.pyi +++ b/test-data/unit/fixtures/typing-typeddict.pyi @@ -49,6 +49,7 @@ class Sequence(Iterable[T_co]): def __getitem__(self, n: Any) -> T_co: pass # type: ignore[misc] class Mapping(Iterable[T], Generic[T, T_co], metaclass=ABCMeta): + def keys(self) -> Iterable[T]: pass # Approximate return type def __getitem__(self, key: T) -> T_co: pass @overload def get(self, k: T) -> Optional[T_co]: pass diff --git a/test-data/unit/lib-stub/_typeshed.pyi b/test-data/unit/lib-stub/_typeshed.pyi new file mode 100644 index 000000000000..054ad0ec0c46 --- /dev/null +++ b/test-data/unit/lib-stub/_typeshed.pyi @@ -0,0 +1,8 @@ +from typing import Protocol, TypeVar, Iterable + +_KT = TypeVar("_KT") +_VT_co = TypeVar("_VT_co", covariant=True) + +class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): + def keys(self) -> Iterable[_KT]: pass + def __getitem__(self, __key: _KT) -> _VT_co: pass diff --git a/test-data/unit/lib-stub/attr/__init__.pyi b/test-data/unit/lib-stub/attr/__init__.pyi index 795e5d3f4f69..1a3838aa3ab1 100644 --- a/test-data/unit/lib-stub/attr/__init__.pyi +++ b/test-data/unit/lib-stub/attr/__init__.pyi @@ -244,3 +244,6 @@ def field( order: Optional[bool] = ..., on_setattr: Optional[object] = ..., ) -> Any: ... + +def evolve(inst: _T, **changes: Any) -> _T: ... +def assoc(inst: _T, **changes: Any) -> _T: ... diff --git a/test-data/unit/lib-stub/attrs/__init__.pyi b/test-data/unit/lib-stub/attrs/__init__.pyi index d25774045132..8e9aa1fdced5 100644 --- a/test-data/unit/lib-stub/attrs/__init__.pyi +++ b/test-data/unit/lib-stub/attrs/__init__.pyi @@ -126,3 +126,6 @@ def field( order: Optional[bool] = ..., on_setattr: Optional[object] = ..., ) -> Any: ... + +def evolve(inst: _T, **changes: Any) -> _T: ... +def assoc(inst: _T, **changes: Any) -> _T: ... diff --git a/test-data/unit/lib-stub/builtins.pyi b/test-data/unit/lib-stub/builtins.pyi index c2ac78c41661..17d519cc8eea 100644 --- a/test-data/unit/lib-stub/builtins.pyi +++ b/test-data/unit/lib-stub/builtins.pyi @@ -2,6 +2,8 @@ # # Use [builtins fixtures/...pyi] if you need more features. +import _typeshed + class object: def __init__(self) -> None: pass diff --git a/test-data/unit/lib-stub/contextlib.pyi b/test-data/unit/lib-stub/contextlib.pyi index e2a0cccd562a..ca9e91cf4d65 100644 --- a/test-data/unit/lib-stub/contextlib.pyi +++ b/test-data/unit/lib-stub/contextlib.pyi @@ -1,6 +1,5 @@ -import sys -from typing import Generic, TypeVar, Callable, Iterator -from typing import ContextManager as ContextManager +from typing import AsyncIterator, Generic, TypeVar, Callable, Iterator +from typing import ContextManager as ContextManager, AsyncContextManager as AsyncContextManager _T = TypeVar('_T') @@ -11,7 +10,4 @@ class GeneratorContextManager(ContextManager[_T], Generic[_T]): def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., GeneratorContextManager[_T]]: ... -if sys.version_info >= (3, 7): - from typing import AsyncIterator - from typing import AsyncContextManager as AsyncContextManager - def asynccontextmanager(func: Callable[..., AsyncIterator[_T]]) -> Callable[..., AsyncContextManager[_T]]: ... +def asynccontextmanager(func: Callable[..., AsyncIterator[_T]]) -> Callable[..., AsyncContextManager[_T]]: ... diff --git a/test-data/unit/lib-stub/math.pyi b/test-data/unit/lib-stub/math.pyi new file mode 100644 index 000000000000..587b04a56de8 --- /dev/null +++ b/test-data/unit/lib-stub/math.pyi @@ -0,0 +1,16 @@ +pi: float +def sqrt(__x: float) -> float: ... +def sin(__x: float) -> float: ... +def cos(__x: float) -> float: ... +def tan(__x: float) -> float: ... +def exp(__x: float) -> float: ... +def log(__x: float) -> float: ... +def floor(__x: float) -> int: ... +def ceil(__x: float) -> int: ... +def fabs(__x: float) -> float: ... +def pow(__x: float, __y: float) -> float: ... +def copysign(__x: float, __y: float) -> float: ... +def isinf(__x: float) -> bool: ... +def isnan(__x: float) -> bool: ... +def isfinite(__x: float) -> bool: ... +def nextafter(__x: float, __y: float) -> float: ... diff --git a/test-data/unit/lib-stub/mypy_extensions.pyi b/test-data/unit/lib-stub/mypy_extensions.pyi index d79be8719417..56fac31e7219 100644 --- a/test-data/unit/lib-stub/mypy_extensions.pyi +++ b/test-data/unit/lib-stub/mypy_extensions.pyi @@ -3,7 +3,6 @@ from typing import ( Any, Dict, Type, TypeVar, Optional, Any, Generic, Mapping, NoReturn as NoReturn, Iterator, Union, Protocol ) -import sys _T = TypeVar('_T') _U = TypeVar('_U') @@ -33,8 +32,6 @@ class _TypedDict(Mapping[str, object]): # Mypy expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... def update(self: _T, __m: _T) -> None: ... - if sys.version_info < (3, 0): - def has_key(self, k: str) -> bool: ... def __delitem__(self, k: NoReturn) -> None: ... def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ... @@ -50,68 +47,67 @@ mypyc_attr: Any class FlexibleAlias(Generic[_T, _U]): ... -if sys.version_info >= (3, 0): - class __SupportsInt(Protocol[T_co]): - def __int__(self) -> int: pass - - _Int = Union[int, i32, i64] - - class i32: - def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... - def __add__(self, x: i32) -> i32: ... - def __radd__(self, x: i32) -> i32: ... - def __sub__(self, x: i32) -> i32: ... - def __rsub__(self, x: i32) -> i32: ... - def __mul__(self, x: i32) -> i32: ... - def __rmul__(self, x: i32) -> i32: ... - def __floordiv__(self, x: i32) -> i32: ... - def __rfloordiv__(self, x: i32) -> i32: ... - def __mod__(self, x: i32) -> i32: ... - def __rmod__(self, x: i32) -> i32: ... - def __and__(self, x: i32) -> i32: ... - def __rand__(self, x: i32) -> i32: ... - def __or__(self, x: i32) -> i32: ... - def __ror__(self, x: i32) -> i32: ... - def __xor__(self, x: i32) -> i32: ... - def __rxor__(self, x: i32) -> i32: ... - def __lshift__(self, x: i32) -> i32: ... - def __rlshift__(self, x: i32) -> i32: ... - def __rshift__(self, x: i32) -> i32: ... - def __rrshift__(self, x: i32) -> i32: ... - def __neg__(self) -> i32: ... - def __invert__(self) -> i32: ... - def __pos__(self) -> i32: ... - def __lt__(self, x: i32) -> bool: ... - def __le__(self, x: i32) -> bool: ... - def __ge__(self, x: i32) -> bool: ... - def __gt__(self, x: i32) -> bool: ... - - class i64: - def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... - def __add__(self, x: i64) -> i64: ... - def __radd__(self, x: i64) -> i64: ... - def __sub__(self, x: i64) -> i64: ... - def __rsub__(self, x: i64) -> i64: ... - def __mul__(self, x: i64) -> i64: ... - def __rmul__(self, x: i64) -> i64: ... - def __floordiv__(self, x: i64) -> i64: ... - def __rfloordiv__(self, x: i64) -> i64: ... - def __mod__(self, x: i64) -> i64: ... - def __rmod__(self, x: i64) -> i64: ... - def __and__(self, x: i64) -> i64: ... - def __rand__(self, x: i64) -> i64: ... - def __or__(self, x: i64) -> i64: ... - def __ror__(self, x: i64) -> i64: ... - def __xor__(self, x: i64) -> i64: ... - def __rxor__(self, x: i64) -> i64: ... - def __lshift__(self, x: i64) -> i64: ... - def __rlshift__(self, x: i64) -> i64: ... - def __rshift__(self, x: i64) -> i64: ... - def __rrshift__(self, x: i64) -> i64: ... - def __neg__(self) -> i64: ... - def __invert__(self) -> i64: ... - def __pos__(self) -> i64: ... - def __lt__(self, x: i64) -> bool: ... - def __le__(self, x: i64) -> bool: ... - def __ge__(self, x: i64) -> bool: ... - def __gt__(self, x: i64) -> bool: ... +class __SupportsInt(Protocol[T_co]): + def __int__(self) -> int: pass + +_Int = Union[int, i32, i64] + +class i32: + def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... + def __add__(self, x: i32) -> i32: ... + def __radd__(self, x: i32) -> i32: ... + def __sub__(self, x: i32) -> i32: ... + def __rsub__(self, x: i32) -> i32: ... + def __mul__(self, x: i32) -> i32: ... + def __rmul__(self, x: i32) -> i32: ... + def __floordiv__(self, x: i32) -> i32: ... + def __rfloordiv__(self, x: i32) -> i32: ... + def __mod__(self, x: i32) -> i32: ... + def __rmod__(self, x: i32) -> i32: ... + def __and__(self, x: i32) -> i32: ... + def __rand__(self, x: i32) -> i32: ... + def __or__(self, x: i32) -> i32: ... + def __ror__(self, x: i32) -> i32: ... + def __xor__(self, x: i32) -> i32: ... + def __rxor__(self, x: i32) -> i32: ... + def __lshift__(self, x: i32) -> i32: ... + def __rlshift__(self, x: i32) -> i32: ... + def __rshift__(self, x: i32) -> i32: ... + def __rrshift__(self, x: i32) -> i32: ... + def __neg__(self) -> i32: ... + def __invert__(self) -> i32: ... + def __pos__(self) -> i32: ... + def __lt__(self, x: i32) -> bool: ... + def __le__(self, x: i32) -> bool: ... + def __ge__(self, x: i32) -> bool: ... + def __gt__(self, x: i32) -> bool: ... + +class i64: + def __init__(self, x: Union[_Int, str, bytes, SupportsInt], base: int = 10) -> None: ... + def __add__(self, x: i64) -> i64: ... + def __radd__(self, x: i64) -> i64: ... + def __sub__(self, x: i64) -> i64: ... + def __rsub__(self, x: i64) -> i64: ... + def __mul__(self, x: i64) -> i64: ... + def __rmul__(self, x: i64) -> i64: ... + def __floordiv__(self, x: i64) -> i64: ... + def __rfloordiv__(self, x: i64) -> i64: ... + def __mod__(self, x: i64) -> i64: ... + def __rmod__(self, x: i64) -> i64: ... + def __and__(self, x: i64) -> i64: ... + def __rand__(self, x: i64) -> i64: ... + def __or__(self, x: i64) -> i64: ... + def __ror__(self, x: i64) -> i64: ... + def __xor__(self, x: i64) -> i64: ... + def __rxor__(self, x: i64) -> i64: ... + def __lshift__(self, x: i64) -> i64: ... + def __rlshift__(self, x: i64) -> i64: ... + def __rshift__(self, x: i64) -> i64: ... + def __rrshift__(self, x: i64) -> i64: ... + def __neg__(self) -> i64: ... + def __invert__(self) -> i64: ... + def __pos__(self) -> i64: ... + def __lt__(self, x: i64) -> bool: ... + def __le__(self, x: i64) -> bool: ... + def __ge__(self, x: i64) -> bool: ... + def __gt__(self, x: i64) -> bool: ... diff --git a/test-data/unit/lib-stub/numbers.pyi b/test-data/unit/lib-stub/numbers.pyi new file mode 100644 index 000000000000..fad173c9a8b6 --- /dev/null +++ b/test-data/unit/lib-stub/numbers.pyi @@ -0,0 +1,10 @@ +# Test fixture for numbers +# +# The numbers module isn't properly supported, but we want to test that mypy +# can tell that it doesn't work as expected. + +class Number: pass +class Complex: pass +class Real: pass +class Rational: pass +class Integral: pass diff --git a/test-data/unit/lib-stub/typing.pyi b/test-data/unit/lib-stub/typing.pyi index a306b70f74d7..b35b64a383c9 100644 --- a/test-data/unit/lib-stub/typing.pyi +++ b/test-data/unit/lib-stub/typing.pyi @@ -50,7 +50,9 @@ class Sequence(Iterable[T_co]): def __getitem__(self, n: Any) -> T_co: pass # Mapping type is oversimplified intentionally. -class Mapping(Iterable[T], Generic[T, T_co]): pass +class Mapping(Iterable[T], Generic[T, T_co]): + def keys(self) -> Iterable[T]: pass # Approximate return type + def __getitem__(self, key: T) -> T_co: pass class Awaitable(Protocol[T]): def __await__(self) -> Generator[Any, Any, T]: pass diff --git a/test-data/unit/lib-stub/typing_extensions.pyi b/test-data/unit/lib-stub/typing_extensions.pyi index 22b895971521..3202c3d49e01 100644 --- a/test-data/unit/lib-stub/typing_extensions.pyi +++ b/test-data/unit/lib-stub/typing_extensions.pyi @@ -1,5 +1,5 @@ import typing -from typing import Any, Mapping, Iterator, NoReturn as NoReturn, Dict, Type +from typing import Any, Mapping, Iterable, Iterator, NoReturn as NoReturn, Dict, Tuple, Type from typing import TYPE_CHECKING as TYPE_CHECKING from typing import NewType as NewType, overload as overload @@ -50,9 +50,16 @@ class _TypedDict(Mapping[str, object]): # Mypy expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... def update(self: _T, __m: _T) -> None: ... + def items(self) -> Iterable[Tuple[str, object]]: ... + def keys(self) -> Iterable[str]: ... + def values(self) -> Iterable[object]: ... if sys.version_info < (3, 0): def has_key(self, k: str) -> bool: ... def __delitem__(self, k: NoReturn) -> None: ... + # Stubtest's tests need the following items: + __required_keys__: frozenset[str] + __optional_keys__: frozenset[str] + __total__: bool def TypedDict(typename: str, fields: Dict[str, Type[_T]], *, total: Any = ...) -> Type[dict]: ... diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test index 42d38c89482c..19b1839f86c0 100644 --- a/test-data/unit/merge.test +++ b/test-data/unit/merge.test @@ -39,7 +39,7 @@ MypyFile:1<1>( FuncDef:1<2>( f def () -> builtins.int<3> - Block:1<4>( + Block:2<4>( PassStmt:2<5>()))) ==> MypyFile:1<0>( @@ -50,7 +50,7 @@ MypyFile:1<1>( FuncDef:1<2>( f def () -> builtins.int<3> - Block:1<6>( + Block:2<6>( PassStmt:2<7>()))) [case testClass] @@ -77,7 +77,7 @@ MypyFile:1<1>( Var(self) Var(x)) def (self: target.A<4>, x: builtins.str<5>) -> builtins.int<6> - Block:2<7>( + Block:3<7>( PassStmt:3<8>())))) ==> MypyFile:1<0>( @@ -93,7 +93,7 @@ MypyFile:1<1>( Var(self) Var(x)) def (self: target.A<4>, x: builtins.int<6>) -> builtins.str<5> - Block:2<10>( + Block:3<10>( PassStmt:3<11>())))) [case testClass_typeinfo] @@ -149,7 +149,7 @@ MypyFile:1<1>( Args( Var(self)) def (self: target.A<4>) -> target.B<5> - Block:2<6>( + Block:3<6>( ReturnStmt:3<7>( CallExpr:3<8>( NameExpr(B [target.B<5>]) @@ -173,7 +173,7 @@ MypyFile:1<1>( Args( Var(self)) def (self: target.A<4>) -> target.B<5> - Block:3<14>( + Block:4<14>( ExpressionStmt:4<15>( IntExpr(1)) ReturnStmt:5<16>( @@ -204,7 +204,7 @@ MypyFile:1<1>( Args( Var(self)) def (self: target.A<4>) - Block:2<5>( + Block:3<5>( ExpressionStmt:3<6>( CallExpr:3<7>( MemberExpr:3<8>( @@ -224,7 +224,7 @@ MypyFile:1<1>( Args( Var(self)) def (self: target.A<4>) - Block:2<11>( + Block:3<11>( ExpressionStmt:3<12>( CallExpr:3<13>( MemberExpr:3<14>( @@ -257,7 +257,7 @@ MypyFile:1<1>( Args( Var(self)) def (self: target.A<4>) - Block:2<5>( + Block:3<5>( AssignmentStmt:3<6>( MemberExpr:3<8>( NameExpr(self [l<9>]) @@ -280,7 +280,7 @@ MypyFile:1<1>( Args( Var(self)) def (self: target.A<4>) - Block:2<13>( + Block:3<13>( AssignmentStmt:3<14>( MemberExpr:3<15>( NameExpr(self [l<16>]) diff --git a/test-data/unit/parse.test b/test-data/unit/parse.test index ff892ce0ce05..10ceaa947fd4 100644 --- a/test-data/unit/parse.test +++ b/test-data/unit/parse.test @@ -95,7 +95,6 @@ MypyFile:1( StrExpr(x\n\')) ExpressionStmt:2( StrExpr(x\n\"))) ---" fix syntax highlight [case testBytes] b'foo' @@ -128,7 +127,6 @@ MypyFile:1( MypyFile:1( ExpressionStmt:1( StrExpr('))) ---' [case testOctalEscapes] '\0\1\177\1234' @@ -203,7 +201,7 @@ def main(): MypyFile:1( FuncDef:1( main - Block:1( + Block:2( ExpressionStmt:2( IntExpr(1))))) @@ -214,7 +212,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( PassStmt:2()))) [case testIf] @@ -288,7 +286,7 @@ while 1: MypyFile:1( WhileStmt:1( IntExpr(1) - Block:1( + Block:2( PassStmt:2()))) [case testReturn] @@ -298,7 +296,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ReturnStmt:2( IntExpr(1))))) @@ -310,7 +308,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ReturnStmt:2()))) [case testBreak] @@ -320,7 +318,7 @@ while 1: MypyFile:1( WhileStmt:1( IntExpr(1) - Block:1( + Block:2( BreakStmt:2()))) [case testLargeBlock] @@ -340,7 +338,7 @@ MypyFile:1( IntExpr(1)) WhileStmt:3( IntExpr(2) - Block:3( + Block:4( PassStmt:4())) AssignmentStmt:5( NameExpr(y) @@ -358,7 +356,7 @@ MypyFile:1( f Args( Var(self)) - Block:2( + Block:3( PassStmt:3())))) [case testGlobalVarWithType] @@ -384,7 +382,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( AssignmentStmt:2( NameExpr(x) IntExpr(0) @@ -413,7 +411,7 @@ MypyFile:1( Args( Var(y)) def (y: str?) -> int? - Block:1( + Block:2( ReturnStmt:2())) ClassDef:3( A @@ -424,14 +422,14 @@ MypyFile:1( Var(a) Var(b)) def (self: Any, a: int?, b: Any?) -> x? - Block:4( + Block:5( PassStmt:5())) FuncDef:6( g Args( Var(self)) def (self: Any) -> Any? - Block:6( + Block:7( PassStmt:7())))) [case testFuncWithNoneReturn] @@ -442,7 +440,7 @@ MypyFile:1( FuncDef:1( f def () -> None? - Block:1( + Block:2( PassStmt:2()))) [case testVarDefWithGenericType] @@ -469,7 +467,7 @@ MypyFile:1( Args( Var(y)) def (y: t?[Any?, x?]) -> a?[b?[c?], d?] - Block:1( + Block:2( PassStmt:2()))) [case testParsingExpressionsWithLessAndGreaterThan] @@ -589,7 +587,7 @@ MypyFile:1( __init__ Args( Var(self)) - Block:2( + Block:3( AssignmentStmt:3( MemberExpr:3( NameExpr(self) @@ -785,7 +783,7 @@ MypyFile:1( ForStmt:1( NameExpr(x) NameExpr(y) - Block:1( + Block:2( PassStmt:2())) ForStmt:3( TupleExpr:3( @@ -794,7 +792,7 @@ MypyFile:1( NameExpr(y) NameExpr(w))) NameExpr(z) - Block:3( + Block:4( ExpressionStmt:4( IntExpr(1)))) ForStmt:5( @@ -804,7 +802,7 @@ MypyFile:1( NameExpr(y) NameExpr(w))) NameExpr(z) - Block:5( + Block:6( ExpressionStmt:6( IntExpr(1))))) @@ -818,7 +816,7 @@ MypyFile:1( x) FuncDef:2( f - Block:2( + Block:3( GlobalDecl:3( x y)))) @@ -831,10 +829,10 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( FuncDef:2( g - Block:2( + Block:3( NonlocalDecl:3( x y)))))) @@ -854,9 +852,9 @@ except: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( PassStmt:2()) - Block:3( + Block:4( RaiseStmt:4()))) [case testRaiseFrom] @@ -1051,7 +1049,7 @@ MypyFile:1( Import:2(x) FuncDef:3( f - Block:3( + Block:4( ImportFrom:4(x, [y]) ImportAll:5(z)))) @@ -1074,7 +1072,7 @@ MypyFile:1( default( Var(x) IntExpr(1))) - Block:1( + Block:2( PassStmt:2())) FuncDef:3( g @@ -1091,7 +1089,7 @@ MypyFile:1( TupleExpr:3( IntExpr(1) IntExpr(2)))) - Block:3( + Block:4( PassStmt:4()))) [case testTryFinally] @@ -1102,7 +1100,7 @@ finally: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( ExpressionStmt:2( IntExpr(1))) Finally( @@ -1117,11 +1115,11 @@ except x: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( ExpressionStmt:2( IntExpr(1))) NameExpr(x) - Block:3( + Block:4( ExpressionStmt:4( IntExpr(2))))) @@ -1135,18 +1133,18 @@ except x.y: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( ExpressionStmt:2( IntExpr(1))) NameExpr(x) NameExpr(y) - Block:3( + Block:4( ExpressionStmt:4( IntExpr(2))) MemberExpr:5( NameExpr(x) y) - Block:5( + Block:6( ExpressionStmt:6( IntExpr(3))))) @@ -1300,7 +1298,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ExpressionStmt:2( YieldExpr:2( OpExpr:2( @@ -1315,7 +1313,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ExpressionStmt:2( YieldFromExpr:2( CallExpr:2( @@ -1329,7 +1327,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( AssignmentStmt:2( NameExpr(a) YieldFromExpr:2( @@ -1389,7 +1387,7 @@ MypyFile:1( f Args( Var(x)) - Block:1( + Block:2( PassStmt:2()))) [case testLambda] @@ -1460,7 +1458,7 @@ MypyFile:1( NameExpr(i) NameExpr(j)) NameExpr(x) - Block:1( + Block:2( PassStmt:2()))) [case testForAndTrailingCommaAfterIndexVar] @@ -1472,7 +1470,7 @@ MypyFile:1( TupleExpr:1( NameExpr(i)) NameExpr(x) - Block:1( + Block:2( PassStmt:2()))) [case testListComprehensionAndTrailingCommaAfterIndexVar] @@ -1498,7 +1496,7 @@ MypyFile:1( NameExpr(i) NameExpr(j)) NameExpr(x) - Block:1( + Block:2( PassStmt:2()))) [case testGeneratorWithCondition] @@ -1630,7 +1628,7 @@ MypyFile:1( StrExpr(foo)))) Target( NameExpr(f)) - Block:1( + Block:2( PassStmt:2()))) [case testWithStatementWithoutTarget] @@ -1641,7 +1639,7 @@ MypyFile:1( WithStmt:1( Expr( NameExpr(foo)) - Block:1( + Block:2( PassStmt:2()))) [case testHexOctBinLiterals] @@ -1673,7 +1671,7 @@ while 1: MypyFile:1( WhileStmt:1( IntExpr(1) - Block:1( + Block:2( ContinueStmt:2()))) [case testStrLiteralConcatenate] @@ -1702,19 +1700,19 @@ except: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( ExpressionStmt:2( IntExpr(1))) - Block:3( + Block:4( PassStmt:4())) TryStmt:5( - Block:5( + Block:6( ExpressionStmt:6( IntExpr(1))) NameExpr(x) - Block:7( + Block:8( PassStmt:8()) - Block:9( + Block:10( ExpressionStmt:10( IntExpr(2))))) @@ -1728,10 +1726,10 @@ else: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( PassStmt:2()) NameExpr(x) - Block:3( + Block:4( ExpressionStmt:4( IntExpr(1))) Else( @@ -1748,19 +1746,19 @@ except (a, b, c) as e: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( PassStmt:2()) TupleExpr:3( NameExpr(x) NameExpr(y)) - Block:3( + Block:4( PassStmt:4()) TupleExpr:5( NameExpr(a) NameExpr(b) NameExpr(c)) NameExpr(e) - Block:5( + Block:6( PassStmt:6()))) [case testNestedFunctions] @@ -1774,19 +1772,19 @@ def h() -> int: MypyFile:1( FuncDef:1( f - Block:1( + Block:2( FuncDef:2( g - Block:2( + Block:3( PassStmt:3())))) FuncDef:4( h def () -> int? - Block:4( + Block:5( FuncDef:5( g def () -> int? - Block:5( + Block:6( PassStmt:6()))))) [case testStatementsAndDocStringsInClassBody] @@ -1808,7 +1806,7 @@ MypyFile:1( f Args( Var(self)) - Block:4( + Block:5( PassStmt:5())))) [case testSingleLineClass] @@ -1830,7 +1828,7 @@ MypyFile:1( NameExpr(property) FuncDef:2( f - Block:2( + Block:3( PassStmt:3())))) [case testComplexDecorator] @@ -1851,7 +1849,7 @@ MypyFile:1( FuncDef:3( f def () -> int? - Block:3( + Block:4( PassStmt:4())))) [case testKeywordArgInCall] @@ -2036,7 +2034,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ExpressionStmt:2( YieldExpr:2())))) @@ -2122,7 +2120,7 @@ MypyFile:1( ForStmt:1( NameExpr(x) NameExpr(y) - Block:1( + Block:2( PassStmt:2()) Else( ExpressionStmt:4( @@ -2137,7 +2135,7 @@ else: MypyFile:1( WhileStmt:1( NameExpr(x) - Block:1( + Block:2( PassStmt:2()) Else( ExpressionStmt:4( @@ -2159,7 +2157,7 @@ MypyFile:1( NameExpr(a)) Target( NameExpr(b)) - Block:1( + Block:2( PassStmt:2())) WithStmt:3( Expr( @@ -2170,7 +2168,7 @@ MypyFile:1( CallExpr:3( NameExpr(y) Args())) - Block:3( + Block:4( PassStmt:4()))) [case testOperatorAssignment] @@ -2264,10 +2262,10 @@ finally: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( PassStmt:2()) NameExpr(x) - Block:3( + Block:4( ExpressionStmt:4( NameExpr(x))) Finally( @@ -2642,7 +2640,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( OverloadedFuncDef:2( Decorator:2( Var(g) @@ -2670,14 +2668,14 @@ MypyFile:1( FuncDef:1( f def () -> A? - Block:1( + Block:2( PassStmt:2())) FuncDef:3( g Args( Var(x)) def (x: A?) -> B? - Block:3( + Block:4( PassStmt:4()))) [case testCommentMethodAnnotation] @@ -2695,7 +2693,7 @@ MypyFile:1( Args( Var(self)) def (self: Any) -> A? - Block:2( + Block:3( PassStmt:3())) FuncDef:4( g @@ -2703,7 +2701,7 @@ MypyFile:1( Var(xself) Var(x)) def (xself: Any, x: A?) -> B? - Block:4( + Block:5( PassStmt:5())))) [case testCommentMethodAnnotationAndNestedFunction] @@ -2720,13 +2718,13 @@ MypyFile:1( Args( Var(self)) def (self: Any) -> A? - Block:2( + Block:3( FuncDef:3( g Args( Var(x)) def (x: A?) -> B? - Block:3( + Block:4( PassStmt:4())))))) [case testCommentFunctionAnnotationOnSeparateLine] @@ -2740,7 +2738,7 @@ MypyFile:1( Args( Var(x)) def (x: X?) -> Y? - Block:1( + Block:3( PassStmt:3()))) [case testCommentFunctionAnnotationOnSeparateLine2] @@ -2756,7 +2754,7 @@ MypyFile:1( Args( Var(x)) def (x: X?) -> Y? - Block:1( + Block:5( PassStmt:5()))) [case testCommentFunctionAnnotationAndVarArg] @@ -2771,7 +2769,7 @@ MypyFile:1( def (x: X?, *y: Y?) -> Z? VarArg( Var(y)) - Block:1( + Block:2( PassStmt:2()))) [case testCommentFunctionAnnotationAndAllVarArgs] @@ -2788,7 +2786,7 @@ MypyFile:1( Var(y)) DictVarArg( Var(z)) - Block:1( + Block:2( PassStmt:2()))) [case testClassDecorator] @@ -2826,11 +2824,11 @@ def y(): MypyFile:1( FuncDef:1( x - Block:1( + Block:2( PassStmt:2())) FuncDef:4( y - Block:4( + Block:5( PassStmt:5()))) [case testEmptySuperClass] @@ -2907,7 +2905,7 @@ MypyFile:1( StarExpr:1( NameExpr(a)) NameExpr(b) - Block:1( + Block:2( PassStmt:2())) ForStmt:4( TupleExpr:4( @@ -2915,7 +2913,7 @@ MypyFile:1( StarExpr:4( NameExpr(b))) NameExpr(c) - Block:4( + Block:5( PassStmt:5())) ForStmt:7( TupleExpr:7( @@ -2923,7 +2921,7 @@ MypyFile:1( NameExpr(a)) NameExpr(b)) NameExpr(c) - Block:7( + Block:8( PassStmt:8()))) [case testStarExprInGeneratorExpr] @@ -3032,7 +3030,7 @@ while 2: MypyFile:1( WhileStmt:1( IntExpr(2) - Block:1( + Block:2( IfStmt:2( If( IntExpr(1)) @@ -3075,7 +3073,7 @@ while 2: MypyFile:1( WhileStmt:1( IntExpr(2) - Block:1( + Block:2( IfStmt:2( If( IntExpr(1)) @@ -3300,7 +3298,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( AssignmentStmt:2( NameExpr(x) YieldExpr:2( @@ -3341,7 +3339,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ExpressionStmt:2( YieldExpr:2())))) @@ -3484,3 +3482,358 @@ MypyFile:1( NameExpr(y) NameExpr(y)) StrExpr())))))))))))) + +[case testStripFunctionBodiesIfIgnoringErrors] +# mypy: ignore-errors=True +def f(self): + self.x = 1 # Cannot define an attribute + return 1 +[out] +MypyFile:1( + FuncDef:2( + f + Args( + Var(self)) + Block:3())) + +[case testStripMethodBodiesIfIgnoringErrors] +# mypy: ignore-errors=True +class C: + def f(self): + x = self.x + for x in y: + pass + with a as y: + pass + while self.foo(): + self.bah() + a[self.x] = 1 +[out] +MypyFile:1( + ClassDef:2( + C + FuncDef:3( + f + Args( + Var(self)) + Block:4()))) + +[case testDoNotStripModuleTopLevelOrClassBody] +# mypy: ignore-errors=True +f() +class C: + x = 5 +[out] +MypyFile:1( + ExpressionStmt:2( + CallExpr:2( + NameExpr(f) + Args())) + ClassDef:3( + C + AssignmentStmt:4( + NameExpr(x) + IntExpr(5)))) + +[case testDoNotStripMethodThatAssignsToAttribute] +# mypy: ignore-errors=True +class C: + def m1(self): + self.x = 0 + def m2(self): + a, self.y = 0 +[out] +MypyFile:1( + ClassDef:2( + C + FuncDef:3( + m1 + Args( + Var(self)) + Block:4( + AssignmentStmt:4( + MemberExpr:4( + NameExpr(self) + x) + IntExpr(0)))) + FuncDef:5( + m2 + Args( + Var(self)) + Block:6( + AssignmentStmt:6( + TupleExpr:6( + NameExpr(a) + MemberExpr:6( + NameExpr(self) + y)) + IntExpr(0)))))) + +[case testDoNotStripMethodThatAssignsToAttributeWithinStatement] +# mypy: ignore-errors=True +class C: + def m1(self): + for x in y: + self.x = 0 + def m2(self): + with x: + self.y = 0 + def m3(self): + if x: + self.y = 0 + else: + x = 4 +[out] +MypyFile:1( + ClassDef:2( + C + FuncDef:3( + m1 + Args( + Var(self)) + Block:4( + ForStmt:4( + NameExpr(x) + NameExpr(y) + Block:5( + AssignmentStmt:5( + MemberExpr:5( + NameExpr(self) + x) + IntExpr(0)))))) + FuncDef:6( + m2 + Args( + Var(self)) + Block:7( + WithStmt:7( + Expr( + NameExpr(x)) + Block:8( + AssignmentStmt:8( + MemberExpr:8( + NameExpr(self) + y) + IntExpr(0)))))) + FuncDef:9( + m3 + Args( + Var(self)) + Block:10( + IfStmt:10( + If( + NameExpr(x)) + Then( + AssignmentStmt:11( + MemberExpr:11( + NameExpr(self) + y) + IntExpr(0))) + Else( + AssignmentStmt:13( + NameExpr(x) + IntExpr(4)))))))) + +[case testDoNotStripMethodThatDefinesAttributeWithoutAssignment] +# mypy: ignore-errors=True +class C: + def m1(self): + with y as self.x: + pass + def m2(self): + for self.y in x: + pass +[out] +MypyFile:1( + ClassDef:2( + C + FuncDef:3( + m1 + Args( + Var(self)) + Block:4( + WithStmt:4( + Expr( + NameExpr(y)) + Target( + MemberExpr:4( + NameExpr(self) + x)) + Block:5( + PassStmt:5())))) + FuncDef:6( + m2 + Args( + Var(self)) + Block:7( + ForStmt:7( + MemberExpr:7( + NameExpr(self) + y) + NameExpr(x) + Block:8( + PassStmt:8())))))) + +[case testStripDecoratedFunctionOrMethod] +# mypy: ignore-errors=True +@deco +def f(): + x = 0 + +class C: + @deco + def m1(self): + x = 0 + + @deco + def m2(self): + self.x = 0 +[out] +MypyFile:1( + Decorator:2( + Var(f) + NameExpr(deco) + FuncDef:3( + f + Block:4())) + ClassDef:6( + C + Decorator:7( + Var(m1) + NameExpr(deco) + FuncDef:8( + m1 + Args( + Var(self)) + Block:9())) + Decorator:11( + Var(m2) + NameExpr(deco) + FuncDef:12( + m2 + Args( + Var(self)) + Block:13( + AssignmentStmt:13( + MemberExpr:13( + NameExpr(self) + x) + IntExpr(0))))))) + +[case testStripOverloadedMethod] +# mypy: ignore-errors=True +class C: + @overload + def m1(self, x: int) -> None: ... + @overload + def m1(self, x: str) -> None: ... + def m1(self, x): + x = 0 + + @overload + def m2(self, x: int) -> None: ... + @overload + def m2(self, x: str) -> None: ... + def m2(self, x): + self.x = 0 +[out] +MypyFile:1( + ClassDef:2( + C + OverloadedFuncDef:3( + Decorator:3( + Var(m1) + NameExpr(overload) + FuncDef:4( + m1 + Args( + Var(self) + Var(x)) + def (self: Any, x: int?) -> None? + Block:4( + ExpressionStmt:4( + Ellipsis)))) + Decorator:5( + Var(m1) + NameExpr(overload) + FuncDef:6( + m1 + Args( + Var(self) + Var(x)) + def (self: Any, x: str?) -> None? + Block:6( + ExpressionStmt:6( + Ellipsis)))) + FuncDef:7( + m1 + Args( + Var(self) + Var(x)) + Block:8())) + OverloadedFuncDef:10( + Decorator:10( + Var(m2) + NameExpr(overload) + FuncDef:11( + m2 + Args( + Var(self) + Var(x)) + def (self: Any, x: int?) -> None? + Block:11( + ExpressionStmt:11( + Ellipsis)))) + Decorator:12( + Var(m2) + NameExpr(overload) + FuncDef:13( + m2 + Args( + Var(self) + Var(x)) + def (self: Any, x: str?) -> None? + Block:13( + ExpressionStmt:13( + Ellipsis)))) + FuncDef:14( + m2 + Args( + Var(self) + Var(x)) + Block:15( + AssignmentStmt:15( + MemberExpr:15( + NameExpr(self) + x) + IntExpr(0))))))) + +[case testStripMethodInNestedClass] +# mypy: ignore-errors=True +class C: + class D: + def m1(self): + self.x = 1 + def m2(self): + return self.x +[out] +MypyFile:1( + ClassDef:2( + C + ClassDef:3( + D + FuncDef:4( + m1 + Args( + Var(self)) + Block:5( + AssignmentStmt:5( + MemberExpr:5( + NameExpr(self) + x) + IntExpr(1)))) + FuncDef:6( + m2 + Args( + Var(self)) + Block:7())))) diff --git a/test-data/unit/pythoneval.test b/test-data/unit/pythoneval.test index a3413e071184..034c2190dd5e 100644 --- a/test-data/unit/pythoneval.test +++ b/test-data/unit/pythoneval.test @@ -268,7 +268,9 @@ def bin(f: IO[bytes]) -> None: txt(sys.stdout) bin(sys.stdout) [out] -_program.py:5: error: Argument 1 to "write" of "IO" has incompatible type "bytes"; expected "str" +_program.py:5: error: No overload variant of "write" of "IO" matches argument type "bytes" +_program.py:5: note: Possible overload variants: +_program.py:5: note: def write(self, str, /) -> int _program.py:10: error: Argument 1 to "bin" has incompatible type "TextIO"; expected "IO[bytes]" [case testBuiltinOpen] @@ -1348,7 +1350,7 @@ def f() -> Dict[int, str]: def d() -> Dict[int, int]: return {} [out] -_testDictWithStarStarSpecialCase.py:4: error: Argument 1 to "update" of "MutableMapping" has incompatible type "Dict[int, int]"; expected "SupportsKeysAndGetItem[int, str]" +_testDictWithStarStarSpecialCase.py:4: error: Unpacked dict entry 1 has incompatible type "Dict[int, int]"; expected "SupportsKeysAndGetItem[int, str]" [case testLoadsOfOverloads] from typing import overload, Any, TypeVar, Iterable, List, Dict, Callable, Union @@ -1878,6 +1880,23 @@ _testEnumIterMetaInference.py:8: note: Revealed type is "typing.Iterator[_E`-1]" _testEnumIterMetaInference.py:9: note: Revealed type is "_E`-1" _testEnumIterMetaInference.py:13: note: Revealed type is "socket.SocketKind" +[case testEnumUnpackedViaMetaclass] +from enum import Enum + +class FooEnum(Enum): + A = 1 + B = 2 + C = 3 + +a, b, c = FooEnum +reveal_type(a) +reveal_type(b) +reveal_type(c) +[out] +_testEnumUnpackedViaMetaclass.py:9: note: Revealed type is "_testEnumUnpackedViaMetaclass.FooEnum" +_testEnumUnpackedViaMetaclass.py:10: note: Revealed type is "_testEnumUnpackedViaMetaclass.FooEnum" +_testEnumUnpackedViaMetaclass.py:11: note: Revealed type is "_testEnumUnpackedViaMetaclass.FooEnum" + [case testNativeIntTypes] # Spot check various native int operations with full stubs. from mypy_extensions import i64, i32 diff --git a/test-data/unit/semanal-basic.test b/test-data/unit/semanal-basic.test index 20443517e03e..169769f06a00 100644 --- a/test-data/unit/semanal-basic.test +++ b/test-data/unit/semanal-basic.test @@ -82,7 +82,7 @@ MypyFile:1( Args( Var(x) Var(y)) - Block:1( + Block:2( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) @@ -96,7 +96,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( AssignmentStmt:2( NameExpr(x* [l]) IntExpr(1)) @@ -113,7 +113,7 @@ def g(): pass MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ExpressionStmt:2( NameExpr(x [__main__.x])) ExpressionStmt:3( @@ -149,7 +149,7 @@ MypyFile:1( f Args( Var(y)) - Block:3( + Block:4( AssignmentStmt:4( NameExpr(y [l]) IntExpr(1)) @@ -174,7 +174,7 @@ MypyFile:1( builtins.int) FuncDef:2( f - Block:2( + Block:3( AssignmentStmt:3( NameExpr(x* [l]) IntExpr(2)) @@ -197,7 +197,7 @@ MypyFile:1( default( Var(y) NameExpr(object [builtins.object]))) - Block:1( + Block:2( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) @@ -214,7 +214,7 @@ MypyFile:1( Var(x)) VarArg( Var(y)) - Block:1( + Block:2( ExpressionStmt:2( TupleExpr:2( NameExpr(x [l]) @@ -234,7 +234,7 @@ MypyFile:1( NameExpr(None [builtins.None])) FuncDef:2( f - Block:2( + Block:3( GlobalDecl:3( x) AssignmentStmt:4( @@ -262,7 +262,7 @@ MypyFile:1( NameExpr(None [builtins.None]))) FuncDef:2( f - Block:2( + Block:3( GlobalDecl:3( x y) @@ -283,12 +283,12 @@ MypyFile:1( NameExpr(None [builtins.None])) FuncDef:2( f - Block:2( + Block:3( GlobalDecl:3( x))) FuncDef:4( g - Block:4( + Block:5( AssignmentStmt:5( NameExpr(x* [l]) NameExpr(None [builtins.None]))))) @@ -306,12 +306,12 @@ MypyFile:1( NameExpr(None [builtins.None])) FuncDef:2( f - Block:2( + Block:3( GlobalDecl:3( x))) FuncDef:4( g - Block:4( + Block:5( AssignmentStmt:5( NameExpr(x* [l]) NameExpr(None [builtins.None]))))) @@ -333,7 +333,7 @@ MypyFile:1( f Args( Var(self)) - Block:3( + Block:4( GlobalDecl:4( x) AssignmentStmt:5( @@ -374,13 +374,13 @@ def g(): MypyFile:1( FuncDef:1( g - Block:1( + Block:2( AssignmentStmt:2( NameExpr(x* [l]) NameExpr(None [builtins.None])) FuncDef:3( f - Block:3( + Block:4( NonlocalDecl:4( x) AssignmentStmt:5( @@ -400,7 +400,7 @@ MypyFile:1( FuncDef:1( f def () - Block:1( + Block:2( AssignmentStmt:2( NameExpr(a* [l]) IntExpr(0)) @@ -422,7 +422,7 @@ def g(): MypyFile:1( FuncDef:1( g - Block:1( + Block:2( AssignmentStmt:2( TupleExpr:2( NameExpr(x* [l]) @@ -434,7 +434,7 @@ MypyFile:1( f Args( Var(z)) - Block:3( + Block:4( NonlocalDecl:4( x y) @@ -453,12 +453,12 @@ MypyFile:1( f Args( Var(x)) - Block:1( + Block:2( FuncDef:2( g Args( Var(y)) - Block:2( + Block:3( AssignmentStmt:3( NameExpr(z* [l]) OpExpr:3( @@ -478,10 +478,10 @@ MypyFile:1( f Args( Var(x)) - Block:1( + Block:2( FuncDef:2( g - Block:2( + Block:3( AssignmentStmt:3( NameExpr(x* [l]) IntExpr(1))))))) diff --git a/test-data/unit/semanal-classes.test b/test-data/unit/semanal-classes.test index 86f8b8656fb6..951791e23490 100644 --- a/test-data/unit/semanal-classes.test +++ b/test-data/unit/semanal-classes.test @@ -27,7 +27,7 @@ MypyFile:1( Args( Var(self) Var(x)) - Block:2( + Block:3( AssignmentStmt:3( NameExpr(y* [l]) NameExpr(x [l])))) @@ -35,7 +35,7 @@ MypyFile:1( f Args( Var(self)) - Block:4( + Block:5( AssignmentStmt:5( NameExpr(y* [l]) NameExpr(self [l])))))) @@ -53,7 +53,7 @@ MypyFile:1( __init__ Args( Var(self)) - Block:2( + Block:3( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) @@ -79,7 +79,7 @@ MypyFile:1( f Args( Var(self)) - Block:2( + Block:3( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) @@ -89,7 +89,7 @@ MypyFile:1( __init__ Args( Var(self)) - Block:4( + Block:5( AssignmentStmt:5( MemberExpr:5( NameExpr(self [l]) @@ -113,7 +113,7 @@ MypyFile:1( Args( Var(x) Var(self)) - Block:2( + Block:3( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) @@ -125,7 +125,7 @@ MypyFile:1( __init__ Args( Var(x)) - Block:5( + Block:6( AssignmentStmt:6( NameExpr(self* [l]) NameExpr(x [l])) @@ -147,7 +147,7 @@ MypyFile:1( __init__ Args( Var(x)) - Block:2( + Block:3( AssignmentStmt:3( MemberExpr:3( NameExpr(x [l]) @@ -167,7 +167,7 @@ MypyFile:1( __init__ Args( Var(self)) - Block:2( + Block:3( AssignmentStmt:3( MemberExpr:3( NameExpr(self [l]) @@ -309,7 +309,7 @@ MypyFile:1( ListExpr:2( IntExpr(1) IntExpr(2)) - Block:2( + Block:3( AssignmentStmt:3( NameExpr(y* [m]) NameExpr(x [__main__.A.x])))))) @@ -322,7 +322,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ClassDef:2( A PassStmt:2()) @@ -369,7 +369,7 @@ MypyFile:1( FuncDef:1( f def () - Block:1( + Block:2( ClassDef:2( A PassStmt:2()) @@ -390,7 +390,7 @@ MypyFile:1( f Args( Var(x)) - Block:1( + Block:2( ClassDef:2( A AssignmentStmt:3( @@ -400,7 +400,7 @@ MypyFile:1( g Args( Var(self)) - Block:4( + Block:5( AssignmentStmt:5( NameExpr(z* [l]) NameExpr(x [l])))))))) diff --git a/test-data/unit/semanal-errors.test b/test-data/unit/semanal-errors.test index a4ed905dcb9f..d09ed87d3afc 100644 --- a/test-data/unit/semanal-errors.test +++ b/test-data/unit/semanal-errors.test @@ -1453,7 +1453,7 @@ from typing import Tuple heterogenous_tuple: Tuple[Unpack[Tuple[int, str]]] homogenous_tuple: Tuple[Unpack[Tuple[int, ...]]] -bad: Tuple[Unpack[int]] # E: builtins.int cannot be unpacked (must be tuple or TypeVarTuple) +bad: Tuple[Unpack[int]] # E: "int" cannot be unpacked (must be tuple or TypeVarTuple) [builtins fixtures/tuple.pyi] [case testTypeVarTuple] diff --git a/test-data/unit/semanal-expressions.test b/test-data/unit/semanal-expressions.test index fa07e533a842..4c9baf6b1b75 100644 --- a/test-data/unit/semanal-expressions.test +++ b/test-data/unit/semanal-expressions.test @@ -212,7 +212,7 @@ MypyFile:1( Args( Var(a)) def (a: Any) - Block:1( + Block:2( ExpressionStmt:2( ListComprehension:2( GeneratorExpr:2( diff --git a/test-data/unit/semanal-lambda.test b/test-data/unit/semanal-lambda.test index 1cde1a794dc2..cc2307b97217 100644 --- a/test-data/unit/semanal-lambda.test +++ b/test-data/unit/semanal-lambda.test @@ -5,7 +5,7 @@ def g(): MypyFile:1( FuncDef:1( g - Block:1( + Block:2( ReturnStmt:2( LambdaExpr:2( Args( @@ -52,7 +52,7 @@ def g(): MypyFile:1( FuncDef:1( g - Block:1( + Block:2( ReturnStmt:2( LambdaExpr:2( Block:2( diff --git a/test-data/unit/semanal-modules.test b/test-data/unit/semanal-modules.test index 116747ae5cb9..d52dd953aea2 100644 --- a/test-data/unit/semanal-modules.test +++ b/test-data/unit/semanal-modules.test @@ -552,7 +552,7 @@ MypyFile:1( FuncDef:1( f def () - Block:1( + Block:2( Import:2(_x) ExpressionStmt:3( MemberExpr:3( @@ -603,7 +603,7 @@ MypyFile:1( FuncDef:1( f def () - Block:1( + Block:2( Import:2(x) Import:3(x) ExpressionStmt:4( @@ -917,7 +917,7 @@ MypyFile:1( FuncDef:3( f def () - Block:3( + Block:4( ImportFrom:4(x, [a]) ImportFrom:5(x, [a]))) Import:6(x) diff --git a/test-data/unit/semanal-statements.test b/test-data/unit/semanal-statements.test index 013452068cf1..f602c236c949 100644 --- a/test-data/unit/semanal-statements.test +++ b/test-data/unit/semanal-statements.test @@ -76,7 +76,7 @@ MypyFile:1( IntExpr(1)) WhileStmt:2( NameExpr(x [__main__.x]) - Block:2( + Block:3( ExpressionStmt:3( NameExpr(y [__main__.y]))))) @@ -88,7 +88,7 @@ MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) NameExpr(object [builtins.object]) - Block:1( + Block:2( ExpressionStmt:2( NameExpr(x [__main__.x]))))) @@ -100,11 +100,11 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ForStmt:2( NameExpr(x* [l]) NameExpr(f [__main__.f]) - Block:2( + Block:3( ExpressionStmt:3( NameExpr(x [l]))))))) @@ -118,7 +118,7 @@ MypyFile:1( NameExpr(x* [__main__.x]) NameExpr(y* [__main__.y])) ListExpr:1() - Block:1( + Block:2( ExpressionStmt:2( TupleExpr:2( NameExpr(x [__main__.x]) @@ -133,7 +133,7 @@ MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) ListExpr:1() - Block:1( + Block:2( PassStmt:2())) ExpressionStmt:3( NameExpr(x [__main__.x]))) @@ -147,11 +147,11 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( ForStmt:2( NameExpr(x* [l]) ListExpr:2() - Block:2( + Block:3( PassStmt:3())) ExpressionStmt:4( NameExpr(x [l]))))) @@ -167,12 +167,12 @@ MypyFile:1( ForStmt:2( NameExpr(x'* [__main__.x']) NameExpr(None [builtins.None]) - Block:2( + Block:3( PassStmt:3())) ForStmt:4( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None]) - Block:4( + Block:5( PassStmt:5()))) [case testReusingForLoopIndexVariable2] @@ -186,16 +186,16 @@ def f(): MypyFile:1( FuncDef:2( f - Block:2( + Block:3( ForStmt:3( NameExpr(x* [l]) NameExpr(None [builtins.None]) - Block:3( + Block:4( PassStmt:4())) ForStmt:5( NameExpr(x'* [l]) NameExpr(None [builtins.None]) - Block:5( + Block:6( PassStmt:6()))))) [case testLoopWithElse] @@ -212,14 +212,14 @@ MypyFile:1( ForStmt:1( NameExpr(x* [__main__.x]) ListExpr:1() - Block:1( + Block:2( PassStmt:2()) Else( ExpressionStmt:4( NameExpr(x [__main__.x])))) WhileStmt:5( IntExpr(1) - Block:5( + Block:6( PassStmt:6()) Else( ExpressionStmt:8( @@ -234,12 +234,12 @@ for x in []: MypyFile:1( WhileStmt:1( IntExpr(1) - Block:1( + Block:2( BreakStmt:2())) ForStmt:3( NameExpr(x* [__main__.x]) ListExpr:3() - Block:3( + Block:4( BreakStmt:4()))) [case testContinue] @@ -251,12 +251,12 @@ for x in []: MypyFile:1( WhileStmt:1( IntExpr(1) - Block:1( + Block:2( ContinueStmt:2())) ForStmt:3( NameExpr(x* [__main__.x]) ListExpr:3() - Block:3( + Block:4( ContinueStmt:4()))) [case testIf] @@ -426,7 +426,7 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( AssignmentStmt:2( NameExpr(x* [l]) IntExpr(1)) @@ -531,7 +531,7 @@ MypyFile:1( f Args( Var(x)) - Block:1( + Block:2( DelStmt:2( NameExpr(x [l]))))) @@ -545,7 +545,7 @@ MypyFile:1( Args( Var(x) Var(y)) - Block:1( + Block:2( DelStmt:2( TupleExpr:2( NameExpr(x [l]) @@ -579,19 +579,19 @@ MypyFile:1( c PassStmt:1()) TryStmt:2( - Block:2( + Block:3( ExpressionStmt:3( NameExpr(c [__main__.c]))) NameExpr(object [builtins.object]) - Block:4( + Block:5( ExpressionStmt:5( NameExpr(c [__main__.c]))) NameExpr(c [__main__.c]) NameExpr(e* [__main__.e]) - Block:6( + Block:7( ExpressionStmt:7( NameExpr(e [__main__.e]))) - Block:8( + Block:9( ExpressionStmt:9( NameExpr(c [__main__.c]))) Finally( @@ -608,9 +608,9 @@ else: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( PassStmt:2()) - Block:3( + Block:4( PassStmt:4()) Else( ExpressionStmt:6( @@ -624,7 +624,7 @@ finally: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( PassStmt:2()) Finally( PassStmt:4()))) @@ -641,13 +641,13 @@ MypyFile:1( c PassStmt:1()) TryStmt:2( - Block:2( + Block:3( PassStmt:3()) TupleExpr:4( NameExpr(c [__main__.c]) NameExpr(object [builtins.object])) NameExpr(e* [__main__.e]) - Block:4( + Block:5( ExpressionStmt:5( NameExpr(e [__main__.e]))))) @@ -665,7 +665,7 @@ MypyFile:1( WithStmt:1( Expr( NameExpr(object [builtins.object])) - Block:1( + Block:2( ExpressionStmt:2( NameExpr(object [builtins.object]))))) @@ -679,7 +679,7 @@ MypyFile:1( NameExpr(object [builtins.object])) Target( NameExpr(x* [__main__.x])) - Block:1( + Block:2( ExpressionStmt:2( NameExpr(x [__main__.x]))))) @@ -691,13 +691,13 @@ def f(): MypyFile:1( FuncDef:1( f - Block:1( + Block:2( WithStmt:2( Expr( NameExpr(f [__main__.f])) Target( NameExpr(x* [l])) - Block:2( + Block:3( ExpressionStmt:3( NameExpr(x [l]))))))) @@ -713,7 +713,7 @@ MypyFile:1( NameExpr(object [builtins.object])) Expr( NameExpr(object [builtins.object])) - Block:1( + Block:2( PassStmt:2())) WithStmt:3( Expr( @@ -724,7 +724,7 @@ MypyFile:1( NameExpr(object [builtins.object])) Target( NameExpr(b* [__main__.b])) - Block:3( + Block:4( PassStmt:4()))) [case testVariableInBlock] @@ -736,7 +736,7 @@ while object: MypyFile:1( WhileStmt:1( NameExpr(object [builtins.object]) - Block:1( + Block:2( AssignmentStmt:2( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) @@ -757,11 +757,11 @@ except object as o: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( PassStmt:2()) NameExpr(object [builtins.object]) NameExpr(o* [__main__.o]) - Block:3( + Block:4( AssignmentStmt:4( NameExpr(x* [__main__.x]) NameExpr(None [builtins.None])) @@ -777,11 +777,11 @@ except object as o: [out] MypyFile:1( TryStmt:1( - Block:1( + Block:2( PassStmt:2()) NameExpr(object [builtins.object]) NameExpr(o* [__main__.o]) - Block:3( + Block:4( AssignmentStmt:4( NameExpr(o [__main__.o]) CallExpr:4( @@ -806,15 +806,15 @@ MypyFile:1( builtins.BaseException) PassStmt:1()) TryStmt:2( - Block:2( + Block:3( PassStmt:3()) NameExpr(BaseException [builtins.BaseException]) NameExpr(e* [__main__.e]) - Block:4( + Block:5( PassStmt:5()) NameExpr(Err [__main__.Err]) NameExpr(f* [__main__.f]) - Block:6( + Block:7( AssignmentStmt:7( NameExpr(f [__main__.f]) CallExpr:7( @@ -860,7 +860,7 @@ MypyFile:1( NameExpr(decorate [__main__.decorate]) FuncDef:3( g - Block:3( + Block:4( ExpressionStmt:4( CallExpr:4( NameExpr(g [__main__.g]) @@ -877,13 +877,13 @@ MypyFile:1( FuncDef:1( f def () - Block:1( + Block:2( TryStmt:2( - Block:2( + Block:3( PassStmt:3()) NameExpr(object [builtins.object]) NameExpr(o* [l]) - Block:4( + Block:5( PassStmt:5()))))) [case testReuseExceptionVariable] @@ -899,17 +899,17 @@ MypyFile:1( FuncDef:1( f def () - Block:1( + Block:2( TryStmt:2( - Block:2( + Block:3( PassStmt:3()) NameExpr(object [builtins.object]) NameExpr(o* [l]) - Block:4( + Block:5( PassStmt:5()) NameExpr(object [builtins.object]) NameExpr(o [l]) - Block:6( + Block:7( PassStmt:7()))))) [case testWithMultiple] @@ -924,11 +924,11 @@ MypyFile:1( f Args( Var(a)) - Block:1( + Block:2( PassStmt:2())) FuncDef:3( main - Block:3( + Block:4( WithStmt:4( Expr( CallExpr:4( @@ -944,7 +944,7 @@ MypyFile:1( NameExpr(a [l])))) Target( NameExpr(b* [l])) - Block:4( + Block:5( AssignmentStmt:5( NameExpr(x* [l]) TupleExpr:5( @@ -1030,7 +1030,7 @@ MypyFile:1( f Args( Var(a)) - Block:2( + Block:3( ExpressionStmt:3( CallExpr:3( NameExpr(f [__main__.f]) @@ -1082,7 +1082,7 @@ MypyFile:1( IntExpr(0)) Target( NameExpr(y'* [__main__.y'])) - Block:1( + Block:2( AssignmentStmt:2( NameExpr(z* [__main__.z]) NameExpr(y' [__main__.y'])))) @@ -1091,7 +1091,7 @@ MypyFile:1( IntExpr(1)) Target( NameExpr(y* [__main__.y])) - Block:3( + Block:4( AssignmentStmt:4( NameExpr(y [__main__.y]) IntExpr(1))))) @@ -1109,7 +1109,7 @@ MypyFile:1( IntExpr(0)) Target( NameExpr(y* [__main__.y])) - Block:1( + Block:2( AssignmentStmt:2( NameExpr(z* [__main__.z]) NameExpr(y [__main__.y])))) @@ -1121,7 +1121,7 @@ MypyFile:1( IntExpr(1)) Target( NameExpr(y [__main__.y])) - Block:4( + Block:5( AssignmentStmt:5( NameExpr(y [__main__.y]) IntExpr(1))))) diff --git a/test-data/unit/semanal-types.test b/test-data/unit/semanal-types.test index 05fc08d8a49e..71a5c6dd87b5 100644 --- a/test-data/unit/semanal-types.test +++ b/test-data/unit/semanal-types.test @@ -31,7 +31,7 @@ MypyFile:1( PassStmt:1()) FuncDef:2( f - Block:2( + Block:3( AssignmentStmt:3( NameExpr(x [l]) NameExpr(None [builtins.None]) @@ -69,7 +69,7 @@ MypyFile:1( __init__ Args( Var(self)) - Block:3( + Block:4( AssignmentStmt:4( MemberExpr:4( NameExpr(self [l]) @@ -120,7 +120,7 @@ MypyFile:1( Var(x) Var(y)) def (x: Any, y: __main__.A) - Block:4( + Block:5( AssignmentStmt:5( NameExpr(z* [l]) TupleExpr:5( @@ -255,7 +255,7 @@ MypyFile:1( IntExpr(1)) FuncDef:6( f - Block:6( + Block:7( AssignmentStmt:7( NameExpr(b [l]) NameExpr(None [builtins.None]) @@ -417,7 +417,7 @@ MypyFile:1( Args( Var(x)) def [t] (x: t`-1) - Block:3( + Block:4( AssignmentStmt:4( NameExpr(y [l]) NameExpr(None [builtins.None]) @@ -591,7 +591,7 @@ MypyFile:1( FuncDef:3( f def () - Block:3( + Block:4( FuncDef:4( g def [t] () -> t`-1 @@ -841,7 +841,7 @@ MypyFile:1( ImportFrom:1(typing, [overload]) FuncDef:2( f - Block:2( + Block:3( OverloadedFuncDef:3( FuncDef:8( g @@ -1113,7 +1113,7 @@ MypyFile:1( ImportFrom:1(typing, [TypeVar]) FuncDef:2( f - Block:2( + Block:3( AssignmentStmt:3( NameExpr(T* [l]) TypeVarExpr:3()) @@ -1196,7 +1196,7 @@ MypyFile:1( Var(self) Var(x)) def (self: __main__.A[_m.T`1], x: _m.T`1) -> Any - Block:5( + Block:6( AssignmentStmt:6( NameExpr(b [l]) NameExpr(None [builtins.None]) @@ -1217,7 +1217,7 @@ MypyFile:1( Args( Var(x)) def [_m.T] (x: _m.T`-1) - Block:2( + Block:3( AssignmentStmt:3( NameExpr(a [l]) NameExpr(None [builtins.None]) @@ -1235,7 +1235,7 @@ MypyFile:1( Args( Var(x)) def (x: builtins.int) -> Any - Block:2( + Block:3( AssignmentStmt:3( NameExpr(x [l]) IntExpr(1))))) @@ -1256,7 +1256,7 @@ MypyFile:1( Var(self) Var(x)) def (self: __main__.A, x: builtins.int) -> builtins.str - Block:3( + Block:4( AssignmentStmt:4( NameExpr(x [l]) IntExpr(1)))))) @@ -1582,7 +1582,7 @@ MypyFile:1( Args( Var(x)) def [Ts] (x: def (*Unpack[Ts`-1])) - Block:5( + Block:6( PassStmt:6()))) [builtins fixtures/tuple.pyi] diff --git a/test-data/unit/stubgen.test b/test-data/unit/stubgen.test index 8e4285b7de2e..eba838f32cd5 100644 --- a/test-data/unit/stubgen.test +++ b/test-data/unit/stubgen.test @@ -319,6 +319,62 @@ class A: def f(self, x) -> None: ... def h(self) -> None: ... +[case testFunctoolsCachedProperty] +import functools + +class A: + @functools.cached_property + def x(self): + return 'x' +[out] +import functools + +class A: + @functools.cached_property + def x(self): ... + +[case testFunctoolsCachedPropertyAlias] +import functools as ft + +class A: + @ft.cached_property + def x(self): + return 'x' +[out] +import functools as ft + +class A: + @ft.cached_property + def x(self): ... + +[case testCachedProperty] +from functools import cached_property + +class A: + @cached_property + def x(self): + return 'x' +[out] +from functools import cached_property + +class A: + @cached_property + def x(self): ... + +[case testCachedPropertyAlias] +from functools import cached_property as cp + +class A: + @cp + def x(self): + return 'x' +[out] +from functools import cached_property as cp + +class A: + @cp + def x(self): ... + [case testStaticMethod] class A: @staticmethod @@ -585,8 +641,9 @@ class A: def _bar(cls) -> None: ... [case testNamedtuple] -import collections, x +import collections, typing, x X = collections.namedtuple('X', ['a', 'b']) +Y = typing.NamedTuple('Y', [('a', int), ('b', str)]) [out] from _typeshed import Incomplete from typing import NamedTuple @@ -595,14 +652,21 @@ class X(NamedTuple): a: Incomplete b: Incomplete +class Y(NamedTuple): + a: int + b: str + [case testEmptyNamedtuple] -import collections +import collections, typing X = collections.namedtuple('X', []) +Y = typing.NamedTuple('Y', []) [out] from typing import NamedTuple class X(NamedTuple): ... +class Y(NamedTuple): ... + [case testNamedtupleAltSyntax] from collections import namedtuple, xx X = namedtuple('X', 'a b') @@ -641,8 +705,10 @@ class X(NamedTuple): [case testNamedtupleWithUnderscore] from collections import namedtuple as _namedtuple +from typing import NamedTuple as _NamedTuple def f(): ... X = _namedtuple('X', 'a b') +Y = _NamedTuple('Y', [('a', int), ('b', str)]) def g(): ... [out] from _typeshed import Incomplete @@ -654,6 +720,10 @@ class X(NamedTuple): a: Incomplete b: Incomplete +class Y(NamedTuple): + a: int + b: str + def g() -> None: ... [case testNamedtupleBaseClass] @@ -672,10 +742,14 @@ class Y(_X): ... [case testNamedtupleAltSyntaxFieldsTuples] from collections import namedtuple, xx +from typing import NamedTuple X = namedtuple('X', ()) Y = namedtuple('Y', ('a',)) Z = namedtuple('Z', ('a', 'b', 'c', 'd', 'e')) xx +R = NamedTuple('R', ()) +S = NamedTuple('S', (('a', int),)) +T = NamedTuple('T', (('a', int), ('b', str))) [out] from _typeshed import Incomplete from typing import NamedTuple @@ -692,13 +766,62 @@ class Z(NamedTuple): d: Incomplete e: Incomplete +class R(NamedTuple): ... + +class S(NamedTuple): + a: int + +class T(NamedTuple): + a: int + b: str + [case testDynamicNamedTuple] from collections import namedtuple +from typing import NamedTuple N = namedtuple('N', ['x', 'y'] + ['z']) +M = NamedTuple('M', [('x', int), ('y', str)] + [('z', float)]) +class X(namedtuple('X', ['a', 'b'] + ['c'])): ... [out] from _typeshed import Incomplete N: Incomplete +M: Incomplete +class X(Incomplete): ... + +[case testNamedTupleInClassBases] +import collections, typing +from collections import namedtuple +from typing import NamedTuple +class X(namedtuple('X', ['a', 'b'])): ... +class Y(NamedTuple('Y', [('a', int), ('b', str)])): ... +class R(collections.namedtuple('R', ['a', 'b'])): ... +class S(typing.NamedTuple('S', [('a', int), ('b', str)])): ... +[out] +import typing +from _typeshed import Incomplete +from typing import NamedTuple + +class X(NamedTuple('X', [('a', Incomplete), ('b', Incomplete)])): ... +class Y(NamedTuple('Y', [('a', int), ('b', str)])): ... +class R(NamedTuple('R', [('a', Incomplete), ('b', Incomplete)])): ... +class S(typing.NamedTuple('S', [('a', int), ('b', str)])): ... + +[case testNotNamedTuple] +from not_collections import namedtuple +from not_typing import NamedTuple +from collections import notnamedtuple +from typing import NotNamedTuple +X = namedtuple('X', ['a', 'b']) +Y = notnamedtuple('Y', ['a', 'b']) +Z = NamedTuple('Z', [('a', int), ('b', str)]) +W = NotNamedTuple('W', [('a', int), ('b', str)]) +[out] +from _typeshed import Incomplete + +X: Incomplete +Y: Incomplete +Z: Incomplete +W: Incomplete [case testArbitraryBaseClass] import x @@ -2793,3 +2916,116 @@ def f(x: str | None) -> None: ... a: str | int def f(x: str | None) -> None: ... + +[case testTypeddict] +import typing, x +X = typing.TypedDict('X', {'a': int, 'b': str}) +Y = typing.TypedDict('X', {'a': int, 'b': str}, total=False) +[out] +from typing_extensions import TypedDict + +class X(TypedDict): + a: int + b: str + +class Y(TypedDict, total=False): + a: int + b: str + +[case testTypeddictKeywordSyntax] +from typing import TypedDict + +X = TypedDict('X', a=int, b=str) +Y = TypedDict('X', a=int, b=str, total=False) +[out] +from typing import TypedDict + +class X(TypedDict): + a: int + b: str + +class Y(TypedDict, total=False): + a: int + b: str + +[case testTypeddictWithNonIdentifierOrKeywordKeys] +from typing import TypedDict +X = TypedDict('X', {'a-b': int, 'c': str}) +Y = TypedDict('X', {'a-b': int, 'c': str}, total=False) +Z = TypedDict('X', {'a': int, 'in': str}) +[out] +from typing import TypedDict + +X = TypedDict('X', {'a-b': int, 'c': str}) + +Y = TypedDict('X', {'a-b': int, 'c': str}, total=False) + +Z = TypedDict('X', {'a': int, 'in': str}) + +[case testEmptyTypeddict] +import typing +X = typing.TypedDict('X', {}) +Y = typing.TypedDict('Y', {}, total=False) +Z = typing.TypedDict('Z') +W = typing.TypedDict('W', total=False) +[out] +from typing_extensions import TypedDict + +class X(TypedDict): ... + +class Y(TypedDict, total=False): ... + +class Z(TypedDict): ... + +class W(TypedDict, total=False): ... + +[case testTypeddictAliased] +from typing import TypedDict as t_TypedDict +from typing_extensions import TypedDict as te_TypedDict +def f(): ... +X = t_TypedDict('X', {'a': int, 'b': str}) +Y = te_TypedDict('Y', {'a': int, 'b': str}) +def g(): ... +[out] +from typing_extensions import TypedDict + +def f() -> None: ... + +class X(TypedDict): + a: int + b: str + +class Y(TypedDict): + a: int + b: str + +def g() -> None: ... + +[case testNotTypeddict] +from x import TypedDict +import y +X = TypedDict('X', {'a': int, 'b': str}) +Y = y.TypedDict('Y', {'a': int, 'b': str}) +[out] +from _typeshed import Incomplete + +X: Incomplete +Y: Incomplete + +[case testTypeddictWithWrongAttributesType] +from typing import TypedDict +R = TypedDict("R", {"a": int, **{"b": str, "c": bytes}}) +S = TypedDict("S", [("b", str), ("c", bytes)]) +T = TypedDict("T", {"a": int}, b=str, total=False) +U = TypedDict("U", {"a": int}, totale=False) +V = TypedDict("V", {"a": int}, {"b": str}) +W = TypedDict("W", **{"a": int, "b": str}) +[out] +from _typeshed import Incomplete + +R: Incomplete +S: Incomplete +T: Incomplete +U: Incomplete +V: Incomplete +W: Incomplete diff --git a/test-requirements.txt b/test-requirements.txt index a7394e6d1472..8d0866eeb20c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,7 +1,7 @@ -r mypy-requirements.txt -r build-requirements.txt attrs>=18.0 -black==22.12.0 # must match version in .pre-commit-config.yaml +black==23.3.0 # must match version in .pre-commit-config.yaml filelock>=3.3.0 flake8==5.0.4 # must match version in .pre-commit-config.yaml flake8-bugbear==22.12.6 # must match version in .pre-commit-config.yaml diff --git a/tox.ini b/tox.ini index 443f05dc8bcf..1e7419918ebf 100644 --- a/tox.ini +++ b/tox.ini @@ -6,9 +6,9 @@ envlist = py38, py39, py310, + docs, lint, type, - docs, isolated_build = true [testenv] @@ -21,6 +21,23 @@ passenv = deps = -rtest-requirements.txt commands = python -m pytest {posargs} +[testenv:dev] +description = generate a DEV environment, that has all project libraries +usedevelop = True +deps = + -rtest-requirements.txt + -rdocs/requirements-docs.txt +commands = + python -m pip list --format=columns + python -c 'import sys; print(sys.executable)' + +[testenv:docs] +description = invoke sphinx-build to build the HTML docs +deps = -rdocs/requirements-docs.txt +commands = + sphinx-build -d "{toxworkdir}/docs_doctree" docs/source "{toxworkdir}/docs_out" --color -W -bhtml {posargs} + python -c 'import pathlib; print("documentation available under file://\{0\}".format(pathlib.Path(r"{toxworkdir}") / "docs_out" / "index.html"))' + [testenv:lint] description = check the code style commands = @@ -37,20 +54,3 @@ passenv = commands = python -m mypy --config-file mypy_self_check.ini -p mypy -p mypyc python -m mypy --config-file mypy_self_check.ini misc --exclude misc/fix_annotate.py --exclude misc/async_matrix.py --exclude misc/sync-typeshed.py - -[testenv:docs] -description = invoke sphinx-build to build the HTML docs -deps = -rdocs/requirements-docs.txt -commands = - sphinx-build -d "{toxworkdir}/docs_doctree" docs/source "{toxworkdir}/docs_out" --color -W -bhtml {posargs} - python -c 'import pathlib; print("documentation available under file://\{0\}".format(pathlib.Path(r"{toxworkdir}") / "docs_out" / "index.html"))' - -[testenv:dev] -description = generate a DEV environment, that has all project libraries -usedevelop = True -deps = - -rtest-requirements.txt - -rdocs/requirements-docs.txt -commands = - python -m pip list --format=columns - python -c 'import sys; print(sys.executable)'