diff --git a/.appveyor.yml b/.appveyor.yml index 80cc236b7..da4a38d6a 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -31,8 +31,7 @@ install: - rustup-init.exe -yv --default-toolchain stable --default-host i686-pc-windows-msvc - "SET PATH=%APPDATA%\\Python;%APPDATA%\\Python\\Scripts;%PYTHON%;%PYTHON%\\Scripts;c:\\MinGW\\bin;%PATH%;C:\\Users\\appveyor\\.cargo\\bin" - "copy c:\\MinGW\\bin\\mingw32-make.exe c:\\MinGW\\bin\\make.exe" - - python -m pip install --user --upgrade setuptools pip - - python -m pip install .[tests] + - make install build: false diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 92737a7ad..2927a0edb 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -2,11 +2,12 @@ name: Coconut Test Suite on: [push] jobs: build: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest strategy: matrix: python-version: - '2.7' + - '3.4' - '3.5' - '3.6' - '3.7' @@ -24,9 +25,10 @@ jobs: steps: - uses: actions/checkout@v3 - name: Setup python - uses: actions/setup-python@v4 + uses: MatteoH2O1999/setup-python@v1.3.1 with: python-version: ${{ matrix.python-version }} + cache: pip - run: make install - run: make test-all - run: make build diff --git a/.gitignore b/.gitignore index a15a13895..243d558fd 100644 --- a/.gitignore +++ b/.gitignore @@ -131,12 +131,13 @@ __pypackages__/ .vscode # Coconut -coconut/tests/dest/ -docs/ +/coconut/tests/dest/ +/docs/ pyston/ pyprover/ bbopt/ coconut-prelude/ index.rst vprof.json -coconut/icoconut/coconut/ +/coconut/icoconut/coconut/ +__coconut_cache__/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3ff5b942f..12b79fd46 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,7 +28,7 @@ _Note: Don't forget to add yourself to the "Authors:" section in the moduledocs First, you'll want to set up a local copy of Coconut's recommended development environment. For that, just run `git checkout develop`, make sure your default `python` installation is some variant of Python 3, and run `make dev`. That should switch you to the `develop` branch, install all possible dependencies, bind the `coconut` command to your local copy, and set up [pre-commit](http://pre-commit.com/), which will check your code for errors for you whenever you `git commit`. -Then, you should be able to use the Coconut command-line for trying out simple things, and to run a paired-down version of the test suite locally, just `make test-basic`. +Then, you should be able to use the Coconut command-line for trying out simple things, and to run a paired-down version of the test suite locally, just `make test-univ`. After you've tested your changes locally, you'll want to add more permanent tests to Coconut's test suite. Coconut's test suite is primarily written in Coconut itself, so testing new features just means using them inside of one of Coconut's `.coco` test files, with some `assert` statements to check validity. @@ -154,7 +154,7 @@ After you've tested your changes locally, you'll want to add more permanent test 1. Preparation: 1. Run `make check-reqs` and update dependencies as necessary - 2. Run `make format` + 2. Run `sudo make format` 3. Make sure `make test`, `make test-py2`, and `make test-easter-eggs` are passing 4. Ensure that `coconut --watch` can successfully compile files when they're modified 5. Check changes in [`compiled-cocotest`](https://github.com/evhub/compiled-cocotest), [`pyprover`](https://github.com/evhub/pyprover), and [`coconut-prelude`](https://github.com/evhub/coconut-prelude) diff --git a/DOCS.md b/DOCS.md index 9b4d61a9a..9cf16df75 100644 --- a/DOCS.md +++ b/DOCS.md @@ -15,7 +15,7 @@ depth: 2 This documentation covers all the features of the [Coconut Programming Language](http://evhub.github.io/coconut/), and is intended as a reference/specification, not a tutorialized introduction. For a full introduction and tutorial of Coconut, see [the tutorial](./HELP.md). -Coconut is a variant of [Python](https://www.python.org/) built for **simple, elegant, Pythonic functional programming**. Coconut syntax is a strict superset of Python 3 syntax. Thus, users familiar with Python will already be familiar with most of Coconut. +Coconut is a variant of [Python](https://www.python.org/) built for **simple, elegant, Pythonic functional programming**. Coconut syntax is a strict superset of the latest Python 3 syntax. Thus, users familiar with Python will already be familiar with most of Coconut. The Coconut compiler turns Coconut code into Python code. The primary method of accessing the Coconut compiler is through the Coconut command-line utility, which also features an interpreter for real-time compilation. In addition to the command-line utility, Coconut also supports the use of IPython/Jupyter notebooks. @@ -143,74 +143,77 @@ dest destination directory for compiled files (defaults to ##### Optional Arguments ``` - -h, --help show this help message and exit - --and source [dest ...] - add an additional source/dest pair to compile (dest is optional) - -v, -V, --version print Coconut and Python version information - -t version, --target version - specify target Python version (defaults to universal) - -i, --interact force the interpreter to start (otherwise starts if no other command - is given) (implies --run) - -p, --package compile source as part of a package (defaults to only if source is a - directory) - -a, --standalone, --stand-alone - compile source as standalone files (defaults to only if source is a - single file) - -l, --line-numbers, --linenumbers - add line number comments for ease of debugging - -k, --keep-lines, --keeplines - include source code in comments for ease of debugging - -w, --watch watch a directory and recompile on changes - -r, --run execute compiled Python - -n, --no-write, --nowrite - disable writing compiled Python - -d, --display print compiled Python - -q, --quiet suppress all informational output (combine with --display to write - runnable code to stdout) - -s, --strict enforce code cleanliness standards - --no-tco, --notco disable tail call optimization - --no-wrap-types, --nowraptypes - disable wrapping type annotations in strings and turn off 'from - __future__ import annotations' behavior - -c code, --code code run Coconut passed in as a string (can also be piped into stdin) - -j processes, --jobs processes - number of additional processes to use (defaults to 'sys') (0 is no - additional processes; 'sys' uses machine default) - -f, --force force re-compilation even when source code and compilation parameters - haven't changed - --minify reduce size of compiled Python - --jupyter ..., --ipython ... - run Jupyter/IPython with Coconut as the kernel (remaining args passed - to Jupyter) - --mypy ... run MyPy on compiled Python (remaining args passed to MyPy) (implies - --package) - --argv ..., --args ... - set sys.argv to source plus remaining args for use in the Coconut - script being run - --tutorial open Coconut's tutorial in the default web browser - --docs, --documentation - open Coconut's documentation in the default web browser - --style name set Pygments syntax highlighting style (or 'list' to list styles) - (defaults to COCONUT_STYLE environment variable if it exists, - otherwise 'default') - --history-file path set history file (or '' for no file) (can be modified by setting - COCONUT_HOME environment variable) - --vi-mode, --vimode enable vi mode in the interpreter (currently set to False) (can be - modified by setting COCONUT_VI_MODE environment variable) - --recursion-limit limit, --recursionlimit limit - set maximum recursion depth in compiler (defaults to 1920) (when - increasing --recursion-limit, you may also need to increase --stack- - size) - --stack-size kbs, --stacksize kbs - run the compiler in a separate thread with the given stack size in - kilobytes - --site-install, --siteinstall - set up coconut.api to be imported on Python start - --site-uninstall, --siteuninstall - revert the effects of --site-install - --verbose print verbose debug output - --trace print verbose parsing data (only available in coconut-develop) - --profile collect and print timing info (only available in coconut-develop) +-h, --help show this help message and exit +--and source [dest ...] + add an additional source/dest pair to compile (dest is optional) +-v, -V, --version print Coconut and Python version information +-t version, --target version + specify target Python version (defaults to universal) +-i, --interact force the interpreter to start (otherwise starts if no other command + is given) (implies --run) +-p, --package compile source as part of a package (defaults to only if source is a + directory) +-a, --standalone, --stand-alone + compile source as standalone files (defaults to only if source is a + single file) +-l, --line-numbers, --linenumbers + force enable line number comments (--line-numbers are enabled by + default unless --minify is passed) +--no-line-numbers, --nolinenumbers + disable line number comments (opposite of --line-numbers) +-k, --keep-lines, --keeplines + include source code in comments for ease of debugging +-w, --watch watch a directory and recompile on changes +-r, --run execute compiled Python +-n, --no-write, --nowrite + disable writing compiled Python +-d, --display print compiled Python +-q, --quiet suppress all informational output (combine with --display to write + runnable code to stdout) +-s, --strict enforce code cleanliness standards +--no-tco, --notco disable tail call optimization +--no-wrap-types, --nowraptypes + disable wrapping type annotations in strings and turn off 'from + __future__ import annotations' behavior +-c code, --code code run Coconut passed in as a string (can also be piped into stdin) +-j processes, --jobs processes + number of additional processes to use (defaults to 'sys') (0 is no + additional processes; 'sys' uses machine default) +-f, --force force re-compilation even when source code and compilation parameters + haven't changed +--minify reduce size of compiled Python +--jupyter ..., --ipython ... + run Jupyter/IPython with Coconut as the kernel (remaining args passed + to Jupyter) +--mypy ... run MyPy on compiled Python (remaining args passed to MyPy) (implies + --package --line-numbers) +--argv ..., --args ... + set sys.argv to source plus remaining args for use in the Coconut + script being run +--tutorial open Coconut's tutorial in the default web browser +--docs, --documentation + open Coconut's documentation in the default web browser +--style name set Pygments syntax highlighting style (or 'list' to list styles) + (defaults to COCONUT_STYLE environment variable if it exists, + otherwise 'default') +--history-file path set history file (or '' for no file) (can be modified by setting + COCONUT_HOME environment variable) +--vi-mode, --vimode enable vi mode in the interpreter (currently set to False) (can be + modified by setting COCONUT_VI_MODE environment variable) +--recursion-limit limit, --recursionlimit limit + set maximum recursion depth in compiler (defaults to 1920) (when + increasing --recursion-limit, you may also need to increase --stack- + size; setting them to approximately equal values is recommended) +--stack-size kbs, --stacksize kbs + run the compiler in a separate thread with the given stack size in + kilobytes +--site-install, --siteinstall + set up coconut.api to be imported on Python start +--site-uninstall, --siteuninstall + revert the effects of --site-install +--verbose print verbose debug output +--trace print verbose parsing data (only available in coconut-develop) +--profile collect and print timing info (only available in coconut-develop) ``` #### Coconut Scripts @@ -221,7 +224,7 @@ coconut-run ``` as an alias for ``` -coconut --quiet --target sys --line-numbers --keep-lines --run --argv +coconut --quiet --target sys --keep-lines --run --argv ``` which will quietly compile and run ``, passing any additional arguments to the script, mimicking how the `python` command works. @@ -232,6 +235,10 @@ which will quietly compile and run ``, passing any additional arguments To pass additional compilation arguments to `coconut-run` (e.g. `--no-tco`), put them before the `` file. +`coconut-run` will always enable [automatic compilation](#automatic-compilation), such that Coconut source files can be directly imported from any Coconut files run via `coconut-run`. Additionally, compilation parameters (e.g. `--no-tco`) used in `coconut-run` will be passed along and used for any auto compilation. + +On Python 3.4+, `coconut-run` will use a `__coconut_cache__` directory to cache the compiled Python. Note that `__coconut_cache__` will always be removed from `__file__`. + #### Naming Source Files Coconut source files should, so the compiler can recognize them, use the extension `.coco` (preferred), `.coc`, or `.coconut`. @@ -294,21 +301,23 @@ _Note: Coconut also universalizes many magic methods, including making `__bool__ If the version of Python that the compiled code will be running on is known ahead of time, a target should be specified with `--target`. The given target will only affect the compiled code and whether or not the Python-3-specific syntax detailed above is allowed. Where Python syntax differs across versions, Coconut syntax will always follow the latest Python 3 across all targets. The supported targets are: -- `universal` (default) (will work on _any_ of the below), -- `2`, `2.6` (will work on any Python `>= 2.6` but `< 3`), -- `2.7` (will work on any Python `>= 2.7` but `< 3`), -- `3`, `3.2` (will work on any Python `>= 3.2`), -- `3.3` (will work on any Python `>= 3.3`), -- `3.4` (will work on any Python `>= 3.4`), -- `3.5` (will work on any Python `>= 3.5`), -- `3.6` (will work on any Python `>= 3.6`), -- `3.7` (will work on any Python `>= 3.7`), -- `3.8` (will work on any Python `>= 3.8`), -- `3.9` (will work on any Python `>= 3.9`), -- `3.10` (will work on any Python `>= 3.10`), -- `3.11` (will work on any Python `>= 3.11`), -- `3.12` (will work on any Python `>= 3.12`), and -- `sys` (chooses the target corresponding to the current Python version). +- `universal`, `univ` (the default): will work on _any_ of the below +- `2`, `2.6`: will work on any Python `>= 2.6` but `< 3` +- `2.7`: will work on any Python `>= 2.7` but `< 3` +- `3`, `3.2`: will work on any Python `>= 3.2` +- `3.3`: will work on any Python `>= 3.3` +- `3.4`: will work on any Python `>= 3.4` +- `3.5`: will work on any Python `>= 3.5` +- `3.6`: will work on any Python `>= 3.6` +- `3.7`: will work on any Python `>= 3.7` +- `3.8`: will work on any Python `>= 3.8` +- `3.9`: will work on any Python `>= 3.9` +- `3.10`: will work on any Python `>= 3.10` +- `3.11`: will work on any Python `>= 3.11` +- `3.12`: will work on any Python `>= 3.12` +- `3.13`: will work on any Python `>= 3.13` +- `sys`: chooses the target corresponding to the current Python version +- `psf`: will work on any Python not considered [end-of-life](https://devguide.python.org/versions/) by the PSF (Python Software Foundation) _Note: Periods are optional in target specifications, such that the target `27` is equivalent to the target `2.7`._ @@ -324,18 +333,21 @@ If the `--strict` (`-s` for short) flag is enabled, Coconut will perform additio The style issues which will cause `--strict` to throw an error are: -- mixing of tabs and spaces (without `--strict` will show a warning). -- use of `from __future__` imports (Coconut does these automatically) (without `--strict` will show a warning). -- inheriting from `object` in classes (Coconut does this automatically) (without `--strict` will show a warning). -- semicolons at end of lines (without `--strict` will show a warning). -- use of `u` to denote Unicode strings (all Coconut strings are Unicode strings) (without `--strict` will show a warning). -- commas after [statement lambdas](#statement-lambdas) (not recommended as it can be unclear whether the comma is inside or outside the lambda) (without `--strict` will show a warning). -- missing new line at end of file. -- trailing whitespace at end of lines. -- use of the Python-style `lambda` statement (use [Coconut's lambda syntax](#lambdas) instead). -- use of backslash continuation (use [parenthetical continuation](#enhanced-parenthetical-continuation) instead). -- Python-3.10/PEP-634-style dotted names in pattern-matching (Coconut style is to preface these with `==`). -- use of `:` instead of `<:` to specify upper bounds in [Coconut's type parameter syntax](#type-parameter-syntax). +- mixing of tabs and spaces +- use of `from __future__` imports (Coconut does these automatically) +- inheriting from `object` in classes (Coconut does this automatically) +- semicolons at end of lines +- use of `u` to denote Unicode strings (all Coconut strings are Unicode strings) +- `f`-strings with no format expressions in them +- commas after [statement lambdas](#statement-lambdas) (not recommended as it can be unclear whether the comma is inside or outside the lambda) +- missing new line at end of file +- trailing whitespace at end of lines +- use of the Python-style `lambda` statement (use [Coconut's lambda syntax](#lambdas) instead) +- use of backslash continuation (use [parenthetical continuation](#enhanced-parenthetical-continuation) instead) +- Python-3.10/PEP-634-style dotted names in pattern-matching (Coconut style is to preface these with `==`) +- use of `:` instead of `<:` to specify upper bounds in [Coconut's type parameter syntax](#type-parameter-syntax) + +Note that many of the above style issues will still show a warning if `--strict` is not present. ## Integrations @@ -389,7 +401,7 @@ If you use [IPython](http://ipython.org/) (the Python kernel for the [Jupyter](h If Coconut is used as a kernel, all code in the console or notebook will be sent directly to Coconut instead of Python to be evaluated. Otherwise, the Coconut kernel behaves exactly like the IPython kernel, including support for `%magic` commands. -Simply installing Coconut should add a `Coconut` kernel to your Jupyter/IPython notebooks. If you are having issues accessing the Coconut kernel, however, the command `coconut --jupyter` will re-install the `Coconut` kernel to ensure it is using the current Python as well as add the additional kernels `Coconut (Default Python)`, `Coconut (Default Python 2)`, and `Coconut (Default Python 3)` which will use, respectively, the Python accessible as `python`, `python2`, and `python3` (these kernels are accessible in the console as `coconut_py`, `coconut_py2`, and `coconut_py3`). Furthermore, the Coconut kernel fully supports [`nb_conda_kernels`](https://github.com/Anaconda-Platform/nb_conda_kernels) to enable accessing the Coconut kernel in one Conda environment from another Conda environment. +Simply installing Coconut should add a `Coconut` kernel to your Jupyter/IPython notebooks. If you are having issues accessing the Coconut kernel, however, the special command `coconut --jupyter install` will re-install the `Coconut` kernel to ensure it is using the current Python as well as add the additional kernels `Coconut (Default Python)`, `Coconut (Default Python 2)`, and `Coconut (Default Python 3)` which will use, respectively, the Python accessible as `python`, `python2`, and `python3` (these kernels are accessible in the console as `coconut_py`, `coconut_py2`, and `coconut_py3`). Coconut also supports `coconut --jupyter install --user` for user installation. Furthermore, the Coconut kernel fully supports [`nb_conda_kernels`](https://github.com/Anaconda-Platform/nb_conda_kernels) to enable accessing the Coconut kernel in one Conda environment from another Conda environment. The Coconut kernel will always compile using the parameters: `--target sys --line-numbers --keep-lines --no-wrap-types`. @@ -475,9 +487,9 @@ user@computer ~ $ $(ls -la) |> .splitlines() |> len 30 ``` -Note that the way that Coconut integrates with `xonsh`, `@()` syntax and the `execx` command will only work with Python code, not Coconut code. +Compilation always uses the same parameters as in the [Coconut Jupyter kernel](#kernel). -Additionally, Coconut will only compile individual commands—Coconut will not touch the `.xonshrc` or any other `.xsh` files. +Note that the way that Coconut integrates with `xonsh`, `@()` syntax and the `execx` command will only work with Python code, not Coconut code. Additionally, Coconut will only compile individual commands—Coconut will not touch the `.xonshrc` or any other `.xsh` files. ## Operators @@ -524,7 +536,7 @@ and left (short-circuits) or left (short-circuits) x if c else y, ternary left (short-circuits) if c then x else y --> right +=> right ====================== ========================== ``` @@ -532,19 +544,21 @@ For example, since addition has a higher precedence than piping, expressions of ### Lambdas -Coconut provides the simple, clean `->` operator as an alternative to Python's `lambda` statements. The syntax for the `->` operator is `(parameters) -> expression` (or `parameter -> expression` for one-argument lambdas). The operator has the same precedence as the old statement, which means it will often be necessary to surround the lambda in parentheses, and is right-associative. +Coconut provides the simple, clean `=>` operator as an alternative to Python's `lambda` statements. The syntax for the `=>` operator is `(parameters) => expression` (or `parameter => expression` for one-argument lambdas). The operator has the same precedence as the old statement, which means it will often be necessary to surround the lambda in parentheses, and is right-associative. -Additionally, Coconut also supports an implicit usage of the `->` operator of the form `(-> expression)`, which is equivalent to `((_=None) -> expression)`, which allows an implicit lambda to be used both when no arguments are required, and when one argument (assigned to `_`) is required. +Additionally, Coconut also supports an implicit usage of the `=>` operator of the form `(=> expression)`, which is equivalent to `((_=None) => expression)`, which allows an implicit lambda to be used both when no arguments are required, and when one argument (assigned to `_`) is required. _Note: If normal lambda syntax is insufficient, Coconut also supports an extended lambda syntax in the form of [statement lambdas](#statement-lambdas). Statement lambdas support full statements rather than just expressions and allow for the use of [pattern-matching function definition](#pattern-matching-functions)._ +_Deprecated: `->` can be used as an alternative to `=>`, though `->`-based lambdas are disabled inside type annotations to avoid conflicting with Coconut's [enhanced type annotation syntax](#enhanced-type-annotation)._ + ##### Rationale In Python, lambdas are ugly and bulky, requiring the entire word `lambda` to be written out every time one is constructed. This is fine if in-line functions are very rarely needed, but in functional programming in-line functions are an essential tool. ##### Python Docs -Lambda forms (lambda expressions) have the same syntactic position as expressions. They are a shorthand to create anonymous functions; the expression `(arguments) -> expression` yields a function object. The unnamed object behaves like a function object defined with: +Lambda forms (lambda expressions) have the same syntactic position as expressions. They are a shorthand to create anonymous functions; the expression `(arguments) => expression` yields a function object. The unnamed object behaves like a function object defined with: ```coconut def (arguments): return expression @@ -555,7 +569,7 @@ Note that functions created with lambda forms cannot contain statements or annot **Coconut:** ```coconut -dubsums = map((x, y) -> 2*(x+y), range(0, 10), range(10, 20)) +dubsums = map((x, y) => 2*(x+y), range(0, 10), range(10, 20)) dubsums |> list |> print ``` @@ -567,20 +581,20 @@ print(list(dubsums)) #### Implicit Lambdas -Coconut also supports implicit lambdas, which allow a lambda to take either no arguments or a single argument. Implicit lambdas are formed with the usual Coconut lambda operator `->`, in the form `(-> expression)`. This is equivalent to `((_=None) -> expression)`. When an argument is passed to an implicit lambda, it will be assigned to `_`, replacing the default value `None`. +Coconut also supports implicit lambdas, which allow a lambda to take either no arguments or a single argument. Implicit lambdas are formed with the usual Coconut lambda operator `=>`, in the form `(=> expression)`. This is equivalent to `((_=None) => expression)`. When an argument is passed to an implicit lambda, it will be assigned to `_`, replacing the default value `None`. Below are two examples of implicit lambdas. The first uses the implicit argument `_`, while the second does not. **Single Argument Example:** ```coconut -square = (-> _**2) +square = (=> _**2) ``` **No-Argument Example:** ```coconut import random -get_random_number = (-> random.random()) +get_random_number = (=> random.random()) ``` _Note: Nesting implicit lambdas can lead to problems with the scope of the `_` parameter to each lambda. It is recommended that nesting implicit lambdas be avoided._ @@ -655,7 +669,7 @@ The None-aware pipe operators here are equivalent to a [monadic bind](https://en For working with `async` functions in pipes, all non-starred pipes support piping into `await` to await the awaitable piped into them, such that `x |> await` is equivalent to `await x`. -Additionally, all pipe operators support a lambda as the last argument, despite lambdas having a lower precedence. Thus, `a |> x -> b |> c` is equivalent to `a |> (x -> b |> c)`, not `a |> (x -> b) |> c`. +Additionally, all pipe operators support a lambda as the last argument, despite lambdas having a lower precedence. Thus, `a |> x => b |> c` is equivalent to `a |> (x => b |> c)`, not `a |> (x => b) |> c`. _Note: To visually spread operations across several lines, just use [parenthetical continuation](#enhanced-parenthetical-continuation)._ @@ -732,6 +746,8 @@ All function composition operators also have in-place versions (e.g. `..=`). Since all forms of function composition always call the first function in the composition (`f` in `f ..> g` and `g` in `f <.. g`) with exactly the arguments passed into the composition, all forms of function composition will preserve all metadata attached to the first function in the composition, including the function's [signature](https://docs.python.org/3/library/inspect.html#inspect.signature) and any of that function's attributes. +_Note: for composing `async` functions, see [`and_then` and `and_then_await`](#and_then-and-and_then_await)._ + ##### Example **Coconut:** @@ -759,7 +775,7 @@ Coconut's iterator slicing is very similar to Python's `itertools.islice`, but u **Coconut:** ```coconut -map(x -> x*2, range(10**100))$[-1] |> print +map(x => x*2, range(10**100))$[-1] |> print ``` **Python:** @@ -809,7 +825,7 @@ x `f` y => f(x, y) x `f` => f(x) `f` => f() ``` -Additionally, infix notation supports a lambda as the last argument, despite lambdas having a lower precedence. Thus, ``a `func` b -> c`` is equivalent to `func(a, b -> c)`. +Additionally, infix notation supports a lambda as the last argument, despite lambdas having a lower precedence. Thus, ``a `func` b => c`` is equivalent to `func(a, b => c)`. Coconut also supports infix function definition to make defining functions that are intended for infix usage simpler. The syntax for infix function definition is ```coconut @@ -1043,6 +1059,7 @@ _Note: these are only the default, built-in unicode operators. Coconut supports ##### Full List ``` +⇒ (\u21d2) => "=>" → (\u2192) => "->" × (\xd7) => "*" (only multiplication) ↑ (\u2191) => "**" (only exponentiation) @@ -1060,18 +1077,18 @@ _Note: these are only the default, built-in unicode operators. Coconut supports » (\xbb) => ">>" … (\u2026) => "..." λ (\u03bb) => "lambda" -↦ (\u21a6) => "|>" -↤ (\u21a4) => "<|" -*↦ (*\u21a6) => "|*>" -↤* (\u21a4*) => "<*|" -**↦ (**\u21a6) => "|**>" -↤** (\u21a4**) => "<**|" -?↦ (?\u21a6) => "|?>" -↤? (?\u21a4) => " "|?*>" -↤*? (\u21a4*?) => "<*?|" -?**↦ (?**\u21a6) => "|?**>" -↤**? (\u21a4**?) => "<**?|" +↦ (\u21a6) => "|>" +↤ (\u21a4) => "<|" +*↦ (*\u21a6) => "|*>" +↤* (\u21a4*) => "<*|" +**↦ (**\u21a6) => "|**>" +↤** (\u21a4**) => "<**|" +?↦ (?\u21a6) => "|?>" +↤? (?\u21a4) => " "|?*>" +↤*? (\u21a4*?) => "<*?|" +?**↦ (?**\u21a6) => "|?**>" +↤**? (\u21a4**?) => "<**?|" ∘ (\u2218) => ".." ∘> (\u2218>) => "..>" <∘ (<\u2218) => "<.." @@ -1085,6 +1102,7 @@ _Note: these are only the default, built-in unicode operators. Coconut supports <*?∘ (<*?\u2218) => "<*?.." ∘?**> (\u2218?**>) => "..?**>" <**?∘ (<**?\u2218) => "<**?.." +⏨ (\u23e8) => "e" (in scientific notation) ``` ## Keywords @@ -1291,7 +1309,7 @@ _Showcases how to match against iterators, namely that the empty iterator case ( ``` def odd_primes(p=3) = - (p,) :: filter(-> _ % p != 0, odd_primes(p + 2)) + (p,) :: filter(=> _ % p != 0, odd_primes(p + 2)) def primes() = (2,) :: odd_primes() @@ -1328,7 +1346,7 @@ match : ``` where `` is any `match` pattern, `` is the item to match against, `` is an optional additional check, and `` is simply code that is executed if the header above it succeeds. Note the absence of an `in` in the `match` statements: that's because the `` in `case ` is taking its place. If no `else` is present and no match succeeds, then the `case` statement is simply skipped over as with [`match` statements](#match) (though unlike [destructuring assignments](#destructuring-assignment)). -Additionally, `cases` can be used as the top-level keyword instead of `match`, and in such a `case` block `match` is allowed for each case rather than `case`. _DEPRECATED: Coconut also supports `case` instead of `cases` as the top-level keyword for backwards-compatibility purposes._ +Additionally, `cases` can be used as the top-level keyword instead of `match`, and in such a `case` block `match` is allowed for each case rather than `case`. _Deprecated: Coconut also supports `case` instead of `cases` as the top-level keyword for backwards-compatibility purposes._ ##### Examples @@ -1661,21 +1679,23 @@ The statement lambda syntax is an extension of the [normal lambda syntax](#lambd The syntax for a statement lambda is ``` -[async|match|copyclosure] def (arguments) -> statement; statement; ... +[async|match|copyclosure] def (arguments) => statement; statement; ... ``` where `arguments` can be standard function arguments or [pattern-matching function definition](#pattern-matching-functions) arguments and `statement` can be an assignment statement or a keyword statement. Note that the `async`, `match`, and [`copyclosure`](#copyclosure-functions) keywords can be combined and can be in any order. If the last `statement` (not followed by a semicolon) in a statement lambda is an `expression`, it will automatically be returned. -Statement lambdas also support implicit lambda syntax such that `def -> _` is equivalent to `def (_=None) -> _` as well as explicitly marking them as pattern-matching such that `match def (x) -> x` will be a pattern-matching function. +Statement lambdas also support implicit lambda syntax such that `def => _` is equivalent to `def (_=None) => _` as well as explicitly marking them as pattern-matching such that `match def (x) => x` will be a pattern-matching function. Note that statement lambdas have a lower precedence than normal lambdas and thus capture things like trailing commas. To avoid confusion, statement lambdas should always be wrapped in their own set of parentheses. +_Deprecated: Statement lambdas also support `->` instead of `=>`. Note that when using `->`, any lambdas in the body of the statement lambda must also use `->` rather than `=>`._ + ##### Example **Coconut:** ```coconut -L |> map$(def (x) -> +L |> map$(def (x) => y = 1/x; y*(1 - y)) ``` @@ -1693,12 +1713,12 @@ map(_lambda, L) Another case where statement lambdas would be used over standard lambdas is when the parameters to the lambda are typed with type annotations. Statement lambdas use the standard Python syntax for adding type annotations to their parameters: ```coconut -f = def (c: str) -> print(c) +f = def (c: str) -> None => print(c) -g = def (a: int, b: int) -> a ** b +g = def (a: int, b: int) -> int => a ** b ``` -However, statement lambdas do not support return type annotations. +_Deprecated: if the deprecated `->` is used in place of `=>`, then return type annotations will not be available._ ### Operator Functions @@ -1714,7 +1734,7 @@ A very common thing to do in functional programming is to make use of function v (::) => (itertools.chain) # will not evaluate its arguments lazily ($) => (functools.partial) (.) => (getattr) -(,) => (*args) -> args # (but pickleable) +(,) => (*args) => args # (but pickleable) (+) => (operator.add) (-) => # 1 arg: operator.neg, 2 args: operator.sub (*) => (operator.mul) @@ -1760,8 +1780,8 @@ A very common thing to do in functional programming is to make use of function v (is not) => (operator.is_not) (in) => (operator.contains) (not in) => # negative containment -(assert) => def (cond, msg=None) -> assert cond, msg # (but a better msg if msg is None) -(raise) => def (exc=None, from_exc=None) -> raise exc from from_exc # or just raise if exc is None +(assert) => def (cond, msg=None) => assert cond, msg # (but a better msg if msg is None) +(raise) => def (exc=None, from_exc=None) => raise exc from from_exc # or just raise if exc is None # there are two operator functions that don't require parentheses: .[] => (operator.getitem) .$[] => # iterator slicing operator @@ -1813,7 +1833,7 @@ Additionally, Coconut also supports implicit operator function partials for arbi ``` based on Coconut's [infix notation](#infix-functions) where `` is the name of the function. Additionally, `` `` `` can instead be a [custom operator](#custom-operators) (in that case, no backticks should be used). -_DEPRECATED: Coconut also supports `obj.` as an implicit partial for `getattr$(obj)`, but its usage is deprecated and will show a warning to switch to `getattr$(obj)` instead._ +_Deprecated: Coconut also supports `obj.` as an implicit partial for `getattr$(obj)`, but its usage is deprecated and will show a warning to switch to `getattr$(obj)` instead._ ##### Example @@ -1833,11 +1853,11 @@ mod(5, 3) ### Enhanced Type Annotation -Since Coconut syntax is a superset of Python 3 syntax, it supports [Python 3 function type annotation syntax](https://www.python.org/dev/peps/pep-0484/) and [Python 3.6 variable type annotation syntax](https://www.python.org/dev/peps/pep-0526/). By default, Coconut compiles all type annotations into Python-2-compatible type comments. If you want to keep the type annotations instead, simply pass a `--target` that supports them. +Since Coconut syntax is a superset of the latest Python 3 syntax, it supports [Python 3 function type annotation syntax](https://www.python.org/dev/peps/pep-0484/) and [Python 3.6 variable type annotation syntax](https://www.python.org/dev/peps/pep-0526/). By default, Coconut compiles all type annotations into Python-2-compatible type comments. If you want to keep the type annotations instead, simply pass a `--target` that supports them. Since not all supported Python versions support the [`typing`](https://docs.python.org/3/library/typing.html) module, Coconut provides the [`TYPE_CHECKING`](#type_checking) built-in for hiding your `typing` imports and `TypeVar` definitions from being executed at runtime. Coconut will also automatically use [`typing_extensions`](https://pypi.org/project/typing-extensions/) over `typing` objects at runtime when importing them from `typing`, even when they aren't natively supported on the current Python version (this works even if you just do `import typing` and then `typing.`). -Furthermore, when compiling type annotations to Python 3 versions without [PEP 563](https://www.python.org/dev/peps/pep-0563/) support, Coconut wraps annotation in strings to prevent them from being evaluated at runtime (note that `--no-wrap-types` disables all wrapping, including via PEP 563 support). +Furthermore, when compiling type annotations to Python 3 versions without [PEP 563](https://www.python.org/dev/peps/pep-0563/) support, Coconut wraps annotation in strings to prevent them from being evaluated at runtime (to avoid this, e.g. if you want to use annotations at runtime, `--no-wrap-types` will disable all wrapping, including via PEP 563 support). Only on `--target 3.13` does `--no-wrap-types` do nothing, since there [PEP 649](https://peps.python.org/pep-0649/) support is used instead. Additionally, Coconut adds special syntax for making type annotations easier and simpler to write. When inside of a type annotation, Coconut treats certain syntax constructs differently, compiling them to type annotations instead of what they would normally represent. Specifically, Coconut applies the following transformations: ```coconut @@ -2088,6 +2108,8 @@ Due to potential confusion, some syntactic constructs are explicitly disallowed - Multiplying two or more numeric literals with implicit coefficient syntax is prohibited, so `10 20` is not allowed. - `await` is not allowed in front of implicit function application and coefficient syntax. To use `await`, simply parenthesize the expression, as in `await (f x)`. +_Note: implicit function application and coefficient syntax is disabled when [using Coconut in `xonsh`](#xonsh-support) due to conflicting with console commands._ + ##### Examples **Coconut:** @@ -2585,7 +2607,7 @@ That includes type parameters for classes, [`data` types](#data), and [all types _Warning: until `mypy` adds support for `infer_variance=True` in `TypeVar`, `TypeVar`s created this way will always be invariant._ -Additionally, Coconut supports the alternative bounds syntax of `type NewType[T <: bound] = ...` rather than `type NewType[T: bound] = ...`, to make it more clear that it is an upper bound rather than a type. In `--strict` mode, `<:` is required over `:` for all type parameter bounds. _DEPRECATED: `<=` can also be used as an alternative to `<:`._ +Additionally, Coconut supports the alternative bounds syntax of `type NewType[T <: bound] = ...` rather than `type NewType[T: bound] = ...`, to make it more clear that it is an upper bound rather than a type. In `--strict` mode, `<:` is required over `:` for all type parameter bounds. _Deprecated: `<=` can also be used as an alternative to `<:`._ Note that the `<:` syntax should only be used for [type bounds](https://peps.python.org/pep-0695/#upper-bound-specification), not [type constraints](https://peps.python.org/pep-0695/#constrained-type-specification)—for type constraints, Coconut style prefers the vanilla Python `:` syntax, which helps to disambiguate between the two cases, as they are functionally different but otherwise hard to tell apart at a glance. This is enforced in `--strict` mode. @@ -2677,27 +2699,6 @@ data Node(left, right) from Tree **Python:** _Can't be done without a series of method definitions for each data type. See the compiled code for the Python syntax._ -### Decorators - -Unlike Python, which only supports a single variable or function call in a decorator, Coconut supports any expression as in [PEP 614](https://www.python.org/dev/peps/pep-0614/). - -##### Example - -**Coconut:** -```coconut -@ wrapper1 .. wrapper2$(arg) -def func(x) = x**2 -``` - -**Python:** -```coconut_python -def wrapper(func): - return wrapper1(wrapper2(arg, func)) -@wrapper -def func(x): - return x**2 -``` - ### Statement Nesting Coconut supports the nesting of compound statements on the same line. This allows the mixing of `match` and `if` statements together, as well as compound `try` statements. @@ -2768,7 +2769,9 @@ global state_c; state_c += 1 ### Code Passthrough -Coconut supports the ability to pass arbitrary code through the compiler without being touched, for compatibility with other variants of Python, such as [Cython](http://cython.org/) or [Mython](http://mython.org/). Anything placed between `\(` and the corresponding close parenthesis will be passed through, as well as any line starting with `\\`, which will have the additional effect of allowing indentation under it. +Coconut supports the ability to pass arbitrary code through the compiler without being touched, for compatibility with other variants of Python, such as [Cython](http://cython.org/) or [Mython](http://mython.org/). When using Coconut to compile to another variant of Python, make sure you [name your source file properly](#naming-source-files) to ensure the resulting compiled code has the right file extension for the intended usage. + +Anything placed between `\(` and the corresponding close parenthesis will be passed through, as well as any line starting with `\\`, which will have the additional effect of allowing indentation under it. ##### Example @@ -2786,7 +2789,7 @@ cdef f(x): ### Enhanced Parenthetical Continuation -Since Coconut syntax is a superset of Python 3 syntax, Coconut supports the same line continuation syntax as Python. That means both backslash line continuation and implied line continuation inside of parentheses, brackets, or braces will all work. +Since Coconut syntax is a superset of the latest Python 3 syntax, Coconut supports the same line continuation syntax as Python. That means both backslash line continuation and implied line continuation inside of parentheses, brackets, or braces will all work. In Python, however, there are some cases (such as multiple `with` statements) where only backslash continuation, and not parenthetical continuation, is supported. Coconut adds support for parenthetical continuation in all these cases. This also includes support as per [PEP 679](https://peps.python.org/pep-0679) for parenthesized `assert` statements. @@ -2921,9 +2924,9 @@ _Simple example of adding a new pattern to a pattern-matching function._ ```coconut "[A], [B]" |> windowsof$(3) |> map$(addpattern( - (def (("[","A","]")) -> "A"), - (def (("[","B","]")) -> "B"), - (def ((_,_,_)) -> None), + (def (("[","A","]")) => "A"), + (def (("[","B","]")) => "B"), + (def ((_,_,_)) => None), )) |> filter$((.is None) ..> (not)) |> list |> print ``` _An example of a case where using the `addpattern` function is necessary over the [`addpattern` keyword](#addpattern-functions) due to the use of in-line pattern-matching [statement lambdas](#statement-lambdas)._ @@ -3269,7 +3272,7 @@ In Haskell, `fmap(func, obj)` takes a data type `obj` and returns a new data typ The behavior of `fmap` for a given object can be overridden by defining an `__fmap__(self, func)` magic method that will be called whenever `fmap` is invoked on that object. Note that `__fmap__` implementations should always satisfy the [Functor Laws](https://wiki.haskell.org/Functor). -For `dict`, or any other `collections.abc.Mapping`, `fmap` will map over the mapping's `.items()` instead of the default iteration through its `.keys()`, with the new mapping reconstructed from the mapped over items. _DEPRECATED: `fmap$(starmap_over_mappings=True)` will `starmap` over the `.items()` instead of `map` over them._ +For `dict`, or any other `collections.abc.Mapping`, `fmap` will map over the mapping's `.items()` instead of the default iteration through its `.keys()`, with the new mapping reconstructed from the mapped over items. _Deprecated: `fmap$(starmap_over_mappings=True)` will `starmap` over the `.items()` instead of `map` over them._ For [`numpy`](#numpy-integration) objects, `fmap` will use [`np.vectorize`](https://docs.scipy.org/doc/numpy/reference/generated/numpy.vectorize.html) to produce the result. @@ -3283,20 +3286,20 @@ async def fmap_over_async_iters(func, async_iter): ``` such that `fmap` can effectively be used as an async map. -_DEPRECATED: `fmap(func, obj, fallback_to_init=True)` will fall back to `obj.__class__(map(func, obj))` if no `fmap` implementation is available rather than raise `TypeError`._ +_Deprecated: `fmap(func, obj, fallback_to_init=True)` will fall back to `obj.__class__(map(func, obj))` if no `fmap` implementation is available rather than raise `TypeError`._ ##### Example **Coconut:** ```coconut -[1, 2, 3] |> fmap$(x -> x+1) == [2, 3, 4] +[1, 2, 3] |> fmap$(x => x+1) == [2, 3, 4] class Maybe data Nothing() from Maybe data Just(n) from Maybe -Just(3) |> fmap$(x -> x*2) == Just(6) -Nothing() |> fmap$(x -> x*2) == Nothing() +Just(3) |> fmap$(x => x*2) == Just(6) +Nothing() |> fmap$(x => x*2) == Nothing() ``` **Python:** @@ -3314,7 +3317,7 @@ def call(f, /, *args, **kwargs) = f(*args, **kwargs) `call` is primarily useful as an [operator function](#operator-functions) for function application when writing in a point-free style. -_DEPRECATED: `of` is available as a deprecated alias for `call`. Note that deprecated features are disabled in `--strict` mode._ +_Deprecated: `of` is available as a deprecated alias for `call`. Note that deprecated features are disabled in `--strict` mode._ #### `safe_call` @@ -3335,12 +3338,60 @@ def safe_call(f, /, *args, **kwargs): **Coconut:** ```coconut -res, err = safe_call(-> 1 / 0) |> fmap$(.+1) +res, err = safe_call(=> 1 / 0) |> fmap$(.+1) ``` **Python:** _Can't be done without a complex `Expected` definition. See the compiled code for the Python syntax._ +#### `ident` + +**ident**(_x_, *, _side\_effect_=`None`) + +Coconut's `ident` is the identity function, generally equivalent to `x => x`. + +`ident` also accepts one keyword-only argument, `side_effect`, which specifies a function to call on the argument before it is returned. Thus, `ident` is effectively equivalent to: +```coconut +def ident(x, *, side_effect=None): + if side_effect is not None: + side_effect(x) + return x +``` + +`ident` is primarily useful when writing in a point-free style (e.g. in combination with [`lift`](#lift)) or for debugging [pipes](#pipes) where `ident$(side_effect=print)` can let you see what is being piped. + +#### `const` + +**const**(_value_) + +Coconut's `const` simply constructs a function that, whatever its arguments, just returns the given value. Thus, `const` is equivalent to a pickleable version of +```coconut +def const(value) = (*args, **kwargs) => value +``` + +`const` is primarily useful when writing in a point-free style (e.g. in combination with [`lift`](#lift)). + +#### `flip` + +**flip**(_func_, _nargs_=`None`) + +Coconut's `flip(f, nargs=None)` is a higher-order function that, given a function `f`, returns a new function with reversed argument order. If `nargs` is passed, only the first `nargs` arguments are reversed. + +For the binary case, `flip` works as +```coconut +flip(f, 2)(x, y) == f(y, x) +``` +such that `flip$(?, 2)` implements the `C` combinator (`flip` in Haskell). + +In the general case, `flip` is equivalent to a pickleable version of +```coconut +def flip(f, nargs=None) = + (*args, **kwargs) => ( + f(*args[::-1], **kwargs) if nargs is None + else f(*(args[nargs-1::-1] + args[nargs:]), **kwargs) + ) +``` + #### `lift` **lift**(_func_) @@ -3358,8 +3409,8 @@ such that in this case `lift` implements the `S'` combinator (`liftA2` or `liftM In the general case, `lift` is equivalent to a pickleable version of ```coconut def lift(f) = ( - (*func_args, **func_kwargs) -> - (*args, **kwargs) -> + (*func_args, **func_kwargs) => + (*args, **kwargs) => f( *(g(*args, **kwargs) for g in func_args), **{k: h(*args, **kwargs) for k, h in func_kwargs.items()} @@ -3373,7 +3424,7 @@ def lift(f) = ( **Coconut:** ```coconut -xs_and_xsp1 = ident `lift(zip)` map$(->_+1) +xs_and_xsp1 = ident `lift(zip)` map$(=>_+1) min_and_max = lift(,)(min, max) plus_and_times = (+) `lift(,)` (*) ``` @@ -3388,54 +3439,58 @@ def plus_and_times(x, y): return x + y, x * y ``` -#### `flip` +#### `and_then` and `and_then_await` -**flip**(_func_, _nargs_=`None`) +Coconut provides the `and_then` and `and_then_await` built-ins for composing `async` functions. Specifically: +* To forwards compose an async function `async_f` with a normal function `g` (such that `g` is called on the result of `await`ing `async_f`), write ``async_f `and_then` g``. +* To forwards compose an async function `async_f` with another async function `async_g` (such that `async_g` is called on the result of `await`ing `async_f`, and then `async_g` is itself awaited), write ``async_f `and_then_await` async_g``. +* To forwards compose a normal function `f` with an async function `async_g` (such that `async_g` is called on the result of `f`), just write `f ..> async_g`. -Coconut's `flip(f, nargs=None)` is a higher-order function that, given a function `f`, returns a new function with reversed argument order. If `nargs` is passed, only the first `nargs` arguments are reversed. +Note that all of the above will always result in the resulting composition being an `async` function. -For the binary case, `flip` works as +The built-ins are effectively equivalent to: ```coconut -flip(f, 2)(x, y) == f(y, x) -``` -such that `flip$(?, 2)` implements the `C` combinator (`flip` in Haskell). +def and_then[**T, U, V]( + first_async_func: async (**T) -> U, + second_func: U -> V, +) -> async (**T) -> V = + async def (*args, **kwargs) => ( + first_async_func(*args, **kwargs) + |> await + |> second_func + ) -In the general case, `flip` is equivalent to a pickleable version of -```coconut -def flip(f, nargs=None) = - (*args, **kwargs) -> ( - f(*args[::-1], **kwargs) if nargs is None - else f(*(args[nargs-1::-1] + args[nargs:]), **kwargs) +def and_then_await[**T, U, V]( + first_async_func: async (**T) -> U, + second_async_func: async U -> V, +) -> async (**T) -> V = + async def (*args, **kwargs) => ( + first_async_func(*args, **kwargs) + |> await + |> second_async_func + |> await ) ``` -#### `const` +Like normal [function composition](#function-composition), `and_then` and `and_then_await` will preserve all metadata attached to the first function in the composition. -**const**(_value_) +##### Example -Coconut's `const` simply constructs a function that, whatever its arguments, just returns the given value. Thus, `const` is equivalent to a pickleable version of +**Coconut:** ```coconut -def const(value) = (*args, **kwargs) -> value +load_and_send_data = ( + load_data_async() + `and_then` proc_data + `and_then_await` send_data +) ``` -`const` is primarily useful when writing in a point-free style (e.g. in combination with [`lift`](#lift)). - -#### `ident` - -**ident**(_x_, *, _side\_effect_=`None`) - -Coconut's `ident` is the identity function, generally equivalent to `x -> x`. - -`ident` also accepts one keyword-only argument, `side_effect`, which specifies a function to call on the argument before it is returned. Thus, `ident` is effectively equivalent to: -```coconut -def ident(x, *, side_effect=None): - if side_effect is not None: - side_effect(x) - return x +**Python:** +```coconut_python +async def load_and_send_data(): + return await send_data(proc_data(await load_data_async())) ``` -`ident` is primarily useful when writing in a point-free style (e.g. in combination with [`lift`](#lift)) or for debugging [pipes](#pipes) where `ident$(side_effect=print)` can let you see what is being piped. - ### Built-Ins for Working with Iterators ```{contents} @@ -3468,13 +3523,13 @@ Coconut's `map`, `zip`, `filter`, `reversed`, and `enumerate` objects are enhanc Though Coconut provides random access indexing/slicing to `range`, `map`, `zip`, `reversed`, and `enumerate`, Coconut cannot index into built-ins like `filter`, `takewhile`, or `dropwhile` directly, as there is no efficient way to do so. ```coconut -range(10) |> filter$(i->i>3) |> .[0] # doesn't work +range(10) |> filter$(i => i>3) |> .[0] # doesn't work ``` In order to make this work, you can explicitly use iterator slicing, which is less efficient in the general case: ```coconut -range(10) |> filter$(i->i>3) |> .$[0] # works +range(10) |> filter$(i => i>3) |> .$[0] # works ``` For more information on Coconut's iterator slicing, see [here](#iterator-slicing). @@ -3484,7 +3539,7 @@ For more information on Coconut's iterator slicing, see [here](#iterator-slicing **Coconut:** ```coconut map((+), range(5), range(6)) |> len |> print -range(10) |> filter$((x) -> x < 5) |> reversed |> tuple |> print +range(10) |> filter$((x) => x < 5) |> reversed |> tuple |> print ``` **Python:** @@ -3494,7 +3549,7 @@ _Can't be done without defining a custom `map` type. The full definition of `map ```coconut range(0, 12, 2)[4] # 8 -map((i->i*2), range(10))[2] # 4 +map((i => i*2), range(10))[2] # 4 ``` **Python:** @@ -3510,7 +3565,7 @@ Coconut re-introduces Python 2's `reduce` built-in, using the `functools.reduce` **reduce**(_function, iterable_**[**_, initial_**]**) -Apply _function_ of two arguments cumulatively to the items of _sequence_, from left to right, so as to reduce the sequence to a single value. For example, `reduce((x, y) -> x+y, [1, 2, 3, 4, 5])` calculates `((((1+2)+3)+4)+5)`. The left argument, _x_, is the accumulated value and the right argument, _y_, is the update value from the _sequence_. If the optional _initial_ is present, it is placed before the items of the sequence in the calculation, and serves as a default when the sequence is empty. If _initial_ is not given and _sequence_ contains only one item, the first item is returned. +Apply _function_ of two arguments cumulatively to the items of _sequence_, from left to right, so as to reduce the sequence to a single value. For example, `reduce((x, y) => x+y, [1, 2, 3, 4, 5])` calculates `((((1+2)+3)+4)+5)`. The left argument, _x_, is the accumulated value and the right argument, _y_, is the update value from the _sequence_. If the optional _initial_ is present, it is placed before the items of the sequence in the calculation, and serves as a default when the sequence is empty. If _initial_ is not given and _sequence_ contains only one item, the first item is returned. ##### Example @@ -3657,7 +3712,7 @@ def takewhile(predicate, iterable): **Coconut:** ```coconut -negatives = numiter |> takewhile$(x -> x < 0) +negatives = numiter |> takewhile$(x => x < 0) ``` **Python:** @@ -3693,7 +3748,7 @@ def dropwhile(predicate, iterable): **Coconut:** ```coconut -positives = numiter |> dropwhile$(x -> x < 0) +positives = numiter |> dropwhile$(x => x < 0) ``` **Python:** @@ -4197,7 +4252,7 @@ In the process of lazily applying operations to iterators, eventually a point is **Coconut:** ```coconut -range(10) |> map$((x) -> x**2) |> map$(print) |> consume +range(10) |> map$((x) => x**2) |> map$(print) |> consume ``` **Python:** @@ -4325,9 +4380,15 @@ Recommended usage is as a debugging tool, where the code `from coconut import em ### Automatic Compilation -If you don't care about the exact compilation parameters you want to use, automatic compilation lets Coconut take care of everything for you. Automatic compilation can be enabled either by importing [`coconut.api`](#coconut-api) before you import anything else, or by running `coconut --site-install`. Once automatic compilation is enabled, Coconut will check each of your imports to see if you are attempting to import a `.coco` file and, if so, automatically compile it for you. Note that, for Coconut to know what file you are trying to import, it will need to be accessible via `sys.path`, just like a normal import. +Automatic compilation lets you simply import Coconut files directly without having to go through a compilation step first. Automatic compilation can be enabled either by importing [`coconut.api`](#coconut-api) before you import anything else, or by running `coconut --site-install`. + +Once automatic compilation is enabled, Coconut will check each of your imports to see if you are attempting to import a `.coco` file and, if so, automatically compile it for you. Note that, for Coconut to know what file you are trying to import, it will need to be accessible via `sys.path`, just like a normal import. -Automatic compilation always compiles modules and packages in-place, and always uses `--target sys`. Automatic compilation is always available in the Coconut interpreter, and, if using the Coconut interpreter, a `reload` built-in is provided to easily reload imported modules. Additionally, the interpreter always allows importing from the current working directory, letting you easily compile and play around with a `.coco` file simply by running the Coconut interpreter and importing it. +Automatic compilation always compiles with `--target sys --line-numbers --keep-lines` by default. On Python 3.4+, automatic compilation will use a `__coconut_cache__` directory to cache the compiled Python. Note that `__coconut_cache__` will always be removed from `__file__`. + +Automatic compilation is always available in the Coconut interpreter or when using [`coconut-run`](#coconut-scripts). When using auto compilation through the Coconut interpreter, any compilation options passed in will also be used for auto compilation. Additionally, the interpreter always allows importing from the current working directory, letting you easily compile and play around with a `.coco` file simply by running the Coconut interpreter and importing it. + +If using the Coconut interpreter, a `reload` built-in is always provided to easily reload (and thus recompile) imported modules. ### Coconut Encoding @@ -4335,19 +4396,19 @@ While automatic compilation is the preferred method for dynamically compiling Co ```coconut # coding: coconut ``` -declaration which can be added to `.py` files to have them treated as Coconut files instead. To use such a coding declaration, you'll need to either run `coconut --site-install` or `import coconut.api` at some point before you first attempt to import a file with a `# coding: coconut` declaration. Like automatic compilation, compilation is always done with `--target sys` and is always available from the Coconut interpreter. +declaration which can be added to `.py` files to have them treated as Coconut files instead. To use such a coding declaration, you'll need to either run `coconut --site-install` or `import coconut.api` at some point before you first attempt to import a file with a `# coding: coconut` declaration. Like automatic compilation, the Coconut encoding is always available from the Coconut interpreter. Compilation always uses the same parameters as in the [Coconut Jupyter kernel](#kernel). ### `coconut.api` In addition to enabling automatic compilation, `coconut.api` can also be used to call the Coconut compiler from code instead of from the command line. See below for specifications of the different api functions. -_DEPRECATED: `coconut.convenience` is a deprecated alias for `coconut.api`._ +_Deprecated: `coconut.convenience` is a deprecated alias for `coconut.api`._ #### `get_state` **coconut.api.get\_state**(_state_=`None`) -Gets a state object which stores the current compilation parameters. State objects can be configured with [**setup**](#setup) or [**cmd**](#cmd) and then used in [**parse**](#parse) or [**coconut\_eval**](#coconut_eval). +Gets a state object which stores the current compilation parameters. State objects can be configured with [**setup**](#setup) or [**cmd**](#cmd) and then used in [**parse**](#parse) or other endpoints. If _state_ is `None`, gets a new state object, whereas if _state_ is `False`, the global state object is returned. @@ -4410,19 +4471,17 @@ while True: #### `setup` -**coconut.api.setup**(_target_=`None`, _strict_=`False`, _minify_=`False`, _line\_numbers_=`False`, _keep\_lines_=`False`, _no\_tco_=`False`, _no\_wrap_=`False`, *, _state_=`False`) +**coconut.api.setup**(_target_=`None`, _strict_=`False`, _minify_=`False`, _line\_numbers_=`True`, _keep\_lines_=`False`, _no\_tco_=`False`, _no\_wrap_=`False`, *, _state_=`False`) + +`setup` can be used to set up the given state object with the given compilation parameters, each corresponding to the command-line flag of the same name. _target_ should be either `None` for the default target or a string of any [allowable target](#allowable-targets). -`setup` can be used to set up the given state object with the given command-line flags. If _state_ is `False`, the global state object is used. +If _state_ is `False`, the global state object is used. -The possible values for each flag argument are: +#### `warm_up` -- _target_: `None` (default), or any [allowable target](#allowable-targets) -- _strict_: `False` (default) or `True` -- _minify_: `False` (default) or `True` -- _line\_numbers_: `False` (default) or `True` -- _keep\_lines_: `False` (default) or `True` -- _no\_tco_: `False` (default) or `True` -- _no\_wrap_: `False` (default) or `True` +**coconut.api.warm_up**(_force_=`True`, _enable\_incremental\_mode_=`False`, *, _state_=`False`) + +Can optionally be called to warm up the compiler and get it ready for parsing. Passing _force_ will cause the warm up to take longer but will substantially reduce parsing times (by default, this level of warm up is only done when the compiler encounters a large file). Passing _enable\_incremental\_mode_ will enable the compiler's incremental mdoe, where parsing some string, then later parsing a continuation of that string, will yield substantial performance improvements. #### `cmd` @@ -4432,6 +4491,12 @@ Executes the given _args_ as if they were fed to `coconut` on the command-line, Has the same effect of setting the command-line flags on the given _state_ object as `setup` (with the global `state` object used when _state_ is `False`). +#### `coconut_exec` + +**coconut.api.coconut_exec**(_expression_, _globals_=`None`, _locals_=`None`, _state_=`False`, _keep\_internal\_state_=`None`) + +Version of [`exec`](https://docs.python.org/3/library/functions.html#exec) which can execute Coconut code. + #### `coconut_eval` **coconut.api.coconut_eval**(_expression_, _globals_=`None`, _locals_=`None`, _state_=`False`, _keep\_internal\_state_=`None`) @@ -4452,10 +4517,14 @@ Retrieves a string containing information about the Coconut version. The optiona #### `auto_compilation` -**coconut.api.auto_compilation**(_on_=`True`) +**coconut.api.auto_compilation**(_on_=`True`, _args_=`None`, _use\_cache\_dir_=`None`) Turns [automatic compilation](#automatic-compilation) on or off. This function is called automatically when `coconut.api` is imported. +If _args_ is passed, it will set the Coconut command-line arguments to use for automatic compilation. Arguments will be processed the same way as with [`coconut-run`](#coconut-scripts) such that `--quiet --target sys --keep-lines` will all be set by default. + +If _use\_cache\_dir_ is passed, it will turn on or off the usage of a `__coconut_cache__` directory to put compile files in rather than compiling them in-place. Note that `__coconut_cache__` will always be removed from `__file__`. + #### `use_coconut_breakpoint` **coconut.api.use_coconut_breakpoint**(_on_=`True`) diff --git a/HELP.md b/HELP.md index 99b1a5c4b..8c78644af 100644 --- a/HELP.md +++ b/HELP.md @@ -349,11 +349,11 @@ return acc Now let's take a look at what we do to `reduce` to make it multiply all the numbers we feed into it together. The Coconut code that we saw for that was `reduce$(*)`. There are two different Coconut constructs being used here: the operator function for multiplication in the form of `(*)`, and partial application in the form of `$`. -First, the operator function. In Coconut, a function form of any operator can be retrieved by surrounding that operator in parentheses. In this case, `(*)` is roughly equivalent to `lambda x, y: x*y`, but much cleaner and neater. In Coconut's lambda syntax, `(*)` is also equivalent to `(x, y) -> x*y`, which we will use from now on for all lambdas, even though both are legal Coconut, because Python's `lambda` statement is too ugly and bulky to use regularly. +First, the operator function. In Coconut, a function form of any operator can be retrieved by surrounding that operator in parentheses. In this case, `(*)` is roughly equivalent to `lambda x, y: x*y`, but much cleaner and neater. In Coconut's lambda syntax, `(*)` is also equivalent to `(x, y) => x*y`, which we will use from now on for all lambdas, even though both are legal Coconut, because Python's `lambda` statement is too ugly and bulky to use regularly. _Note: If Coconut's `--strict` mode is enabled, which will force your code to obey certain cleanliness standards, it will raise an error whenever Python `lambda` statements are used._ -Second, the partial application. Think of partial application as _lazy function calling_, and `$` as the _lazy-ify_ operator, where lazy just means "don't evaluate this until you need to." In Coconut, if a function call is prefixed by a `$`, like in this example, instead of actually performing the function call, a new function is returned with the given arguments already provided to it, so that when it is then called, it will be called with both the partially-applied arguments and the new arguments, in that order. In this case, `reduce$(*)` is roughly equivalent to `(*args, **kwargs) -> reduce((*), *args, **kwargs)`. +Second, the partial application. Think of partial application as _lazy function calling_, and `$` as the _lazy-ify_ operator, where lazy just means "don't evaluate this until you need to." In Coconut, if a function call is prefixed by a `$`, like in this example, instead of actually performing the function call, a new function is returned with the given arguments already provided to it, so that when it is then called, it will be called with both the partially-applied arguments and the new arguments, in that order. In this case, `reduce$(*)` is roughly equivalent to `(*args, **kwargs) => reduce((*), *args, **kwargs)`. _You can partially apply arguments in any order using `?` in place of missing arguments, as in `to_binary = int$(?, 2)`._ @@ -531,7 +531,7 @@ data vector2(x, y): # Test cases: vector2(1, 2) |> print # vector2(x=1, y=2) vector2(3, 4) |> abs |> print # 5 -vector2(1, 2) |> fmap$(x -> x*2) |> print # vector2(x=2, y=4) +vector2(1, 2) |> fmap$(x => x*2) |> print # vector2(x=2, y=4) v = vector2(2, 3) v.x = 7 # AttributeError ``` @@ -579,7 +579,7 @@ Now that we have a constructor for our n-vector, it's time to write its methods. """Return the magnitude of the vector.""" self.pts |> map$(.**2) |> sum |> (.**0.5) ``` -The basic algorithm here is map square over each element, sum them all, then square root the result. The one new construct here is the `(.**2)` and `(.**0.5)` syntax, which are effectively equivalent to `(x -> x**2)` and `(x -> x**0.5)`, respectively (though the `(.**2)` syntax produces a pickleable object). This syntax works for all [operator functions](./DOCS.md#operator-functions), so you can do things like `(1-.)` or `(cond() or .)`. +The basic algorithm here is map square over each element, sum them all, then square root the result. The one new construct here is the `(.**2)` and `(.**0.5)` syntax, which are effectively equivalent to `(x => x**2)` and `(x => x**0.5)`, respectively (though the `(.**2)` syntax produces a pickleable object). This syntax works for all [operator functions](./DOCS.md#operator-functions), so you can do things like `(1-.)` or `(cond() or .)`. Next up is vector addition. The goal here is to add two vectors of equal length by adding their components. To do this, we're going to make use of Coconut's ability to perform pattern-matching, or in this case destructuring assignment, to data types, like so: ```coconut @@ -733,7 +733,7 @@ _Hint: the `n`th diagonal should contain `n+1` elements, so try starting with `r That wasn't so bad, now was it? Now, let's take a look at my solution: ```coconut -def diagonal_line(n) = range(n+1) |> map$(i -> (i, n-i)) +def diagonal_line(n) = range(n+1) |> map$(i => (i, n-i)) ``` Pretty simple, huh? We take `range(n+1)`, and use `map` to transform it into the right sequence of tuples. @@ -856,7 +856,7 @@ data vector(*pts): """Necessary to make scalar multiplication commutative.""" self * other -def diagonal_line(n) = range(n+1) |> map$(i -> (i, n-i)) +def diagonal_line(n) = range(n+1) |> map$(i => (i, n-i)) def linearized_plane(n=0) = diagonal_line(n) :: linearized_plane(n+1) def vector_field() = linearized_plane() |> starmap$(vector) @@ -919,7 +919,7 @@ _Hint: Look back at how we implemented scalar multiplication._ Here's my solution for you to check against: ```coconut - def __truediv__(self, other) = self.pts |> map$(x -> x/other) |*> vector + def __truediv__(self, other) = self.pts |> map$(x => x/other) |*> vector ``` ### `.unit` @@ -1036,7 +1036,7 @@ data vector(*pts): """Necessary to make scalar multiplication commutative.""" self * other # New one-line functions necessary for finding the angle between vectors: - def __truediv__(self, other) = self.pts |> map$(x -> x/other) |*> vector + def __truediv__(self, other) = self.pts |> map$(x => x/other) |*> vector def unit(self) = self / abs(self) def angle(self, other `isinstance` vector) = math.acos(self.unit() * other.unit()) @@ -1082,7 +1082,7 @@ abcd$[2] ### Function Composition -Next is function composition. In Coconut, this is primarily accomplished through the `f1 ..> f2` operator, which takes two functions and composes them, creating a new function equivalent to `(*args, **kwargs) -> f2(f1(*args, **kwargs))`. This can be useful in combination with partial application for piecing together multiple higher-order functions, like so: +Next is function composition. In Coconut, this is primarily accomplished through the `f1 ..> f2` operator, which takes two functions and composes them, creating a new function equivalent to `(*args, **kwargs) => f2(f1(*args, **kwargs))`. This can be useful in combination with partial application for piecing together multiple higher-order functions, like so: ```coconut zipsum = zip ..> map$(sum) ``` @@ -1111,9 +1111,9 @@ Another useful trick with function composition involves composing a function wit def inc_or_dec(t): # Our higher-order function, which returns another function if t: - return x -> x+1 + return x => x+1 else: - return x -> x-1 + return x => x-1 def square(n) = n * n diff --git a/Makefile b/Makefile index 2e7c73d2d..99ddd3752 100644 --- a/Makefile +++ b/Makefile @@ -2,6 +2,10 @@ .PHONY: test test: test-mypy +# same as test, but for testing only changes to the tests +.PHONY: test-tests +test-tests: test-mypy-tests + .PHONY: dev dev: clean setup python -m pip install --upgrade -e .[dev] @@ -22,17 +26,17 @@ dev-py3: clean setup-py3 .PHONY: setup setup: python -m ensurepip - python -m pip install --upgrade setuptools wheel pip pytest_remotedata + python -m pip install --upgrade setuptools wheel pip pytest_remotedata cython .PHONY: setup-py2 setup-py2: python2 -m ensurepip - python2 -m pip install --upgrade "setuptools<58" wheel pip pytest_remotedata + python2 -m pip install --upgrade "setuptools<58" wheel pip pytest_remotedata cython .PHONY: setup-py3 setup-py3: python3 -m ensurepip - python3 -m pip install --upgrade setuptools wheel pip pytest_remotedata + python3 -m pip install --upgrade setuptools wheel pip pytest_remotedata cython .PHONY: setup-pypy setup-pypy: @@ -78,16 +82,16 @@ test-all: clean .PHONY: test-univ test-univ: export COCONUT_USE_COLOR=TRUE test-univ: clean - python ./coconut/tests --strict --line-numbers --keep-lines --force + python ./coconut/tests --strict --keep-lines --force python ./coconut/tests/dest/runner.py python ./coconut/tests/dest/extras.py # same as test-univ, but doesn't recompile unchanged test files; # should only be used when testing the tests not the compiler -.PHONY: test-tests -test-tests: export COCONUT_USE_COLOR=TRUE -test-tests: clean - python ./coconut/tests --strict --line-numbers --keep-lines +.PHONY: test-univ-tests +test-univ-tests: export COCONUT_USE_COLOR=TRUE +test-univ-tests: clean-no-tests + python ./coconut/tests --strict --keep-lines python ./coconut/tests/dest/runner.py python ./coconut/tests/dest/extras.py @@ -95,7 +99,7 @@ test-tests: clean .PHONY: test-py2 test-py2: export COCONUT_USE_COLOR=TRUE test-py2: clean - python2 ./coconut/tests --strict --line-numbers --keep-lines --force + python2 ./coconut/tests --strict --keep-lines --force python2 ./coconut/tests/dest/runner.py python2 ./coconut/tests/dest/extras.py @@ -103,7 +107,7 @@ test-py2: clean .PHONY: test-py3 test-py3: export COCONUT_USE_COLOR=TRUE test-py3: clean - python3 ./coconut/tests --strict --line-numbers --keep-lines --force --target 3 + python3 ./coconut/tests --strict --keep-lines --force --target 3 python3 ./coconut/tests/dest/runner.py python3 ./coconut/tests/dest/extras.py @@ -111,7 +115,7 @@ test-py3: clean .PHONY: test-pypy test-pypy: export COCONUT_USE_COLOR=TRUE test-pypy: clean - pypy ./coconut/tests --strict --line-numbers --keep-lines --force + pypy ./coconut/tests --strict --keep-lines --force pypy ./coconut/tests/dest/runner.py pypy ./coconut/tests/dest/extras.py @@ -119,19 +123,19 @@ test-pypy: clean .PHONY: test-pypy3 test-pypy3: export COCONUT_USE_COLOR=TRUE test-pypy3: clean - pypy3 ./coconut/tests --strict --line-numbers --keep-lines --force - pypy3 ./coconut/tests/dest/runner.py - pypy3 ./coconut/tests/dest/extras.py - -# same as test-pypy3 but includes verbose output for better debugging -.PHONY: test-pypy3-verbose -test-pypy3-verbose: export COCONUT_USE_COLOR=TRUE -test-pypy3-verbose: clean - pypy3 ./coconut/tests --strict --line-numbers --keep-lines --force --verbose --jobs 0 + pypy3 ./coconut/tests --strict --keep-lines --force pypy3 ./coconut/tests/dest/runner.py pypy3 ./coconut/tests/dest/extras.py # same as test-univ but also runs mypy +.PHONY: test-mypy-univ +test-mypy-univ: export COCONUT_USE_COLOR=TRUE +test-mypy-univ: clean + python ./coconut/tests --strict --force --keep-lines --mypy --follow-imports silent --ignore-missing-imports --allow-redefinition + python ./coconut/tests/dest/runner.py + python ./coconut/tests/dest/extras.py + +# same as test-mypy-univ but uses --target sys .PHONY: test-mypy test-mypy: export COCONUT_USE_COLOR=TRUE test-mypy: clean @@ -139,11 +143,11 @@ test-mypy: clean python ./coconut/tests/dest/runner.py python ./coconut/tests/dest/extras.py -# same as test-mypy but uses the universal target -.PHONY: test-mypy-univ -test-mypy-univ: export COCONUT_USE_COLOR=TRUE -test-mypy-univ: clean - python ./coconut/tests --strict --force --keep-lines --mypy --follow-imports silent --ignore-missing-imports --allow-redefinition +# same as test-mypy but doesn't use --force +.PHONY: test-mypy-tests +test-mypy-tests: export COCONUT_USE_COLOR=TRUE +test-mypy-tests: clean-no-tests + python ./coconut/tests --strict --target sys --keep-lines --mypy --follow-imports silent --ignore-missing-imports --allow-redefinition python ./coconut/tests/dest/runner.py python ./coconut/tests/dest/extras.py @@ -151,7 +155,15 @@ test-mypy-univ: clean .PHONY: test-verbose test-verbose: export COCONUT_USE_COLOR=TRUE test-verbose: clean - python ./coconut/tests --strict --line-numbers --keep-lines --force --verbose --jobs 0 + python ./coconut/tests --strict --keep-lines --force --verbose + python ./coconut/tests/dest/runner.py + python ./coconut/tests/dest/extras.py + +# same as test-univ but includes verbose output for better debugging and is fully synchronous +.PHONY: test-verbose-sync +test-verbose-sync: export COCONUT_USE_COLOR=TRUE +test-verbose-sync: clean + python ./coconut/tests --strict --keep-lines --force --verbose --jobs 0 python ./coconut/tests/dest/runner.py python ./coconut/tests/dest/extras.py @@ -159,7 +171,7 @@ test-verbose: clean .PHONY: test-mypy-verbose test-mypy-verbose: export COCONUT_USE_COLOR=TRUE test-mypy-verbose: clean - python ./coconut/tests --strict --force --target sys --verbose --jobs 0 --keep-lines --mypy --follow-imports silent --ignore-missing-imports --allow-redefinition + python ./coconut/tests --strict --force --target sys --verbose --keep-lines --mypy --follow-imports silent --ignore-missing-imports --allow-redefinition python ./coconut/tests/dest/runner.py python ./coconut/tests/dest/extras.py @@ -175,7 +187,7 @@ test-mypy-all: clean .PHONY: test-easter-eggs test-easter-eggs: export COCONUT_USE_COLOR=TRUE test-easter-eggs: clean - python ./coconut/tests --strict --line-numbers --keep-lines --force + python ./coconut/tests --strict --keep-lines --force python ./coconut/tests/dest/runner.py --test-easter-eggs python ./coconut/tests/dest/extras.py @@ -188,7 +200,15 @@ test-pyparsing: test-univ .PHONY: test-minify test-minify: export COCONUT_USE_COLOR=TRUE test-minify: clean - python ./coconut/tests --strict --line-numbers --keep-lines --force --minify + python ./coconut/tests --strict --keep-lines --force --minify + python ./coconut/tests/dest/runner.py + python ./coconut/tests/dest/extras.py + +# same as test-univ but uses --no-wrap +.PHONY: test-no-wrap +test-no-wrap: export COCONUT_USE_COLOR=TRUE +test-no-wrap: clean + python ./coconut/tests --strict --keep-lines --force --no-wrap python ./coconut/tests/dest/runner.py python ./coconut/tests/dest/extras.py @@ -196,21 +216,31 @@ test-minify: clean .PHONY: test-watch test-watch: export COCONUT_USE_COLOR=TRUE test-watch: clean - python ./coconut/tests --strict --line-numbers --keep-lines --force - coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --watch --strict --line-numbers --keep-lines + python ./coconut/tests --strict --keep-lines --force + coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --watch --strict --keep-lines --stack-size 4096 --recursion-limit 4096 python ./coconut/tests/dest/runner.py python ./coconut/tests/dest/extras.py # mini test that just compiles agnostic tests with fully synchronous output .PHONY: test-mini test-mini: - coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --force --jobs 0 + coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --force --jobs 0 --stack-size 4096 --recursion-limit 4096 -.PHONY: debug-comp-crash -debug-comp-crash: export COCONUT_USE_COLOR=TRUE -debug-comp-crash: export COCONUT_PURE_PYTHON=TRUE -debug-comp-crash: - python -X dev -m coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --strict --line-numbers --keep-lines --force --jobs 0 +# same as test-univ but debugs crashes +.PHONY: test-univ-debug +test-univ-debug: export COCONUT_TEST_DEBUG_PYTHON=TRUE +test-univ-debug: test-univ + +# same as test-mini but debugs crashes +.PHONY: test-mini-debug +test-mini-debug: export COCONUT_USE_COLOR=TRUE +test-mini-debug: + python -X dev -m coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --strict --keep-lines --force --jobs 0 --stack-size 4096 --recursion-limit 4096 + +# same as test-mini-debug but uses vanilla pyparsing +.PHONY: test-mini-debug-pyparsing +test-mini-debug-pyparsing: export COCONUT_PURE_PYTHON=TRUE +test-mini-debug-pyparsing: test-mini-debug .PHONY: debug-test-crash debug-test-crash: @@ -229,15 +259,21 @@ docs: clean sphinx-build -b html . ./docs rm -f index.rst +.PHONY: clean-no-tests +clean-no-tests: + rm -rf ./docs ./dist ./build ./bbopt ./pyprover ./pyston ./coconut-prelude index.rst ./.mypy_cache + .PHONY: clean -clean: - rm -rf ./docs ./dist ./build ./coconut/tests/dest ./bbopt ./pyprover ./pyston ./coconut-prelude index.rst ./.mypy_cache +clean: clean-no-tests + rm -rf ./coconut/tests/dest .PHONY: wipe wipe: clean - rm -rf vprof.json profile.log *.egg-info + rm -rf ./coconut/tests/dest vprof.json profile.log *.egg-info -find . -name "__pycache__" -delete -C:/GnuWin32/bin/find.exe . -name "__pycache__" -delete + -find . -name "__coconut_cache__" -delete + -C:/GnuWin32/bin/find.exe . -name "__coconut_cache__" -delete -find . -name "*.pyc" -delete -C:/GnuWin32/bin/find.exe . -name "*.pyc" -delete -python -m coconut --site-uninstall @@ -257,7 +293,7 @@ build: .PHONY: just-upload just-upload: build pip install --upgrade --ignore-installed twine - twine upload dist/* + twine upload dist/* -u __token__ .PHONY: upload upload: wipe dev just-upload @@ -270,15 +306,15 @@ check-reqs: profile-parser: export COCONUT_USE_COLOR=TRUE profile-parser: export COCONUT_PURE_PYTHON=TRUE profile-parser: - coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --force --profile --verbose --recursion-limit 4096 2>&1 | tee ./profile.log + coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --force --profile --verbose --stack-size 4096 --recursion-limit 4096 2>&1 | tee ./profile.log .PHONY: profile-time profile-time: - vprof -c h "./coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --force" --output-file ./vprof.json + vprof -c h "./coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --force --stack-size 4096 --recursion-limit 4096" --output-file ./vprof.json .PHONY: profile-memory profile-memory: - vprof -c m "./coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --force" --output-file ./vprof.json + vprof -c m "./coconut ./coconut/tests/src/cocotest/agnostic ./coconut/tests/dest/cocotest --force --stack-size 4096 --recursion-limit 4096" --output-file ./vprof.json .PHONY: view-profile view-profile: diff --git a/__coconut__/__init__.pyi b/__coconut__/__init__.pyi index b85237ebc..d4b4ff4a6 100644 --- a/__coconut__/__init__.pyi +++ b/__coconut__/__init__.pyi @@ -107,6 +107,16 @@ if sys.version_info < (3,): py_xrange = xrange class range(_t.Iterable[int]): + """ + range(stop) -> range object + range(start, stop[, step]) -> range object + + Return an object that produces a sequence of integers from start (inclusive) + to stop (exclusive) by step. range(i, j) produces i, i+1, i+2, ..., j-1. + start defaults to 0, and stop is omitted! range(4) produces 0, 1, 2, 3. + These are exactly the valid indices for a list of 4 elements. + When step is given, it specifies the increment (or decrement). + """ def __init__(self, start: _t.Optional[int] = ..., stop: _t.Optional[int] = ..., @@ -133,7 +143,16 @@ else: _coconut_exec = exec if sys.version_info < (3, 7): - def breakpoint(*args: _t.Any, **kwargs: _t.Any) -> _t.Any: ... + def breakpoint(*args: _t.Any, **kwargs: _t.Any) -> _t.Any: + """ + breakpoint(*args, **kws) + + Call sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept + whatever arguments are passed. + + By default, this drops you into the pdb debugger. + """ + ... py_chr = chr @@ -209,11 +228,15 @@ def scan( func: _t.Callable[[_T, _U], _T], iterable: _t.Iterable[_U], initial: _T = ..., -) -> _t.Iterable[_T]: ... +) -> _t.Iterable[_T]: + """Reduce func over iterable, yielding intermediate results, + optionally starting from initial.""" + ... _coconut_scan = scan class MatchError(Exception): + """Pattern-matching error. Has attributes .pattern, .value, and .message.""" pattern: _t.Optional[_t.Text] value: _t.Any def __init__(self, pattern: _t.Optional[_t.Text] = None, value: _t.Any = None) -> None: ... @@ -277,9 +300,16 @@ def call( _func: _t.Callable[..., _T], *args: _t.Any, **kwargs: _t.Any, -) -> _T: ... +) -> _T: + """Function application operator function. + + Equivalent to: + def call(f, /, *args, **kwargs) = f(*args, **kwargs). + """ + ... -_coconut_tail_call = of = call +_coconut_tail_call = call +of = _deprecated("use call instead")(call) @_dataclass(frozen=True, slots=True) @@ -287,6 +317,44 @@ class _BaseExpected(_t.Generic[_T], _t.Tuple): result: _t.Optional[_T] error: _t.Optional[BaseException] class Expected(_BaseExpected[_T]): + '''Coconut's Expected built-in is a Coconut data that represents a value + that may or may not be an error, similar to Haskell's Either. + + Effectively equivalent to: + data Expected[T](result: T? = None, error: BaseException? = None): + def __bool__(self) -> bool: + return self.error is None + def __fmap__[U](self, func: T -> U) -> Expected[U]: + return self.__class__(func(self.result)) if self else self + def and_then[U](self, func: T -> Expected[U]) -> Expected[U]: + """Maps a T -> Expected[U] over an Expected[T] to produce an Expected[U]. + Implements a monadic bind. Equivalent to fmap ..> .join().""" + return self |> fmap$(func) |> .join() + def join(self: Expected[Expected[T]]) -> Expected[T]: + """Monadic join. Converts Expected[Expected[T]] to Expected[T].""" + if not self: + return self + if not self.result `isinstance` Expected: + raise TypeError("Expected.join() requires an Expected[Expected[_]]") + return self.result + def map_error(self, func: BaseException -> BaseException) -> Expected[T]: + """Maps func over the error if it exists.""" + return self if self else self.__class__(error=func(self.error)) + def or_else[U](self, func: BaseException -> Expected[U]) -> Expected[T | U]: + """Return self if no error, otherwise return the result of evaluating func on the error.""" + return self if self else func(self.error) + def result_or[U](self, default: U) -> T | U: + """Return the result if it exists, otherwise return the default.""" + return self.result if self else default + def result_or_else[U](self, func: BaseException -> U) -> T | U: + """Return the result if it exists, otherwise return the result of evaluating func on the error.""" + return self.result if self else func(self.error) + def unwrap(self) -> T: + """Unwrap the result or raise the error.""" + if not self: + raise self.error + return self.result + ''' __slots__ = () _coconut_is_data = True __match_args__ = ("result", "error") @@ -315,19 +383,39 @@ class Expected(_BaseExpected[_T]): result: _t.Optional[_T] = None, error: _t.Optional[BaseException] = None, ): ... - def __fmap__(self, func: _t.Callable[[_T], _U]) -> Expected[_U]: ... + def __fmap__(self, func: _t.Callable[[_T], _U]) -> Expected[_U]: + """Maps func over the result if it exists. + + __fmap__ should be used directly only when fmap is not available (e.g. when consuming an Expected in vanilla Python). + """ + ... def __iter__(self) -> _t.Iterator[_T | BaseException | None]: ... @_t.overload def __getitem__(self, index: _SupportsIndex) -> _T | BaseException | None: ... @_t.overload def __getitem__(self, index: slice) -> _t.Tuple[_T | BaseException | None, ...]: ... - def and_then(self, func: _t.Callable[[_T], Expected[_U]]) -> Expected[_U]: ... - def join(self: Expected[Expected[_T]]) -> Expected[_T]: ... - def map_error(self, func: _t.Callable[[BaseException], BaseException]) -> Expected[_T]: ... - def or_else(self, func: _t.Callable[[BaseException], Expected[_U]]) -> Expected[_T | _U]: ... - def result_or(self, default: _U) -> _T | _U: ... - def result_or_else(self, func: _t.Callable[[BaseException], _U]) -> _T | _U: ... - def unwrap(self) -> _T: ... + def and_then(self, func: _t.Callable[[_T], Expected[_U]]) -> Expected[_U]: + """Maps a T -> Expected[U] over an Expected[T] to produce an Expected[U]. + Implements a monadic bind. Equivalent to fmap ..> .join().""" + ... + def join(self: Expected[Expected[_T]]) -> Expected[_T]: + """Monadic join. Converts Expected[Expected[T]] to Expected[T].""" + ... + def map_error(self, func: _t.Callable[[BaseException], BaseException]) -> Expected[_T]: + """Maps func over the error if it exists.""" + ... + def or_else(self, func: _t.Callable[[BaseException], Expected[_U]]) -> Expected[_T | _U]: + """Return self if no error, otherwise return the result of evaluating func on the error.""" + ... + def result_or(self, default: _U) -> _T | _U: + """Return the result if it exists, otherwise return the default.""" + ... + def result_or_else(self, func: _t.Callable[[BaseException], _U]) -> _T | _U: + """Return the result if it exists, otherwise return the result of evaluating func on the error.""" + ... + def unwrap(self) -> _T: + """Unwrap the result or raise the error.""" + ... _coconut_Expected = Expected @@ -380,7 +468,18 @@ def safe_call( _func: _t.Callable[..., _T], *args: _t.Any, **kwargs: _t.Any, -) -> Expected[_T]: ... +) -> Expected[_T]: + """safe_call is a version of call that catches any Exceptions and + returns an Expected containing either the result or the error. + + Equivalent to: + def safe_call(f, /, *args, **kwargs): + try: + return Expected(f(*args, **kwargs)) + except Exception as err: + return Expected(error=err) + """ + ... # based on call above @@ -436,6 +535,7 @@ def _coconut_call_or_coefficient( def recursive_iterator(func: _T_iter_func) -> _T_iter_func: + """Decorator that memoizes a recursive function that returns an iterator (e.g. a recursive generator).""" return func @@ -447,6 +547,8 @@ try: override = _override except ImportError: def override(func: _Tfunc) -> _Tfunc: + """Declare a method in a subclass as an override of a parent class method. + Enforces at runtime that the parent class has such a method to be overwritten.""" return func @@ -487,9 +589,16 @@ def addpattern( base_func: _Callable, *add_funcs: _Callable, allow_any_func: bool=False, -) -> _t.Callable[..., _t.Any]: ... +) -> _t.Callable[..., _t.Any]: + """Decorator to add a new case to a pattern-matching function (where the new case is checked last). + + Pass allow_any_func=True to allow any object as the base_func rather than just pattern-matching functions. + If add_funcs are passed, addpattern(base_func, add_func) is equivalent to addpattern(base_func)(add_func). + """ + ... -_coconut_addpattern = prepattern = addpattern +_coconut_addpattern = addpattern +prepattern = _deprecated("use addpattern instead")(addpattern) def _coconut_mark_as_match(func: _Tfunc) -> _Tfunc: @@ -521,7 +630,14 @@ def _coconut_iter_getitem( def _coconut_iter_getitem( iterable: _t.Iterable[_T], index: slice, - ) -> _t.Iterable[_T]: ... + ) -> _t.Iterable[_T]: + """Iterator slicing works just like sequence slicing, including support for negative indices and slices, and support for `slice` objects in the same way as can be done with normal slicing. + + Coconut's iterator slicing is very similar to Python's `itertools.islice`, but unlike `itertools.islice`, Coconut's iterator slicing supports negative indices, and will preferentially call an object's `__iter_getitem__` (always used if available) or `__getitem__` (only used if the object is a collections.abc.Sequence). Coconut's iterator slicing is also optimized to work well with all of Coconut's built-in objects, only computing the elements of each that are actually necessary to extract the desired slice. + + Some code taken from more_itertools under the terms of its MIT license. + """ + ... def _coconut_base_compose( @@ -530,6 +646,46 @@ def _coconut_base_compose( ) -> _t.Callable[[_T], _t.Any]: ... +def and_then( + first_async_func: _t.Callable[_P, _t.Awaitable[_U]], + second_func: _t.Callable[[_U], _V], +) -> _t.Callable[_P, _t.Awaitable[_V]]: + """Compose an async function with a normal function. + + Effectively equivalent to: + def and_then[**T, U, V]( + first_async_func: async (**T) -> U, + second_func: U -> V, + ) -> async (**T) -> V = + async def (*args, **kwargs) -> ( + first_async_func(*args, **kwargs) + |> await + |> second_func + ) + """ + ... + +def and_then_await( + first_async_func: _t.Callable[_P, _t.Awaitable[_U]], + second_async_func: _t.Callable[[_U], _t.Awaitable[_V]], +) -> _t.Callable[_P, _t.Awaitable[_V]]: + """Compose two async functions. + + Effectively equivalent to: + def and_then_await[**T, U, V]( + first_async_func: async (**T) -> U, + second_async_func: async U -> V, + ) -> async (**T) -> V = + async def (*args, **kwargs) -> ( + first_async_func(*args, **kwargs) + |> await + |> second_async_func + |> await + ) + """ + ... + + # all forward/backward/none composition functions MUST be kept in sync: # @_t.overload @@ -585,7 +741,11 @@ def _coconut_forward_compose( _f: _t.Callable[[_T], _U], ) -> _t.Callable[..., _U]: ... @_t.overload -def _coconut_forward_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_forward_compose(*funcs: _Callable) -> _Callable: + """Forward composition operator (..>). + + (..>)(f, g) is effectively equivalent to (*args, **kwargs) -> g(f(*args, **kwargs)).""" + ... @_t.overload def _coconut_back_compose( @@ -598,7 +758,11 @@ def _coconut_back_compose( _g: _t.Callable[..., _T], ) -> _t.Callable[..., _U]: ... @_t.overload -def _coconut_back_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_back_compose(*funcs: _Callable) -> _Callable: + """Backward composition operator (<..). + + (<..)(f, g) is effectively equivalent to (*args, **kwargs) -> f(g(*args, **kwargs)).""" + ... @_t.overload @@ -612,7 +776,11 @@ def _coconut_forward_none_compose( _f: _t.Callable[[_T], _U], ) -> _t.Callable[..., _t.Optional[_U]]: ... @_t.overload -def _coconut_forward_none_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_forward_none_compose(*funcs: _Callable) -> _Callable: + """Forward none-aware composition operator (..?>). + + (..?>)(f, g) is effectively equivalent to (*args, **kwargs) -> g?(f(*args, **kwargs)).""" + ... @_t.overload def _coconut_back_none_compose( @@ -625,7 +793,11 @@ def _coconut_back_none_compose( _g: _t.Callable[..., _t.Optional[_T]], ) -> _t.Callable[..., _t.Optional[_U]]: ... @_t.overload -def _coconut_back_none_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_back_none_compose(*funcs: _Callable) -> _Callable: + """Backward none-aware composition operator (<..?). + + (<..?)(f, g) is effectively equivalent to (*args, **kwargs) -> f?(g(*args, **kwargs)).""" + ... @_t.overload @@ -659,7 +831,11 @@ def _coconut_forward_star_compose( _f: _t.Callable[[_T, _U, _V], _W], ) -> _t.Callable[..., _W]: ... @_t.overload -def _coconut_forward_star_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_forward_star_compose(*funcs: _Callable) -> _Callable: + """Forward star composition operator (..*>). + + (..*>)(f, g) is effectively equivalent to (*args, **kwargs) -> g(*f(*args, **kwargs)).""" + ... @_t.overload def _coconut_back_star_compose( @@ -692,7 +868,11 @@ def _coconut_back_star_compose( _g: _t.Callable[..., _t.Tuple[_T, _U, _V]], ) -> _t.Callable[..., _W]: ... @_t.overload -def _coconut_back_star_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_back_star_compose(*funcs: _Callable) -> _Callable: + """Backward star composition operator (<*..). + + (<*..)(f, g) is effectively equivalent to (*args, **kwargs) -> f(*g(*args, **kwargs)).""" + ... @_t.overload @@ -726,7 +906,11 @@ def _coconut_forward_none_star_compose( _f: _t.Callable[[_T, _U, _V], _W], ) -> _t.Callable[..., _t.Optional[_W]]: ... @_t.overload -def _coconut_forward_none_star_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_forward_none_star_compose(*funcs: _Callable) -> _Callable: + """Forward none-aware star composition operator (..?*>). + + (..?*>)(f, g) is effectively equivalent to (*args, **kwargs) -> g?(*f(*args, **kwargs)).""" + ... @_t.overload def _coconut_back_none_star_compose( @@ -759,7 +943,11 @@ def _coconut_back_none_star_compose( _g: _t.Callable[..., _t.Optional[_t.Tuple[_T, _U, _V]]], ) -> _t.Callable[..., _t.Optional[_W]]: ... @_t.overload -def _coconut_back_none_star_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_back_none_star_compose(*funcs: _Callable) -> _Callable: + """Backward none-aware star composition operator (<*?..). + + (<*?..)(f, g) is effectively equivalent to (*args, **kwargs) -> f?(*g(*args, **kwargs)).""" + ... @_t.overload @@ -773,7 +961,11 @@ def _coconut_forward_dubstar_compose( # _f: _t.Callable[..., _T], # ) -> _t.Callable[..., _T]: ... @_t.overload -def _coconut_forward_dubstar_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_forward_dubstar_compose(*funcs: _Callable) -> _Callable: + """Forward double star composition operator (..**>). + + (..**>)(f, g) is effectively equivalent to (*args, **kwargs) -> g(**f(*args, **kwargs)).""" + ... @_t.overload def _coconut_back_dubstar_compose( @@ -786,7 +978,11 @@ def _coconut_back_dubstar_compose( # _g: _t.Callable[..., _t.Dict[_t.Text, _t.Any]], # ) -> _t.Callable[..., _T]: ... @_t.overload -def _coconut_back_dubstar_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_back_dubstar_compose(*funcs: _Callable) -> _Callable: + """Backward double star composition operator (<**..). + + (<**..)(f, g) is effectively equivalent to (*args, **kwargs) -> f(**g(*args, **kwargs)).""" + ... @_t.overload @@ -800,7 +996,11 @@ def _coconut_forward_none_dubstar_compose( # _f: _t.Callable[..., _T], # ) -> _t.Callable[..., _t.Optional[_T]]: ... @_t.overload -def _coconut_forward_none_dubstar_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_forward_none_dubstar_compose(*funcs: _Callable) -> _Callable: + """Forward none-aware double star composition operator (..?**>). + + (..?**>)(f, g) is effectively equivalent to (*args, **kwargs) -> g?(**f(*args, **kwargs)).""" + ... @_t.overload def _coconut_back_none_dubstar_compose( @@ -813,84 +1013,123 @@ def _coconut_back_none_dubstar_compose( # _g: _t.Callable[..., _t.Optional[_t.Dict[_t.Text, _t.Any]]], # ) -> _t.Callable[..., _t.Optional[_T]]: ... @_t.overload -def _coconut_back_none_dubstar_compose(*funcs: _Callable) -> _Callable: ... +def _coconut_back_none_dubstar_compose(*funcs: _Callable) -> _Callable: + """Backward none-aware double star composition operator (<**?..). + + (<**?..)(f, g) is effectively equivalent to (*args, **kwargs) -> f?(**g(*args, **kwargs)).""" + ... def _coconut_pipe( x: _T, f: _t.Callable[[_T], _U], -) -> _U: ... +) -> _U: + """Pipe operator (|>). Equivalent to (x, f) -> f(x).""" + ... def _coconut_star_pipe( xs: _Iterable, f: _t.Callable[..., _T], -) -> _T: ... +) -> _T: + """Star pipe operator (*|>). Equivalent to (xs, f) -> f(*xs).""" + ... def _coconut_dubstar_pipe( kws: _t.Dict[_t.Text, _t.Any], f: _t.Callable[..., _T], -) -> _T: ... +) -> _T: + """Double star pipe operator (**|>). Equivalent to (kws, f) -> f(**kws).""" + ... def _coconut_back_pipe( f: _t.Callable[[_T], _U], x: _T, -) -> _U: ... +) -> _U: + """Backward pipe operator (<|). Equivalent to (f, x) -> f(x).""" + ... def _coconut_back_star_pipe( f: _t.Callable[..., _T], xs: _Iterable, -) -> _T: ... +) -> _T: + """Backward star pipe operator (<*|). Equivalent to (f, xs) -> f(*xs).""" + ... def _coconut_back_dubstar_pipe( f: _t.Callable[..., _T], kws: _t.Dict[_t.Text, _t.Any], -) -> _T: ... +) -> _T: + """Backward double star pipe operator (<**|). Equivalent to (f, kws) -> f(**kws).""" + ... def _coconut_none_pipe( x: _t.Optional[_T], f: _t.Callable[[_T], _U], -) -> _t.Optional[_U]: ... +) -> _t.Optional[_U]: + """Nullable pipe operator (|?>). Equivalent to (x, f) -> f(x) if x is not None else None.""" + ... def _coconut_none_star_pipe( xs: _t.Optional[_Iterable], f: _t.Callable[..., _T], -) -> _t.Optional[_T]: ... +) -> _t.Optional[_T]: + """Nullable star pipe operator (|?*>). Equivalent to (xs, f) -> f(*xs) if xs is not None else None.""" + ... def _coconut_none_dubstar_pipe( kws: _t.Optional[_t.Dict[_t.Text, _t.Any]], f: _t.Callable[..., _T], -) -> _t.Optional[_T]: ... +) -> _t.Optional[_T]: + """Nullable double star pipe operator (|?**>). Equivalent to (kws, f) -> f(**kws) if kws is not None else None.""" + ... def _coconut_back_none_pipe( f: _t.Callable[[_T], _U], x: _t.Optional[_T], -) -> _t.Optional[_U]: ... +) -> _t.Optional[_U]: + """Nullable backward pipe operator ( f(x) if x is not None else None.""" + ... def _coconut_back_none_star_pipe( f: _t.Callable[..., _T], xs: _t.Optional[_Iterable], -) -> _t.Optional[_T]: ... +) -> _t.Optional[_T]: + """Nullable backward star pipe operator (<*?|). Equivalent to (f, xs) -> f(*xs) if xs is not None else None.""" + ... def _coconut_back_none_dubstar_pipe( f: _t.Callable[..., _T], kws: _t.Optional[_t.Dict[_t.Text, _t.Any]], -) -> _t.Optional[_T]: ... +) -> _t.Optional[_T]: + """Nullable backward double star pipe operator (<**?|). Equivalent to (kws, f) -> f(**kws) if kws is not None else None.""" + ... def _coconut_assert(cond: _t.Any, msg: _t.Optional[_t.Text] = None) -> None: + """Assert operator (assert). Asserts condition with optional message.""" assert cond, msg -def _coconut_raise(exc: _t.Optional[Exception] = None, from_exc: _t.Optional[Exception] = None) -> None: ... +def _coconut_raise(exc: _t.Optional[Exception] = None, from_exc: _t.Optional[Exception] = None) -> None: + """Raise operator (raise). Raises exception with optional cause.""" + ... @_t.overload def _coconut_bool_and(a: _t.Literal[True], b: _T) -> _T: ... @_t.overload -def _coconut_bool_and(a: _T, b: _U) -> _t.Union[_T, _U]: ... +def _coconut_bool_and(a: _T, b: _U) -> _t.Union[_T, _U]: + """Boolean and operator (and). Equivalent to (a, b) -> a and b.""" + ... @_t.overload def _coconut_bool_or(a: None, b: _T) -> _T: ... @_t.overload def _coconut_bool_or(a: _t.Literal[False], b: _T) -> _T: ... @_t.overload -def _coconut_bool_or(a: _T, b: _U) -> _t.Union[_T, _U]: ... +def _coconut_bool_or(a: _T, b: _U) -> _t.Union[_T, _U]: + """Boolean or operator (or). Equivalent to (a, b) -> a or b.""" + ... -def _coconut_in(a: _T, b: _t.Sequence[_T]) -> bool: ... -_coconut_not_in = _coconut_in +def _coconut_in(a: _T, b: _t.Sequence[_T]) -> bool: + """Containment operator (in). Equivalent to (a, b) -> a in b.""" + ... +def _coconut_not_in(a: _T, b: _t.Sequence[_T]) -> bool: + """Negative containment operator (not in). Equivalent to (a, b) -> a not in b.""" + ... @_t.overload @@ -898,7 +1137,9 @@ def _coconut_none_coalesce(a: _T, b: None) -> _T: ... @_t.overload def _coconut_none_coalesce(a: None, b: _T) -> _T: ... @_t.overload -def _coconut_none_coalesce(a: _T, b: _U) -> _t.Union[_T, _U]: ... +def _coconut_none_coalesce(a: _T, b: _U) -> _t.Union[_T, _U]: + """None coalescing operator (??). Equivalent to (a, b) -> a if a is not None else b.""" + ... @_t.overload @@ -908,7 +1149,9 @@ def _coconut_minus(a: int, b: float) -> float: ... @_t.overload def _coconut_minus(a: float, b: int) -> float: ... @_t.overload -def _coconut_minus(a: _T, _b: _T) -> _T: ... +def _coconut_minus(a: _T, _b: _T) -> _T: + """Minus operator (-). Effectively equivalent to (a, b=None) -> a - b if b is not None else -a.""" + ... @_t.overload @@ -920,26 +1163,45 @@ def _coconut_comma_op(_x: _T, _y: _U, _z: _V) -> _t.Tuple[_T, _U, _V]: ... @_t.overload def _coconut_comma_op(*args: _T) -> _t.Tuple[_T, ...]: ... @_t.overload -def _coconut_comma_op(*args: _t.Any) -> _Tuple: ... +def _coconut_comma_op(*args: _t.Any) -> _Tuple: + """Comma operator (,). Equivalent to (*args) -> args.""" + ... if sys.version_info < (3, 5): @_t.overload def _coconut_matmul(a: _T, b: _T) -> _T: ... @_t.overload - def _coconut_matmul(a: _t.Any, b: _t.Any) -> _t.Any: ... + def _coconut_matmul(a: _t.Any, b: _t.Any) -> _t.Any: + """Matrix multiplication operator (@). Implements operator.matmul on any Python version.""" + ... else: _coconut_matmul = _coconut.operator.matmul -def reiterable(iterable: _t.Iterable[_T]) -> _t.Iterable[_T]: ... +def reiterable(iterable: _t.Iterable[_T]) -> _t.Iterable[_T]: + """Allow an iterator to be iterated over multiple times with the same results.""" + ... _coconut_reiterable = reiterable -def multi_enumerate(iterable: _Iterable) -> _t.Iterable[_t.Tuple[_t.Tuple[int, ...], _t.Any]]: ... +def multi_enumerate(iterable: _Iterable) -> _t.Iterable[_t.Tuple[_t.Tuple[int, ...], _t.Any]]: + """Enumerate an iterable of iterables. Works like enumerate, but indexes + through inner iterables and produces a tuple index representing the index + in each inner iterable. Supports indexing. + + For numpy arrays, effectively equivalent to: + it = np.nditer(iterable, flags=["multi_index", "refs_ok"]) + for x in it: + yield it.multi_index, x + + Also supports len for numpy arrays. + """ + ... class _count(_t.Iterable[_T]): + """count(start, step) returns an infinite iterator starting at start and increasing by step.""" @_t.overload def __new__(cls) -> _count[int]: ... @_t.overload @@ -956,14 +1218,21 @@ class _count(_t.Iterable[_T]): def __getitem__(self, index: slice) -> _t.Iterable[_T]: ... def __hash__(self) -> int: ... - def count(self, elem: _T) -> int | float: ... - def index(self, elem: _T) -> int: ... + def count(self, elem: _T) -> int | float: + """Count the number of times elem appears in the count.""" + ... + def index(self, elem: _T) -> int: + """Find the index of elem in the count.""" + ... def __fmap__(self, func: _t.Callable[[_T], _U]) -> _count[_U]: ... def __copy__(self) -> _count[_T]: ... count = _coconut_count = _count # necessary since we define .count() class cycle(_t.Iterable[_T]): + """cycle is a modified version of itertools.cycle with a times parameter + that controls the number of times to cycle through the given iterable + before stopping.""" def __new__( cls, iterable: _t.Iterable[_T], @@ -978,8 +1247,12 @@ class cycle(_t.Iterable[_T]): def __getitem__(self, index: slice) -> _t.Iterable[_T]: ... def __hash__(self) -> int: ... - def count(self, elem: _T) -> int | float: ... - def index(self, elem: _T) -> int: ... + def count(self, elem: _T) -> int | float: + """Count the number of times elem appears in the cycle.""" + ... + def index(self, elem: _T) -> int: + """Find the index of elem in the cycle.""" + ... def __fmap__(self, func: _t.Callable[[_T], _U]) -> _t.Iterable[_U]: ... def __copy__(self) -> cycle[_T]: ... def __len__(self) -> int: ... @@ -987,6 +1260,10 @@ _coconut_cycle = cycle class groupsof(_t.Generic[_T]): + """groupsof(n, iterable) splits iterable into groups of size n. + + If the length of the iterable is not divisible by n, the last group will be of size < n. + """ def __new__( cls, n: _SupportsIndex, @@ -1001,6 +1278,11 @@ _coconut_groupsof = groupsof class windowsof(_t.Generic[_T]): + """Produces an iterable that effectively mimics a sliding window over iterable of the given size. + The step determines the spacing between windowsof. + + If the size is larger than the iterable, windowsof will produce an empty iterable. + If that is not the desired behavior, fillvalue can be passed and will be used in place of missing values.""" def __new__( cls, size: _SupportsIndex, @@ -1017,6 +1299,8 @@ _coconut_windowsof = windowsof class flatten(_t.Iterable[_T]): + """Flatten an iterable of iterables into a single iterable. + Only flattens the top level of the iterable.""" def __new__( cls, iterable: _t.Iterable[_t.Iterable[_T]], @@ -1034,22 +1318,31 @@ class flatten(_t.Iterable[_T]): @_t.overload def __getitem__(self, index: slice) -> _t.Iterable[_T]: ... - def count(self, elem: _T) -> int: ... - def index(self, elem: _T) -> int: ... + def count(self, elem: _T) -> int: + """Count the number of times elem appears in the flattened iterable.""" + ... + def index(self, elem: _T) -> int: + """Find the index of elem in the flattened iterable.""" + ... def __fmap__(self, func: _t.Callable[[_T], _U]) -> flatten[_U]: ... _coconut_flatten = flatten -def makedata(data_type: _t.Type[_T], *args: _t.Any) -> _T: ... +def makedata(data_type: _t.Type[_T], *args: _t.Any) -> _T: + """Construct an object of the given data_type containing the given arguments.""" + ... @_deprecated("use makedata instead") def datamaker(data_type: _t.Type[_T]) -> _t.Callable[..., _T]: + """DEPRECATED: use makedata instead.""" return _coconut.functools.partial(makedata, data_type) def consume( iterable: _t.Iterable[_T], keep_last: _t.Optional[int] = ..., - ) -> _t.Sequence[_T]: ... + ) -> _t.Sequence[_T]: + """consume(iterable, keep_last) fully exhausts iterable and returns the last keep_last elements.""" + ... class _FMappable(_t.Protocol[_Tfunc_contra, _Tco]): @@ -1077,7 +1370,20 @@ def fmap(func: _t.Callable[[_T], _U], obj: _t.AsyncIterable[_T]) -> _t.AsyncIter @_t.overload def fmap(func: _t.Callable[[_T, _U], _t.Tuple[_V, _W]], obj: _t.Dict[_T, _U], starmap_over_mappings: _t.Literal[True]) -> _t.Dict[_V, _W]: ... @_t.overload -def fmap(func: _t.Callable[[_T, _U], _t.Tuple[_V, _W]], obj: _t.Mapping[_T, _U], starmap_over_mappings: _t.Literal[True]) -> _t.Mapping[_V, _W]: ... +def fmap(func: _t.Callable[[_T, _U], _t.Tuple[_V, _W]], obj: _t.Mapping[_T, _U], starmap_over_mappings: _t.Literal[True]) -> _t.Mapping[_V, _W]: + """fmap(func, obj) creates a copy of obj with func applied to its contents. + + Supports: + * Coconut data types + * `str`, `dict`, `list`, `tuple`, `set`, `frozenset` + * `dict` (maps over .items()) + * asynchronous iterables + * numpy arrays (uses np.vectorize) + * pandas objects (uses .apply) + + Override by defining obj.__fmap__(func). + """ + ... def _coconut_handle_cls_kwargs(**kwargs: _t.Dict[_t.Text, _t.Any]) -> _t.Callable[[_T], _T]: ... @@ -1110,14 +1416,22 @@ def flip(func: _t.Callable[[_T, _U, _V], _W], nargs: _t.Literal[3]) -> _t.Callab @_t.overload def flip(func: _t.Callable[[_T, _U, _V], _W], nargs: _t.Literal[2]) -> _t.Callable[[_U, _T, _V], _W]: ... @_t.overload -def flip(func: _t.Callable[..., _T], nargs: _t.Optional[_SupportsIndex]) -> _t.Callable[..., _T]: ... +def flip(func: _t.Callable[..., _T], nargs: _t.Optional[_SupportsIndex]) -> _t.Callable[..., _T]: + """Given a function, return a new function with inverse argument order. + If nargs is passed, only the first nargs arguments are reversed.""" + ... -def ident(x: _T, *, side_effect: _t.Optional[_t.Callable[[_T], _t.Any]] = None) -> _T: ... +def ident(x: _T, *, side_effect: _t.Optional[_t.Callable[[_T], _t.Any]] = None) -> _T: + """The identity function. Generally equivalent to x -> x. Useful in point-free programming. + Accepts one keyword-only argument, side_effect, which specifies a function to call on the argument before it is returned.""" + ... _coconut_ident = ident -def const(value: _T) -> _t.Callable[..., _T]: ... +def const(value: _T) -> _t.Callable[..., _T]: + """Create a function that, whatever its arguments, just returns the given value.""" + ... # lift(_T -> _W) @@ -1245,11 +1559,28 @@ def lift(func: _t.Callable[[_T, _U], _W]) -> _coconut_lifted_2[_T, _U, _W]: ... @_t.overload def lift(func: _t.Callable[[_T, _U, _V], _W]) -> _coconut_lifted_3[_T, _U, _V, _W]: ... @_t.overload -def lift(func: _t.Callable[..., _W]) -> _t.Callable[..., _t.Callable[..., _W]]: ... +def lift(func: _t.Callable[..., _W]) -> _t.Callable[..., _t.Callable[..., _W]]: + """Lifts a function up so that all of its arguments are functions. + + For a binary function f(x, y) and two unary functions g(z) and h(z), lift works as the S' combinator: + lift(f)(g, h)(z) == f(g(z), h(z)) + + In general, lift is requivalent to: + def lift(f) = ((*func_args, **func_kwargs) -> (*args, **kwargs) -> + f(*(g(*args, **kwargs) for g in func_args), **{lbrace}k: h(*args, **kwargs) for k, h in func_kwargs.items(){rbrace})) + + lift also supports a shortcut form such that lift(f, *func_args, **func_kwargs) is equivalent to lift(f)(*func_args, **func_kwargs). + """ + ... _coconut_lift = lift -def all_equal(iterable: _Iterable) -> bool: ... +def all_equal(iterable: _Iterable) -> bool: + """For a given iterable, check whether all elements in that iterable are equal to each other. + + Supports numpy arrays. Assumes transitivity and 'x != y' being equivalent to 'not (x == y)'. + """ + ... @_t.overload @@ -1262,13 +1593,23 @@ def collectby( key_func: _t.Callable[[_T], _U], iterable: _t.Iterable[_T], reduce_func: _t.Callable[[_T, _T], _V], -) -> _t.DefaultDict[_U, _V]: ... +) -> _t.DefaultDict[_U, _V]: + """Collect the items in iterable into a dictionary of lists keyed by key_func(item). + + if value_func is passed, collect value_func(item) into each list instead of item. + + If reduce_func is passed, instead of collecting the items into lists, reduce over + the items of each key with reduce_func, effectively implementing a MapReduce operation. + """ + ... @_t.overload def _namedtuple_of(**kwargs: _t.Dict[_t.Text, _T]) -> _t.Tuple[_T, ...]: ... @_t.overload -def _namedtuple_of(**kwargs: _t.Dict[_t.Text, _t.Any]) -> _Tuple: ... +def _namedtuple_of(**kwargs: _t.Dict[_t.Text, _t.Any]) -> _Tuple: + """Construct an anonymous namedtuple of the given keyword arguments.""" + ... @_t.overload @@ -1356,59 +1697,145 @@ def _coconut_multi_dim_arr(arrs: _Tuple, dim: int) -> _Sequence: ... class _coconut_SupportsAdd(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (+) Protocol. Equivalent to: + + class SupportsAdd[T, U, V](Protocol): + def __add__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __add__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsMinus(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (-) Protocol. Equivalent to: + + class SupportsMinus[T, U, V](Protocol): + def __sub__(self: T, other: U) -> V: + raise NotImplementedError + def __neg__(self: T) -> V: + raise NotImplementedError + """ def __sub__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError def __neg__(self: _Tco) -> _Vco: raise NotImplementedError class _coconut_SupportsMul(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (*) Protocol. Equivalent to: + + class SupportsMul[T, U, V](Protocol): + def __mul__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __mul__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsPow(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (**) Protocol. Equivalent to: + + class SupportsPow[T, U, V](Protocol): + def __pow__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __pow__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsTruediv(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (/) Protocol. Equivalent to: + + class SupportsTruediv[T, U, V](Protocol): + def __truediv__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __truediv__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsFloordiv(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (//) Protocol. Equivalent to: + + class SupportsFloordiv[T, U, V](Protocol): + def __floordiv__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __floordiv__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsMod(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (%) Protocol. Equivalent to: + + class SupportsMod[T, U, V](Protocol): + def __mod__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __mod__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsAnd(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (&) Protocol. Equivalent to: + + class SupportsAnd[T, U, V](Protocol): + def __and__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __and__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsXor(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (^) Protocol. Equivalent to: + + class SupportsXor[T, U, V](Protocol): + def __xor__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __xor__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsOr(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (|) Protocol. Equivalent to: + + class SupportsOr[T, U, V](Protocol): + def __or__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __or__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsLshift(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (<<) Protocol. Equivalent to: + + class SupportsLshift[T, U, V](Protocol): + def __lshift__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __lshift__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsRshift(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (>>) Protocol. Equivalent to: + + class SupportsRshift[T, U, V](Protocol): + def __rshift__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __rshift__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsMatmul(_t.Protocol, _t.Generic[_Tco, _Ucontra, _Vco]): + """Coconut (@) Protocol. Equivalent to: + + class SupportsMatmul[T, U, V](Protocol): + def __matmul__(self: T, other: U) -> V: + raise NotImplementedError(...) + """ def __matmul__(self: _Tco, other: _Ucontra) -> _Vco: raise NotImplementedError class _coconut_SupportsInv(_t.Protocol, _t.Generic[_Tco, _Vco]): + """Coconut (~) Protocol. Equivalent to: + + class SupportsInv[T, V](Protocol): + def __invert__(self: T) -> V: + raise NotImplementedError(...) + """ def __invert__(self: _Tco) -> _Vco: raise NotImplementedError diff --git a/_coconut/__init__.pyi b/_coconut/__init__.pyi index 38433b7ac..c00dfdcb1 100644 --- a/_coconut/__init__.pyi +++ b/_coconut/__init__.pyi @@ -90,6 +90,7 @@ multiprocessing_dummy = _multiprocessing_dummy copyreg = _copyreg asyncio = _asyncio +asyncio_Return = StopIteration async_generator = _async_generator pickle = _pickle if sys.version_info >= (2, 7): diff --git a/coconut/_pyparsing.py b/coconut/_pyparsing.py index d975a6d14..936b8b6a7 100644 --- a/coconut/_pyparsing.py +++ b/coconut/_pyparsing.py @@ -41,6 +41,10 @@ use_left_recursion_if_available, get_bool_env_var, use_computation_graph_env_var, + use_incremental_if_available, + incremental_cache_size, + never_clear_incremental_cache, + warn_on_multiline_regex, ) from coconut.util import get_clock_time # NOQA from coconut.util import ( @@ -80,7 +84,7 @@ # ----------------------------------------------------------------------------------------------------------------------- -# VERSION CHECKING: +# VERSIONING: # ----------------------------------------------------------------------------------------------------------------------- min_ver = min(min_versions["pyparsing"], min_versions["cPyparsing"][:3]) # inclusive @@ -103,26 +107,82 @@ + " (run '{python} -m pip install {package}<{max_ver}' to fix)".format(python=sys.executable, package=PYPARSING_PACKAGE, max_ver=max_ver_str), ) +MODERN_PYPARSING = cur_ver >= (3,) + +if MODERN_PYPARSING: + warn( + "This version of Coconut is not built for pyparsing v3; some syntax features WILL NOT WORK" + + " (run either '{python} -m pip install cPyparsing<{max_ver}' or '{python} -m pip install pyparsing<{max_ver}' to fix)".format(python=sys.executable, max_ver=max_ver_str), + ) + + +# ----------------------------------------------------------------------------------------------------------------------- +# OVERRIDES: +# ----------------------------------------------------------------------------------------------------------------------- + +if PYPARSING_PACKAGE != "cPyparsing": + if not MODERN_PYPARSING: + HIT, MISS = 0, 1 + + def _parseCache(self, instring, loc, doActions=True, callPreParse=True): + # [CPYPARSING] include packrat_context + lookup = (self, instring, loc, callPreParse, doActions, tuple(self.packrat_context)) + with ParserElement.packrat_cache_lock: + cache = ParserElement.packrat_cache + value = cache.get(lookup) + if value is cache.not_in_cache: + ParserElement.packrat_cache_stats[MISS] += 1 + try: + value = self._parseNoCache(instring, loc, doActions, callPreParse) + except ParseBaseException as pe: + # cache a copy of the exception, without the traceback + cache.set(lookup, pe.__class__(*pe.args)) + raise + else: + cache.set(lookup, (value[0], value[1].copy())) + return value + else: + ParserElement.packrat_cache_stats[HIT] += 1 + if isinstance(value, Exception): + raise value + return value[0], value[1].copy() + ParserElement.packrat_context = [] + ParserElement._parseCache = _parseCache + +elif not hasattr(ParserElement, "packrat_context"): + raise ImportError( + "This version of Coconut requires cPyparsing>=" + ver_tuple_to_str(min_versions["cPyparsing"]) + + "; got cPyparsing==" + __version__ + + " (run '{python} -m pip install --upgrade cPyparsing' to fix)".format(python=sys.executable), + ) + +if hasattr(ParserElement, "enableIncremental"): + SUPPORTS_INCREMENTAL = sys.version_info >= (3, 8) # avoids stack overflows on py<=37 +else: + SUPPORTS_INCREMENTAL = False + ParserElement._incrementalEnabled = False + ParserElement._incrementalWithResets = False + + def enableIncremental(*args, **kwargs): + """Dummy version of enableIncremental that just raises an error.""" + raise ImportError( + "incremental parsing only supported on cPyparsing>=" + + ver_tuple_to_str(min_versions["cPyparsing"]) + + " (run '{python} -m pip install --upgrade cPyparsing' to fix)".format(python=sys.executable) + ) + # ----------------------------------------------------------------------------------------------------------------------- # SETUP: # ----------------------------------------------------------------------------------------------------------------------- -if cur_ver >= (3,): - MODERN_PYPARSING = True +if MODERN_PYPARSING: _trim_arity = _pyparsing.core._trim_arity _ParseResultsWithOffset = _pyparsing.core._ParseResultsWithOffset else: - MODERN_PYPARSING = False _trim_arity = _pyparsing._trim_arity _ParseResultsWithOffset = _pyparsing._ParseResultsWithOffset -if MODERN_PYPARSING: - warn( - "This version of Coconut is not built for pyparsing v3; some syntax features WILL NOT WORK" - + " (run either '{python} -m pip install cPyparsing<{max_ver}' or '{python} -m pip install pyparsing<{max_ver}' to fix)".format(python=sys.executable, max_ver=max_ver_str), - ) - USE_COMPUTATION_GRAPH = get_bool_env_var( use_computation_graph_env_var, default=( @@ -137,9 +197,12 @@ else: _pyparsing._enable_all_warnings() _pyparsing.__diag__.warn_name_set_on_empty_Forward = False + _pyparsing.__diag__.warn_on_incremental_multiline_regex = warn_on_multiline_regex if MODERN_PYPARSING and use_left_recursion_if_available: ParserElement.enable_left_recursion() +elif SUPPORTS_INCREMENTAL and use_incremental_if_available: + ParserElement.enableIncremental(incremental_cache_size, still_reset_cache=not never_clear_incremental_cache) elif use_packrat_parser: ParserElement.enablePackrat(packrat_cache_size) @@ -149,42 +212,18 @@ # ----------------------------------------------------------------------------------------------------------------------- -# PACKRAT CONTEXT: +# MISSING OBJECTS: # ----------------------------------------------------------------------------------------------------------------------- -if PYPARSING_PACKAGE == "cPyparsing": - if not hasattr(ParserElement, "packrat_context"): - raise ImportError( - "This version of Coconut requires cPyparsing>=" + ver_tuple_to_str(min_versions["cPyparsing"]) - + "; got cPyparsing==" + __version__ - + " (run '{python} -m pip install --upgrade cPyparsing' to fix)".format(python=sys.executable), - ) -elif not MODERN_PYPARSING: - def _parseCache(self, instring, loc, doActions=True, callPreParse=True): - HIT, MISS = 0, 1 - # [CPYPARSING] include packrat_context - lookup = (self, instring, loc, callPreParse, doActions, tuple(self.packrat_context)) - with ParserElement.packrat_cache_lock: - cache = ParserElement.packrat_cache - value = cache.get(lookup) - if value is cache.not_in_cache: - ParserElement.packrat_cache_stats[MISS] += 1 - try: - value = self._parseNoCache(instring, loc, doActions, callPreParse) - except ParseBaseException as pe: - # cache a copy of the exception, without the traceback - cache.set(lookup, pe.__class__(*pe.args)) - raise - else: - cache.set(lookup, (value[0], value[1].copy())) - return value - else: - ParserElement.packrat_cache_stats[HIT] += 1 - if isinstance(value, Exception): - raise value - return value[0], value[1].copy() - ParserElement.packrat_context = [] - ParserElement._parseCache = _parseCache +if not hasattr(_pyparsing, "python_quoted_string"): + import re as _re + python_quoted_string = _pyparsing.Combine( + (_pyparsing.Regex(r'"""(?:[^"\\]|""(?!")|"(?!"")|\\.)*', flags=_re.MULTILINE) + '"""').setName("multiline double quoted string") + ^ (_pyparsing.Regex(r"'''(?:[^'\\]|''(?!')|'(?!'')|\\.)*", flags=_re.MULTILINE) + "'''").setName("multiline single quoted string") + ^ (_pyparsing.Regex(r'"(?:[^"\n\r\\]|(?:\\")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').setName("double quoted string") + ^ (_pyparsing.Regex(r"'(?:[^'\n\r\\]|(?:\\')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").setName("single quoted string") + ).setName("Python quoted string") + _pyparsing.python_quoted_string = python_quoted_string # ----------------------------------------------------------------------------------------------------------------------- @@ -335,6 +374,7 @@ def collect_timing_info(): "_ErrorStop", "_UnboundedCache", "enablePackrat", + "enableIncremental", "inlineLiteralsUsing", "setDefaultWhitespaceChars", "setDefaultKeywordChars", diff --git a/coconut/api.py b/coconut/api.py index 0e1d42d6e..c8a8bb995 100644 --- a/coconut/api.py +++ b/coconut/api.py @@ -22,21 +22,27 @@ import sys import os.path import codecs +from functools import partial try: from encodings import utf_8 except ImportError: utf_8 = None +from coconut.root import _coconut_exec from coconut.integrations import embed from coconut.exceptions import CoconutException from coconut.command import Command from coconut.command.cli import cli_version +from coconut.command.util import proc_run_args from coconut.compiler import Compiler from coconut.constants import ( + PY34, version_tag, code_exts, - coconut_import_hook_args, coconut_kernel_kwargs, + default_use_cache_dir, + coconut_cache_dir, + coconut_run_kwargs, ) # ----------------------------------------------------------------------------------------------------------------------- @@ -59,11 +65,12 @@ def get_state(state=None): return state -def cmd(cmd_args, interact=False, state=False, **kwargs): +def cmd(cmd_args, **kwargs): """Process command-line arguments.""" + state = kwargs.pop("state", False) if isinstance(cmd_args, (str, bytes)): cmd_args = cmd_args.split() - return get_state(state).cmd(cmd_args, interact=interact, **kwargs) + return get_state(state).cmd(cmd_args, **kwargs) VERSIONS = { @@ -96,6 +103,12 @@ def setup(*args, **kwargs): return get_state(state).setup(*args, **kwargs) +def warm_up(*args, **kwargs): + """Warm up the given state object.""" + state = kwargs.pop("state", False) + return get_state(state).comp.warm_up(*args, **kwargs) + + PARSERS = { "sys": lambda comp: comp.parse_sys, "exec": lambda comp: comp.parse_exec, @@ -127,7 +140,7 @@ def parse(code="", mode="sys", state=False, keep_internal_state=None): return PARSERS[mode](command.comp)(code, keep_state=keep_internal_state) -def coconut_eval(expression, globals=None, locals=None, state=False, **kwargs): +def coconut_base_exec(exec_func, mode, expression, globals=None, locals=None, state=False, **kwargs): """Compile and evaluate Coconut code.""" command = get_state(state) if command.comp is None: @@ -136,8 +149,12 @@ def coconut_eval(expression, globals=None, locals=None, state=False, **kwargs): if globals is None: globals = {} command.runner.update_vars(globals) - compiled_python = parse(expression, "eval", state, **kwargs) - return eval(compiled_python, globals, locals) + compiled_python = parse(expression, mode, state, **kwargs) + return exec_func(compiled_python, globals, locals) + + +coconut_exec = partial(coconut_base_exec, _coconut_exec, "sys") +coconut_eval = partial(coconut_base_exec, eval, "eval") # ----------------------------------------------------------------------------------------------------------------------- @@ -176,41 +193,98 @@ class CoconutImporter(object): ext = code_exts[0] command = None - def run_compiler(self, path): - """Run the Coconut compiler on the given path.""" + def __init__(self, *args): + self.use_cache_dir(default_use_cache_dir) + self.set_args(args) + + def use_cache_dir(self, use_cache_dir): + """Set the cache directory if any to use for compiled Coconut files.""" + if use_cache_dir: + if not PY34: + raise CoconutException("coconut.api.auto_compilation only supports the usage of a cache directory on Python 3.4+") + self.cache_dir = coconut_cache_dir + else: + self.cache_dir = None + + def set_args(self, args): + """Set the Coconut command line args to use for auto compilation.""" + self.args = proc_run_args(args) + + def cmd(self, *args): + """Run the Coconut compiler with the given args.""" if self.command is None: self.command = Command() - self.command.cmd([path] + list(coconut_import_hook_args)) - - def find_module(self, fullname, path=None): + return self.command.cmd(list(args) + self.args, interact=False, **coconut_run_kwargs) + + def compile(self, path, package): + """Compile a path to a file or package.""" + extra_args = [] + if self.cache_dir: + if package: + cache_dir = os.path.join(path, self.cache_dir) + else: + cache_dir = os.path.join(os.path.dirname(path), self.cache_dir) + extra_args.append(cache_dir) + else: + cache_dir = None + + if package: + self.cmd(path, *extra_args) + return cache_dir or path + else: + destpath, = self.cmd(path, *extra_args) + return destpath + + def find_coconut(self, fullname, path=None): """Searches for a Coconut file of the given name and compiles it.""" - basepaths = [""] + list(sys.path) + basepaths = list(sys.path) + [""] if fullname.startswith("."): if path is None: # we can't do a relative import if there's no package path - return + return None fullname = fullname[1:] basepaths.insert(0, path) - fullpath = os.path.join(*fullname.split(".")) - for head in basepaths: - path = os.path.join(head, fullpath) + + path_tail = os.path.join(*fullname.split(".")) + for path_head in basepaths: + path = os.path.join(path_head, path_tail) filepath = path + self.ext - dirpath = os.path.join(path, "__init__" + self.ext) + initpath = os.path.join(path, "__init__" + self.ext) if os.path.exists(filepath): - self.run_compiler(filepath) - # Coconut file was found and compiled, now let Python import it - return - if os.path.exists(dirpath): - self.run_compiler(path) - # Coconut package was found and compiled, now let Python import it - return + return self.compile(filepath, package=False) + if os.path.exists(initpath): + return self.compile(path, package=True) + return None + + def find_module(self, fullname, path=None): + """Get a loader for a Coconut module if it exists.""" + destpath = self.find_coconut(fullname, path) + # return None to let Python do the import when nothing was found or compiling in-place + if destpath is None or not self.cache_dir: + return None + else: + from importlib.machinery import SourceFileLoader + return SourceFileLoader(fullname, destpath) + + def find_spec(self, fullname, path=None, target=None): + """Get a modulespec for a Coconut module if it exists.""" + loader = self.find_module(fullname, path) + if loader is None: + return None + else: + from importlib.util import spec_from_loader + return spec_from_loader(fullname, loader) coconut_importer = CoconutImporter() -def auto_compilation(on=True): +def auto_compilation(on=True, args=None, use_cache_dir=None): """Turn automatic compilation of Coconut files on or off.""" + if args is not None: + coconut_importer.set_args(args) + if use_cache_dir is not None: + coconut_importer.use_cache_dir(use_cache_dir) if on: if coconut_importer not in sys.meta_path: sys.meta_path.insert(0, coconut_importer) diff --git a/coconut/api.pyi b/coconut/api.pyi index b2845d394..97f6fbf80 100644 --- a/coconut/api.pyi +++ b/coconut/api.pyi @@ -19,12 +19,12 @@ from typing import ( Iterable, Optional, Text, - Union, ) from coconut.command.command import Command class CoconutException(Exception): + """Coconut Exception.""" ... #----------------------------------------------------------------------------------------------------------------------- @@ -34,16 +34,29 @@ class CoconutException(Exception): GLOBAL_STATE: Optional[Command] = None -def get_state(state: Optional[Command]=None) -> Command: ... +def get_state(state: Optional[Command] = None) -> Command: + """Get a Coconut state object; None gets a new state, False gets the global state.""" + ... -def cmd(args: Union[Text, bytes, Iterable], interact: bool=False) -> None: ... +def cmd( + args: Text | bytes | Iterable, + *, + state: Command | None = ..., + argv: Iterable[Text] | None = None, + interact: bool = False, + default_target: Text | None = None, +) -> None: + """Process command-line arguments.""" + ... VERSIONS: Dict[Text, Text] = ... -def version(which: Optional[Text]=None) -> Text: ... +def version(which: Optional[Text] = None) -> Text: + """Get the Coconut version.""" + ... #----------------------------------------------------------------------------------------------------------------------- @@ -52,14 +65,28 @@ def version(which: Optional[Text]=None) -> Text: ... def setup( - target: Optional[str]=None, - strict: bool=False, - minify: bool=False, - line_numbers: bool=False, - keep_lines: bool=False, - no_tco: bool=False, - no_wrap: bool=False, -) -> None: ... + target: Optional[str] = None, + strict: bool = False, + minify: bool = False, + line_numbers: bool = True, + keep_lines: bool = False, + no_tco: bool = False, + no_wrap: bool = False, + *, + state: Optional[Command] = ..., +) -> None: + """Set up the given state object.""" + ... + + +def warm_up( + force: bool = False, + enable_incremental_mode: bool = False, + *, + state: Optional[Command] = ..., +) -> None: + """Warm up the given state object.""" + ... PARSERS: Dict[Text, Callable] = ... @@ -67,19 +94,34 @@ PARSERS: Dict[Text, Callable] = ... def parse( code: Text, - mode: Text=..., - state: Optional[Command]=..., - keep_internal_state: Optional[bool]=None, -) -> Text: ... + mode: Text = ..., + state: Optional[Command] = ..., + keep_internal_state: Optional[bool] = None, +) -> Text: + """Compile Coconut code.""" + ... + + +def coconut_exec( + expression: Text, + globals: Optional[Dict[Text, Any]] = None, + locals: Optional[Dict[Text, Any]] = None, + state: Optional[Command] = ..., + keep_internal_state: Optional[bool] = None, +) -> None: + """Compile and evaluate Coconut code.""" + ... def coconut_eval( expression: Text, - globals: Optional[Dict[Text, Any]]=None, - locals: Optional[Dict[Text, Any]]=None, - state: Optional[Command]=..., - keep_internal_state: Optional[bool]=None, -) -> Any: ... + globals: Optional[Dict[Text, Any]] = None, + locals: Optional[Dict[Text, Any]] = None, + state: Optional[Command] = ..., + keep_internal_state: Optional[bool] = None, +) -> Any: + """Compile and evaluate Coconut code.""" + ... # ----------------------------------------------------------------------------------------------------------------------- @@ -87,22 +129,24 @@ def coconut_eval( # ----------------------------------------------------------------------------------------------------------------------- -def use_coconut_breakpoint(on: bool=True) -> None: ... - - -class CoconutImporter: - ext: str - - @staticmethod - def run_compiler(path: str) -> None: ... - - def find_module(self, fullname: str, path: Optional[str]=None) -> None: ... +def use_coconut_breakpoint(on: bool = True) -> None: + """Switches the breakpoint() built-in (universally accessible via + coconut.__coconut__.breakpoint) to use coconut.embed.""" + ... -coconut_importer = CoconutImporter() +coconut_importer: Any = ... -def auto_compilation(on: bool=True) -> None: ... +def auto_compilation( + on: bool = True, + args: Iterable[Text] | None = None, + use_cache_dir: bool | None = None, +) -> None: + """Turn automatic compilation of Coconut files on or off.""" + ... -def get_coconut_encoding(encoding: str=...) -> Any: ... +def get_coconut_encoding(encoding: Text = ...) -> Any: + """Get a CodecInfo for the given Coconut encoding.""" + ... diff --git a/coconut/command/cli.py b/coconut/command/cli.py index 62e9b8050..5087e52d0 100644 --- a/coconut/command/cli.py +++ b/coconut/command/cli.py @@ -115,7 +115,13 @@ arguments.add_argument( "-l", "--line-numbers", "--linenumbers", action="store_true", - help="add line number comments for ease of debugging", + help="force enable line number comments (--line-numbers are enabled by default unless --minify is passed)", +) + +arguments.add_argument( + "--no-line-numbers", "--nolinenumbers", + action="store_true", + help="disable line number comments (opposite of --line-numbers)", ) arguments.add_argument( @@ -209,7 +215,7 @@ "--mypy", type=str, nargs=argparse.REMAINDER, - help="run MyPy on compiled Python (remaining args passed to MyPy) (implies --package)", + help="run MyPy on compiled Python (remaining args passed to MyPy) (implies --package --line-numbers)", ) arguments.add_argument( @@ -256,7 +262,7 @@ "--recursion-limit", "--recursionlimit", metavar="limit", type=int, - help="set maximum recursion depth in compiler (defaults to " + ascii(default_recursion_limit) + ") (when increasing --recursion-limit, you may also need to increase --stack-size)", + help="set maximum recursion depth in compiler (defaults to " + ascii(default_recursion_limit) + ") (when increasing --recursion-limit, you may also need to increase --stack-size; setting them to approximately equal values is recommended)", ) arguments.add_argument( diff --git a/coconut/command/command.py b/coconut/command/command.py index 3bcd5fd7d..fee072a41 100644 --- a/coconut/command/command.py +++ b/coconut/command/command.py @@ -55,8 +55,6 @@ icoconut_custom_kernel_name, icoconut_old_kernel_names, exit_chars, - coconut_run_args, - coconut_run_verbose_args, verbose_mypy_args, default_mypy_args, report_this_text, @@ -64,12 +62,17 @@ mypy_silent_err_prefixes, mypy_err_infixes, mypy_install_arg, + jupyter_install_arg, mypy_builtin_regex, coconut_pth_file, error_color_code, jupyter_console_commands, default_jobs, create_package_retries, + default_use_cache_dir, + coconut_cache_dir, + coconut_run_kwargs, + interpreter_uses_incremental, ) from coconut.util import ( univ_open, @@ -98,8 +101,7 @@ can_parse, invert_mypy_arg, run_with_stack_size, - memoized_isdir, - memoized_isfile, + proc_run_args, ) from coconut.compiler.util import ( should_indent, @@ -122,7 +124,7 @@ class Command(object): exit_code = 0 # exit status to return errmsg = None # error message to display - show = False # corresponds to --display flag + display = False # corresponds to --display flag jobs = 0 # corresponds to --jobs flag mypy_args = None # corresponds to --mypy flag argv_args = None # corresponds to --argv flag @@ -142,22 +144,38 @@ def start(self, run=False): if run: args, argv = [], [] # for coconut-run, all args beyond the source file should be wrapped in an --argv + source = None for i in range(1, len(sys.argv)): arg = sys.argv[i] - args.append(arg) # if arg is source file, put everything else in argv - if not arg.startswith("-") and can_parse(arguments, args[:-1]): + if not arg.startswith("-") and can_parse(arguments, args): + source = arg argv = sys.argv[i + 1:] break - for run_arg in (coconut_run_verbose_args if "--verbose" in args else coconut_run_args): - if run_arg not in args: - args.append(run_arg) - self.cmd(args, argv=argv) + else: + args.append(arg) + args = proc_run_args(args) + if "--run" in args: + logger.warn("extraneous --run argument passed; coconut-run implies --run") + else: + args.append("--run") + dest = None + if source is not None: + source = fixpath(source) + args.append(source) + if default_use_cache_dir: + if os.path.isfile(source): + dest = os.path.join(os.path.dirname(source), coconut_cache_dir) + else: + dest = os.path.join(source, coconut_cache_dir) + self.cmd(args, argv=argv, use_dest=dest, **coconut_run_kwargs) else: self.cmd() - def cmd(self, args=None, argv=None, interact=True, default_target=None): + # new external parameters should be updated in api.pyi and DOCS + def cmd(self, args=None, argv=None, interact=True, default_target=None, use_dest=None): """Process command-line arguments.""" + result = None with self.handling_exceptions(): if args is None: parsed_args = arguments.parse_args() @@ -169,10 +187,14 @@ def cmd(self, args=None, argv=None, interact=True, default_target=None): parsed_args.argv = argv if parsed_args.target is None: parsed_args.target = default_target + if use_dest is not None and not parsed_args.no_write: + internal_assert(parsed_args.dest is None, "coconut-run got passed a dest", parsed_args) + parsed_args.dest = use_dest self.exit_code = 0 self.stack_size = parsed_args.stack_size - self.run_with_stack_size(self.execute_args, parsed_args, interact, original_args=args) + result = self.run_with_stack_size(self.execute_args, parsed_args, interact, original_args=args) self.exit_on_error() + return result def run_with_stack_size(self, func, *args, **kwargs): """Execute func with the correct stack size.""" @@ -212,7 +234,11 @@ def execute_args(self, args, interact=True, original_args=None): args.trace = args.profile = False # set up logger - logger.quiet, logger.verbose, logger.tracing = args.quiet, args.verbose, args.trace + logger.setup( + quiet=args.quiet, + verbose=args.verbose, + tracing=args.trace, + ) if args.verbose or args.trace or args.profile: set_grammar_names() if args.trace or args.profile: @@ -229,8 +255,10 @@ def execute_args(self, args, interact=True, original_args=None): # validate general command args if args.stack_size and args.stack_size % 4 != 0: logger.warn("--stack-size should generally be a multiple of 4, not {stack_size} (to support 4 KB pages)".format(stack_size=args.stack_size)) - if args.mypy is not None and args.line_numbers: - logger.warn("extraneous --line-numbers argument passed; --mypy implies --line-numbers") + if args.mypy is not None and args.no_line_numbers: + logger.warn("using --mypy running with --no-line-numbers is not recommended; mypy error messages won't include Coconut line numbers") + if args.line_numbers and args.no_line_numbers: + raise CoconutException("cannot compile with both --line-numbers and --no-line-numbers") if args.site_install and args.site_uninstall: raise CoconutException("cannot --site-install and --site-uninstall simultaneously") for and_args in getattr(args, "and") or []: @@ -246,14 +274,16 @@ def execute_args(self, args, interact=True, original_args=None): self.set_jobs(args.jobs, args.profile) if args.recursion_limit is not None: set_recursion_limit(args.recursion_limit) - if args.display: - self.show = True + self.display = args.display + self.prompt.vi_mode = args.vi_mode if args.style is not None: self.prompt.set_style(args.style) if args.history_file is not None: self.prompt.set_history_file(args.history_file) - if args.vi_mode: - self.prompt.vi_mode = True + if args.argv is not None: + self.argv_args = list(args.argv) + + # execute non-compilation tasks if args.docs: launch_documentation() if args.tutorial: @@ -262,25 +292,36 @@ def execute_args(self, args, interact=True, original_args=None): self.site_uninstall() if args.site_install: self.site_install() - if args.argv is not None: - self.argv_args = list(args.argv) # process general compiler args + if args.line_numbers: + line_numbers = True + elif args.no_line_numbers: + line_numbers = False + else: + line_numbers = ( + not args.minify + or args.mypy is not None + ) self.setup( target=args.target, strict=args.strict, minify=args.minify, - line_numbers=args.line_numbers or args.mypy is not None, + line_numbers=line_numbers, keep_lines=args.keep_lines, no_tco=args.no_tco, no_wrap=args.no_wrap_types, ) + if args.watch: + self.comp.warm_up(enable_incremental_mode=True) # process mypy args and print timing info (must come after compiler setup) if args.mypy is not None: self.set_mypy_args(args.mypy) logger.log("Grammar init time: " + str(self.comp.grammar_init_time) + " secs / Total init time: " + str(get_clock_time() - first_import_time) + " secs") + # do compilation, keeping track of compiled filepaths + filepaths = [] if args.source is not None: # warnings if source is given if args.interact and args.run: @@ -307,12 +348,11 @@ def execute_args(self, args, interact=True, original_args=None): src_dest_package_triples.append(self.process_source_dest(src, dest, args)) # disable jobs if we know we're only compiling one file - if len(src_dest_package_triples) <= 1 and not any(memoized_isdir(source) for source, dest, package in src_dest_package_triples): + if len(src_dest_package_triples) <= 1 and not any(os.path.isdir(source) for source, dest, package in src_dest_package_triples): self.disable_jobs() # do compilation with self.running_jobs(exit_on_error=not args.watch): - filepaths = [] for source, dest, package in src_dest_package_triples: filepaths += self.compile_path(source, dest, package, run=args.run or args.interact, force=args.force) self.run_mypy(filepaths) @@ -362,18 +402,21 @@ def execute_args(self, args, interact=True, original_args=None): if args.profile: print_timing_info() + # make sure to return inside handling_exceptions to ensure filepaths is available + return filepaths + def process_source_dest(self, source, dest, args): """Determine the correct source, dest, package mode to use for the given source, dest, and args.""" # determine source processed_source = fixpath(source) # validate args - if (args.run or args.interact) and memoized_isdir(processed_source): + if (args.run or args.interact) and os.path.isdir(processed_source): if args.run: raise CoconutException("source path %r must point to file not directory when --run is enabled" % (source,)) if args.interact: raise CoconutException("source path %r must point to file not directory when --run (implied by --interact) is enabled" % (source,)) - if args.watch and memoized_isfile(processed_source): + if args.watch and os.path.isfile(processed_source): raise CoconutException("source path %r must point to directory not file when --watch is enabled" % (source,)) # determine dest @@ -394,9 +437,9 @@ def process_source_dest(self, source, dest, args): package = False else: # auto-decide package - if memoized_isfile(processed_source): + if os.path.isfile(processed_source): package = False - elif memoized_isdir(processed_source): + elif os.path.isdir(processed_source): package = True else: raise CoconutException("could not find source path", source) @@ -444,20 +487,20 @@ def handling_exceptions(self): self.register_exit_code(err=err) def compile_path(self, path, write=True, package=True, **kwargs): - """Compile a path and returns paths to compiled files.""" + """Compile a path and return paths to compiled files.""" if not isinstance(write, bool): write = fixpath(write) - if memoized_isfile(path): + if os.path.isfile(path): destpath = self.compile_file(path, write, package, **kwargs) return [destpath] if destpath is not None else [] - elif memoized_isdir(path): + elif os.path.isdir(path): return self.compile_folder(path, write, package, **kwargs) else: raise CoconutException("could not find source path", path) def compile_folder(self, directory, write=True, package=True, **kwargs): - """Compile a directory and returns paths to compiled files.""" - if not isinstance(write, bool) and memoized_isfile(write): + """Compile a directory and return paths to compiled files.""" + if not isinstance(write, bool) and os.path.isfile(write): raise CoconutException("destination path cannot point to a file when compiling a directory") filepaths = [] for dirpath, dirnames, filenames in os.walk(directory): @@ -479,7 +522,7 @@ def compile_folder(self, directory, write=True, package=True, **kwargs): return filepaths def compile_file(self, filepath, write=True, package=False, force=False, **kwargs): - """Compile a file and returns the compiled file's path.""" + """Compile a file and return the compiled file's path.""" set_ext = False if write is False: destpath = None @@ -532,8 +575,8 @@ def compile(self, codepath, destpath=None, package=False, run=False, force=False foundhash = None if force else self.has_hash_of(destpath, code, package_level) if foundhash: if show_unchanged: - logger.show_tabulated("Left unchanged", showpath(destpath), "(pass --force to override).") - if self.show: + logger.show_tabulated("Left unchanged", showpath(destpath), "(pass --force to overwrite).") + if self.display: logger.print(foundhash) if run: self.execute_file(destpath, argv_source_path=codepath) @@ -548,7 +591,7 @@ def callback(compiled): with univ_open(destpath, "w") as opened: writefile(opened, compiled) logger.show_tabulated("Compiled to", showpath(destpath), ".") - if self.show: + if self.display: logger.print(compiled) if run: if destpath is None: @@ -675,12 +718,15 @@ def running_jobs(self, exit_on_error=True): def has_hash_of(self, destpath, code, package_level): """Determine if a file has the hash of the code.""" - if destpath is not None and memoized_isfile(destpath): + if destpath is not None and os.path.isfile(destpath): with univ_open(destpath, "r") as opened: compiled = readfile(opened) hashash = gethash(compiled) - if hashash is not None and hashash == self.comp.genhash(code, package_level): - return True + if hashash is not None: + newhash = self.comp.genhash(code, package_level) + if hashash == newhash: + return True + logger.log("old __coconut_hash__", hashash, "!= new __coconut_hash__", newhash) return False def get_input(self, more=False): @@ -701,7 +747,7 @@ def get_input(self, more=False): def start_running(self): """Start running the Runner.""" - self.comp.warm_up() + self.comp.warm_up(enable_incremental_mode=interpreter_uses_incremental) self.check_runner() self.running = True logger.log("Time till prompt: " + str(get_clock_time() - first_import_time) + " secs") @@ -709,8 +755,9 @@ def start_running(self): def start_prompt(self): """Start the interpreter.""" logger.show( - "Coconut Interpreter v{co_ver}:".format( + "Coconut Interpreter v{co_ver} (Python {py_ver}):".format( co_ver=VERSION, + py_ver=".".join(str(v) for v in sys.version_info[:2]), ), ) logger.show("(enter 'exit()' or press Ctrl-D to end)") @@ -757,7 +804,7 @@ def execute(self, compiled=None, path=None, use_eval=False, allow_show=True): self.check_runner() if compiled is not None: - if allow_show and self.show: + if allow_show and self.display: logger.print(compiled) if path is None: # header is not included @@ -872,9 +919,9 @@ def run_silent_cmd(self, *args): """Same as run_cmd$(show_output=logger.verbose).""" return run_cmd(*args, show_output=logger.verbose) - def install_jupyter_kernel(self, jupyter, kernel_dir): + def install_jupyter_kernel(self, jupyter, kernel_dir, install_args=[]): """Install the given kernel via the command line and return whether successful.""" - install_args = jupyter + ["kernelspec", "install", kernel_dir, "--replace"] + install_args = jupyter + ["kernelspec", "install", kernel_dir, "--replace"] + install_args try: self.run_silent_cmd(install_args) except CalledProcessError: @@ -898,7 +945,7 @@ def remove_jupyter_kernel(self, jupyter, kernel_name): return False return True - def install_default_jupyter_kernels(self, jupyter, kernel_list): + def install_default_jupyter_kernels(self, jupyter, kernel_list, install_args=[]): """Install icoconut default kernels.""" logger.show_sig("Installing Jupyter kernels '" + "', '".join(icoconut_default_kernel_names) + "'...") overall_success = True @@ -909,7 +956,7 @@ def install_default_jupyter_kernels(self, jupyter, kernel_list): overall_success = overall_success and success for kernel_dir in icoconut_default_kernel_dirs: - success = self.install_jupyter_kernel(jupyter, kernel_dir) + success = self.install_jupyter_kernel(jupyter, kernel_dir, install_args) overall_success = overall_success and success if overall_success: @@ -952,15 +999,27 @@ def start_jupyter(self, args): kernel_list = self.get_jupyter_kernels(jupyter) newly_installed_kernels = [] - # always update the custom kernel, but only reinstall it if it isn't already there or given no args + # determine if we're just installing + if not args: + just_install = True + elif args[0].startswith("-"): + just_install = True + elif args[0] == jupyter_install_arg: + just_install = True + args = args[1:] + else: + just_install = False + install_args = args if just_install else [] + + # always update the custom kernel, but only reinstall it if it isn't already there or just installing custom_kernel_dir = install_custom_kernel(logger=logger) - if custom_kernel_dir is not None and (icoconut_custom_kernel_name not in kernel_list or not args): + if custom_kernel_dir is not None and (icoconut_custom_kernel_name not in kernel_list or just_install): logger.show_sig("Installing Jupyter kernel {name!r}...".format(name=icoconut_custom_kernel_name)) - if self.install_jupyter_kernel(jupyter, custom_kernel_dir): + if self.install_jupyter_kernel(jupyter, custom_kernel_dir, install_args): newly_installed_kernels.append(icoconut_custom_kernel_name) - if not args: - # install default kernels if given no args + if just_install: + # install default kernels if just installing newly_installed_kernels += self.install_default_jupyter_kernels(jupyter, kernel_list) run_args = None @@ -979,7 +1038,7 @@ def start_jupyter(self, args): else: kernel = "coconut_py" + ver if kernel not in kernel_list: - newly_installed_kernels += self.install_default_jupyter_kernels(jupyter, kernel_list) + newly_installed_kernels += self.install_default_jupyter_kernels(jupyter, kernel_list, install_args) logger.warn("could not find {name!r} kernel; using {kernel!r} kernel instead".format(name=icoconut_custom_kernel_name, kernel=kernel)) # pass the kernel to the console or otherwise just launch Jupyter now that we know our kernel is available @@ -1007,7 +1066,7 @@ def watch(self, src_dest_package_triples, run=False, force=False): def recompile(path, src, dest, package): path = fixpath(path) - if memoized_isfile(path) and os.path.splitext(path)[1] in code_exts: + if os.path.isfile(path) and os.path.splitext(path)[1] in code_exts: with self.handling_exceptions(): if dest is True or dest is None: writedir = dest @@ -1061,7 +1120,7 @@ def site_uninstall(self): python_lib = self.get_python_lib() pth_file = os.path.join(python_lib, os.path.basename(coconut_pth_file)) - if memoized_isfile(pth_file): + if os.path.isfile(pth_file): os.remove(pth_file) logger.show_sig("Removed %s from %s" % (os.path.basename(coconut_pth_file), python_lib)) else: diff --git a/coconut/command/command.pyi b/coconut/command/command.pyi index 7f47447f8..3f1d4ba40 100644 --- a/coconut/command/command.pyi +++ b/coconut/command/command.pyi @@ -17,4 +17,5 @@ Description: MyPy stub file for command.py. class Command: + """Coconut command-line interface.""" ... diff --git a/coconut/command/util.py b/coconut/command/util.py index 11ebce971..57d2872d6 100644 --- a/coconut/command/util.py +++ b/coconut/command/util.py @@ -32,6 +32,7 @@ else: import builtins +from coconut.root import _coconut_exec from coconut.terminal import ( logger, complain, @@ -46,7 +47,6 @@ pickleable_obj, get_encoding, get_clock_time, - memoize, assert_remove_prefix, ) from coconut.constants import ( @@ -72,6 +72,7 @@ minimum_recursion_limit, oserror_retcode, base_stub_dir, + stub_dir_names, installed_stub_dir, interpreter_uses_auto_compilation, interpreter_uses_coconut_breakpoint, @@ -79,6 +80,7 @@ must_use_specific_target_builtins, kilobyte, min_stack_size_kbs, + coconut_base_run_args, ) if PY26: @@ -134,10 +136,6 @@ # ----------------------------------------------------------------------------------------------------------------------- -memoized_isdir = memoize(64)(os.path.isdir) -memoized_isfile = memoize(64)(os.path.isfile) - - def writefile(openedfile, newcontents): """Set the contents of a file.""" openedfile.seek(0) @@ -195,13 +193,6 @@ def rem_encoding(code): return "\n".join(new_lines) -def exec_func(code, glob_vars, loc_vars=None): - """Wrapper around exec.""" - if loc_vars is None: - loc_vars = glob_vars - exec(code, glob_vars, loc_vars) - - def interpret(code, in_vars): """Try to evaluate the given code, otherwise execute it.""" try: @@ -212,7 +203,7 @@ def interpret(code, in_vars): if result is not None: logger.print(ascii(result)) return result # don't also exec code - exec_func(code, in_vars) + _coconut_exec(code, in_vars) @contextmanager @@ -325,19 +316,30 @@ def run_cmd(cmd, show_output=True, raise_errs=True, **kwargs): return "" -def symlink(link_to, link_from): - """Link link_from to the directory link_to universally.""" - if os.path.islink(link_from): - os.unlink(link_from) - elif os.path.exists(link_from): +def unlink(link_path): + """Remove a symbolic link if one exists. Return whether anything was done.""" + if os.path.islink(link_path): + os.unlink(link_path) + return True + return False + + +def rm_dir_or_link(dir_to_rm): + """Safely delete a directory without deleting the contents of symlinks.""" + if not unlink(dir_to_rm) and os.path.exists(dir_to_rm): if WINDOWS: try: - os.rmdir(link_from) + os.rmdir(dir_to_rm) except OSError: logger.log_exc() - shutil.rmtree(link_from) + shutil.rmtree(dir_to_rm) else: - shutil.rmtree(link_from) + shutil.rmtree(dir_to_rm) + + +def symlink(link_to, link_from): + """Link link_from to the directory link_to universally.""" + rm_dir_or_link(link_from) try: if PY32: os.symlink(link_to, link_from, target_is_directory=True) @@ -351,7 +353,23 @@ def symlink(link_to, link_from): def install_mypy_stubs(): """Properly symlink mypy stub files.""" - symlink(base_stub_dir, installed_stub_dir) + # unlink stub_dirs so we know rm_dir_or_link won't clear them + for stub_name in stub_dir_names: + unlink(os.path.join(base_stub_dir, stub_name)) + + # clean out the installed_stub_dir (which shouldn't follow symlinks, + # but we still do the previous unlinking just to be sure) + rm_dir_or_link(installed_stub_dir) + + # recreate installed_stub_dir + os.makedirs(installed_stub_dir) + + # link stub dirs into the installed_stub_dir + for stub_name in stub_dir_names: + current_stub = os.path.join(base_stub_dir, stub_name) + install_stub = os.path.join(installed_stub_dir, stub_name) + symlink(current_stub, install_stub) + return installed_stub_dir @@ -449,6 +467,18 @@ def run_with_stack_size(stack_kbs, func, *args, **kwargs): return out[0] +def proc_run_args(args=()): + """Process args to use for coconut-run or the import hook.""" + args = list(args) + if "--verbose" not in args and "--quiet" not in args: + args.append("--quiet") + for run_arg in coconut_base_run_args: + run_arg_name = run_arg.split("=", 1)[0] + if not any(arg.startswith(run_arg_name) for arg in args): + args.append(run_arg) + return args + + # ----------------------------------------------------------------------------------------------------------------------- # CLASSES: # ----------------------------------------------------------------------------------------------------------------------- @@ -479,7 +509,7 @@ def set_style(self, style): if style == "none": self.style = None elif prompt_toolkit is None: - raise CoconutException("syntax highlighting is not supported on this Python version") + raise CoconutException("syntax highlighting requires prompt_toolkit (run 'pip install -U prompt_toolkit' to fix)") elif style == "list": logger.print("Coconut Styles: none, " + ", ".join(pygments.styles.get_all_styles())) sys.exit(0) @@ -554,7 +584,7 @@ class Runner(object): def __init__(self, comp=None, exit=sys.exit, store=False, path=None): """Create the executor.""" from coconut.api import auto_compilation, use_coconut_breakpoint - auto_compilation(on=interpreter_uses_auto_compilation) + auto_compilation(on=interpreter_uses_auto_compilation, args=comp.get_cli_args() if comp else None) use_coconut_breakpoint(on=interpreter_uses_coconut_breakpoint) self.exit = exit self.vars = self.build_vars(path, init=True) @@ -572,9 +602,8 @@ def build_vars(path=None, init=False): "__name__": "__main__", "__package__": None, "reload": reload, + "__file__": None if path is None else fixpath(path) } - if path is not None: - init_vars["__file__"] = fixpath(path) if init: # put reserved_vars in for auto-completion purposes only at the very beginning for var in reserved_vars: @@ -632,7 +661,7 @@ def run(self, code, use_eval=False, path=None, all_errors_exit=False, store=True elif use_eval: run_func = eval else: - run_func = exec_func + run_func = _coconut_exec logger.log("Running {func}()...".format(func=getattr(run_func, "__name__", run_func))) start_time = get_clock_time() result = None @@ -671,7 +700,7 @@ def was_run_code(self, get_all=True): class multiprocess_wrapper(pickleable_obj): """Wrapper for a method that needs to be multiprocessed.""" - __slots__ = ("base", "method", "rec_limit", "logger", "argv") + __slots__ = ("base", "method", "stack_size", "rec_limit", "logger", "argv") def __init__(self, base, method, stack_size=None, _rec_limit=None, _logger=None, _argv=None): """Create new multiprocessable method.""" diff --git a/coconut/compiler/compiler.py b/coconut/compiler/compiler.py index bc5a800b8..9abf5dd15 100644 --- a/coconut/compiler/compiler.py +++ b/coconut/compiler/compiler.py @@ -60,7 +60,7 @@ unwrapper, open_chars, close_chars, - hold_chars, + str_chars, tabideal, match_to_args_var, match_to_kwargs_var, @@ -74,8 +74,6 @@ data_defaults_var, funcwrapper, non_syntactic_newline, - indchars, - default_whitespace_chars, early_passthrough_wrapper, super_names, custom_op_var, @@ -99,6 +97,8 @@ get_clock_time, get_name, assert_remove_prefix, + dictset, + noop_ctx, ) from coconut.exceptions import ( CoconutException, @@ -137,7 +137,6 @@ rem_comment, split_comment, attach, - trace_attach, split_leading_indent, split_trailing_indent, split_leading_trailing_indent, @@ -167,6 +166,11 @@ tuple_str_of_str, dict_to_str, close_char_for, + base_keyword, + enable_incremental_parsing, + get_psf_target, + move_loc_to_non_whitespace, + move_endpt_to_non_whitespace, ) from coconut.compiler.header import ( minify_header, @@ -199,7 +203,7 @@ def set_to_tuple(tokens): def import_stmt(imp_from, imp, imp_as, raw=False): """Generate an import statement.""" - if not raw: + if not raw and imp != "*": module_path = (imp if imp_from is None else imp_from).split(".", 1) existing_imp = import_existing.get(module_path[0]) if existing_imp is not None: @@ -473,8 +477,8 @@ def __init__(self, *args, **kwargs): """Creates a new compiler with the given parsing parameters.""" self.setup(*args, **kwargs) - # changes here should be reflected in __reduce__ and in the stub for coconut.api.setup - def setup(self, target=None, strict=False, minify=False, line_numbers=False, keep_lines=False, no_tco=False, no_wrap=False): + # changes here should be reflected in __reduce__, get_cli_args, and in the stub for coconut.api.setup + def setup(self, target=None, strict=False, minify=False, line_numbers=True, keep_lines=False, no_tco=False, no_wrap=False): """Initializes parsing parameters.""" if target is None: target = "" @@ -486,12 +490,14 @@ def setup(self, target=None, strict=False, minify=False, line_numbers=False, kee raise CoconutException("target Python version must be major.minor, not major.minor.micro") if target == "sys": target = sys_target + elif target == "psf": + target = get_psf_target() if target in pseudo_targets: target = pseudo_targets[target] if target not in targets: raise CoconutException( "unsupported target Python version " + repr(target), - extra="supported targets are: " + ", ".join(repr(t) for t in specific_targets + tuple(pseudo_targets)) + ", and 'sys'", + extra="supported targets are: " + ", ".join(repr(t) for t in specific_targets + tuple(pseudo_targets)) + ", 'sys', 'psf'", ) logger.log_vars("Compiler args:", locals()) self.target = target @@ -506,6 +512,23 @@ def __reduce__(self): """Return pickling information.""" return (self.__class__, (self.target, self.strict, self.minify, self.line_numbers, self.keep_lines, self.no_tco, self.no_wrap)) + def get_cli_args(self): + """Get the Coconut CLI args that can be used to set up an equivalent compiler.""" + args = ["--target=" + self.target] + if self.strict: + args.append("--strict") + if self.minify: + args.append("--minify") + if not self.line_numbers: + args.append("--no-line-numbers") + if self.keep_lines: + args.append("--keep-lines") + if self.no_tco: + args.append("--no-tco") + if self.no_wrap: + args.append("--no-wrap-types") + return args + def __copy__(self): """Create a new, blank copy of the compiler.""" cls, args = self.__reduce__() @@ -543,8 +566,9 @@ def reset(self, keep_state=False, filename=None): IMPORTANT: When adding anything here, consider whether it should also be added to inner_environment. """ self.filename = filename + self.outer_ln = None self.indchar = None - self.comments = {} + self.comments = defaultdict(set) self.wrapped_type_ignore = None self.refs = [] self.skips = [] @@ -552,6 +576,8 @@ def reset(self, keep_state=False, filename=None): # need to keep temp_var_counts in interpreter to avoid overwriting typevars if self.temp_var_counts is None or not keep_state: self.temp_var_counts = defaultdict(int) + # but always overwrite temp_vars_by_key since they store locs that will be invalidated + self.temp_vars_by_key = {} self.parsing_context = defaultdict(list) self.unused_imports = defaultdict(list) self.kept_lines = [] @@ -566,11 +592,12 @@ def reset(self, keep_state=False, filename=None): self.add_code_before_ignore_names = {} @contextmanager - def inner_environment(self): + def inner_environment(self, ln=None): """Set up compiler to evaluate inner expressions.""" + outer_ln, self.outer_ln = self.outer_ln, ln line_numbers, self.line_numbers = self.line_numbers, False keep_lines, self.keep_lines = self.keep_lines, False - comments, self.comments = self.comments, {} + comments, self.comments = self.comments, defaultdict(dictset) wrapped_type_ignore, self.wrapped_type_ignore = self.wrapped_type_ignore, None skips, self.skips = self.skips, [] docstring, self.docstring = self.docstring, "" @@ -580,6 +607,7 @@ def inner_environment(self): try: yield finally: + self.outer_ln = outer_ln self.line_numbers = line_numbers self.keep_lines = keep_lines self.comments = comments @@ -616,12 +644,22 @@ def post_transform(self, grammar, text): return transform(grammar, text) return None - def get_temp_var(self, base_name="temp"): + def get_temp_var(self, base_name="temp", loc=None): """Get a unique temporary variable name.""" + if loc is None: + key = None + else: + key = (base_name, loc) + if key is not None: + got_name = self.temp_vars_by_key.get(key) + if got_name is not None: + return got_name if self.minify: base_name = "" var_name = reserved_prefix + "_" + base_name + "_" + str(self.temp_var_counts[base_name]) self.temp_var_counts[base_name] += 1 + if key is not None: + self.temp_vars_by_key[key] = var_name return var_name @classmethod @@ -654,125 +692,125 @@ def method(original, loc, tokens): def bind(cls): """Binds reference objects to the proper parse actions.""" # handle parsing_context for class definitions - new_classdef = trace_attach(cls.classdef_ref, cls.method("classdef_handle")) + new_classdef = attach(cls.classdef_ref, cls.method("classdef_handle")) cls.classdef <<= Wrap(new_classdef, cls.method("class_manage"), greedy=True) - new_datadef = trace_attach(cls.datadef_ref, cls.method("datadef_handle")) + new_datadef = attach(cls.datadef_ref, cls.method("datadef_handle")) cls.datadef <<= Wrap(new_datadef, cls.method("class_manage"), greedy=True) - new_match_datadef = trace_attach(cls.match_datadef_ref, cls.method("match_datadef_handle")) + new_match_datadef = attach(cls.match_datadef_ref, cls.method("match_datadef_handle")) cls.match_datadef <<= Wrap(new_match_datadef, cls.method("class_manage"), greedy=True) # handle parsing_context for function definitions - new_stmt_lambdef = trace_attach(cls.stmt_lambdef_ref, cls.method("stmt_lambdef_handle")) + new_stmt_lambdef = attach(cls.stmt_lambdef_ref, cls.method("stmt_lambdef_handle")) cls.stmt_lambdef <<= Wrap(new_stmt_lambdef, cls.method("func_manage"), greedy=True) - new_decoratable_normal_funcdef_stmt = trace_attach( + new_decoratable_normal_funcdef_stmt = attach( cls.decoratable_normal_funcdef_stmt_ref, cls.method("decoratable_funcdef_stmt_handle"), ) cls.decoratable_normal_funcdef_stmt <<= Wrap(new_decoratable_normal_funcdef_stmt, cls.method("func_manage"), greedy=True) - new_decoratable_async_funcdef_stmt = trace_attach( + new_decoratable_async_funcdef_stmt = attach( cls.decoratable_async_funcdef_stmt_ref, cls.method("decoratable_funcdef_stmt_handle", is_async=True), ) cls.decoratable_async_funcdef_stmt <<= Wrap(new_decoratable_async_funcdef_stmt, cls.method("func_manage"), greedy=True) # handle parsing_context for type aliases - new_type_alias_stmt = trace_attach(cls.type_alias_stmt_ref, cls.method("type_alias_stmt_handle")) + new_type_alias_stmt = attach(cls.type_alias_stmt_ref, cls.method("type_alias_stmt_handle")) cls.type_alias_stmt <<= Wrap(new_type_alias_stmt, cls.method("type_alias_stmt_manage"), greedy=True) # greedy handlers (we need to know about them even if suppressed and/or they use the parsing_context) - cls.comment <<= trace_attach(cls.comment_tokens, cls.method("comment_handle"), greedy=True) - cls.type_param <<= trace_attach(cls.type_param_ref, cls.method("type_param_handle"), greedy=True) + cls.comment <<= attach(cls.comment_tokens, cls.method("comment_handle"), greedy=True) + cls.type_param <<= attach(cls.type_param_ref, cls.method("type_param_handle"), greedy=True) # name handlers cls.refname <<= attach(cls.name_ref, cls.method("name_handle")) cls.setname <<= attach(cls.name_ref, cls.method("name_handle", assign=True)) - cls.classname <<= trace_attach(cls.name_ref, cls.method("name_handle", assign=True, classname=True), greedy=True) + cls.classname <<= attach(cls.name_ref, cls.method("name_handle", assign=True, classname=True), greedy=True) # abnormally named handlers - cls.moduledoc_item <<= trace_attach(cls.moduledoc, cls.method("set_moduledoc")) + cls.moduledoc_item <<= attach(cls.moduledoc, cls.method("set_moduledoc")) cls.endline <<= attach(cls.endline_ref, cls.method("endline_handle")) - cls.normal_pipe_expr <<= trace_attach(cls.normal_pipe_expr_tokens, cls.method("pipe_handle")) - cls.return_typedef <<= trace_attach(cls.return_typedef_ref, cls.method("typedef_handle")) - cls.power_in_impl_call <<= trace_attach(cls.power, cls.method("power_in_impl_call_check")) + cls.normal_pipe_expr <<= attach(cls.normal_pipe_expr_tokens, cls.method("pipe_handle")) + cls.return_typedef <<= attach(cls.return_typedef_ref, cls.method("typedef_handle")) + cls.power_in_impl_call <<= attach(cls.power, cls.method("power_in_impl_call_check")) # handle all atom + trailers constructs with item_handle - cls.trailer_atom <<= trace_attach(cls.trailer_atom_ref, cls.method("item_handle")) - cls.no_partial_trailer_atom <<= trace_attach(cls.no_partial_trailer_atom_ref, cls.method("item_handle")) - cls.simple_assign <<= trace_attach(cls.simple_assign_ref, cls.method("item_handle")) + cls.trailer_atom <<= attach(cls.trailer_atom_ref, cls.method("item_handle")) + cls.no_partial_trailer_atom <<= attach(cls.no_partial_trailer_atom_ref, cls.method("item_handle")) + cls.simple_assign <<= attach(cls.simple_assign_ref, cls.method("item_handle")) # handle all string atoms with string_atom_handle - cls.string_atom <<= trace_attach(cls.string_atom_ref, cls.method("string_atom_handle")) - cls.f_string_atom <<= trace_attach(cls.f_string_atom_ref, cls.method("string_atom_handle")) + cls.string_atom <<= attach(cls.string_atom_ref, cls.method("string_atom_handle")) + cls.f_string_atom <<= attach(cls.f_string_atom_ref, cls.method("string_atom_handle")) # handle all keyword funcdefs with keyword_funcdef_handle - cls.keyword_funcdef <<= trace_attach(cls.keyword_funcdef_ref, cls.method("keyword_funcdef_handle")) - cls.async_keyword_funcdef <<= trace_attach(cls.async_keyword_funcdef_ref, cls.method("keyword_funcdef_handle")) - - # standard handlers of the form name <<= trace_attach(name_tokens, method("name_handle")) (implies name_tokens is reused) - cls.function_call <<= trace_attach(cls.function_call_tokens, cls.method("function_call_handle")) - cls.testlist_star_namedexpr <<= trace_attach(cls.testlist_star_namedexpr_tokens, cls.method("testlist_star_expr_handle")) - cls.ellipsis <<= trace_attach(cls.ellipsis_tokens, cls.method("ellipsis_handle")) - cls.f_string <<= trace_attach(cls.f_string_tokens, cls.method("f_string_handle")) - - # standard handlers of the form name <<= trace_attach(name_ref, method("name_handle")) - cls.term <<= trace_attach(cls.term_ref, cls.method("term_handle")) - cls.set_literal <<= trace_attach(cls.set_literal_ref, cls.method("set_literal_handle")) - cls.set_letter_literal <<= trace_attach(cls.set_letter_literal_ref, cls.method("set_letter_literal_handle")) - cls.import_stmt <<= trace_attach(cls.import_stmt_ref, cls.method("import_handle")) - cls.complex_raise_stmt <<= trace_attach(cls.complex_raise_stmt_ref, cls.method("complex_raise_stmt_handle")) - cls.augassign_stmt <<= trace_attach(cls.augassign_stmt_ref, cls.method("augassign_stmt_handle")) - cls.kwd_augassign <<= trace_attach(cls.kwd_augassign_ref, cls.method("kwd_augassign_handle")) - cls.dict_comp <<= trace_attach(cls.dict_comp_ref, cls.method("dict_comp_handle")) - cls.destructuring_stmt <<= trace_attach(cls.destructuring_stmt_ref, cls.method("destructuring_stmt_handle")) - cls.full_match <<= trace_attach(cls.full_match_ref, cls.method("full_match_handle")) - cls.name_match_funcdef <<= trace_attach(cls.name_match_funcdef_ref, cls.method("name_match_funcdef_handle")) - cls.op_match_funcdef <<= trace_attach(cls.op_match_funcdef_ref, cls.method("op_match_funcdef_handle")) - cls.yield_from <<= trace_attach(cls.yield_from_ref, cls.method("yield_from_handle")) - cls.typedef <<= trace_attach(cls.typedef_ref, cls.method("typedef_handle")) - cls.typedef_default <<= trace_attach(cls.typedef_default_ref, cls.method("typedef_handle")) - cls.unsafe_typedef_default <<= trace_attach(cls.unsafe_typedef_default_ref, cls.method("unsafe_typedef_handle")) - cls.typed_assign_stmt <<= trace_attach(cls.typed_assign_stmt_ref, cls.method("typed_assign_stmt_handle")) - cls.with_stmt <<= trace_attach(cls.with_stmt_ref, cls.method("with_stmt_handle")) - cls.await_expr <<= trace_attach(cls.await_expr_ref, cls.method("await_expr_handle")) - cls.cases_stmt <<= trace_attach(cls.cases_stmt_ref, cls.method("cases_stmt_handle")) - cls.decorators <<= trace_attach(cls.decorators_ref, cls.method("decorators_handle")) - cls.unsafe_typedef_or_expr <<= trace_attach(cls.unsafe_typedef_or_expr_ref, cls.method("unsafe_typedef_or_expr_handle")) - cls.testlist_star_expr <<= trace_attach(cls.testlist_star_expr_ref, cls.method("testlist_star_expr_handle")) - cls.list_expr <<= trace_attach(cls.list_expr_ref, cls.method("list_expr_handle")) - cls.dict_literal <<= trace_attach(cls.dict_literal_ref, cls.method("dict_literal_handle")) - cls.new_testlist_star_expr <<= trace_attach(cls.new_testlist_star_expr_ref, cls.method("new_testlist_star_expr_handle")) - cls.anon_namedtuple <<= trace_attach(cls.anon_namedtuple_ref, cls.method("anon_namedtuple_handle")) - cls.base_match_for_stmt <<= trace_attach(cls.base_match_for_stmt_ref, cls.method("base_match_for_stmt_handle")) - cls.async_with_for_stmt <<= trace_attach(cls.async_with_for_stmt_ref, cls.method("async_with_for_stmt_handle")) - cls.unsafe_typedef_tuple <<= trace_attach(cls.unsafe_typedef_tuple_ref, cls.method("unsafe_typedef_tuple_handle")) - cls.funcname_typeparams <<= trace_attach(cls.funcname_typeparams_ref, cls.method("funcname_typeparams_handle")) - cls.impl_call <<= trace_attach(cls.impl_call_ref, cls.method("impl_call_handle")) - cls.protocol_intersect_expr <<= trace_attach(cls.protocol_intersect_expr_ref, cls.method("protocol_intersect_expr_handle")) + cls.keyword_funcdef <<= attach(cls.keyword_funcdef_ref, cls.method("keyword_funcdef_handle")) + cls.async_keyword_funcdef <<= attach(cls.async_keyword_funcdef_ref, cls.method("keyword_funcdef_handle")) + + # standard handlers of the form name <<= attach(name_tokens, method("name_handle")) (implies name_tokens is reused) + cls.function_call <<= attach(cls.function_call_tokens, cls.method("function_call_handle")) + cls.testlist_star_namedexpr <<= attach(cls.testlist_star_namedexpr_tokens, cls.method("testlist_star_expr_handle")) + cls.ellipsis <<= attach(cls.ellipsis_tokens, cls.method("ellipsis_handle")) + cls.f_string <<= attach(cls.f_string_tokens, cls.method("f_string_handle")) + + # standard handlers of the form name <<= attach(name_ref, method("name_handle")) + cls.term <<= attach(cls.term_ref, cls.method("term_handle")) + cls.set_literal <<= attach(cls.set_literal_ref, cls.method("set_literal_handle")) + cls.set_letter_literal <<= attach(cls.set_letter_literal_ref, cls.method("set_letter_literal_handle")) + cls.import_stmt <<= attach(cls.import_stmt_ref, cls.method("import_handle")) + cls.complex_raise_stmt <<= attach(cls.complex_raise_stmt_ref, cls.method("complex_raise_stmt_handle")) + cls.augassign_stmt <<= attach(cls.augassign_stmt_ref, cls.method("augassign_stmt_handle")) + cls.kwd_augassign <<= attach(cls.kwd_augassign_ref, cls.method("kwd_augassign_handle")) + cls.dict_comp <<= attach(cls.dict_comp_ref, cls.method("dict_comp_handle")) + cls.destructuring_stmt <<= attach(cls.destructuring_stmt_ref, cls.method("destructuring_stmt_handle")) + cls.full_match <<= attach(cls.full_match_ref, cls.method("full_match_handle")) + cls.name_match_funcdef <<= attach(cls.name_match_funcdef_ref, cls.method("name_match_funcdef_handle")) + cls.op_match_funcdef <<= attach(cls.op_match_funcdef_ref, cls.method("op_match_funcdef_handle")) + cls.yield_from <<= attach(cls.yield_from_ref, cls.method("yield_from_handle")) + cls.typedef <<= attach(cls.typedef_ref, cls.method("typedef_handle")) + cls.typedef_default <<= attach(cls.typedef_default_ref, cls.method("typedef_handle")) + cls.unsafe_typedef_default <<= attach(cls.unsafe_typedef_default_ref, cls.method("unsafe_typedef_handle")) + cls.typed_assign_stmt <<= attach(cls.typed_assign_stmt_ref, cls.method("typed_assign_stmt_handle")) + cls.with_stmt <<= attach(cls.with_stmt_ref, cls.method("with_stmt_handle")) + cls.await_expr <<= attach(cls.await_expr_ref, cls.method("await_expr_handle")) + cls.cases_stmt <<= attach(cls.cases_stmt_ref, cls.method("cases_stmt_handle")) + cls.decorators <<= attach(cls.decorators_ref, cls.method("decorators_handle")) + cls.unsafe_typedef_or_expr <<= attach(cls.unsafe_typedef_or_expr_ref, cls.method("unsafe_typedef_or_expr_handle")) + cls.testlist_star_expr <<= attach(cls.testlist_star_expr_ref, cls.method("testlist_star_expr_handle")) + cls.list_expr <<= attach(cls.list_expr_ref, cls.method("list_expr_handle")) + cls.dict_literal <<= attach(cls.dict_literal_ref, cls.method("dict_literal_handle")) + cls.new_testlist_star_expr <<= attach(cls.new_testlist_star_expr_ref, cls.method("new_testlist_star_expr_handle")) + cls.anon_namedtuple <<= attach(cls.anon_namedtuple_ref, cls.method("anon_namedtuple_handle")) + cls.base_match_for_stmt <<= attach(cls.base_match_for_stmt_ref, cls.method("base_match_for_stmt_handle")) + cls.async_with_for_stmt <<= attach(cls.async_with_for_stmt_ref, cls.method("async_with_for_stmt_handle")) + cls.unsafe_typedef_tuple <<= attach(cls.unsafe_typedef_tuple_ref, cls.method("unsafe_typedef_tuple_handle")) + cls.funcname_typeparams <<= attach(cls.funcname_typeparams_ref, cls.method("funcname_typeparams_handle")) + cls.impl_call <<= attach(cls.impl_call_ref, cls.method("impl_call_handle")) + cls.protocol_intersect_expr <<= attach(cls.protocol_intersect_expr_ref, cls.method("protocol_intersect_expr_handle")) # these handlers just do strict/target checking - cls.u_string <<= trace_attach(cls.u_string_ref, cls.method("u_string_check")) - cls.nonlocal_stmt <<= trace_attach(cls.nonlocal_stmt_ref, cls.method("nonlocal_check")) - cls.star_assign_item <<= trace_attach(cls.star_assign_item_ref, cls.method("star_assign_item_check")) - cls.classic_lambdef <<= trace_attach(cls.classic_lambdef_ref, cls.method("lambdef_check")) - cls.star_sep_arg <<= trace_attach(cls.star_sep_arg_ref, cls.method("star_sep_check")) - cls.star_sep_setarg <<= trace_attach(cls.star_sep_setarg_ref, cls.method("star_sep_check")) - cls.slash_sep_arg <<= trace_attach(cls.slash_sep_arg_ref, cls.method("slash_sep_check")) - cls.slash_sep_setarg <<= trace_attach(cls.slash_sep_setarg_ref, cls.method("slash_sep_check")) - cls.endline_semicolon <<= trace_attach(cls.endline_semicolon_ref, cls.method("endline_semicolon_check")) - cls.async_stmt <<= trace_attach(cls.async_stmt_ref, cls.method("async_stmt_check")) - cls.async_comp_for <<= trace_attach(cls.async_comp_for_ref, cls.method("async_comp_check")) - cls.namedexpr <<= trace_attach(cls.namedexpr_ref, cls.method("namedexpr_check")) - cls.new_namedexpr <<= trace_attach(cls.new_namedexpr_ref, cls.method("new_namedexpr_check")) - cls.match_dotted_name_const <<= trace_attach(cls.match_dotted_name_const_ref, cls.method("match_dotted_name_const_check")) - cls.except_star_clause <<= trace_attach(cls.except_star_clause_ref, cls.method("except_star_clause_check")) - cls.subscript_star <<= trace_attach(cls.subscript_star_ref, cls.method("subscript_star_check")) + cls.u_string <<= attach(cls.u_string_ref, cls.method("u_string_check")) + cls.nonlocal_stmt <<= attach(cls.nonlocal_stmt_ref, cls.method("nonlocal_check")) + cls.star_assign_item <<= attach(cls.star_assign_item_ref, cls.method("star_assign_item_check")) + cls.keyword_lambdef <<= attach(cls.keyword_lambdef_ref, cls.method("lambdef_check")) + cls.star_sep_arg <<= attach(cls.star_sep_arg_ref, cls.method("star_sep_check")) + cls.star_sep_setarg <<= attach(cls.star_sep_setarg_ref, cls.method("star_sep_check")) + cls.slash_sep_arg <<= attach(cls.slash_sep_arg_ref, cls.method("slash_sep_check")) + cls.slash_sep_setarg <<= attach(cls.slash_sep_setarg_ref, cls.method("slash_sep_check")) + cls.endline_semicolon <<= attach(cls.endline_semicolon_ref, cls.method("endline_semicolon_check")) + cls.async_stmt <<= attach(cls.async_stmt_ref, cls.method("async_stmt_check")) + cls.async_comp_for <<= attach(cls.async_comp_for_ref, cls.method("async_comp_check")) + cls.namedexpr <<= attach(cls.namedexpr_ref, cls.method("namedexpr_check")) + cls.new_namedexpr <<= attach(cls.new_namedexpr_ref, cls.method("new_namedexpr_check")) + cls.match_dotted_name_const <<= attach(cls.match_dotted_name_const_ref, cls.method("match_dotted_name_const_check")) + cls.except_star_clause <<= attach(cls.except_star_clause_ref, cls.method("except_star_clause_check")) + cls.subscript_star <<= attach(cls.subscript_star_ref, cls.method("subscript_star_check")) # these checking handlers need to be greedy since they can be suppressed - cls.match_check_equals <<= trace_attach(cls.match_check_equals_ref, cls.method("match_check_equals_check"), greedy=True) + cls.match_check_equals <<= attach(cls.match_check_equals_ref, cls.method("match_check_equals_check"), greedy=True) def copy_skips(self): """Copy the line skips.""" @@ -804,19 +842,31 @@ def reformat_post_deferred_code_proc(self, snip): """Do post-processing that comes after deferred_code_proc.""" return self.apply_procs(self.reformatprocs[1:], snip, reformatting=True, log=False) - def reformat(self, snip, *indices, **kwargs): + def reformat(self, snip, **kwargs): """Post process a preprocessed snippet.""" internal_assert("ignore_errors" in kwargs, "reformat() missing required keyword argument: 'ignore_errors'") - if not indices: - with self.complain_on_err(): - return self.apply_procs(self.reformatprocs, snip, reformatting=True, log=False, **kwargs) - return snip - else: - internal_assert(kwargs.get("ignore_errors", False), "cannot reformat with indices and ignore_errors=False") - return ( - (self.reformat(snip, **kwargs),) - + tuple(len(self.reformat(snip[:index], **kwargs)) for index in indices) - ) + with self.complain_on_err(): + return self.apply_procs(self.reformatprocs, snip, reformatting=True, log=False, **kwargs) + return snip + + def reformat_locs(self, snip, loc, endpt=None, **kwargs): + """Reformats a snippet and adjusts the locations in it.""" + internal_assert("ignore_errors" not in kwargs, "cannot pass ignore_errors to reformat_locs") + kwargs["ignore_errors"] = True + + new_snip = self.reformat(snip, **kwargs) + new_loc = move_loc_to_non_whitespace( + new_snip, + len(self.reformat(snip[:loc], **kwargs)), + ) + if endpt is None: + return new_snip, new_loc + + new_endpt = move_endpt_to_non_whitespace( + new_snip, + len(self.reformat(snip[:endpt], **kwargs)), + ) + return new_snip, new_loc, new_endpt def reformat_without_adding_code_before(self, code, **kwargs): """Reformats without adding code before and instead returns what would have been added.""" @@ -850,14 +900,22 @@ def strict_err(self, *args, **kwargs): if self.strict: raise self.make_err(CoconutStyleError, *args, **kwargs) + def syntax_warning(self, *args, **kwargs): + """Show a CoconutSyntaxWarning. Usage: + self.syntax_warning(message, original, loc) + """ + logger.warn_err(self.make_err(CoconutSyntaxWarning, *args, **kwargs)) + def strict_err_or_warn(self, *args, **kwargs): - """Raises an error if in strict mode, otherwise raises a warning.""" + """Raises an error if in strict mode, otherwise raises a warning. Usage: + self.strict_err_or_warn(message, original, loc) + """ internal_assert("extra" not in kwargs, "cannot pass extra=... to strict_err_or_warn") if self.strict: kwargs["extra"] = "remove --strict to downgrade to a warning" raise self.make_err(CoconutStyleError, *args, **kwargs) else: - logger.warn_err(self.make_err(CoconutSyntaxWarning, *args, **kwargs)) + self.syntax_warning(*args, **kwargs) @contextmanager def complain_on_err(self): @@ -866,14 +924,15 @@ def complain_on_err(self): yield except ParseBaseException as err: # don't reformat, since we might have gotten here because reformat failed - complain(self.make_parse_err(err, reformat=False, include_ln=False)) + complain(self.make_parse_err(err, include_ln=False, reformat=False, endpoint=True)) except CoconutException as err: complain(err) - def remove_strs(self, inputstring): + def remove_strs(self, inputstring, inner_environment=True): """Remove strings/comments from the given input.""" with self.complain_on_err(): - return self.str_proc(inputstring) + with (self.inner_environment() if inner_environment else noop_ctx()): + return self.str_proc(inputstring) return inputstring def get_matcher(self, original, loc, check_var, name_list=None): @@ -896,12 +955,30 @@ def get_ref(self, reftype, index): index, extra="max index: {max_index}; wanted reftype: {reftype}".format(max_index=len(self.refs) - 1, reftype=reftype), ) - internal_assert( - got_reftype == reftype, - "wanted {reftype} reference; got {got_reftype} reference".format(reftype=reftype, got_reftype=got_reftype), - extra="index: {index}; data: {data!r}".format(index=index, data=data), - ) - return data + if reftype is None: + return got_reftype, data + else: + internal_assert( + got_reftype == reftype, + "wanted {reftype} reference; got {got_reftype} reference".format(reftype=reftype, got_reftype=got_reftype), + extra="index: {index}; data: {data!r}".format(index=index, data=data), + ) + return data + + def get_str_ref(self, index, reformatting): + """Get a reference to a string.""" + if reformatting: + reftype, data = self.get_ref(None, index) + if reftype == "str": + return data + elif reftype == "f_str": + strchar, string_parts, exprs = data + text = interleaved_join(string_parts, exprs) + return text, strchar + else: + raise CoconutInternalException("unknown str ref type", reftype) + else: + return self.get_ref("str", index) def wrap_str(self, text, strchar, multiline=False): """Wrap a string.""" @@ -909,6 +986,10 @@ def wrap_str(self, text, strchar, multiline=False): strchar *= 3 return strwrapper + self.add_ref("str", (text, strchar)) + unwrapper + def wrap_f_str(self, strchar, string_parts, exprs): + """Wrap a format string.""" + return strwrapper + self.add_ref("f_str", (strchar, string_parts, exprs)) + unwrapper + def wrap_str_of(self, text, expect_bytes=False): """Wrap a string of a string.""" text_repr = ascii(text) @@ -1007,21 +1088,33 @@ def target_info(self): """Return information on the current target as a version tuple.""" return get_target_info(self.target) - def make_err(self, errtype, message, original, loc=0, ln=None, extra=None, reformat=True, endpoint=False, include_causes=False, **kwargs): + def make_err(self, errtype, message, original, loc=0, ln=None, extra=None, reformat=True, endpoint=None, include_causes=False, **kwargs): """Generate an error of the specified type.""" + logger.log_loc("raw_loc", original, loc) + logger.log_loc("raw_endpoint", original, endpoint) + # move loc back to end of most recent actual text - while loc >= 2 and original[loc - 1:loc + 1].rstrip("".join(indchars) + default_whitespace_chars) == "": - loc -= 1 + loc = move_loc_to_non_whitespace(original, loc) + logger.log_loc("loc", original, loc) - # get endpoint and line number + # get endpoint + if endpoint is None: + endpoint = reformat if endpoint is False: endpoint = loc - elif endpoint is True: - endpoint = clip(get_highest_parse_loc() + 1, min=loc) else: - endpoint = clip(endpoint, min=loc) + if endpoint is True: + endpoint = get_highest_parse_loc(original) + logger.log_loc("highest_parse_loc", original, endpoint) + endpoint = clip( + move_endpt_to_non_whitespace(original, endpoint, backwards=True), + min=loc, + ) + logger.log_loc("endpoint", original, endpoint) + + # get line number if ln is None: - ln = self.adjust(lineno(loc, original)) + ln = self.outer_ln or self.adjust(lineno(loc, original)) # get line indices for the error locs original_lines = tuple(logical_lines(original, True)) @@ -1034,11 +1127,13 @@ def make_err(self, errtype, message, original, loc=0, ln=None, extra=None, refor # fix error locations to correspond to the snippet loc_in_snip = getcol(loc, original) - 1 endpt_in_snip = endpoint - sum(len(line) for line in original_lines[:loc_line_ind]) + logger.log_loc("loc_in_snip", snippet, loc_in_snip) + logger.log_loc("endpt_in_snip", snippet, endpt_in_snip) # determine possible causes if include_causes: self.internal_assert(extra is None, original, loc, "make_err cannot include causes with extra") - causes = set() + causes = dictset() for cause, _, _ in all_matches(self.parse_err_msg, snippet[loc_in_snip:]): causes.add(cause) for cause, _, _ in all_matches(self.parse_err_msg, snippet[endpt_in_snip:]): @@ -1053,7 +1148,9 @@ def make_err(self, errtype, message, original, loc=0, ln=None, extra=None, refor # reformat the snippet and fix error locations to match if reformat: - snippet, loc_in_snip, endpt_in_snip = self.reformat(snippet, loc_in_snip, endpt_in_snip, ignore_errors=True) + snippet, loc_in_snip, endpt_in_snip = self.reformat_locs(snippet, loc_in_snip, endpt_in_snip) + logger.log_loc("reformatted_loc", snippet, loc_in_snip) + logger.log_loc("reformatted_endpt", snippet, endpt_in_snip) if extra is not None: kwargs["extra"] = extra @@ -1062,7 +1159,7 @@ def make_err(self, errtype, message, original, loc=0, ln=None, extra=None, refor def make_syntax_err(self, err, original): """Make a CoconutSyntaxError from a CoconutDeferredSyntaxError.""" msg, loc = err.args - return self.make_err(CoconutSyntaxError, msg, original, loc, endpoint=True) + return self.make_err(CoconutSyntaxError, msg, original, loc) def make_parse_err(self, err, msg=None, include_ln=True, **kwargs): """Make a CoconutParseError from a ParseBaseException.""" @@ -1070,12 +1167,12 @@ def make_parse_err(self, err, msg=None, include_ln=True, **kwargs): loc = err.loc ln = self.adjust(err.lineno) if include_ln else None - return self.make_err(CoconutParseError, msg, original, loc, ln, endpoint=True, include_causes=True, **kwargs) + return self.make_err(CoconutParseError, msg, original, loc, ln, include_causes=True, **kwargs) def make_internal_syntax_err(self, original, loc, msg, item, extra): """Make a CoconutInternalSyntaxError.""" message = msg + ": " + repr(item) - return self.make_err(CoconutInternalSyntaxError, message, original, loc, extra=extra, endpoint=True) + return self.make_err(CoconutInternalSyntaxError, message, original, loc, extra=extra) def internal_assert(self, cond, original, loc, msg=None, item=None): """Version of internal_assert that raises CoconutInternalSyntaxErrors.""" @@ -1084,6 +1181,8 @@ def internal_assert(self, cond, original, loc, msg=None, item=None): def inner_parse_eval( self, + original, + loc, inputstring, parser=None, preargs={"strip": True}, @@ -1092,7 +1191,7 @@ def inner_parse_eval( """Parse eval code in an inner environment.""" if parser is None: parser = self.eval_parser - with self.inner_environment(): + with self.inner_environment(ln=self.adjust(lineno(loc, original))): self.streamline(parser, inputstring) pre_procd = self.pre(inputstring, **preargs) parsed = parse(parser, pre_procd) @@ -1106,9 +1205,9 @@ def parsing(self, keep_state=False, filename=None): self.current_compiler[0] = self yield - def streamline(self, grammar, inputstring=""): + def streamline(self, grammar, inputstring="", force=False): """Streamline the given grammar for the given inputstring.""" - if streamline_grammar_for_len is not None and len(inputstring) >= streamline_grammar_for_len: + if force or (streamline_grammar_for_len is not None and len(inputstring) >= streamline_grammar_for_len): start_time = get_clock_time() prep_grammar(grammar, streamline=True) logger.log_lambda( @@ -1128,7 +1227,7 @@ def run_final_checks(self, original, keep_state=False): for name, locs in self.unused_imports.items(): for loc in locs: ln = self.adjust(lineno(loc, original)) - comment = self.reformat(self.comments.get(ln, ""), ignore_errors=True) + comment = self.reformat(" ".join(self.comments[ln]), ignore_errors=True) if not self.noqa_regex.search(comment): self.strict_err_or_warn( "found unused import " + repr(self.reformat(name, ignore_errors=True)) + " (add '# NOQA' to suppress)", @@ -1180,94 +1279,219 @@ def prepare(self, inputstring, strip=False, nl_at_eof_check=False, **kwargs): inputstring = inputstring.strip() return inputstring + def wrap_str_hold(self, hold): + """Wrap a string hold from str_proc.""" + if hold["type"] == "string": + return self.wrap_str(hold["contents"], hold["start"]) + elif hold["type"] == "f string": + return self.wrap_f_str(hold["start"], hold["str_parts"], hold["exprs"]) + else: + raise CoconutInternalException("invalid str_proc hold type", hold["type"]) + + def str_hold_contents(self, hold, append=None): + """Get the contents of a string hold from str_proc.""" + if hold["type"] == "string": + if append is not None: + hold["contents"] += append + return hold["contents"] + elif hold["type"] == "f string": + if append is not None: + hold["str_parts"][-1] += append + return hold["str_parts"][-1] + else: + raise CoconutInternalException("invalid str_proc hold type", hold["type"]) + def str_proc(self, inputstring, **kwargs): """Process strings and comments.""" out = [] found = None # store of characters that might be the start of a string - hold = None - # hold = [_comment]: - _comment = 0 # the contents of the comment so far - # hold = [_contents, _start, _stop]: - _contents = 0 # the contents of the string so far - _start = 1 # the string of characters that started the string - _stop = 2 # store of characters that might be the end of the string + hold = None # dictionary of information on the string/comment we're currently in skips = self.copy_skips() - x = 0 - while x <= len(inputstring): + i = 0 + while i <= len(inputstring): try: - c = inputstring[x] + c = inputstring[i] except IndexError: - internal_assert(x == len(inputstring), "invalid index in str_proc", (inputstring, x)) + internal_assert(i == len(inputstring), "invalid index in str_proc", (inputstring, i)) c = "\n" if hold is not None: - if len(hold) == 1: # hold == [_comment] + internal_assert(found is None, "str_proc error, got both hold and found", (hold, found)) + if hold["type"] == "comment": if c == "\n": - out += [self.wrap_comment(hold[_comment]), c] + out += [self.wrap_comment(hold["comment"]), c] hold = None else: - hold[_comment] += c - elif hold[_stop] is not None: - if c == "\\": - hold[_contents] += hold[_stop] + c - hold[_stop] = None - elif c == hold[_start][0]: - hold[_stop] += c - elif len(hold[_stop]) > len(hold[_start]): - raise self.make_err(CoconutSyntaxError, "invalid number of closing " + repr(hold[_start][0]) + "s", inputstring, x, reformat=False) - elif hold[_stop] == hold[_start]: - out.append(self.wrap_str(hold[_contents], hold[_start][0], True)) - hold = None - x -= 1 - else: - if c == "\n": - if len(hold[_start]) == 1: - raise self.make_err(CoconutSyntaxError, "linebreak in non-multiline string", inputstring, x, reformat=False) - skips = addskip(skips, self.adjust(lineno(x, inputstring))) - hold[_contents] += hold[_stop] + c - hold[_stop] = None - elif count_end(hold[_contents], "\\") % 2 == 1: - if c == "\n": - skips = addskip(skips, self.adjust(lineno(x, inputstring))) - hold[_contents] += c - elif c == hold[_start]: - out.append(self.wrap_str(hold[_contents], hold[_start], False)) - hold = None - elif c == hold[_start][0]: - hold[_stop] = c + hold["comment"] += c + else: - if c == "\n": - if len(hold[_start]) == 1: - raise self.make_err(CoconutSyntaxError, "linebreak in non-multiline string", inputstring, x, reformat=False) - skips = addskip(skips, self.adjust(lineno(x, inputstring))) - hold[_contents] += c + if hold["type"] == "string": + is_f = False + elif hold["type"] == "f string": + is_f = True + else: + raise CoconutInternalException("invalid str_proc string hold type", hold["type"]) + done = False # whether the string is finished + rerun = False # whether we want to rerun the loop with the same i next iteration + + # if we're inside an f string expr + if hold.get("in_expr", False): + internal_assert(is_f, "in_expr should only be for f string holds, not", hold) + remaining_text = inputstring[i:] + str_start, str_stop = parse_where(self.string_start, remaining_text) + if str_start is not None: # str_start >= 0; if > 0 means there is whitespace before the string + hold["exprs"][-1] += remaining_text[:str_stop] + # add any skips from where we're fast-forwarding (except don't include c since we handle that below) + for j in range(1, str_stop): + if inputstring[i + j] == "\n": + skips = addskip(skips, self.adjust(lineno(i + j, inputstring))) + i += str_stop - 1 + elif hold["paren_level"] < 0: + hold["paren_level"] += paren_change(c) + hold["exprs"][-1] += c + elif hold["paren_level"] > 0: + raise self.make_err(CoconutSyntaxError, "imbalanced parentheses in format string expression", inputstring, i, reformat=False) + elif match_in(self.end_f_str_expr, remaining_text): + hold["in_expr"] = False + hold["str_parts"].append(c) + else: + hold["paren_level"] += paren_change(c) + hold["exprs"][-1] += c + + # if we might be at the end of the string + elif hold["stop"] is not None: + if c == "\\": + self.str_hold_contents(hold, append=hold["stop"] + c) + hold["stop"] = None + elif c == hold["start"][0]: + hold["stop"] += c + elif len(hold["stop"]) > len(hold["start"]): + raise self.make_err(CoconutSyntaxError, "invalid number of closing " + repr(hold["start"][0]) + "s", inputstring, i, reformat=False) + elif hold["stop"] == hold["start"]: + done = True + rerun = True + else: + self.str_hold_contents(hold, append=hold["stop"] + c) + hold["stop"] = None + + # if we might be at the start of an f string expr + elif hold.get("saw_brace", False): + internal_assert(is_f, "saw_brace should only be for f string holds, not", hold) + hold["saw_brace"] = False + if c == "{": + self.str_hold_contents(hold, append=c) + elif c == "}": + raise self.make_err(CoconutSyntaxError, "empty expression in format string", inputstring, i, reformat=False) + else: + hold["in_expr"] = True + hold["exprs"].append("") + rerun = True + + elif count_end(self.str_hold_contents(hold), "\\") % 2 == 1: + self.str_hold_contents(hold, append=c) + elif c == hold["start"]: + done = True + elif c == hold["start"][0]: + hold["stop"] = c + elif is_f and c == "{": + hold["saw_brace"] = True + self.str_hold_contents(hold, append=c) + else: + self.str_hold_contents(hold, append=c) + + if rerun: + i -= 1 + + # wrap the string if it's complete + if done: + if is_f: + # handle dangling detections + if hold["saw_brace"]: + raise self.make_err(CoconutSyntaxError, "format string ends with unescaped brace (escape by doubling to '{{')", inputstring, i, reformat=False) + if hold["in_expr"]: + raise self.make_err(CoconutSyntaxError, "imbalanced braces in format string (escape braces by doubling to '{{' and '}}')", inputstring, i, reformat=False) + out.append(self.wrap_str_hold(hold)) + hold = None + # add a line skip if c is inside the string (not done) and we wont be seeing this c again (not rerun) + elif not rerun and c == "\n": + if not hold.get("in_expr", False) and len(hold["start"]) == 1: + raise self.make_err(CoconutSyntaxError, "linebreak in non-multi-line string", inputstring, i, reformat=False) + skips = addskip(skips, self.adjust(lineno(i, inputstring))) + elif found is not None: + + # determine if we're at the start of a string if c == found[0] and len(found) < 3: found += c elif len(found) == 1: # found == "_" - hold = ["", found, None] # [_contents, _start, _stop] + hold = { + "start": found, + "stop": None, + "contents": "" + } found = None - x -= 1 + i -= 1 elif len(found) == 2: # found == "__" - out.append(self.wrap_str("", found[0], False)) + # empty string; will be wrapped immediately below + hold = { + "start": found[0], + "stop": found[-1], + } found = None - x -= 1 + i -= 1 else: # found == "___" internal_assert(len(found) == 3, "invalid number of string starts", found) - hold = ["", found, None] # [_contents, _start, _stop] + hold = { + "start": found, + "stop": None, + } found = None - x -= 1 + i -= 1 + + # start the string hold if we're at the start of a string + if hold is not None: + is_f = False + j = i - len(hold["start"]) + while j >= 0: + prev_c = inputstring[j] + if prev_c == "f": + is_f = True + break + elif prev_c != "r": + break + j -= 1 + if is_f: + hold.update({ + "type": "f string", + "str_parts": [""], + "exprs": [], + "saw_brace": False, + "in_expr": False, + "paren_level": 0, + }) + else: + hold.update({ + "type": "string", + "contents": "", + }) + if hold["stop"]: # empty string; wrap immediately + out.append(self.wrap_str_hold(hold)) + hold = None + elif c == "#": - hold = [""] # [_comment] - elif c in hold_chars: + hold = { + "type": "comment", + "comment": "", + } + elif c in str_chars: found = c else: out.append(c) - x += 1 + i += 1 if hold is not None or found is not None: - raise self.make_err(CoconutSyntaxError, "unclosed string", inputstring, x, reformat=False) + raise self.make_err(CoconutSyntaxError, "unclosed string", inputstring, i, reformat=False) self.set_skips(skips) return "".join(out) @@ -1473,7 +1697,7 @@ def ind_proc(self, inputstring, **kwargs): open_char, _, open_col_ind, _, open_line_id = opens.pop() if c != close_char_for(open_char): if open_line_id is line_id: - err_kwargs = {"loc": open_col_ind, "endpoint": i + 1} + err_kwargs = {"loc": open_col_ind, "endpoint": i} else: err_kwargs = {"loc": i} raise self.make_err( @@ -1603,7 +1827,7 @@ def endline_repl(self, inputstring, reformatting=False, ignore_errors=False, **k # add comments based on source line number src_ln = self.adjust(ln) if not reformatting or has_wrapped_ln: - line += self.comments.get(src_ln, "") + line += " ".join(self.comments[src_ln]) if not reformatting and line.rstrip() and not line.lstrip().startswith("#"): line += self.ln_comment(src_ln) @@ -1656,7 +1880,7 @@ def base_passthrough_repl(self, inputstring, wrap_char, ignore_errors=False, **k return "".join(out) - def str_repl(self, inputstring, ignore_errors=False, **kwargs): + def str_repl(self, inputstring, reformatting=False, ignore_errors=False, **kwargs): """Add back strings and comments.""" out = [] comment = None @@ -1682,7 +1906,7 @@ def str_repl(self, inputstring, ignore_errors=False, **kwargs): if c is not None and c in nums: string += c elif c == unwrapper and string: - text, strchar = self.get_ref("str", string) + text, strchar = self.get_str_ref(string, reformatting) out += [strchar, text, strchar] string = None else: @@ -1722,7 +1946,7 @@ def split_docstring(self, block): return first_line, rest_of_lines return None, block - def tre_return(self, func_name, func_args, func_store, mock_var=None): + def tre_return_grammar(self, func_name, func_args, func_store, mock_var=None): """Generate grammar element that matches a string which is just a TRE return statement.""" def tre_return_handle(loc, tokens): args = ", ".join(tokens) @@ -1741,7 +1965,7 @@ def tre_return_handle(loc, tokens): else: tre_recurse = tuple_str_of_str(func_args) + " = " + mock_var + "(" + args + ")" + "\ncontinue" - tre_check_var = self.get_temp_var("tre_check") + tre_check_var = self.get_temp_var("tre_check", loc) return handle_indentation( """ try: @@ -1762,8 +1986,9 @@ def tre_return_handle(loc, tokens): tco_recurse=tco_recurse, type_ignore=self.type_ignore_comment(), ) + self.tre_func_name <<= base_keyword(func_name).suppress() return attach( - self.get_tre_return_grammar(func_name), + self.tre_return, tre_return_handle, greedy=True, ) @@ -1872,8 +2097,9 @@ def transform_returns(self, original, loc, raw_lines, tre_return_grammar=None, i to_return = "(" + to_return + ")" # only use trollius Return when trollius is imported if is_async and self.target_info < (3, 4): - ret_err = "_coconut.asyncio.Return" - else: + ret_err = "_coconut.asyncio_Return" + # for both coroutines and generators, use StopIteration if return isn't supported + elif self.target_info < (3, 3): ret_err = "_coconut.StopIteration" # warn about Python 3.7 incompatibility on any target with Python 3 support if not self.target.startswith("2"): @@ -1884,7 +2110,10 @@ def transform_returns(self, original, loc, raw_lines, tre_return_grammar=None, i original, loc, ), ) - line = indent + "raise " + ret_err + "(" + to_return + ")" + comment + dedent + else: + ret_err = None + if ret_err is not None: + line = indent + "raise " + ret_err + "(" + to_return + ")" + comment + dedent # handle async generator yields if is_async and is_gen and self.target_info < (3, 6): @@ -2004,7 +2233,7 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, undotted_name = None if func_name is not None and "." in func_name: undotted_name = func_name.rsplit(".", 1)[-1] - def_name = self.get_temp_var(undotted_name) + def_name = self.get_temp_var("dotted_" + undotted_name, loc) # detect pattern-matching functions is_match_func = func_paramdef == match_func_paramdef @@ -2014,7 +2243,7 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, if func_name is None: raise CoconutInternalException("could not find name in addpattern function definition", def_stmt) # binds most tightly, except for TCO - addpattern_decorator = self.get_temp_var("addpattern") + addpattern_decorator = self.get_temp_var("addpattern", loc) out.append( handle_indentation( """ @@ -2022,7 +2251,7 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, {addpattern_decorator} = _coconut_addpattern({func_name}) {type_ignore} except _coconut.NameError: {addpattern_decorator} = lambda f: f - """, + """, add_newline=True, ).format( func_name=func_name, @@ -2044,6 +2273,7 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, # handle async functions if is_async: + force_gen = False if not self.target: raise self.make_err( CoconutTargetError, @@ -2064,8 +2294,18 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, ) else: decorators += "@_coconut.asyncio.coroutine\n" + # raise StopIteration/Return will only work if we ensure it's a generator + force_gen = True func_code, _, _ = self.transform_returns(original, loc, raw_lines, is_async=True, is_gen=is_gen) + if force_gen: + func_code += "\n" + handle_indentation( + """ +if False: + yield + """, + extra_indent=1, + ) # handle normal functions else: @@ -2078,11 +2318,11 @@ def proc_funcdef(self, original, loc, decorators, funcdef, is_async, in_method, ) if attempt_tre: if func_args and func_args != func_paramdef: - mock_var = self.get_temp_var("mock") + mock_var = self.get_temp_var("mock", loc) else: mock_var = None - func_store = self.get_temp_var("recursive_func") - tre_return_grammar = self.tre_return(func_name, func_args, func_store, mock_var) + func_store = self.get_temp_var("recursive_func", loc) + tre_return_grammar = self.tre_return_grammar(func_name, func_args, func_store, mock_var) else: mock_var = func_store = tre_return_grammar = None @@ -2194,7 +2434,7 @@ def {mock_var}({mock_paramdef}): func_code=func_code, func_name=func_name, undotted_name=undotted_name, - temp_var=self.get_temp_var("qualname"), + temp_var=self.get_temp_var("qualname", loc), ), ) # decorating the function must come after __name__ has been set, @@ -2208,7 +2448,7 @@ def {mock_var}({mock_paramdef}): # handle copyclosure functions if copyclosure: - vars_var = self.get_temp_var("func_vars") + vars_var = self.get_temp_var("func_vars", loc) func_from_vars = vars_var + '["' + def_name + '"]' # for dotted copyclosure function definition, decoration was deferred until now if decorators: @@ -2645,11 +2885,11 @@ def set_moduledoc(self, tokens): self.docstring = self.reformat(moduledoc, ignore_errors=False) + "\n\n" return endline - def yield_from_handle(self, tokens): + def yield_from_handle(self, loc, tokens): """Process Python 3.3 yield from.""" expr, = tokens if self.target_info < (3, 3): - ret_val_name = self.get_temp_var("yield_from") + ret_val_name = self.get_temp_var("yield_from_return", loc) self.add_code_before[ret_val_name] = handle_indentation( ''' {yield_from_var} = _coconut.iter({expr}) @@ -2662,8 +2902,8 @@ def yield_from_handle(self, tokens): ''', ).format( expr=expr, - yield_from_var=self.get_temp_var("yield_from"), - yield_err_var=self.get_temp_var("yield_err"), + yield_from_var=self.get_temp_var("yield_from", loc), + yield_err_var=self.get_temp_var("yield_err", loc), ret_val_name=ret_val_name, ) return ret_val_name @@ -2687,10 +2927,7 @@ def comment_handle(self, original, loc, tokens): """Store comment in comments.""" comment_marker, = tokens ln = self.adjust(lineno(loc, original)) - if ln in self.comments: - self.comments[ln] += " " + comment_marker - else: - self.comments[ln] = comment_marker + self.comments[ln].add(comment_marker) return "" def kwd_augassign_handle(self, original, loc, tokens): @@ -2727,7 +2964,7 @@ def augassign_stmt_handle(self, original, loc, tokens): elif op == "??=": return name + " = " + item + " if " + name + " is None else " + name elif op == "::=": - ichain_var = self.get_temp_var("lazy_chain") + ichain_var = self.get_temp_var("lazy_chain", loc) # this is necessary to prevent a segfault caused by self-reference return ( ichain_var + " = " + name + "\n" @@ -2802,7 +3039,7 @@ def match_datadef_handle(self, original, loc, tokens): else: raise CoconutInternalException("invalid pattern-matching tokens in data", match_tokens) - check_var = self.get_temp_var("match_check") + check_var = self.get_temp_var("match_check", loc) matcher = self.get_matcher(original, loc, check_var, name_list=[]) pos_only_args, req_args, default_args, star_arg, kwd_only_args, dubstar_arg = split_args_list(matches, loc) @@ -3103,7 +3340,7 @@ def anon_namedtuple_handle(self, tokens): namedtuple_call = self.make_namedtuple_call(None, names, types) return namedtuple_call + "(" + ", ".join(items) + ")" - def single_import(self, path, imp_as, type_ignore=False): + def single_import(self, loc, path, imp_as, type_ignore=False): """Generate import statements from a fully qualified import and the name to bind it to.""" out = [] @@ -3122,7 +3359,7 @@ def single_import(self, path, imp_as, type_ignore=False): imp, imp_as = imp_as, None if imp_as is not None and "." in imp_as: - import_as_var = self.get_temp_var("import") + import_as_var = self.get_temp_var("import", loc) out.append(import_stmt(imp_from, imp, import_as_var)) fake_mods = imp_as.split(".") for i in range(1, len(fake_mods)): @@ -3146,7 +3383,7 @@ def single_import(self, path, imp_as, type_ignore=False): return out - def universal_import(self, imports, imp_from=None): + def universal_import(self, loc, imports, imp_from=None): """Generate code for a universal import of imports from imp_from. imports = [[imp1], [imp2, as], ...]""" importmap = [] # [((imp | old_imp, imp, version_check), imp_as), ...] @@ -3193,7 +3430,7 @@ def universal_import(self, imports, imp_from=None): stmts = [] for paths, imp_as, type_ignore in importmap: if len(paths) == 1: - more_stmts = self.single_import(paths[0], imp_as) + more_stmts = self.single_import(loc, paths[0], imp_as) stmts.extend(more_stmts) else: old_imp, new_imp, version_check = paths @@ -3212,11 +3449,11 @@ def universal_import(self, imports, imp_from=None): if {store_var} is not _coconut_sentinel: sys = {store_var} """).format( - store_var=self.get_temp_var("sys"), + store_var=self.get_temp_var("sys", loc), version_check=version_check, - new_imp="\n".join(self.single_import(new_imp, imp_as)), + new_imp="\n".join(self.single_import(loc, new_imp, imp_as)), # should only type: ignore the old import - old_imp="\n".join(self.single_import(old_imp, imp_as, type_ignore=type_ignore)), + old_imp="\n".join(self.single_import(loc, old_imp, imp_as, type_ignore=type_ignore)), type_ignore=self.type_ignore_comment(), ), ) @@ -3237,13 +3474,13 @@ def import_handle(self, original, loc, tokens): if imp_from == "*" or imp_from is None and "*" in imports: if not (len(imports) == 1 and imports[0] == "*"): raise self.make_err(CoconutSyntaxError, "only [from *] import * allowed, not from * import name", original, loc) - logger.warn_err(self.make_err(CoconutSyntaxWarning, "[from *] import * is a Coconut Easter egg and should not be used in production code", original, loc)) + self.syntax_warning("[from *] import * is a Coconut Easter egg and should not be used in production code", original, loc) return special_starred_import_handle(imp_all=bool(imp_from)) for imp_name in imported_names(imports): self.unused_imports[imp_name].append(loc) - return self.universal_import(imports, imp_from=imp_from) + return self.universal_import(loc, imports, imp_from=imp_from) - def complex_raise_stmt_handle(self, tokens): + def complex_raise_stmt_handle(self, loc, tokens): """Process Python 3 raise from statement.""" raise_expr, from_expr = tokens if self.target.startswith("3"): @@ -3256,7 +3493,7 @@ def complex_raise_stmt_handle(self, tokens): raise {raise_from_var} ''', ).format( - raise_from_var=self.get_temp_var("raise_from"), + raise_from_var=self.get_temp_var("raise_from", loc), raise_expr=raise_expr, from_expr=from_expr, ) @@ -3305,9 +3542,9 @@ def full_match_handle(self, original, loc, tokens, match_to_var=None, match_chec raise CoconutInternalException("invalid match type", match_type) if match_to_var is None: - match_to_var = self.get_temp_var("match_to") + match_to_var = self.get_temp_var("match_to", loc) if match_check_var is None: - match_check_var = self.get_temp_var("match_check") + match_check_var = self.get_temp_var("match_check", loc) matching = self.get_matcher(original, loc, match_check_var) matching.match(matches, match_to_var) @@ -3327,8 +3564,8 @@ def full_match_handle(self, original, loc, tokens, match_to_var=None, match_chec def destructuring_stmt_handle(self, original, loc, tokens): """Process match assign blocks.""" matches, item = tokens - match_to_var = self.get_temp_var("match_to") - match_check_var = self.get_temp_var("match_check") + match_to_var = self.get_temp_var("match_to", loc) + match_check_var = self.get_temp_var("match_check", loc) out = self.full_match_handle(original, loc, [matches, "in", item, None], match_to_var, match_check_var) out += self.pattern_error(original, loc, match_to_var, match_check_var) return out @@ -3343,7 +3580,7 @@ def name_match_funcdef_handle(self, original, loc, tokens): else: raise CoconutInternalException("invalid match function definition tokens", tokens) - check_var = self.get_temp_var("match_check") + check_var = self.get_temp_var("match_check", loc) matcher = self.get_matcher(original, loc, check_var) pos_only_args, req_args, default_args, star_arg, kwd_only_args, dubstar_arg = split_args_list(matches, loc) @@ -3422,7 +3659,11 @@ def set_letter_literal_handle(self, tokens): def stmt_lambdef_handle(self, original, loc, tokens): """Process multi-line lambdef statements.""" - got_kwds, params, stmts_toks, followed_by = tokens + if len(tokens) == 4: + got_kwds, params, stmts_toks, followed_by = tokens + typedef = None + else: + got_kwds, params, typedef, stmts_toks, followed_by = tokens if followed_by == ",": self.strict_err_or_warn("found statement lambda followed by comma; this isn't recommended as it can be unclear whether the comma is inside or outside the lambda (just wrap the lambda in parentheses)", original, loc) @@ -3451,19 +3692,24 @@ def stmt_lambdef_handle(self, original, loc, tokens): else: raise CoconutInternalException("invalid statement lambda body tokens", stmts_toks) - name = self.get_temp_var("lambda") + name = self.get_temp_var("lambda", loc) body = openindent + "\n".join(stmts) + closeindent + if typedef is None: + colon = ":" + else: + colon = self.typedef_handle([typedef]) if isinstance(params, str): decorators = "" - funcdef = "def " + name + params + ":\n" + body + funcdef = "def " + name + params + colon + "\n" + body else: match_tokens = [name] + list(params) before_colon, after_docstring = self.name_match_funcdef_handle(original, loc, match_tokens) decorators = "@_coconut_mark_as_match\n" funcdef = ( before_colon - + ":\n" + + colon + + "\n" + after_docstring + body ) @@ -3499,7 +3745,7 @@ def await_expr_handle(self, original, loc, tokens): return "await " + await_expr elif self.target_info >= (3, 3): # we have to wrap the yield here so it doesn't cause the function to be detected as an async generator - return self.wrap_passthrough("(yield from " + await_expr + ")", early=True) + return "(" + self.wrap_passthrough("yield from") + " " + await_expr + ")" else: # this yield is fine because we can detect the _coconut.asyncio.From return "(yield _coconut.asyncio.From(" + await_expr + "))" @@ -3666,7 +3912,7 @@ def type_param_handle(self, original, loc, tokens): else: if name in typevar_info["all_typevars"]: raise CoconutDeferredSyntaxError("type variable {name!r} already defined".format(name=name), loc) - temp_name = self.get_temp_var("typevar_" + name) + temp_name = self.get_temp_var("typevar_" + name, name_loc) typevar_info["all_typevars"][name] = temp_name typevar_info["new_typevars"].append((TypeVarFunc, temp_name)) typevar_info["typevar_locs"][name] = name_loc @@ -3777,8 +4023,8 @@ def cases_stmt_handle(self, original, loc, tokens): if block_kwd == "case": self.strict_err_or_warn("deprecated case keyword at top level in case ...: match ...: block (use Python 3.10 match ...: case ...: syntax instead)", original, loc) - check_var = self.get_temp_var("case_match_check") - match_var = self.get_temp_var("case_match_to") + check_var = self.get_temp_var("case_match_check", loc) + match_var = self.get_temp_var("case_match_to", loc) out = ( match_var + " = " + item + "\n" @@ -3793,7 +4039,7 @@ def cases_stmt_handle(self, original, loc, tokens): out += "if not " + check_var + default return out - def f_string_handle(self, loc, tokens): + def f_string_handle(self, original, loc, tokens): """Process Python 3.6 format strings.""" string, = tokens @@ -3806,57 +4052,12 @@ def f_string_handle(self, loc, tokens): internal_assert(string.startswith(strwrapper) and string.endswith(unwrapper), "invalid f string item", string) string = string[1:-1] - # get text - old_text, strchar = self.get_ref("str", string) + # get f string parts + strchar, string_parts, exprs = self.get_ref("f_str", string) - # separate expressions - string_parts = [""] - exprs = [] - saw_brace = False - in_expr = False - paren_level = 0 - i = 0 - while i < len(old_text): - c = old_text[i] - if saw_brace: - saw_brace = False - if c == "{": - string_parts[-1] += c - elif c == "}": - raise CoconutDeferredSyntaxError("empty expression in format string", loc) - else: - in_expr = True - exprs.append("") - i -= 1 - elif in_expr: - remaining_text = old_text[i:] - str_start, str_stop = parse_where(self.string_start, remaining_text) - if str_start is not None: # str_start >= 0; if > 0 means there is whitespace before the string - exprs[-1] += remaining_text[:str_stop] - i += str_stop - 1 - elif paren_level < 0: - paren_level += paren_change(c) - exprs[-1] += c - elif paren_level > 0: - raise CoconutDeferredSyntaxError("imbalanced parentheses in format string expression", loc) - elif match_in(self.end_f_str_expr, remaining_text): - in_expr = False - string_parts.append(c) - else: - paren_level += paren_change(c) - exprs[-1] += c - elif c == "{": - saw_brace = True - string_parts[-1] += c - else: - string_parts[-1] += c - i += 1 - - # handle dangling detections - if saw_brace: - raise CoconutDeferredSyntaxError("format string ends with unescaped brace (escape by doubling to '{{')", loc) - if in_expr: - raise CoconutDeferredSyntaxError("imbalanced braces in format string (escape braces by doubling to '{{' and '}}')", loc) + # warn if there are no exprs + if not exprs: + self.strict_err_or_warn("f-string with no expressions", original, loc) # handle Python 3.8 f string = specifier for i, expr in enumerate(exprs): @@ -3870,29 +4071,31 @@ def f_string_handle(self, loc, tokens): compiled_exprs = [] for co_expr in exprs: try: - py_expr = self.inner_parse_eval(co_expr) + py_expr = self.inner_parse_eval(original, loc, co_expr) except ParseBaseException: raise CoconutDeferredSyntaxError("parsing failed for format string expression: " + co_expr, loc) - if "\n" in py_expr: - raise CoconutDeferredSyntaxError("invalid expression in format string: " + co_expr, loc) + if not does_parse(self.no_unquoted_newlines, py_expr): + raise CoconutDeferredSyntaxError("illegal complex expression in format string: " + co_expr, loc) compiled_exprs.append(py_expr) # reconstitute string - if self.target_info >= (3, 6): + # (though f strings are supported on 3.6+, nested strings with the same strchars are only + # supported on 3.12+, so we should only use the literal syntax there) + if self.target_info >= (3, 12): new_text = interleaved_join(string_parts, compiled_exprs) - return "f" + ("r" if raw else "") + self.wrap_str(new_text, strchar) + else: names = [format_var + "_" + str(i) for i in range(len(compiled_exprs))] new_text = interleaved_join(string_parts, names) # generate format call return ("r" if raw else "") + self.wrap_str(new_text, strchar) + ".format(" + ", ".join( - name + "=(" + expr + ")" + name + "=(" + self.wrap_passthrough(expr) + ")" for name, expr in zip(names, compiled_exprs) ) + ")" - def decorators_handle(self, tokens): + def decorators_handle(self, loc, tokens): """Process decorators.""" defs = [] decorators = [] @@ -3903,7 +4106,7 @@ def decorators_handle(self, tokens): if self.target_info >= (3, 9): decorators.append("@" + tok[0]) else: - varname = self.get_temp_var("decorator") + varname = self.get_temp_var("decorator", loc) defs.append(varname + " = " + tok[0]) decorators.append("@" + varname + "\n") else: @@ -4020,8 +4223,8 @@ def base_match_for_stmt_handle(self, original, loc, tokens): """Handle match for loops.""" matches, item, body = tokens - match_to_var = self.get_temp_var("match_to") - match_check_var = self.get_temp_var("match_check") + match_to_var = self.get_temp_var("match_to", loc) + match_check_var = self.get_temp_var("match_check", loc) matcher = self.get_matcher(original, loc, match_check_var) matcher.match(matches, match_to_var) @@ -4059,7 +4262,7 @@ def async_with_for_stmt_handle(self, original, loc, tokens): is_match = False loop_vars, iter_item, body = inner_toks - temp_var = self.get_temp_var("async_with_for") + temp_var = self.get_temp_var("async_with_for", loc) if is_match: loop = "async " + self.base_match_for_stmt_handle( @@ -4148,11 +4351,11 @@ def keyword_funcdef_handle(self, tokens): funcdef = kwd + " " + funcdef return funcdef - def protocol_intersect_expr_handle(self, tokens): + def protocol_intersect_expr_handle(self, loc, tokens): if len(tokens) == 1: return tokens[0] internal_assert(len(tokens) >= 2, "invalid protocol intersection tokens", tokens) - protocol_var = self.get_temp_var("protocol_intersection") + protocol_var = self.get_temp_var("protocol_intersection", loc) self.add_code_before[protocol_var] = handle_indentation( ''' class {protocol_var}({tokens}, _coconut.typing.Protocol): pass @@ -4170,7 +4373,7 @@ class {protocol_var}({tokens}, _coconut.typing.Protocol): pass # CHECKING HANDLERS: # ----------------------------------------------------------------------------------------------------------------------- - def check_strict(self, name, original, loc, tokens, only_warn=False, always_warn=False): + def check_strict(self, name, original, loc, tokens=(None,), only_warn=False, always_warn=False): """Check that syntax meets --strict requirements.""" self.internal_assert(len(tokens) == 1, original, loc, "invalid " + name + " tokens", tokens) message = "found " + name @@ -4179,13 +4382,13 @@ def check_strict(self, name, original, loc, tokens, only_warn=False, always_warn if only_warn: if not always_warn: kwargs["extra"] = "remove --strict to dismiss" - logger.warn_err(self.make_err(CoconutSyntaxWarning, message, original, loc, **kwargs)) + self.syntax_warning(message, original, loc, **kwargs) else: if always_warn: kwargs["extra"] = "remove --strict to downgrade to a warning" raise self.make_err(CoconutStyleError, message, original, loc, **kwargs) elif always_warn: - logger.warn_err(self.make_err(CoconutSyntaxWarning, message, original, loc)) + self.syntax_warning(message, original, loc) return tokens[0] def lambdef_check(self, original, loc, tokens): @@ -4459,10 +4662,12 @@ def parse_xonsh(self, inputstring, **kwargs): """Parse xonsh code.""" return self.parse(inputstring, self.xonsh_parser, {"strip": True}, {"header": "none", "initial": "none"}, streamline=False, **kwargs) - def warm_up(self): + def warm_up(self, force=False, enable_incremental_mode=False): """Warm up the compiler by streamlining the file_parser.""" - self.streamline(self.file_parser) - self.streamline(self.eval_parser) + self.streamline(self.file_parser, force=force) + self.streamline(self.eval_parser, force=force) + if enable_incremental_mode: + enable_incremental_parsing() # end: ENDPOINTS diff --git a/coconut/compiler/grammar.py b/coconut/compiler/grammar.py index e5943da66..bbd3bd902 100644 --- a/coconut/compiler/grammar.py +++ b/coconut/compiler/grammar.py @@ -47,7 +47,7 @@ originalTextFor, nestedExpr, FollowedBy, - quotedString, + python_quoted_string, restOfLine, ) @@ -62,7 +62,7 @@ CoconutDeferredSyntaxError, ) from coconut.terminal import ( - trace, + trace, # NOQA internal_assert, ) from coconut.constants import ( @@ -624,6 +624,7 @@ class Grammar(object): star = ~dubstar + Literal("*") at = Literal("@") arrow = Literal("->") | fixto(Literal("\u2192"), "->") + unsafe_fat_arrow = Literal("=>") | fixto(Literal("\u21d2"), "=>") colon_eq = Literal(":=") unsafe_dubcolon = Literal("::") unsafe_colon = Literal(":") @@ -632,7 +633,7 @@ class Grammar(object): semicolon = Literal(";") | invalid_syntax("\u037e", "invalid Greek question mark instead of semicolon", greedy=True) multisemicolon = combine(OneOrMore(semicolon)) eq = Literal("==") - equals = ~eq + Literal("=") + equals = ~eq + ~Literal("=>") + Literal("=") lbrack = Literal("[") rbrack = Literal("]") lbrace = Literal("{") @@ -804,7 +805,7 @@ class Grammar(object): integer + dot + Optional(integer) | Optional(integer) + dot + integer ) | integer - sci_e = combine(caseless_literal("e") + Optional(plus | neg_minus)) + sci_e = combine((caseless_literal("e") | fixto(Literal("\u23e8"), "e")) + Optional(plus | neg_minus)) numitem = ~(Literal("0") + Word(nums + "_", exact=1)) + combine(basenum + Optional(sci_e + integer)) imag_num = combine(numitem + imag_j) bin_num = combine(caseless_literal("0b") + Optional(underscore.suppress()) + binint) @@ -942,9 +943,9 @@ class Grammar(object): negable_atom_item = condense(Optional(neg_minus) + atom_item) - testlist = trace(itemlist(test, comma, suppress_trailing=False)) - testlist_has_comma = trace(addspace(OneOrMore(condense(test + comma)) + Optional(test))) - new_namedexpr_testlist_has_comma = trace(addspace(OneOrMore(condense(new_namedexpr_test + comma)) + Optional(test))) + testlist = itemlist(test, comma, suppress_trailing=False) + testlist_has_comma = addspace(OneOrMore(condense(test + comma)) + Optional(test)) + new_namedexpr_testlist_has_comma = addspace(OneOrMore(condense(new_namedexpr_test + comma)) + Optional(test)) testlist_star_expr = Forward() testlist_star_expr_ref = tokenlist(Group(test) | star_expr, comma, suppress=False) @@ -1050,7 +1051,7 @@ class Grammar(object): | labeled_group(test_no_infix + partialable_op + dot.suppress(), "left partial") ) partial_op_item = attach(partial_op_item_tokens, partial_op_item_handle) - op_item = trace( + op_item = ( typedef_op_item | partial_op_item | base_op_item @@ -1083,7 +1084,7 @@ class Grammar(object): just_op = just_star | just_slash match = Forward() - args_list = trace( + args_list = ( ~just_op + addspace( ZeroOrMore( @@ -1098,7 +1099,7 @@ class Grammar(object): ) ) parameters = condense(lparen + args_list + rparen) - set_args_list = trace( + set_args_list = ( ~just_op + addspace( ZeroOrMore( @@ -1112,21 +1113,17 @@ class Grammar(object): ) ) ) - match_args_list = trace( - Group( - Optional( - tokenlist( - Group( - (star | dubstar) + match - | star # not star_sep because pattern-matching can handle star separators on any Python version - | slash # not slash_sep as above - | match + Optional(equals.suppress() + test) - ), - comma, - ) - ) + match_args_list = Group(Optional( + tokenlist( + Group( + (star | dubstar) + match + | star # not star_sep because pattern-matching can handle star separators on any Python version + | slash # not slash_sep as above + | match + Optional(equals.suppress() + test) + ), + comma, ) - ) + )) call_item = ( dubstar + test @@ -1231,7 +1228,7 @@ class Grammar(object): f_string_atom_ref = ZeroOrMore(nonbf_string) + f_string + ZeroOrMore(nonb_string) keyword_atom = any_keyword_in(const_vars) - passthrough_atom = trace(addspace(OneOrMore(passthrough_item))) + passthrough_atom = addspace(OneOrMore(passthrough_item)) set_literal = Forward() set_letter_literal = Forward() @@ -1251,7 +1248,7 @@ class Grammar(object): lazy_items = Optional(tokenlist(test, comma)) lazy_list = attach(lbanana.suppress() + lazy_items + rbanana.suppress(), lazy_list_handle) - known_atom = trace( + known_atom = ( keyword_atom | string_atom | num_atom @@ -1351,7 +1348,7 @@ class Grammar(object): typed_assign_stmt = Forward() typed_assign_stmt_ref = simple_assign + colon.suppress() + typedef_test + Optional(equals.suppress() + test_expr) - basic_stmt = trace(addspace(ZeroOrMore(assignlist + equals) + test_expr)) + basic_stmt = addspace(ZeroOrMore(assignlist + equals) + test_expr) type_param = Forward() type_param_bound_op = lt_colon | colon | le @@ -1390,7 +1387,9 @@ class Grammar(object): + Optional(power_in_impl_call) ) impl_call = Forward() - impl_call_ref = ( + # we need to disable this inside the xonsh parser + impl_call_ref = Forward() + unsafe_impl_call_ref = ( impl_call_item + OneOrMore(impl_call_arg) ) @@ -1499,13 +1498,13 @@ class Grammar(object): # expr must come at end | labeled_group(comp_pipe_expr, "expr") + pipe_op ) - pipe_augassign_item = trace( + pipe_augassign_item = ( # should match pipe_item but with pipe_op -> end_simple_stmt_item and no expr labeled_group(keyword("await"), "await") + end_simple_stmt_item | labeled_group(attrgetter_atom_tokens, "attrgetter") + end_simple_stmt_item | labeled_group(itemgetter_atom_tokens, "itemgetter") + end_simple_stmt_item | labeled_group(partial_atom_tokens, "partial") + end_simple_stmt_item - | labeled_group(partial_op_atom_tokens, "op partial") + end_simple_stmt_item, + | labeled_group(partial_op_atom_tokens, "op partial") + end_simple_stmt_item ) last_pipe_item = Group( lambdef("expr") @@ -1537,8 +1536,8 @@ class Grammar(object): not_test = addspace(ZeroOrMore(keyword("not")) + comparison) # we condense "and" and "or" into one, since Python handles the precedence, not Coconut # and_test = exprlist(not_test, keyword("and")) - # test_item = trace(exprlist(and_test, keyword("or"))) - test_item = trace(exprlist(not_test, keyword("and") | keyword("or"))) + # test_item = exprlist(and_test, keyword("or")) + test_item = exprlist(not_test, keyword("and") | keyword("or")) simple_stmt_item = Forward() unsafe_simple_stmt_item = Forward() @@ -1548,13 +1547,18 @@ class Grammar(object): nocolon_suite = Forward() base_suite = Forward() - classic_lambdef = Forward() - classic_lambdef_params = maybeparens(lparen, set_args_list, rparen) - new_lambdef_params = lparen.suppress() + set_args_list + rparen.suppress() | setname - classic_lambdef_ref = addspace(keyword("lambda") + condense(classic_lambdef_params + colon)) - new_lambdef = attach(new_lambdef_params + arrow.suppress(), lambdef_handle) - implicit_lambdef = fixto(arrow, "lambda _=None:") - lambdef_base = classic_lambdef | new_lambdef | implicit_lambdef + fat_arrow = Forward() + lambda_arrow = Forward() + unsafe_lambda_arrow = fat_arrow | arrow + + keyword_lambdef_params = maybeparens(lparen, set_args_list, rparen) + arrow_lambdef_params = lparen.suppress() + set_args_list + rparen.suppress() | setname + + keyword_lambdef = Forward() + keyword_lambdef_ref = addspace(keyword("lambda") + condense(keyword_lambdef_params + colon)) + arrow_lambdef = attach(arrow_lambdef_params + lambda_arrow.suppress(), lambdef_handle) + implicit_lambdef = fixto(lambda_arrow, "lambda _=None:") + lambdef_base = keyword_lambdef | arrow_lambdef | implicit_lambdef stmt_lambdef = Forward() match_guard = Optional(keyword("if").suppress() + namedexpr_test) @@ -1570,14 +1574,21 @@ class Grammar(object): Group(OneOrMore(simple_stmt_item + semicolon.suppress())) + Optional(closing_stmt) | Group(ZeroOrMore(simple_stmt_item + semicolon.suppress())) + closing_stmt, ) + + no_fat_arrow_stmt_lambdef_body, _fat_arrow = disable_inside(stmt_lambdef_body, unsafe_fat_arrow) + fat_arrow <<= _fat_arrow + stmt_lambdef_suite = ( + arrow.suppress() + no_fat_arrow_stmt_lambdef_body + ~fat_arrow + | Optional(arrow.suppress() + typedef_test) + fat_arrow.suppress() + stmt_lambdef_body + ) + general_stmt_lambdef = ( Group(any_len_perm( keyword("async"), keyword("copyclosure"), )) + keyword("def").suppress() + stmt_lambdef_params - + arrow.suppress() - + stmt_lambdef_body + + stmt_lambdef_suite ) match_stmt_lambdef = ( Group(any_len_perm( @@ -1586,10 +1597,9 @@ class Grammar(object): keyword("copyclosure"), )) + keyword("def").suppress() + stmt_lambdef_match_params - + arrow.suppress() - + stmt_lambdef_body + + stmt_lambdef_suite ) - stmt_lambdef_ref = ( + stmt_lambdef_ref = trace( general_stmt_lambdef | match_stmt_lambdef ) + ( @@ -1598,7 +1608,7 @@ class Grammar(object): ) lambdef <<= addspace(lambdef_base + test) | stmt_lambdef - lambdef_no_cond = trace(addspace(lambdef_base + test_no_cond)) + lambdef_no_cond = addspace(lambdef_base + test_no_cond) typedef_callable_arg = Group( test("arg") @@ -1634,7 +1644,7 @@ class Grammar(object): unsafe_typedef_op_item = attach(base_op_item, typedef_op_item_handle) - _typedef_test, typedef_callable, _typedef_trailer, _typedef_or_expr, _typedef_tuple, _typedef_ellipsis, _typedef_op_item = disable_outside( + unsafe_typedef_test, typedef_callable, _typedef_trailer, _typedef_or_expr, _typedef_tuple, _typedef_ellipsis, _typedef_op_item = disable_outside( test, unsafe_typedef_callable, unsafe_typedef_trailer, @@ -1643,13 +1653,19 @@ class Grammar(object): unsafe_typedef_ellipsis, unsafe_typedef_op_item, ) - typedef_test <<= _typedef_test typedef_trailer <<= _typedef_trailer typedef_or_expr <<= _typedef_or_expr typedef_tuple <<= _typedef_tuple typedef_ellipsis <<= _typedef_ellipsis typedef_op_item <<= _typedef_op_item + _typedef_test, _lambda_arrow = disable_inside( + unsafe_typedef_test, + unsafe_lambda_arrow, + ) + typedef_test <<= _typedef_test + lambda_arrow <<= _lambda_arrow + alt_ternary_expr = attach(keyword("if").suppress() + test_item + keyword("then").suppress() + test_item + keyword("else").suppress() + test, alt_ternary_handle) test <<= ( typedef_callable @@ -1863,7 +1879,7 @@ class Grammar(object): | lparen.suppress() + matchlist_star + rparen.suppress() )("star") - base_match = trace(Group( + base_match = Group( (negable_atom_item + arrow.suppress() + match)("view") | match_string | match_const("const") @@ -1888,7 +1904,7 @@ class Grammar(object): | (keyword("class").suppress() + dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("class") | (dotted_refname + lparen.suppress() + matchlist_data + rparen.suppress())("data_or_class") | Optional(keyword("as").suppress()) + setname("var") - )) + ) matchlist_isinstance = base_match + OneOrMore(keyword("is").suppress() + negable_atom_item) isinstance_match = labeled_group(matchlist_isinstance, "isinstance_is") | base_match @@ -1908,7 +1924,7 @@ class Grammar(object): matchlist_kwd_or = and_match + OneOrMore(keyword("or").suppress() + and_match) kwd_or_match = labeled_group(matchlist_kwd_or, "or") | and_match - match <<= trace(kwd_or_match) + match <<= kwd_or_match many_match = ( labeled_group(matchlist_star, "star") @@ -1929,32 +1945,32 @@ class Grammar(object): + ~FollowedBy(colon + newline + indent + keyword("case")) - full_suite ) - match_stmt = trace(condense(full_match - Optional(else_stmt))) + match_stmt = condense(full_match - Optional(else_stmt)) destructuring_stmt = Forward() base_destructuring_stmt = Optional(keyword("match").suppress()) + many_match + equals.suppress() + test_expr destructuring_stmt_ref, match_dotted_name_const_ref = disable_inside(base_destructuring_stmt, must_be_dotted_name + ~lparen) # both syntaxes here must be kept the same except for the keywords - case_match_co_syntax = trace(Group( + case_match_co_syntax = Group( (keyword("match") | keyword("case")).suppress() + stores_loc_item + many_match + Optional(keyword("if").suppress() + namedexpr_test) - full_suite - )) + ) cases_stmt_co_syntax = ( (keyword("cases") | keyword("case")) + testlist_star_namedexpr + colon.suppress() + newline.suppress() + indent.suppress() + Group(OneOrMore(case_match_co_syntax)) + dedent.suppress() + Optional(keyword("else").suppress() + suite) ) - case_match_py_syntax = trace(Group( + case_match_py_syntax = Group( keyword("case").suppress() + stores_loc_item + many_match + Optional(keyword("if").suppress() + namedexpr_test) - full_suite - )) + ) cases_stmt_py_syntax = ( keyword("match") + testlist_star_namedexpr + colon.suppress() + newline.suppress() + indent.suppress() + Group(OneOrMore(case_match_py_syntax)) @@ -2018,22 +2034,22 @@ class Grammar(object): funcname_typeparams = Forward() funcname_typeparams_ref = dotted_setname + Optional(type_params) - name_funcdef = trace(condense(funcname_typeparams + parameters)) + name_funcdef = condense(funcname_typeparams + parameters) op_tfpdef = unsafe_typedef_default | condense(setname + Optional(default)) op_funcdef_arg = setname | condense(lparen.suppress() + op_tfpdef + rparen.suppress()) op_funcdef_name = unsafe_backtick.suppress() + funcname_typeparams + unsafe_backtick.suppress() - op_funcdef = trace(attach( + op_funcdef = attach( Group(Optional(op_funcdef_arg)) + op_funcdef_name + Group(Optional(op_funcdef_arg)), op_funcdef_handle, - )) + ) return_typedef = Forward() return_typedef_ref = arrow.suppress() + typedef_test end_func_colon = return_typedef + colon.suppress() | colon base_funcdef = op_funcdef | name_funcdef - funcdef = trace(addspace(keyword("def") + condense(base_funcdef + end_func_colon + nocolon_suite))) + funcdef = addspace(keyword("def") + condense(base_funcdef + end_func_colon + nocolon_suite)) name_match_funcdef = Forward() op_match_funcdef = Forward() @@ -2049,7 +2065,7 @@ class Grammar(object): )) name_match_funcdef_ref = keyword("def").suppress() + funcname_typeparams + lparen.suppress() + match_args_list + match_guard + rparen.suppress() op_match_funcdef_ref = keyword("def").suppress() + op_match_funcdef_arg + op_funcdef_name + op_match_funcdef_arg + match_guard - base_match_funcdef = trace(op_match_funcdef | name_match_funcdef) + base_match_funcdef = op_match_funcdef | name_match_funcdef func_suite = ( attach(simple_stmt, make_suite_handle) | ( @@ -2060,17 +2076,17 @@ class Grammar(object): - dedent.suppress() ) ) - def_match_funcdef = trace(attach( + def_match_funcdef = attach( base_match_funcdef + end_func_colon - func_suite, join_match_funcdef, - )) - match_def_modifiers = trace(any_len_perm( + ) + match_def_modifiers = any_len_perm( keyword("match").suppress(), # addpattern is detected later keyword("addpattern"), - )) + ) match_funcdef = addspace(match_def_modifiers + def_match_funcdef) where_stmt = attach( @@ -2100,11 +2116,11 @@ class Grammar(object): | condense(newline - indent - math_funcdef_body - dedent) ) end_func_equals = return_typedef + equals.suppress() | fixto(equals, ":") - math_funcdef = trace(attach( + math_funcdef = attach( condense(addspace(keyword("def") + base_funcdef) + end_func_equals) - math_funcdef_suite, math_funcdef_handle, - )) - math_match_funcdef = trace(addspace( + ) + math_match_funcdef = addspace( match_def_modifiers + attach( base_match_funcdef @@ -2120,7 +2136,7 @@ class Grammar(object): ), join_match_funcdef, ) - )) + ) async_stmt = Forward() async_with_for_stmt = Forward() @@ -2150,14 +2166,14 @@ class Grammar(object): ) async_funcdef = keyword("async").suppress() + (funcdef | math_funcdef) - async_match_funcdef = trace(addspace( + async_match_funcdef = addspace( any_len_perm( keyword("match").suppress(), # addpattern is detected later keyword("addpattern"), required=(keyword("async").suppress(),), ) + (def_match_funcdef | math_match_funcdef), - )) + ) async_keyword_normal_funcdef = Group( any_len_perm_at_least_one( @@ -2278,13 +2294,13 @@ class Grammar(object): passthrough_stmt = condense(passthrough_block - (base_suite | newline)) - simple_compound_stmt = trace( + simple_compound_stmt = ( if_stmt | try_stmt | match_stmt | passthrough_stmt ) - compound_stmt = trace( + compound_stmt = ( decoratable_class_stmt | decoratable_func_stmt | for_stmt @@ -2297,7 +2313,7 @@ class Grammar(object): ) endline_semicolon = Forward() endline_semicolon_ref = semicolon.suppress() + newline - keyword_stmt = trace( + keyword_stmt = ( flow_stmt | import_stmt | assert_stmt @@ -2336,11 +2352,11 @@ class Grammar(object): simple_suite = attach(stmt, make_suite_handle) nocolon_suite <<= base_suite | simple_suite suite <<= condense(colon + nocolon_suite) - line = trace(newline | stmt) + line = newline | stmt - single_input = trace(condense(Optional(line) - ZeroOrMore(newline))) - file_input = trace(condense(moduledoc_marker - ZeroOrMore(line))) - eval_input = trace(condense(testlist - ZeroOrMore(newline))) + single_input = condense(Optional(line) - ZeroOrMore(newline)) + file_input = condense(moduledoc_marker - ZeroOrMore(line)) + eval_input = condense(testlist - ZeroOrMore(newline)) single_parser = start_marker - single_input - end_marker file_parser = start_marker - file_input - end_marker @@ -2357,8 +2373,13 @@ class Grammar(object): + ~(lparen + rparen | lbrack + rbrack | lbrace + rbrace) + (parens | brackets | braces | unsafe_name) ) - xonsh_parser, _anything_stmt, _xonsh_command = disable_outside( + unsafe_xonsh_parser, _impl_call_ref = disable_inside( single_parser, + unsafe_impl_call_ref, + ) + impl_call_ref <<= _impl_call_ref + xonsh_parser, _anything_stmt, _xonsh_command = disable_outside( + unsafe_xonsh_parser, unsafe_anything_stmt, unsafe_xonsh_command, ) @@ -2393,25 +2414,23 @@ class Grammar(object): | attach(parens, strip_parens_handle) ) - def get_tre_return_grammar(self, func_name): - """The TRE return grammar is parameterized by the name of the function being optimized.""" - return ( - self.start_marker - + self.keyword("return").suppress() - + maybeparens( - self.lparen, - base_keyword(func_name).suppress() - + self.original_function_call_tokens, - self.rparen, - ) + self.end_marker - ) + tre_func_name = Forward() + tre_return = ( + start_marker + + keyword("return").suppress() + + maybeparens( + lparen, + tre_func_name + original_function_call_tokens, + rparen, + ) + end_marker + ) tco_return = attach( start_marker + keyword("return").suppress() + maybeparens( lparen, - disallow_keywords(untcoable_funcs, with_suffix=lparen) + disallow_keywords(untcoable_funcs, with_suffix="(") + condense( (unsafe_name | parens | brackets | braces | string_atom) + ZeroOrMore( @@ -2503,9 +2522,11 @@ def get_tre_return_grammar(self, func_name): ) ) - end_f_str_expr = start_marker + (bang | colon | rbrace) + end_f_str_expr = combine(start_marker + (bang | colon | rbrace)) + + string_start = start_marker + python_quoted_string - string_start = start_marker + quotedString + no_unquoted_newlines = start_marker + ZeroOrMore(python_quoted_string | ~Literal("\n") + any_char) + end_marker operator_stmt = ( start_marker @@ -2546,8 +2567,6 @@ def set_grammar_names(): for varname, val in vars(Grammar).items(): if isinstance(val, ParserElement): val.setName(varname) - if isinstance(val, Forward): - trace(val) # end: TRACING diff --git a/coconut/compiler/header.py b/coconut/compiler/header.py index 39ff27fca..3be75c8fd 100644 --- a/coconut/compiler/header.py +++ b/coconut/compiler/header.py @@ -38,6 +38,7 @@ self_match_types, is_data_var, data_defaults_var, + coconut_cache_dir, ) from coconut.util import ( univ_open, @@ -224,6 +225,7 @@ def process_header_args(which, use_hash, target, no_tco, strict, no_wrap): module_docstring='"""Built-in Coconut utilities."""\n\n' if which == "__coconut__" else "", __coconut__=make_py_str("__coconut__", target), _coconut_cached__coconut__=make_py_str("_coconut_cached__coconut__", target), + coconut_cache_dir=make_py_str(coconut_cache_dir, target), object="" if target.startswith("3") else "(object)", comma_object="" if target.startswith("3") else ", object", comma_slash=", /" if target_info >= (3, 8) else "", @@ -476,9 +478,11 @@ def __lt__(self, other): indent=1, newline=True, ), - assign_multiset_views=pycondition( + def_py2_multiset_methods=pycondition( (3,), if_lt=''' +def __bool__(self): + return _coconut.bool(_coconut.len(self)) keys = _coconut.collections.Counter.viewkeys values = _coconut.collections.Counter.viewvalues items = _coconut.collections.Counter.viewitems @@ -486,6 +490,56 @@ def __lt__(self, other): indent=1, newline=True, ), + def_async_compose_call=prepare( + r''' +async def __call__(self, *args, **kwargs): + arg = await self._coconut_func(*args, **kwargs) + for f, await_f in self._coconut_func_infos: + arg = f(arg) + if await_f: + arg = await arg + return arg + ''' if target_info >= (3, 5) else + pycondition( + (3, 5), + if_ge=r''' +_coconut_call_ns = {"_coconut": _coconut} +_coconut_exec("""async def __call__(self, *args, **kwargs): + arg = await self._coconut_func(*args, **kwargs) + for f, await_f in self._coconut_func_infos: + arg = f(arg) + if await_f: + arg = await arg + return arg""", _coconut_call_ns) +__call__ = _coconut_call_ns["__call__"] + ''', + if_lt=pycondition( + (3, 4), + if_ge=r''' +_coconut_call_ns = {"_coconut": _coconut} +_coconut_exec("""def __call__(self, *args, **kwargs): + arg = yield from self._coconut_func(*args, **kwargs) + for f, await_f in self._coconut_func_infos: + arg = f(arg) + if await_f: + arg = yield from arg + raise _coconut.StopIteration(arg)""", _coconut_call_ns) +__call__ = _coconut.asyncio.coroutine(_coconut_call_ns["__call__"]) + ''', + if_lt=''' +@_coconut.asyncio.coroutine +def __call__(self, *args, **kwargs): + arg = yield _coconut.asyncio.From(self._coconut_func(*args, **kwargs)) + for f, await_f in self._coconut_func_infos: + arg = f(arg) + if await_f: + arg = yield _coconut.asyncio.From(arg) + raise _coconut.asyncio.Return(arg) + ''', + ), + ), + indent=1 + ), # used in the second round tco_comma="_coconut_tail_call, _coconut_tco, " if not no_tco else "", @@ -499,13 +553,13 @@ async def __anext__(self): pycondition( (3, 5), if_ge=r''' -_coconut_anext_ns = {} +_coconut_anext_ns = {"_coconut": _coconut} _coconut_exec("""async def __anext__(self): return self.func(await self.aiter.__anext__())""", _coconut_anext_ns) __anext__ = _coconut_anext_ns["__anext__"] ''', if_lt=r''' -_coconut_anext_ns = {} +_coconut_anext_ns = {"_coconut": _coconut} _coconut_exec("""def __anext__(self): result = yield from self.aiter.__anext__() return self.func(result)""", _coconut_anext_ns) @@ -631,13 +685,21 @@ class you_need_to_install_typing_extensions{object}: if_lt=''' try: import trollius as asyncio -except ImportError: - class you_need_to_install_trollius{object}: +except ImportError as trollius_import_err: + class you_need_to_install_trollius(_coconut_missing_module): __slots__ = () - asyncio = you_need_to_install_trollius() + def coroutine(self, func): + def raise_import_error(*args, **kwargs): + raise self._import_err + return raise_import_error + def Return(self, obj): + raise self._import_err + asyncio = you_need_to_install_trollius(trollius_import_err) +asyncio_Return = asyncio.Return '''.format(**format_dict), if_ge=''' import asyncio +asyncio_Return = StopIteration ''', indent=1, ), @@ -667,15 +729,53 @@ def __aiter__(self): try: from backports.functools_lru_cache import lru_cache functools.lru_cache = lru_cache -except ImportError: - class you_need_to_install_backports_functools_lru_cache{object}: - __slots__ = () - functools.lru_cache = you_need_to_install_backports_functools_lru_cache() +except ImportError as lru_cache_import_err: + functools.lru_cache = _coconut_missing_module(lru_cache_import_err) '''.format(**format_dict), if_ge=None, indent=1, newline=True, ), + def_multiset_ops=pycondition( + (3,), + if_ge=''' +def __add__(self, other): + out = self.copy() + out += other + return out +def __and__(self, other): + out = self.copy() + out &= other + return out +def __or__(self, other): + out = self.copy() + out |= other + return out +def __sub__(self, other): + out = self.copy() + out -= other + return out +def __pos__(self): + return self.__class__(_coconut.super({_coconut_}multiset, self).__pos__()) +def __neg__(self): + return self.__class__(_coconut.super({_coconut_}multiset, self).__neg__()) + '''.format(**format_dict), + if_lt=''' +def __add__(self, other): + return self.__class__(_coconut.super({_coconut_}multiset, self).__add__(other)) +def __and__(self, other): + return self.__class__(_coconut.super({_coconut_}multiset, self).__and__(other)) +def __or__(self, other): + return self.__class__(_coconut.super({_coconut_}multiset, self).__or__(other)) +def __sub__(self, other): + return self.__class__(_coconut.super({_coconut_}multiset, self).__sub__(other)) +def __pos__(self): + return self + {_coconut_}multiset() +def __neg__(self): + return {_coconut_}multiset() - self + '''.format(**format_dict), + indent=1, + ), ) format_dict.update(extra_format_dict) @@ -732,9 +832,11 @@ def getheader(which, use_hash, target, no_tco, strict, no_wrap): if not target.startswith("3"): header += "from __future__ import print_function, absolute_import, unicode_literals, division\n" - # including generator_stop here is fine, even though to universalize - # generator returns we raise StopIteration errors, since we only do so - # when target_info < (3, 3) + # including generator_stop here is fine, even though to universalize generator returns + # we raise StopIteration errors, since we only do so when target_info < (3, 3) + elif target_info >= (3, 13): + # 3.13 supports lazy annotations, so we should just use that instead of from __future__ import annotations + header += "from __future__ import generator_stop\n" elif target_info >= (3, 7): if no_wrap: header += "from __future__ import generator_stop\n" @@ -743,19 +845,21 @@ def getheader(which, use_hash, target, no_tco, strict, no_wrap): elif target_info >= (3, 5): header += "from __future__ import generator_stop\n" - header += "import sys as _coconut_sys\n" + header += '''import sys as _coconut_sys +import os as _coconut_os +''' if which.startswith("package") or which == "__coconut__": header += "_coconut_header_info = " + header_info + "\n" + levels_up = None if which.startswith("package"): levels_up = int(assert_remove_prefix(which, "package:")) coconut_file_dir = "_coconut_os.path.dirname(_coconut_os.path.abspath(__file__))" for _ in range(levels_up): coconut_file_dir = "_coconut_os.path.dirname(" + coconut_file_dir + ")" - return header + prepare( + header += prepare( ''' -import os as _coconut_os _coconut_cached__coconut__ = _coconut_sys.modules.get({__coconut__}) _coconut_file_dir = {coconut_file_dir} _coconut_pop_path = False @@ -788,14 +892,40 @@ def getheader(which, use_hash, target, no_tco, strict, no_wrap): ).format( coconut_file_dir=coconut_file_dir, **format_dict - ) + section("Compiled Coconut") + ) if which == "sys": - return header + '''from coconut.__coconut__ import * + header += '''from coconut.__coconut__ import * from coconut.__coconut__ import {underscore_imports} -'''.format(**format_dict) + section("Compiled Coconut") +'''.format(**format_dict) + + # remove coconut_cache_dir from __file__ if it was put there by auto compilation + header += prepare( + ''' +try: + __file__ = _coconut_os.path.abspath(__file__) if __file__ else __file__ +except NameError: + pass +else: + if __file__ and {coconut_cache_dir} in __file__: + _coconut_file_comps = [] + while __file__: + __file__, _coconut_file_comp = _coconut_os.path.split(__file__) + if not _coconut_file_comp: + _coconut_file_comps.append(__file__) + break + if _coconut_file_comp != {coconut_cache_dir}: + _coconut_file_comps.append(_coconut_file_comp) + __file__ = _coconut_os.path.join(*reversed(_coconut_file_comps)) + ''', + newline=True, + ).format(**format_dict) + + if which == "sys" or which.startswith("package"): + return header + section("Compiled Coconut") # __coconut__, code, file + internal_assert(which in ("__coconut__", "code", "file"), "wrong header type", which) header += prepare( ''' diff --git a/coconut/compiler/templates/header.py_template b/coconut/compiler/templates/header.py_template index 33f3b8503..845f6265b 100644 --- a/coconut/compiler/templates/header.py_template +++ b/coconut/compiler/templates/header.py_template @@ -1,3 +1,9 @@ +class _coconut_missing_module{object}: + __slots__ = ("_import_err",) + def __init__(self, error): + self._import_err = error + def __getattr__(self, name): + raise self._import_err @_coconut_wraps(_coconut_py_super) def _coconut_super(type=None, object_or_type=None): if type is None: @@ -19,10 +25,8 @@ class _coconut{object}:{COMMENT.EVERYTHING_HERE_MUST_BE_COPIED_TO_STUB_FILE} {import_asyncio} try: import async_generator - except ImportError: - class you_need_to_install_async_generator{object}: - __slots__ = () - async_generator = you_need_to_install_async_generator() + except ImportError as async_generator_import_err: + async_generator = _coconut_missing_module(async_generator_import_err) {import_pickle} {import_OrderedDict} {import_collections_abc} @@ -45,10 +49,8 @@ class _coconut{object}:{COMMENT.EVERYTHING_HERE_MUST_BE_COPIED_TO_STUB_FILE} {set_zip_longest} try: import numpy - except ImportError: - class you_need_to_install_numpy{object}: - __slots__ = () - numpy = you_need_to_install_numpy() + except ImportError as numpy_import_err: + numpy = _coconut_missing_module(numpy_import_err) else: abc.Sequence.register(numpy.ndarray) numpy_modules = {numpy_modules} @@ -352,25 +354,34 @@ def _coconut_iter_getitem(iterable, index): return () iterable = _coconut.itertools.islice(iterable, 0, n) return _coconut.tuple(iterable)[i::step] -class _coconut_base_compose(_coconut_baseclass):{COMMENT.no_slots_to_allow_update_wrapper}{COMMENT.must_use_coconut_attrs_to_avoid_interacting_with_update_wrapper} +class _coconut_compostion_baseclass(_coconut_baseclass):{COMMENT.no_slots_to_allow_update_wrapper}{COMMENT.must_use_coconut_attrs_to_avoid_interacting_with_update_wrapper} def __init__(self, func, *func_infos): try: _coconut.functools.update_wrapper(self, func) except _coconut.AttributeError: pass - if _coconut.isinstance(func, _coconut_base_compose): + if _coconut.isinstance(func, self.__class__): self._coconut_func = func._coconut_func func_infos = func._coconut_func_infos + func_infos else: self._coconut_func = func self._coconut_func_infos = [] - for f, stars, none_aware in func_infos: - if _coconut.isinstance(f, _coconut_base_compose): - self._coconut_func_infos.append((f._coconut_func, stars, none_aware)) + for f_info in func_infos: + f = f_info[0] + if _coconut.isinstance(f, self.__class__): + self._coconut_func_infos.append((f._coconut_func,) + f_info[1:]) self._coconut_func_infos += f._coconut_func_infos else: - self._coconut_func_infos.append((f, stars, none_aware)) + self._coconut_func_infos.append(f_info) self._coconut_func_infos = _coconut.tuple(self._coconut_func_infos) + def __reduce__(self): + return (self.__class__, (self._coconut_func,) + self._coconut_func_infos) + def __get__(self, obj, objtype=None): + if obj is None: + return self +{return_method_of_self} +class _coconut_base_compose(_coconut_compostion_baseclass): + __slots__ = () def __call__(self, *args, **kwargs): arg = self._coconut_func(*args, **kwargs) for f, stars, none_aware in self._coconut_func_infos: @@ -387,12 +398,42 @@ class _coconut_base_compose(_coconut_baseclass):{COMMENT.no_slots_to_allow_updat return arg def __repr__(self): return _coconut.repr(self._coconut_func) + " " + " ".join(".." + "?"*none_aware + "*"*stars + "> " + _coconut.repr(f) for f, stars, none_aware in self._coconut_func_infos) - def __reduce__(self): - return (self.__class__, (self._coconut_func,) + self._coconut_func_infos) - def __get__(self, obj, objtype=None): - if obj is None: - return self -{return_method_of_self} +class _coconut_async_compose(_coconut_compostion_baseclass): + __slots__ = () +{def_async_compose_call} + def __repr__(self): + return _coconut.repr(self._coconut_func) + " " + " ".join("`and_then" + "_await"*await_f + "` " + _coconut.repr(f) for f, await_f in self._coconut_func_infos) +def and_then(first_async_func, second_func): + """Compose an async function with a normal function. + + Effectively equivalent to: + def and_then[**T, U, V]( + first_async_func: async (**T) -> U, + second_func: U -> V, + ) -> async (**T) -> V = + async def (*args, **kwargs) -> ( + first_async_func(*args, **kwargs) + |> await + |> second_func + ) + """ + return _coconut_async_compose(first_async_func, (second_func, False)) +def and_then_await(first_async_func, second_async_func): + """Compose two async functions. + + Effectively equivalent to: + def and_then_await[**T, U, V]( + first_async_func: async (**T) -> U, + second_async_func: async U -> V, + ) -> async (**T) -> V = + async def (*args, **kwargs) -> ( + first_async_func(*args, **kwargs) + |> await + |> second_async_func + |> await + ) + """ + return _coconut_async_compose(first_async_func, (second_async_func, True)) def _coconut_forward_compose(func, *funcs): """Forward composition operator (..>). @@ -403,26 +444,6 @@ def _coconut_back_compose(*funcs): (<..)(f, g) is effectively equivalent to (*args, **kwargs) -> f(g(*args, **kwargs)).""" return _coconut_forward_compose(*_coconut.reversed(funcs)) -def _coconut_forward_star_compose(func, *funcs): - """Forward star composition operator (..*>). - - (..*>)(f, g) is effectively equivalent to (*args, **kwargs) -> g(*f(*args, **kwargs)).""" - return _coconut_base_compose(func, *((f, 1, False) for f in funcs)) -def _coconut_back_star_compose(*funcs): - """Backward star composition operator (<*..). - - (<*..)(f, g) is effectively equivalent to (*args, **kwargs) -> f(*g(*args, **kwargs)).""" - return _coconut_forward_star_compose(*_coconut.reversed(funcs)) -def _coconut_forward_dubstar_compose(func, *funcs): - """Forward double star composition operator (..**>). - - (..**>)(f, g) is effectively equivalent to (*args, **kwargs) -> g(**f(*args, **kwargs)).""" - return _coconut_base_compose(func, *((f, 2, False) for f in funcs)) -def _coconut_back_dubstar_compose(*funcs): - """Backward double star composition operator (<**..). - - (<**..)(f, g) is effectively equivalent to (*args, **kwargs) -> f(**g(*args, **kwargs)).""" - return _coconut_forward_dubstar_compose(*_coconut.reversed(funcs)) def _coconut_forward_none_compose(func, *funcs): """Forward none-aware composition operator (..?>). @@ -433,6 +454,16 @@ def _coconut_back_none_compose(*funcs): (<..?)(f, g) is effectively equivalent to (*args, **kwargs) -> f?(g(*args, **kwargs)).""" return _coconut_forward_none_compose(*_coconut.reversed(funcs)) +def _coconut_forward_star_compose(func, *funcs): + """Forward star composition operator (..*>). + + (..*>)(f, g) is effectively equivalent to (*args, **kwargs) -> g(*f(*args, **kwargs)).""" + return _coconut_base_compose(func, *((f, 1, False) for f in funcs)) +def _coconut_back_star_compose(*funcs): + """Backward star composition operator (<*..). + + (<*..)(f, g) is effectively equivalent to (*args, **kwargs) -> f(*g(*args, **kwargs)).""" + return _coconut_forward_star_compose(*_coconut.reversed(funcs)) def _coconut_forward_none_star_compose(func, *funcs): """Forward none-aware star composition operator (..?*>). @@ -443,6 +474,16 @@ def _coconut_back_none_star_compose(*funcs): (<*?..)(f, g) is effectively equivalent to (*args, **kwargs) -> f?(*g(*args, **kwargs)).""" return _coconut_forward_none_star_compose(*_coconut.reversed(funcs)) +def _coconut_forward_dubstar_compose(func, *funcs): + """Forward double star composition operator (..**>). + + (..**>)(f, g) is effectively equivalent to (*args, **kwargs) -> g(**f(*args, **kwargs)).""" + return _coconut_base_compose(func, *((f, 2, False) for f in funcs)) +def _coconut_back_dubstar_compose(*funcs): + """Backward double star composition operator (<**..). + + (<**..)(f, g) is effectively equivalent to (*args, **kwargs) -> f(**g(*args, **kwargs)).""" + return _coconut_forward_dubstar_compose(*_coconut.reversed(funcs)) def _coconut_forward_none_dubstar_compose(func, *funcs): """Forward none-aware double star composition operator (..?**>). @@ -1474,6 +1515,11 @@ class multiset(_coconut.collections.Counter{comma_object}): return not self & other def __xor__(self, other): return self - other | other - self + def __ixor__(self, other): + right = other - self + self -= other + self |= right + return self def count(self, item): """Return the number of times an element occurs in a multiset. Equivalent to multiset[item], but additionally verifies the count is non-negative.""" @@ -1483,7 +1529,8 @@ class multiset(_coconut.collections.Counter{comma_object}): return result def __fmap__(self, func): return self.__class__(_coconut.dict((func(obj), num) for obj, num in self.items())) -{def_total_and_comparisons}{assign_multiset_views}_coconut.abc.MutableSet.register(multiset) +{def_multiset_ops} +{def_total_and_comparisons}{def_py2_multiset_methods}_coconut.abc.MutableSet.register(multiset) def _coconut_base_makedata(data_type, args, from_fmap=False, fallback_to_init=False): if _coconut.hasattr(data_type, "_make") and _coconut.issubclass(data_type, _coconut.tuple): return data_type._make(args) @@ -1566,6 +1613,8 @@ def memoize(*args, **kwargs): return _coconut.functools.lru_cache(maxsize, typed) {def_call_set_names} class override(_coconut_baseclass): + """Declare a method in a subclass as an override of a parent class method. + Enforces at runtime that the parent class has such a method to be overwritten.""" __slots__ = ("func",) def __init__(self, func): self.func = func diff --git a/coconut/compiler/util.py b/coconut/compiler/util.py index bebec4a09..7605cecae 100644 --- a/coconut/compiler/util.py +++ b/coconut/compiler/util.py @@ -14,6 +14,7 @@ # Table of Contents: # - Imports # - Computation Graph +# - Parsing Introspection # - Targets # - Parse Elements # - Utilities @@ -32,6 +33,7 @@ import inspect import __future__ import itertools +import datetime as dt from functools import partial, reduce from collections import defaultdict from contextlib import contextmanager @@ -39,6 +41,7 @@ from coconut._pyparsing import ( USE_COMPUTATION_GRAPH, + SUPPORTS_INCREMENTAL, replaceWith, ZeroOrMore, OneOrMore, @@ -96,6 +99,9 @@ non_syntactic_newline, allow_explicit_keyword_vars, reserved_prefix, + incremental_cache_size, + repeatedly_clear_incremental_cache, + py_vers_with_eols, ) from coconut.exceptions import ( CoconutException, @@ -341,14 +347,24 @@ def attach(item, action, ignore_no_tokens=None, ignore_one_token=None, ignore_to return add_action(item, action, make_copy) -def trace_attach(*args, **kwargs): - """trace_attach = trace .. attach""" - return trace(attach(*args, **kwargs)) +def should_clear_cache(): + """Determine if we should be clearing the packrat cache.""" + return ( + use_packrat_parser + and ( + not ParserElement._incrementalEnabled + or ( + ParserElement._incrementalWithResets + and repeatedly_clear_incremental_cache + ) + ) + ) def final_evaluate_tokens(tokens): """Same as evaluate_tokens but should only be used once a parse is assured.""" - if use_packrat_parser: + # don't clear the cache in incremental mode + if should_clear_cache(): # clear cache without resetting stats ParserElement.packrat_cache.clear() return evaluate_tokens(tokens) @@ -357,7 +373,7 @@ def final_evaluate_tokens(tokens): def final(item): """Collapse the computation graph upon parsing the given item.""" # evaluate_tokens expects a computation graph, so we just call add_action directly - return add_action(item, final_evaluate_tokens) + return add_action(trace(item), final_evaluate_tokens) def defer(item): @@ -375,29 +391,46 @@ def unpack(tokens): return tokens +def force_reset_packrat_cache(): + """Forcibly reset the packrat cache and all packrat stats.""" + if ParserElement._incrementalEnabled: + ParserElement._incrementalEnabled = False + ParserElement.enableIncremental(incremental_cache_size, still_reset_cache=ParserElement._incrementalWithResets) + else: + ParserElement._packratEnabled = False + ParserElement.enablePackrat(packrat_cache_size) + + @contextmanager def parsing_context(inner_parse=True): """Context to manage the packrat cache across parse calls.""" - if inner_parse and use_packrat_parser: + if inner_parse and should_clear_cache(): # store old packrat cache old_cache = ParserElement.packrat_cache old_cache_stats = ParserElement.packrat_cache_stats[:] # give inner parser a new packrat cache - ParserElement._packratEnabled = False - ParserElement.enablePackrat(packrat_cache_size) - try: - yield - finally: - if inner_parse and use_packrat_parser: + force_reset_packrat_cache() + try: + yield + finally: ParserElement.packrat_cache = old_cache if logger.verbose: ParserElement.packrat_cache_stats[0] += old_cache_stats[0] ParserElement.packrat_cache_stats[1] += old_cache_stats[1] + elif inner_parse and ParserElement._incrementalWithResets: + incrementalWithResets, ParserElement._incrementalWithResets = ParserElement._incrementalWithResets, False + try: + yield + finally: + ParserElement._incrementalWithResets = incrementalWithResets + else: + yield def prep_grammar(grammar, streamline=False): """Prepare a grammar item to be used as the root of a parse.""" + grammar = trace(grammar) if streamline: grammar.streamlined = False grammar.streamline() @@ -454,16 +487,85 @@ def match_in(grammar, text, inner=True): def transform(grammar, text, inner=True): """Transform text by replacing matches to grammar.""" with parsing_context(inner): - result = add_action(grammar, unpack).parseWithTabs().transformString(text) + result = prep_grammar(add_action(grammar, unpack)).transformString(text) if result == text: result = None return result # ----------------------------------------------------------------------------------------------------------------------- -# TARGETS: +# PARSING INTROSPECTION: # ----------------------------------------------------------------------------------------------------------------------- + +def get_func_closure(func): + """Get variables in func's closure.""" + if PY2: + varnames = func.func_code.co_freevars + cells = func.func_closure + else: + varnames = func.__code__.co_freevars + cells = func.__closure__ + return {v: c.cell_contents for v, c in zip(varnames, cells)} + + +def get_pyparsing_cache(): + """Extract the underlying pyparsing packrat cache.""" + packrat_cache = ParserElement.packrat_cache + if isinstance(packrat_cache, dict): # if enablePackrat is never called + return packrat_cache + elif hasattr(packrat_cache, "cache"): # cPyparsing adds this + return packrat_cache.cache + else: # on pyparsing we have to do this + try: + # this is sketchy, so errors should only be complained + return get_func_closure(packrat_cache.get.__func__)["cache"] + except Exception as err: + complain(err) + return {} + + +def add_to_cache(new_cache_items): + """Add the given items directly to the pyparsing packrat cache.""" + packrat_cache = ParserElement.packrat_cache + for lookup, value in new_cache_items: + packrat_cache.set(lookup, value) + + +def get_cache_items_for(original): + """Get items from the pyparsing cache filtered to only from parsing original.""" + cache = get_pyparsing_cache() + for lookup, value in cache.items(): + got_orig = lookup[1] + if got_orig == original: + yield lookup, value + + +def get_highest_parse_loc(original): + """Get the highest observed parse location.""" + # find the highest observed parse location + highest_loc = 0 + for item, _ in get_cache_items_for(original): + loc = item[2] + if loc > highest_loc: + highest_loc = loc + return highest_loc + + +def enable_incremental_parsing(force=False): + """Enable incremental parsing mode where prefix parses are reused.""" + if SUPPORTS_INCREMENTAL or force: + try: + ParserElement.enableIncremental(incremental_cache_size, still_reset_cache=False) + except ImportError as err: + raise CoconutException(str(err)) + else: + logger.log("Incremental parsing mode enabled.") + + +# ----------------------------------------------------------------------------------------------------------------------- +# TARGETS: +# ----------------------------------------------------------------------------------------------------------------------- on_new_python = False raw_sys_target = str(sys.version_info[0]) + str(sys.version_info[1]) @@ -482,6 +584,15 @@ def transform(grammar, text, inner=True): sys_target = "".join(str(i) for i in supported_py3_vers[0]) +def get_psf_target(): + """Get the oldest PSF-supported Python version target.""" + now = dt.datetime.now() + for ver, eol in py_vers_with_eols: + if now < eol: + break + return pseudo_targets.get(ver, ver) + + def get_vers_for_target(target): """Gets a list of the versions supported by the given target.""" target_info = get_target_info(target) @@ -535,31 +646,33 @@ def get_target_info_smart(target, mode="lowest"): class Wrap(ParseElementEnhance): """PyParsing token that wraps the given item in the given context manager.""" + inside = False def __init__(self, item, wrapper, greedy=False, include_in_packrat_context=False): super(Wrap, self).__init__(item) self.wrapper = wrapper self.greedy = greedy - self.include_in_packrat_context = include_in_packrat_context + self.include_in_packrat_context = include_in_packrat_context and hasattr(ParserElement, "packrat_context") @property def wrapped_name(self): return get_name(self.expr) + " (Wrapped)" @contextmanager - def wrapped_packrat_context(self): + def wrapped_context(self): """Context manager that edits the packrat_context. Required to allow the packrat cache to distinguish between wrapped and unwrapped parses. Only supported natively on cPyparsing.""" - if self.include_in_packrat_context and hasattr(self, "packrat_context"): - self.packrat_context.append(self.wrapper) - try: - yield - finally: - self.packrat_context.pop() - else: + was_inside, self.inside = self.inside, True + if self.include_in_packrat_context: + ParserElement.packrat_context.append(self.wrapper) + try: yield + finally: + if self.include_in_packrat_context: + ParserElement.packrat_context.pop() + self.inside = was_inside @override def parseImpl(self, original, loc, *args, **kwargs): @@ -568,7 +681,7 @@ def parseImpl(self, original, loc, *args, **kwargs): logger.log_trace(self.wrapped_name, original, loc) with logger.indent_tracing(): with self.wrapper(self, original, loc): - with self.wrapped_packrat_context(): + with self.wrapped_context(): parse_loc, tokens = super(Wrap, self).parseImpl(original, loc, *args, **kwargs) if self.greedy: tokens = evaluate_tokens(tokens) @@ -586,7 +699,7 @@ def __repr__(self): def disable_inside(item, *elems, **kwargs): """Prevent elems from matching inside of item. - Returns (item with elem disabled, *new versions of elems). + Returns (item with elems disabled, *new versions of elems). """ _invert = kwargs.pop("_invert", False) internal_assert(not kwargs, "excess keyword arguments passed to disable_inside", kwargs) @@ -617,9 +730,9 @@ def manage_elem(self, original, loc): def disable_outside(item, *elems): """Prevent elems from matching outside of item. - Returns (item with elem enabled, *new versions of elems). + Returns (item with elems enabled, *new versions of elems). """ - for wrapped in disable_inside(item, *elems, **{"_invert": True}): + for wrapped in disable_inside(item, *elems, _invert=True): yield wrapped @@ -667,6 +780,9 @@ def compile_regex(regex, options=None): return re.compile(regex, options) +memoized_compile_regex = memoize(64)(compile_regex) + + def regex_item(regex, options=None): """pyparsing.Regex except it always uses unicode.""" if options is None: @@ -799,20 +915,13 @@ def stores_loc_action(loc, tokens): stores_loc_item = attach(always_match, stores_loc_action) -def disallow_keywords(kwds, with_suffix=None): +def disallow_keywords(kwds, with_suffix=""): """Prevent the given kwds from matching.""" - item = ~( - base_keyword(kwds[0]) - if with_suffix is None else - base_keyword(kwds[0]) + with_suffix + to_disallow = ( + k + r"\b" + re.escape(with_suffix) + for k in kwds ) - for k in kwds[1:]: - item += ~( - base_keyword(k) - if with_suffix is None else - base_keyword(k) + with_suffix - ) - return item + return regex_item(r"(?!" + "|".join(to_disallow) + r")").suppress() def any_keyword_in(kwds): @@ -904,6 +1013,7 @@ def caseless_literal(literalstr, suppress=False): # UTILITIES: # ----------------------------------------------------------------------------------------------------------------------- + def ordered(items): """Return the items in a deterministic order.""" if PY2: @@ -1249,43 +1359,6 @@ def handle_indentation(inputstr, add_newline=False, extra_indent=0): return out -def get_func_closure(func): - """Get variables in func's closure.""" - if PY2: - varnames = func.func_code.co_freevars - cells = func.func_closure - else: - varnames = func.__code__.co_freevars - cells = func.__closure__ - return {v: c.cell_contents for v, c in zip(varnames, cells)} - - -def get_highest_parse_loc(): - """Get the highest observed parse location.""" - try: - # extract the actual cache object (pyparsing does not make this easy) - packrat_cache = ParserElement.packrat_cache - if isinstance(packrat_cache, dict): # if enablePackrat is never called - cache = packrat_cache - elif hasattr(packrat_cache, "cache"): # cPyparsing adds this - cache = packrat_cache.cache - else: # on pyparsing we have to do this - cache = get_func_closure(packrat_cache.get.__func__)["cache"] - - # find the highest observed parse location - highest_loc = 0 - for item in cache: - loc = item[2] - if loc > highest_loc: - highest_loc = loc - return highest_loc - - # everything here is sketchy, so errors should only be complained - except Exception as err: - complain(err) - return 0 - - def literal_eval(py_code): """Version of ast.literal_eval that attempts to be version-independent.""" try: @@ -1370,6 +1443,58 @@ def add_int_and_strs(int_part=0, str_parts=(), parens=False): return out +def base_move_loc(original, loc, chars_to_move_forwards): + """Move loc in original in accordance with chars_to_move_forwards.""" + visited_locs = set() + while 0 <= loc <= len(original) - 1: + c = original[loc] + for charset, forwards in chars_to_move_forwards.items(): + if c in charset: + break + else: # no break + break + if forwards: + if loc >= len(original) - 1: + break + next_loc = loc + 1 + else: + if loc <= 1: + break + next_loc = loc - 1 + if next_loc in visited_locs: + loc = next_loc + break + visited_locs.add(next_loc) + loc = next_loc + return loc + + +def move_loc_to_non_whitespace(original, loc, backwards=False): + """Move the given loc in original to the closest non-whitespace in the given direction. + Won't ever move far enough to set loc to 0 or len(original).""" + return base_move_loc( + original, + loc, + chars_to_move_forwards={ + default_whitespace_chars: not backwards, + # for loc, move backwards on newlines/indents, which we can do safely without removing anything from the error + indchars: False, + }, + ) + + +def move_endpt_to_non_whitespace(original, loc, backwards=False): + """Same as base_move_loc but for endpoints specifically.""" + return base_move_loc( + original, + loc, + chars_to_move_forwards={ + default_whitespace_chars: not backwards, + # for endpt, ignore newlines/indents to avoid introducing unnecessary lines into the error + }, + ) + + # ----------------------------------------------------------------------------------------------------------------------- # PYTEST: # ----------------------------------------------------------------------------------------------------------------------- diff --git a/coconut/constants.py b/coconut/constants.py index c6bc04fdd..38f1c671d 100644 --- a/coconut/constants.py +++ b/coconut/constants.py @@ -40,9 +40,9 @@ def fixpath(path): def get_bool_env_var(env_var, default=False): """Get a boolean from an environment variable.""" boolstr = os.getenv(env_var, "").lower() - if boolstr in ("true", "yes", "on", "1"): + if boolstr in ("true", "yes", "on", "1", "t"): return True - elif boolstr in ("false", "no", "off", "0"): + elif boolstr in ("false", "no", "off", "0", "f"): return False else: if boolstr not in ("", "none", "default"): @@ -78,8 +78,9 @@ def get_bool_env_var(env_var, default=False): PY311 = sys.version_info >= (3, 11) IPY = ( ((PY2 and not PY26) or PY35) - and not (PYPY and WINDOWS) and (PY37 or not PYPY) + and not (PYPY and WINDOWS) + and not (PY2 and WINDOWS) and sys.version_info[:2] != (3, 7) ) MYPY = ( @@ -104,12 +105,7 @@ def get_bool_env_var(env_var, default=False): assert use_fast_pyparsing_reprs or DEVELOP, "use_fast_pyparsing_reprs should never be disabled on non-develop build" enable_pyparsing_warnings = DEVELOP - -# experimentally determined to maximize performance -use_packrat_parser = True # True also gives us better error messages -use_left_recursion_if_available = False -packrat_cache_size = None # only works because final() clears the cache -streamline_grammar_for_len = 4000 +warn_on_multiline_regex = False default_whitespace_chars = " \t\f" # the only non-newline whitespace Python allows @@ -117,6 +113,22 @@ def get_bool_env_var(env_var, default=False): use_computation_graph_env_var = "COCONUT_USE_COMPUTATION_GRAPH" +# below constants are experimentally determined to maximize performance + +streamline_grammar_for_len = 4000 + +use_packrat_parser = True # True also gives us better error messages +packrat_cache_size = None # only works because final() clears the cache + +# note that _parseIncremental produces much smaller caches +use_incremental_if_available = True +incremental_cache_size = None +# these only apply to use_incremental_if_available, not compiler.util.enable_incremental_parsing() +repeatedly_clear_incremental_cache = True +never_clear_incremental_cache = False + +use_left_recursion_if_available = False + # ----------------------------------------------------------------------------------------------------------------------- # COMPILER CONSTANTS: # ----------------------------------------------------------------------------------------------------------------------- @@ -171,6 +183,18 @@ def get_bool_env_var(env_var, default=False): (3, 10), (3, 11), (3, 12), + (3, 13), +) + +# must be in ascending order and kept up-to-date with https://devguide.python.org/versions +py_vers_with_eols = ( + # (target, eol date) + ("38", dt.datetime(2024, 11, 1)), + ("39", dt.datetime(2025, 11, 1)), + ("310", dt.datetime(2026, 11, 1)), + ("311", dt.datetime(2027, 11, 1)), + ("312", dt.datetime(2028, 11, 1)), + ("313", dt.datetime(2029, 11, 1)), ) # must match supported vers above and must be replicated in DOCS @@ -188,13 +212,14 @@ def get_bool_env_var(env_var, default=False): "310", "311", "312", + "313", ) pseudo_targets = { "universal": "", + "univ": "", "26": "2", "32": "3", } -assert all(v in specific_targets or v in pseudo_targets for v in ROOT_HEADER_VERSIONS) targets = ("",) + specific_targets @@ -237,14 +262,16 @@ def get_bool_env_var(env_var, default=False): indchars = (openindent, closeindent, "\n") comment_chars = ("#", lnwrapper) +all_whitespace = default_whitespace_chars + "".join(indchars) + # open_chars and close_chars MUST BE IN THE SAME ORDER open_chars = "([{" # opens parenthetical close_chars = ")]}" # closes parenthetical -hold_chars = "'\"" # string open/close chars +str_chars = "'\"" # string open/close chars # together should include all the constants defined above -delimiter_symbols = tuple(open_chars + close_chars + hold_chars) + ( +delimiter_symbols = tuple(open_chars + close_chars + str_chars) + ( strwrapper, errwrapper, early_passthrough_wrapper, @@ -255,11 +282,14 @@ def get_bool_env_var(env_var, default=False): funcwrapper, ) -taberrfmt = 2 # spaces to indent exceptions tabideal = 4 # spaces to indent code for displaying +taberrfmt = 2 # spaces to indent exceptions + justify_len = 79 # ideal line length +min_squiggles_in_err_msg = 1 + # for pattern-matching default_matcher_style = "python warn" wildcard = "_" @@ -563,23 +593,27 @@ def get_bool_env_var(env_var, default=False): main_prompt = ">>> " more_prompt = " " +default_use_cache_dir = PY34 +coconut_cache_dir = "__coconut_cache__" + mypy_path_env_var = "MYPYPATH" style_env_var = "COCONUT_STYLE" vi_mode_env_var = "COCONUT_VI_MODE" home_env_var = "COCONUT_HOME" -use_color_env_var = "COCONUT_USE_COLOR" + +force_verbose_logger = get_bool_env_var("COCONUT_FORCE_VERBOSE", False) coconut_home = fixpath(os.getenv(home_env_var, "~")) -use_color = get_bool_env_var(use_color_env_var, default=None) +use_color = get_bool_env_var("COCONUT_USE_COLOR", None) error_color_code = "31" log_color_code = "93" default_style = "default" prompt_histfile = os.path.join(coconut_home, ".coconut_history") prompt_multiline = False -prompt_vi_mode = get_bool_env_var(vi_mode_env_var) +prompt_vi_mode = get_bool_env_var(vi_mode_env_var, False) prompt_wrap_lines = True prompt_history_search = True prompt_use_suggester = False @@ -587,6 +621,11 @@ def get_bool_env_var(env_var, default=False): base_dir = os.path.dirname(os.path.abspath(fixpath(__file__))) base_stub_dir = os.path.dirname(base_dir) +stub_dir_names = ( + "__coconut__", + "_coconut", + "coconut", +) installed_stub_dir = os.path.join(coconut_home, ".coconut_stubs") watch_interval = .1 # seconds @@ -612,9 +651,9 @@ def get_bool_env_var(env_var, default=False): ) # always use atomic --xxx=yyy rather than --xxx yyy -coconut_run_verbose_args = ("--run", "--target=sys", "--line-numbers", "--keep-lines") -coconut_run_args = coconut_run_verbose_args + ("--quiet",) -coconut_import_hook_args = ("--target=sys", "--line-numbers", "--keep-lines", "--quiet") +# and don't include --run, --quiet, or --target as they're added separately +coconut_base_run_args = ("--keep-lines",) +coconut_run_kwargs = dict(default_target="sys") # passed to Command.cmd default_mypy_args = ( "--pretty", @@ -649,11 +688,13 @@ def get_bool_env_var(env_var, default=False): default_jobs = "sys" if not PY26 else 0 mypy_install_arg = "install" +jupyter_install_arg = "install" mypy_builtin_regex = re.compile(r"\b(reveal_type|reveal_locals)\b") interpreter_uses_auto_compilation = True interpreter_uses_coconut_breakpoint = True +interpreter_uses_incremental = get_bool_env_var("COCONUT_INTERPRETER_USE_INCREMENTAL_PARSING", True) command_resources_dir = os.path.join(base_dir, "command", "resources") coconut_pth_file = os.path.join(command_resources_dir, "zcoconut.pth") @@ -714,6 +755,8 @@ def get_bool_env_var(env_var, default=False): "multiset", "cycle", "windowsof", + "and_then", + "and_then_await", "py_chr", "py_dict", "py_hex", @@ -766,10 +809,13 @@ def get_bool_env_var(env_var, default=False): r"\|\??\*?\*?>", r"<\*?\*?\??\|", r"->", + r"=>", r"\?\??", r"<:", r"&:", + # not raw strings since we want the actual unicode chars "\u2192", # -> + "\u21d2", # => "\\??\\*?\\*?\u21a6", # |> "\u21a4\\*?\\*?\\??", # <| "?", # .. @@ -790,6 +836,7 @@ def get_bool_env_var(env_var, default=False): "\u2287", # ^reversed "\u228a", # C!= "\u228b", # ^reversed + "\u23e8", # 10 ) # ----------------------------------------------------------------------------------------------------------------------- @@ -807,7 +854,7 @@ def get_bool_env_var(env_var, default=False): license_name = "Apache 2.0" pure_python_env_var = "COCONUT_PURE_PYTHON" -PURE_PYTHON = get_bool_env_var(pure_python_env_var) +PURE_PYTHON = get_bool_env_var(pure_python_env_var, False) # the different categories here are defined in requirements.py, # tuples denote the use of environment markers @@ -821,9 +868,9 @@ def get_bool_env_var(env_var, default=False): ("prompt_toolkit", "py>=3"), ("pygments", "py<39"), ("pygments", "py>=39"), - ("typing_extensions", "py==35"), + ("typing_extensions", "py<36"), ("typing_extensions", "py==36"), - ("typing_extensions", "py37"), + ("typing_extensions", "py>=37"), ), "cpython": ( "cPyparsing", @@ -873,7 +920,7 @@ def get_bool_env_var(env_var, default=False): ("aenum", "py<34"), ("dataclasses", "py==36"), ("typing", "py<35"), - ("async_generator", "py3"), + ("async_generator", "py35"), ), "dev": ( ("pre-commit", "py3"), @@ -899,7 +946,7 @@ def get_bool_env_var(env_var, default=False): # min versions are inclusive min_versions = { - "cPyparsing": (2, 4, 7, 1, 2, 1), + "cPyparsing": (2, 4, 7, 2, 2, 1), ("pre-commit", "py3"): (3,), ("psutil", "py>=27"): (5,), "jupyter": (1, 0), @@ -913,25 +960,24 @@ def get_bool_env_var(env_var, default=False): ("numpy", "py34"): (1,), ("numpy", "py<3;cpy"): (1,), ("dataclasses", "py==36"): (0, 8), - ("aenum", "py<34"): (3,), + ("aenum", "py<34"): (3, 1, 15), "pydata-sphinx-theme": (0, 13), - "myst-parser": (1,), - "mypy[python2]": (1, 3), + "myst-parser": (2,), + "sphinx": (7,), + "mypy[python2]": (1, 4), ("jupyter-console", "py37"): (6, 6), ("typing", "py<35"): (3, 10), - ("typing_extensions", "py37"): (4, 6), + ("typing_extensions", "py>=37"): (4, 7), ("ipython", "py38"): (8,), ("ipykernel", "py38"): (6,), - ("jedi", "py39"): (0, 18), + ("jedi", "py39"): (0, 19), ("pygments", "py>=39"): (2, 15), ("xonsh", "py38"): (0, 14), ("pytest", "py36"): (7,), - ("async_generator", "py3"): (1, 10), + ("async_generator", "py35"): (1, 10), # pinned reqs: (must be added to pinned_reqs below) - # don't upgrade until myst-parser supports the new version - "sphinx": (6,), # don't upgrade these; they breaks on Python 3.7 ("ipython", "py==37"): (7, 34), # don't upgrade these; they breaks on Python 3.6 @@ -947,7 +993,7 @@ def get_bool_env_var(env_var, default=False): ("jupytext", "py3"): (1, 8), ("jupyterlab", "py35"): (2, 2), ("xonsh", "py<36"): (0, 9), - ("typing_extensions", "py==35"): (3, 10), + ("typing_extensions", "py<36"): (3, 10), # don't upgrade this to allow all versions ("prompt_toolkit", "py>=3"): (1,), # don't upgrade this; it breaks on Python 2.6 @@ -973,7 +1019,6 @@ def get_bool_env_var(env_var, default=False): # should match the reqs with comments above pinned_reqs = ( - "sphinx", ("ipython", "py==37"), ("xonsh", "py>=36;py<38"), ("pandas", "py36"), @@ -987,7 +1032,7 @@ def get_bool_env_var(env_var, default=False): ("jupytext", "py3"), ("jupyterlab", "py35"), ("xonsh", "py<36"), - ("typing_extensions", "py==35"), + ("typing_extensions", "py<36"), ("prompt_toolkit", "py>=3"), ("pytest", "py<36"), "vprof", @@ -1151,7 +1196,7 @@ def get_bool_env_var(env_var, default=False): # ----------------------------------------------------------------------------------------------------------------------- # must be replicated in DOCS; must include --line-numbers for xonsh line number extraction -coconut_kernel_kwargs = dict(target="sys", line_numbers=True, keep_lines=True, no_wrap=True) +coconut_kernel_kwargs = dict(target="sys", line_numbers=True, keep_lines=True, no_wrap=True) # passed to Compiler.setup icoconut_dir = os.path.join(base_dir, "icoconut") diff --git a/coconut/exceptions.py b/coconut/exceptions.py index 33e0c40b4..61319e4ed 100644 --- a/coconut/exceptions.py +++ b/coconut/exceptions.py @@ -29,6 +29,7 @@ from coconut.constants import ( taberrfmt, report_this_text, + min_squiggles_in_err_msg, ) from coconut.util import ( pickleable_obj, @@ -116,6 +117,7 @@ def message(self, message, source, point, ln, extra=None, endpoint=None, filenam message += "\n" + " " * taberrfmt + clean(line) else: source = normalize_newlines(source) + point = clip(point, 0, len(source)) if endpoint is None: endpoint = 0 @@ -129,7 +131,7 @@ def message(self, message, source, point, ln, extra=None, endpoint=None, filenam source_lines = tuple(logical_lines(source, keep_newlines=True)) - # walk the endpoint back until it points to real text + # walk the endpoint line back until it points to real text while endpoint_ln > point_ln and not "".join(source_lines[endpoint_ln - 1:endpoint_ln]).strip(): endpoint_ln -= 1 endpoint_ind = len(source_lines[endpoint_ln - 1]) @@ -154,15 +156,20 @@ def message(self, message, source, point, ln, extra=None, endpoint=None, filenam message += "\n" + " " * taberrfmt + part if point_ind > 0 or endpoint_ind > 0: + err_len = endpoint_ind - point_ind message += "\n" + " " * (taberrfmt + point_ind) - if endpoint_ind - point_ind > 1: + if err_len <= min_squiggles_in_err_msg: if not self.point_to_endpoint: message += "^" - message += "~" * (endpoint_ind - point_ind - 1) + message += "~" * err_len # err_len ~'s when there's only an extra char in one spot if self.point_to_endpoint: message += "^" else: - message += "^" + message += ( + ("^" if not self.point_to_endpoint else "\\") + + "~" * (err_len - 1) # err_len-1 ~'s when there's an extra char at the start and end + + ("^" if self.point_to_endpoint else "/" if endpoint_ind < len(part) else "|") + ) # multi-line error message else: @@ -170,14 +177,20 @@ def message(self, message, source, point, ln, extra=None, endpoint=None, filenam for line in source_lines[point_ln - 1:endpoint_ln]: lines.append(clean(line)) - # adjust cols that are too large based on clean/rstrip point_ind = clip(point_ind, 0, len(lines[0])) endpoint_ind = clip(endpoint_ind, 0, len(lines[-1])) - message += "\n" + " " * (taberrfmt + point_ind) + "|" + "~" * (len(lines[0]) - point_ind - 1) + "\n" + message += "\n" + " " * (taberrfmt + point_ind) + if point_ind >= len(lines[0]): + message += "|\n" + else: + message += "/" + "~" * (len(lines[0]) - point_ind - 1) + "\n" for line in lines: message += "\n" + " " * taberrfmt + line - message += "\n\n" + " " * taberrfmt + "~" * (endpoint_ind) + "^" + message += ( + "\n\n" + " " * taberrfmt + "~" * endpoint_ind + + ("^" if self.point_to_endpoint else "/" if 0 < endpoint_ind < len(lines[-1]) else "|") + ) return message diff --git a/coconut/icoconut/root.py b/coconut/icoconut/root.py index 326a2dd62..babd03616 100644 --- a/coconut/icoconut/root.py +++ b/coconut/icoconut/root.py @@ -100,12 +100,12 @@ def memoized_parse_block(code): def syntaxerr_memoized_parse_block(code): """Version of memoized_parse_block that raises SyntaxError without any __cause__.""" - to_raise = None + syntax_err = None try: return memoized_parse_block(code) except CoconutException as err: - to_raise = err.syntax_err() - raise to_raise + syntax_err = err.syntax_err() + raise syntax_err # ----------------------------------------------------------------------------------------------------------------------- @@ -114,7 +114,7 @@ def syntaxerr_memoized_parse_block(code): if LOAD_MODULE: - COMPILER.warm_up() + COMPILER.warm_up(enable_incremental_mode=True) class CoconutCompiler(CachingCompiler, object): """IPython compiler for Coconut.""" @@ -158,15 +158,14 @@ class CoconutSplitter(IPythonInputSplitter, object): def __init__(self, *args, **kwargs): """Version of __init__ that sets up Coconut code compilation.""" super(CoconutSplitter, self).__init__(*args, **kwargs) + self._original_compile = self._compile self._compile = self._coconut_compile def _coconut_compile(self, source, *args, **kwargs): """Version of _compile that checks Coconut code. None means that the code should not be run as is. Any other value means that it can.""" - if source.endswith("\n\n"): - return True - elif should_indent(source): + if not source.endswith("\n\n") and should_indent(source): return None else: return True diff --git a/coconut/integrations.py b/coconut/integrations.py index f13375c65..8d2fec811 100644 --- a/coconut/integrations.py +++ b/coconut/integrations.py @@ -23,7 +23,9 @@ from coconut.constants import ( coconut_kernel_kwargs, + coconut_run_kwargs, enabled_xonsh_modes, + interpreter_uses_incremental, ) from coconut.util import memoize_with_exceptions @@ -63,6 +65,7 @@ def load_ipython_extension(ipython): magic_state = api.get_state() api.setup(state=magic_state, **coconut_kernel_kwargs) + api.warm_up(enable_incremental_mode=True, state=magic_state) # add magic function def magic(line, cell=None): @@ -74,7 +77,7 @@ def magic(line, cell=None): # first line in block is cmd, rest is code line = line.strip() if line: - api.cmd(line, default_target="sys", state=magic_state) + api.cmd(line, state=magic_state, **coconut_run_kwargs) code = cell compiled = api.parse(code, state=magic_state) except CoconutException: @@ -99,7 +102,7 @@ class CoconutXontribLoader(object): def memoized_parse_xonsh(self, code): return self.compiler.parse_xonsh(code, keep_state=True) - def compile_code(self, code): + def compile_code(self, code, log_name="parse"): """Memoized self.compiler.parse_xonsh.""" # hide imports to avoid circular dependencies from coconut.exceptions import CoconutException @@ -120,7 +123,7 @@ def compile_code(self, code): success = True finally: logger.quiet = quiet - self.timing_info.append(("parse", get_clock_time() - parse_start_time)) + self.timing_info.append((log_name, get_clock_time() - parse_start_time)) return compiled, success @@ -151,11 +154,11 @@ def new_ctxvisit(self, ctxtransformer, node, inp, ctx, mode="exec", *args, **kwa from coconut.terminal import logger from coconut.compiler.util import extract_line_num_from_comment - compiled, success = self.compile_code(inp) + compiled, success = self.compile_code(inp, log_name="ctxvisit") if success: original_lines = tuple(inp.splitlines()) - used_lines = set() + remaining_ln_pieces = {} new_inp_lines = [] last_ln = 1 for compiled_line in compiled.splitlines(): @@ -165,11 +168,24 @@ def new_ctxvisit(self, ctxtransformer, node, inp, ctx, mode="exec", *args, **kwa except IndexError: logger.log_exc() line = original_lines[-1] - if line in used_lines: - line = "" + remaining_pieces = remaining_ln_pieces.get(ln) + if remaining_pieces is None: + # we handle our own inner_environment rather than have remove_strs do it so that we can reformat + with self.compiler.inner_environment(): + line_no_strs = self.compiler.remove_strs(line, inner_environment=False) + if ";" in line_no_strs: + remaining_pieces = [ + self.compiler.reformat(piece, ignore_errors=True) + for piece in line_no_strs.split(";") + ] + else: + remaining_pieces = [line] + if remaining_pieces: + new_line = remaining_pieces.pop(0) else: - used_lines.add(line) - new_inp_lines.append(line) + new_line = "" + remaining_ln_pieces[ln] = remaining_pieces + new_inp_lines.append(new_line) last_ln = ln inp = "\n".join(new_inp_lines) @@ -186,7 +202,7 @@ def __call__(self, xsh, **kwargs): if self.compiler is None: from coconut.compiler import Compiler self.compiler = Compiler(**coconut_kernel_kwargs) - self.compiler.warm_up() + self.compiler.warm_up(enable_incremental_mode=interpreter_uses_incremental) if self.runner is None: from coconut.command.util import Runner @@ -213,7 +229,7 @@ def unload(self, xsh): if not self.loaded: # hide imports to avoid circular dependencies from coconut.terminal import logger - logger.warn("attempting to unload Coconut xontrib but it was never loaded") + logger.warn("attempting to unload Coconut xontrib but it was already unloaded") self.loaded = False diff --git a/coconut/root.py b/coconut/root.py index 4b0454312..32cd33428 100644 --- a/coconut/root.py +++ b/coconut/root.py @@ -23,7 +23,7 @@ # VERSION: # ----------------------------------------------------------------------------------------------------------------------- -VERSION = "3.0.2" +VERSION = "3.0.3" VERSION_NAME = None # False for release, int >= 1 for develop DEVELOP = False diff --git a/coconut/terminal.py b/coconut/terminal.py index bdb92196e..30db0ecf4 100644 --- a/coconut/terminal.py +++ b/coconut/terminal.py @@ -50,6 +50,7 @@ error_color_code, log_color_code, ansii_escape, + force_verbose_logger, ) from coconut.util import ( get_clock_time, @@ -178,7 +179,8 @@ def logging(self): class Logger(object): """Container object for various logger functions and variables.""" - verbose = False + force_verbose = force_verbose_logger + verbose = force_verbose quiet = False path = None name = None @@ -215,6 +217,15 @@ def copy(self): """Make a copy of the logger.""" return Logger(self) + def setup(self, quiet=None, verbose=None, tracing=None): + """Set up the logger with the given parameters.""" + if quiet is not None: + self.quiet = quiet + if not self.force_verbose and verbose is not None: + self.verbose = verbose + if tracing is not None: + self.tracing = tracing + def display( self, messages, @@ -337,6 +348,14 @@ def log_vars(self, message, variables, rem_vars=("self",)): del new_vars[v] self.printlog(message, new_vars) + def log_loc(self, name, original, loc): + """Log a location in source code.""" + if self.verbose: + if isinstance(loc, int): + self.printlog("in error construction:", str(name), "=", repr(original[:loc]), "|", repr(original[loc:])) + else: + self.printlog("in error construction:", str(name), "=", repr(loc)) + def get_error(self, err=None, show_tb=None): """Properly formats the current error.""" if err is None: @@ -430,9 +449,10 @@ def print_trace(self, *args): trace = " ".join(str(arg) for arg in args) self.printlog(_indent(trace, self.trace_ind)) - def log_tag(self, tag, code, multiline=False): + def log_tag(self, tag, code, multiline=False, force=False): """Logs a tagged message if tracing.""" - if self.tracing: + if self.tracing or force: + assert not (not DEVELOP and force), tag if callable(code): code = code() tagstr = "[" + str(tag) + "]" @@ -471,16 +491,17 @@ def log_trace(self, expr, original, loc, item=None, extra=None): self.print_trace(*out) def _trace_success_action(self, original, start_loc, end_loc, expr, tokens): - if self.tracing and self.verbose: # avoid the overhead of an extra function call + if self.tracing: # avoid the overhead of an extra function call self.log_trace(expr, original, start_loc, tokens) def _trace_exc_action(self, original, loc, expr, exc): - if self.tracing: # avoid the overhead of an extra function call + if self.tracing and self.verbose: # avoid the overhead of an extra function call self.log_trace(expr, original, loc, exc) def trace(self, item): """Traces a parse element (only enabled in develop).""" if DEVELOP and not MODERN_PYPARSING: + # setDebugActions doesn't work as it won't let us set any actions to None item.debugActions = ( None, # no start action self._trace_success_action, @@ -498,10 +519,13 @@ def gather_parsing_stats(self): yield finally: elapsed_time = get_clock_time() - start_time - self.printlog("Time while parsing:", elapsed_time, "secs") + self.printlog("Time while parsing" + (" " + self.path if self.path else "") + ":", elapsed_time, "secs") if use_packrat_parser: hits, misses = ParserElement.packrat_cache_stats self.printlog("\tPackrat parsing stats:", hits, "hits;", misses, "misses") + # reset stats after printing if in incremental mode + if ParserElement._incrementalEnabled: + ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) else: yield @@ -528,21 +552,24 @@ def timed_func(*args, **kwargs): return func(*args, **kwargs) return timed_func - def debug_func(self, func): + def debug_func(self, func, func_name=None): """Decorates a function to print the input/output behavior.""" + if func_name is None: + func_name = func + @wraps(func) def printing_func(*args, **kwargs): """Function decorated by logger.debug_func.""" if not DEVELOP or self.quiet: return func(*args, **kwargs) if not kwargs: - self.printerr(func, "<*|", args) + self.printerr(func_name, "<*|", args) elif not args: - self.printerr(func, "<**|", kwargs) + self.printerr(func_name, "<**|", kwargs) else: - self.printerr(func, "<<|", args, kwargs) + self.printerr(func_name, "<<|", args, kwargs) out = func(*args, **kwargs) - self.printerr(func, "=>", repr(out)) + self.printerr(func_name, "=>", repr(out)) return out return printing_func diff --git a/coconut/tests/constants_test.py b/coconut/tests/constants_test.py index 7c5186781..2df0da3ba 100644 --- a/coconut/tests/constants_test.py +++ b/coconut/tests/constants_test.py @@ -96,10 +96,10 @@ def test_imports(self): or PY26 and old_imp == "ttk" # don't test tkinter on PyPy or PYPY and new_imp.startswith("tkinter") - # don't test trollius on PyPy - or PYPY and old_imp == "trollius" - # don't test typing_extensions, async_generator on Python 2 - or PY2 and old_imp.startswith(("typing_extensions", "async_generator")) + # don't test trollius, aenum on PyPy + or PYPY and old_imp in ("trollius", "aenum") + # don't test typing_extensions, async_generator + or old_imp.startswith(("typing_extensions", "async_generator")) ): pass elif sys.version_info >= ver_cutoff: @@ -113,6 +113,18 @@ def test_reqs(self): for maxed_ver in constants.max_versions: assert isinstance(maxed_ver, tuple) or maxed_ver in ("pyparsing", "cPyparsing"), "maxed versions must be tagged to a specific Python version" + def test_run_args(self): + assert "--run" not in constants.coconut_base_run_args + assert "--quiet" not in constants.coconut_base_run_args + assert not any(arg.startswith("--target") for arg in constants.coconut_base_run_args) + + def test_targets(self): + assert all(v in constants.specific_targets or v in constants.pseudo_targets for v in ROOT_HEADER_VERSIONS) + + def test_tuples(self): + assert isinstance(constants.indchars, tuple) + assert isinstance(constants.comment_chars, tuple) + # ----------------------------------------------------------------------------------------------------------------------- # MAIN: diff --git a/coconut/tests/main_test.py b/coconut/tests/main_test.py index b5183d6fb..d30e9b793 100644 --- a/coconut/tests/main_test.py +++ b/coconut/tests/main_test.py @@ -33,7 +33,7 @@ import pytest import pexpect -from coconut.util import noop_ctx +from coconut.util import noop_ctx, get_target_info from coconut.terminal import ( logger, LoggingStringIO, @@ -42,6 +42,9 @@ call_output, reload, ) +from coconut.compiler.util import ( + get_psf_target, +) from coconut.constants import ( WINDOWS, PYPY, @@ -59,6 +62,8 @@ icoconut_custom_kernel_name, mypy_err_infixes, get_bool_env_var, + coconut_cache_dir, + default_use_cache_dir, ) from coconut.api import ( @@ -84,16 +89,26 @@ # ----------------------------------------------------------------------------------------------------------------------- -default_recursion_limit = "4096" -default_stack_size = "4096" +default_recursion_limit = "6144" +default_stack_size = "6144" + +jupyter_timeout = 120 base = os.path.dirname(os.path.relpath(__file__)) src = os.path.join(base, "src") dest = os.path.join(base, "dest") additional_dest = os.path.join(base, "dest", "additional_dest") +src_cache_dir = os.path.join(src, coconut_cache_dir) + runnable_coco = os.path.join(src, "runnable.coco") runnable_py = os.path.join(src, "runnable.py") +runnable_compiled_loc = src_cache_dir if default_use_cache_dir else runnable_py + +importable_coco = os.path.join(src, "importable.coco") +importable_py = os.path.join(src, "importable.py") +importable_compiled_loc = src_cache_dir if default_use_cache_dir else importable_py + pyston = os.path.join(os.curdir, "pyston") pyprover = os.path.join(os.curdir, "pyprover") prelude = os.path.join(os.curdir, "coconut-prelude") @@ -198,7 +213,17 @@ def call_with_import(module_name, extra_argv=[], assert_result=True): return stdout, stderr, retcode -def call(raw_cmd, assert_output=False, check_mypy=False, check_errors=True, stderr_first=False, expect_retcode=0, convert_to_import=False, **kwargs): +def call( + raw_cmd, + assert_output=False, + check_mypy=False, + check_errors=True, + stderr_first=False, + expect_retcode=0, + convert_to_import=False, + assert_output_only_at_end=None, + **kwargs +): """Execute a shell command and assert that no errors were encountered.""" if isinstance(raw_cmd, str): cmd = raw_cmd.split() @@ -213,10 +238,13 @@ def call(raw_cmd, assert_output=False, check_mypy=False, check_errors=True, stde elif assert_output is True: assert_output = ("",) elif isinstance(assert_output, str): - if "\n" not in assert_output: - assert_output = (assert_output,) + if assert_output_only_at_end is None and "\n" in assert_output: + assert_output_only_at_end = False + assert_output = (assert_output,) else: assert_output = tuple(x if x is not True else "" for x in assert_output) + if assert_output_only_at_end is None: + assert_output_only_at_end = True if convert_to_import is None: convert_to_import = ( @@ -311,10 +339,7 @@ def call(raw_cmd, assert_output=False, check_mypy=False, check_errors=True, stde if check_mypy and all(test not in line for test in ignore_mypy_errs_with): assert "error:" not in line, "MyPy error in " + repr(line) - if isinstance(assert_output, str): - got_output = "\n".join(raw_lines) + "\n" - assert assert_output in got_output, "Expected " + repr(assert_output) + "; got " + repr(got_output) - else: + if assert_output_only_at_end: last_line = "" for line in reversed(lines): if not any(ignore in line for ignore in ignore_last_lines_with): @@ -328,10 +353,15 @@ def call(raw_cmd, assert_output=False, check_mypy=False, check_errors=True, stde + " in " + repr(last_line) + "; got:\n" + "\n".join(repr(li) for li in raw_lines) ) + else: + got_output = "\n".join(raw_lines) + "\n" + assert any(x in got_output for x in assert_output), "Expected " + repr(assert_output) + "; got " + repr(got_output) def call_python(args, **kwargs): """Calls the current Python.""" + if get_bool_env_var("COCONUT_TEST_DEBUG_PYTHON"): + args = ["-X", "dev"] + args call([sys.executable] + args, **kwargs) @@ -381,27 +411,30 @@ def rm_path(path, allow_keep=False): @contextmanager -def using_path(path): - """Removes a path at the beginning and end.""" - if os.path.exists(path): - rm_path(path) +def using_paths(*paths): + """Removes paths at the beginning and end.""" + for path in paths: + if os.path.exists(path): + rm_path(path) try: yield finally: - try: - rm_path(path, allow_keep=True) - except OSError: - logger.print_exc() + for path in paths: + try: + rm_path(path, allow_keep=True) + except OSError: + logger.print_exc() @contextmanager -def using_dest(dest=dest): +def using_dest(dest=dest, allow_existing=False): """Makes and removes the dest folder.""" try: os.mkdir(dest) except Exception: - rm_path(dest) - os.mkdir(dest) + if not allow_existing: + rm_path(dest) + os.mkdir(dest) try: yield finally: @@ -671,7 +704,19 @@ def install_bbopt(): def run_runnable(args=[]): """Call coconut-run on runnable_coco.""" - call(["coconut-run"] + args + [runnable_coco, "--arg"], assert_output=True) + paths_being_used = [importable_compiled_loc] + if "--no-write" not in args and "-n" not in args: + paths_being_used.append(runnable_compiled_loc) + with using_paths(*paths_being_used): + call(["coconut-run"] + args + [runnable_coco, "--arg"], assert_output=True) + + +def comp_runnable(args=[]): + """Just compile runnable.""" + if "--target" not in args: + args += ["--target", "sys"] + call_coconut([runnable_coco, "--and", importable_coco] + args) + call_coconut([runnable_coco, "--and", importable_coco] + args, assert_output="Left unchanged", assert_output_only_at_end=False) # ----------------------------------------------------------------------------------------------------------------------- @@ -681,6 +726,9 @@ def run_runnable(args=[]): @add_test_func_names class TestShell(unittest.TestCase): + def test_version(self): + call(["coconut", "--version"]) + def test_code(self): call(["coconut", "-s", "-c", coconut_snip], assert_output=True) @@ -721,7 +769,7 @@ def test_api(self): def test_import_hook(self): with using_sys_path(src): - with using_path(runnable_py): + with using_paths(runnable_compiled_loc, importable_compiled_loc): with using_coconut(): auto_compilation(True) import runnable @@ -729,20 +777,19 @@ def test_import_hook(self): assert runnable.success == "" def test_runnable(self): - with using_path(runnable_py): - run_runnable() + run_runnable() def test_runnable_nowrite(self): run_runnable(["-n"]) def test_compile_runnable(self): - with using_path(runnable_py): - call_coconut([runnable_coco, runnable_py]) + with using_paths(runnable_py, importable_py): + comp_runnable() call_python([runnable_py, "--arg"], assert_output=True) def test_import_runnable(self): - with using_path(runnable_py): - call_coconut([runnable_coco, runnable_py]) + with using_paths(runnable_py, importable_py): + comp_runnable() for _ in range(2): # make sure we can import it twice call_python([runnable_py, "--arg"], assert_output=True, convert_to_import=True) @@ -754,6 +801,8 @@ def test_xontrib(self): p.expect("$") p.sendline("!(ls -la) |> bool") p.expect("True") + p.sendline("'1; 2' |> print") + p.expect("1; 2") p.sendline('$ENV_VAR = "ABC"') p.expect("$") p.sendline('echo f"{$ENV_VAR}"; echo f"{$ENV_VAR}"') @@ -763,6 +812,12 @@ def test_xontrib(self): if PY36: p.sendline("echo 123;; 123") p.expect("123;; 123") + p.sendline("echo abc; echo abc") + p.expect("abc") + p.expect("abc") + p.sendline("echo abc; print(1 |> (.+1))") + p.expect("abc") + p.expect("2") p.sendline('execx("10 |> print")') p.expect("subprocess mode") p.sendline("xontrib unload coconut") @@ -797,11 +852,12 @@ def test_kernel_installation(self): if not WINDOWS and not PYPY: def test_jupyter_console(self): p = spawn_cmd("coconut --jupyter console") - p.expect("In", timeout=120) + p.expect("In", timeout=jupyter_timeout) p.sendline("%load_ext coconut") - p.expect("In", timeout=120) + p.expect("In", timeout=jupyter_timeout) p.sendline("`exit`") - p.expect("Shutting down kernel|shutting down") + if sys.version_info[:2] != (3, 6): + p.expect("Shutting down kernel|shutting down", timeout=jupyter_timeout) if p.isalive(): p.terminate() @@ -809,6 +865,22 @@ def test_jupyter_console(self): @add_test_func_names class TestCompilation(unittest.TestCase): + def test_simple_no_line_numbers(self): + run_runnable(["-n", "--no-line-numbers"]) + + def test_simple_keep_lines(self): + run_runnable(["-n", "--keep-lines"]) + + def test_simple_no_line_numbers_keep_lines(self): + run_runnable(["-n", "--no-line-numbers", "--keep-lines"]) + + def test_simple_minify(self): + run_runnable(["-n", "--minify"]) + + if sys.version_info >= get_target_info(get_psf_target()): + def test_simple_psf(self): + run_runnable(["-n", "--target", "psf"]) + def test_normal(self): run() @@ -818,18 +890,7 @@ def test_mypy_sys(self): if sys.version_info[:2] in always_sys_versions: def test_always_sys(self): - run(["--line-numbers"], agnostic_target="sys", always_sys=True) - - # run fewer tests on Windows so appveyor doesn't time out - if not WINDOWS: - def test_line_numbers_keep_lines(self): - run(["--line-numbers", "--keep-lines"]) - - def test_strict(self): - run(["--strict"]) - - def test_and(self): - run(["--and"]) # src and dest built by comp + run(agnostic_target="sys", always_sys=True) def test_target(self): run(agnostic_target=(2 if PY2 else 3)) @@ -843,6 +904,17 @@ def test_package(self): def test_no_tco(self): run(["--no-tco"]) + # run fewer tests on Windows so appveyor doesn't time out + if not WINDOWS: + def test_keep_lines(self): + run(["--keep-lines"]) + + def test_strict(self): + run(["--strict"]) + + def test_and(self): + run(["--and"]) # src and dest built by comp + if PY35: def test_no_wrap(self): run(["--no-wrap"]) @@ -857,25 +929,14 @@ def test_trace(self): # avoids a strange, unreproducable failure on appveyor if not (WINDOWS and sys.version_info[:2] == (3, 8)): - def test_run(self): + def test_run_arg(self): run(use_run_arg=True) - if not PYPY and not PY26: + # not WINDOWS is for appveyor timeout prevention + if not WINDOWS and not PYPY and not PY26: def test_jobs_zero(self): run(["--jobs", "0"]) - def test_simple_line_numbers(self): - run_runnable(["-n", "--line-numbers"]) - - def test_simple_keep_lines(self): - run_runnable(["-n", "--keep-lines"]) - - def test_simple_line_numbers_keep_lines(self): - run_runnable(["-n", "--line-numbers", "--keep-lines"]) - - def test_simple_minify(self): - run_runnable(["-n", "--minify"]) - # more appveyor timeout prevention if not WINDOWS: @@ -884,25 +945,25 @@ class TestExternal(unittest.TestCase): if not PYPY or PY2: def test_prelude(self): - with using_path(prelude): + with using_paths(prelude): comp_prelude() if MYPY and PY38: run_prelude() def test_bbopt(self): - with using_path(bbopt): + with using_paths(bbopt): comp_bbopt() if not PYPY and PY38 and not PY310: install_bbopt() def test_pyprover(self): - with using_path(pyprover): + with using_paths(pyprover): comp_pyprover() if PY38: run_pyprover() def test_pyston(self): - with using_path(pyston): + with using_paths(pyston): comp_pyston(["--no-tco"]) if PYPY and PY2: run_pyston() diff --git a/coconut/tests/src/cocotest/agnostic/primary.coco b/coconut/tests/src/cocotest/agnostic/primary.coco index 7b7d3ef5b..b4db19453 100644 --- a/coconut/tests/src/cocotest/agnostic/primary.coco +++ b/coconut/tests/src/cocotest/agnostic/primary.coco @@ -1,23 +1,24 @@ -import sys import itertools import collections import collections.abc import weakref +import platform from copy import copy operator log10 from math import \log10 as (log10) -# need to be at top level to avoid binding sys as a local in primary_test from importlib import reload # NOQA -from enum import Enum # noqa +if platform.python_implementation() == "CPython": # fixes weird aenum issue on pypy + from enum import Enum # noqa -from .util import assert_raises +from .util import assert_raises, typed_eq def primary_test() -> bool: """Basic no-dependency tests.""" # must come at start so that local sys binding is correct + import sys import queue as q, builtins, email.mime.base assert q.Queue # type: ignore assert builtins.len([1, 1]) == 2 @@ -30,7 +31,16 @@ def primary_test() -> bool: bio = BytesIO(b"herp") assert bio.read() == b"herp" if TYPE_CHECKING or sys.version_info >= (3, 5): - from typing import Iterable, Any + from typing import ( + Iterable, + Any, + List, + Dict, + cast, + Protocol, + TypeVar, + Generic, + ) # NOQA assert 1 | 2 == 3 assert "\n" == ( @@ -522,8 +532,6 @@ def primary_test() -> bool: assert f"{x} == {y}" == "1 == 2" assert f"{x!r} == {y!r}" == "1 == " + py_repr("2") assert f"{({})}" == "{}" == f"{({})!r}" - assert f"{{" == "{" - assert f"}}" == "}" assert f"{1, 2}" == "(1, 2)" assert f"{[] |> len}" == "0" match {"a": {"b": x }} or {"a": {"b": {"c": x}}} = {"a": {"b": {"c": "x"}}} @@ -807,7 +815,6 @@ def primary_test() -> bool: else: assert False x = 1 - assert f"a" f"b" == "ab" == f"a" "b" == "a" f"b" assert f"{x}" f"{x}" == "11" assert f"{x}" "{x}" == "1{x}" assert "{x}" f"{x}" == "{x}1" @@ -1060,8 +1067,6 @@ def primary_test() -> bool: assert xs == [2, 3] assert xs `isinstance` list (1, *(2, 3), 4) = (|1, 2, 3, 4|) - assert f"a" r"b" fr"c" rf"d" == "abcd" - assert "a" fr"b" == "ab" == "a" rf"b" int(1) = 1 [1] + [2] + m + [3] + [4] = [1,2,"?",3,4] assert m == ["?"] @@ -1321,8 +1326,11 @@ def primary_test() -> bool: assert 2 not in m assert m{1, 2}.isdisjoint(m{3, 4}) assert not m{1, 2}.isdisjoint(m{2, 3}) - assert m{1, 2} ^ m{2, 3} == m{1, 3} - assert m{1, 1} ^ m{1} == m{1} + assert m{1, 2} ^ m{2, 3} `typed_eq` m{1, 3} + m = m{1, 2} + m ^= m{2, 3} + assert m `typed_eq` m{1, 3} + assert m{1, 1} ^ m{1} `typed_eq` m{1} assert multiset((1, 2)) == m{1, 2} == multiset(m{1, 2}) assert multiset({1: 2, 2: 1}) == m{1, 1, 2} assert m{} `isinstance` multiset @@ -1603,4 +1611,37 @@ def primary_test() -> bool: assert n[0] == 0 assert_raises(-> m{{1:2,2:3}}, TypeError) assert_raises((def -> from typing import blah), ImportError) # NOQA + assert type(m{1, 2}) is multiset + assert multiset(collections.Counter({1: 1, 2: 1})) `typed_eq` m{1, 2} + assert +m{-1, 1} `typed_eq` m{-1, 1} + assert -m{-1, 1} `typed_eq` m{} + assert m{1, 1, 2} + m{1, 3} `typed_eq` m{1, 1, 1, 2, 3} + assert m{1, 1, 2} | m{1, 3} `typed_eq` m{1, 1, 2, 3} + assert m{1, 1, 2} & m{1, 3} `typed_eq` m{1} + assert m{1, 1, 2} - m{1, 3} `typed_eq` m{1, 2} + assert (.+1) `and_then` (.*2) `and_then_await` (./3) |> repr == "$(?, 1) `and_then` $(?, 2) `and_then_await` $(?, 3)" + assert 5.5⏨3 == 5.5 * 10**3 + assert (x => x)(5) == 5 == (def x => x)(5) + assert (=> _)(5) == 5 == (def => _)(5) + assert ((x, y) => (x, y))(1, 2) == (1, 2) == (def (x, y) => (x, y))(1, 2) + assert (def (int(x)) => x)(5) == 5 == (def (int -> x) => x)("5") + assert (def (x: int) -> int => x)(5) == 5 == (def (int(x)) -> int => x)(5) + assert (x ⇒ x)(5) == 5 == (def x ⇒ x)(5) + assert f"a: { "abc" }" == "a: abc" == f'a: { 'abc' }' + assert f"1 + {"a" + "b"} + 2 + {"c" + "d"}" == "1 + ab + 2 + cd" == f'1 + {'a' + 'b'} + 2 + {'c' + 'd'}' + assert f"{"a" + "b"} + c + {"d" + "e"}" == "ab + c + de" == f'{'a' + 'b'} + c + {'d' + 'e'}' + assert f"""{""" +"""}""" == """ +""" == f"""{''' +'''}""" + assert f"""{( + )}""" == "()" == f'''{( + )}''' + assert f"{'\n'.join(["", ""])}" == "\n" + assert f"""{f'''{f'{f"{1+1}"}'}'''}""" == "2" == f"{f"{f"{f"{f"{f"{1+1}"}"}"}"}"}" + assert f"___{ + 1 +}___" == '___1___' == f"___{( + 1 +)}___" return True diff --git a/coconut/tests/src/cocotest/agnostic/specific.coco b/coconut/tests/src/cocotest/agnostic/specific.coco index 2cd9d3858..1a3b8ba6f 100644 --- a/coconut/tests/src/cocotest/agnostic/specific.coco +++ b/coconut/tests/src/cocotest/agnostic/specific.coco @@ -146,9 +146,9 @@ def py36_spec_test(tco: bool) -> bool: hello: Literal["hello"] = "hello" hello = HasStr(hello).get() - def and_then[**P, T, U](f: (**P) -> T, g: T -> U) -> (**P) -> U = + def forward_compose[**P, T, U](f: (**P) -> T, g: T -> U) -> (**P) -> U = (*args, **kwargs) -> g(f(*args, **kwargs)) - assert (.+5) `and_then` (.*2) <| 3 == 16 + assert (.+5) `forward_compose` (.*2) <| 3 == 16 def mk_repeat[T, **P](f: (T, **P) -> T) -> (int, T, **P) -> T: def newf(n: int, x: T, *args, **kwargs) -> T: diff --git a/coconut/tests/src/cocotest/agnostic/suite.coco b/coconut/tests/src/cocotest/agnostic/suite.coco index 666fb773f..7e0440630 100644 --- a/coconut/tests/src/cocotest/agnostic/suite.coco +++ b/coconut/tests/src/cocotest/agnostic/suite.coco @@ -633,8 +633,8 @@ def suite_test() -> bool: assert map(Ad().ef, range(5)) |> list == range(1, 6) |> list assert Ad().ef 1 == 2 assert store.plus1 store.one == store.two - assert ret_locals()["abc"] == 1 - assert ret_globals()["abc"] == 1 + assert ret_locals()["my_loc"] == 1 + assert ret_globals()["my_glob"] == 1 assert pos_only(1, 2) == (1, 2) try: pos_only(a=1, b=2) diff --git a/coconut/tests/src/cocotest/agnostic/tutorial.coco b/coconut/tests/src/cocotest/agnostic/tutorial.coco index 8023ed71e..3eeabae34 100644 --- a/coconut/tests/src/cocotest/agnostic/tutorial.coco +++ b/coconut/tests/src/cocotest/agnostic/tutorial.coco @@ -1,6 +1,6 @@ # WEBSITE: -plus1 = x -> x + 1 +plus1 = x => x + 1 assert plus1(5) == 6 assert range(10) |> map$(.**2) |> list == [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] @@ -328,7 +328,7 @@ data vector2(x, y): # Test cases: assert vector2(1, 2) |> str == "vector2(x=1, y=2)" assert vector2(3, 4) |> abs == 5 -assert vector2(1, 2) |> fmap$(x -> x*2) |> str == "vector2(x=2, y=4)" +assert vector2(1, 2) |> fmap$(x => x*2) |> str == "vector2(x=2, y=4)" v = vector2(2, 3) try: v.x = 7 @@ -396,7 +396,7 @@ assert (vector(2, 4) == vector(2, 4)) is True assert 2*vector(1, 2) |> str == "vector(*pts=(2, 4))" assert vector(1, 2) * vector(1, 3) == 7 -def diagonal_line(n) = range(n+1) |> map$(i -> (i, n-i)) +def diagonal_line(n) = range(n+1) |> map$(i => (i, n-i)) assert diagonal_line(0) `isinstance` (list, tuple) is False assert diagonal_line(0) |> list == [(0, 0)] @@ -449,7 +449,7 @@ data vector(*pts): """Necessary to make scalar multiplication commutative.""" self * other -def diagonal_line(n) = range(n+1) |> map$(i -> (i, n-i)) +def diagonal_line(n) = range(n+1) |> map$(i => (i, n-i)) def linearized_plane(n=0) = diagonal_line(n) :: linearized_plane(n+1) def vector_field() = linearized_plane() |> starmap$(vector) @@ -497,7 +497,7 @@ data vector(*pts): """Necessary to make scalar multiplication commutative.""" self * other # New one-line functions necessary for finding the angle between vectors: - def __truediv__(self, other) = self.pts |> map$(x -> x/other) |*> vector + def __truediv__(self, other) = self.pts |> map$(x => x/other) |*> vector def unit(self) = self / abs(self) def angle(self, other `isinstance` vector) = math.acos(self.unit() * other.unit()) diff --git a/coconut/tests/src/cocotest/agnostic/util.coco b/coconut/tests/src/cocotest/agnostic/util.coco index 38cbadc26..427245454 100644 --- a/coconut/tests/src/cocotest/agnostic/util.coco +++ b/coconut/tests/src/cocotest/agnostic/util.coco @@ -43,6 +43,9 @@ except NameError, TypeError: return addpattern(func, base_func, **kwargs) return pattern_prepender +def x `typed_eq` y = (type(x), x) == (type(y), y) + + # Old functions: old_fmap = fmap$(starmap_over_mappings=True) @@ -1056,15 +1059,10 @@ class unrepresentable: # Typing if TYPE_CHECKING or sys.version_info >= (3, 5): - from typing import ( - List, - Dict, - Any, - cast, - Protocol, - TypeVar, - Generic, - ) + # test from typing import *, but that doesn't actually get us + # the typing_extensions stuff we need, so also then import those + from typing import * + from typing import Protocol T = TypeVar("T", covariant=True) U = TypeVar("U", contravariant=True) @@ -1361,11 +1359,12 @@ class store: # Locals and globals def ret_locals() = - abc = 1 + my_loc = 1 locals() + +my_glob = 1 def ret_globals() = - abc = 1 - locals() + globals() global glob = 0 copyclosure def wrong_get_set_glob(x): diff --git a/coconut/tests/src/cocotest/non_strict/non_strict_test.coco b/coconut/tests/src/cocotest/non_strict/non_strict_test.coco index 33bea2e47..5550ee1f5 100644 --- a/coconut/tests/src/cocotest/non_strict/non_strict_test.coco +++ b/coconut/tests/src/cocotest/non_strict/non_strict_test.coco @@ -82,6 +82,13 @@ def non_strict_test() -> bool: assert a_dict["a"] == 1 assert "". <| "join" <| ["1","2","3"] == "123" assert "a b c" == (" ". ?? "not gonna happen")("join")("abc") + assert f'{ (lambda x: x*2)(2) }' == "4" + assert f"{{" == "{" + assert f"}}" == "}" + assert f"a" f"b" == "ab" == f"a" "b" == "a" f"b" + assert f"a" r"b" fr"c" rf"d" == "abcd" + assert "a" fr"b" == "ab" == "a" rf"b" + assert f"{f"{f"infinite"}"}" + " " + f"{f"nesting!!!"}" == "infinite nesting!!!" return True if __name__ == "__main__": diff --git a/coconut/tests/src/cocotest/target_sys/target_sys_test.coco b/coconut/tests/src/cocotest/target_sys/target_sys_test.coco index 10cf50399..012c4a6eb 100644 --- a/coconut/tests/src/cocotest/target_sys/target_sys_test.coco +++ b/coconut/tests/src/cocotest/target_sys/target_sys_test.coco @@ -44,6 +44,8 @@ def it_ret_tuple(x, y): def asyncio_test() -> bool: import asyncio + def toa(f) = async def (*args, **kwargs) -> f(*args, **kwargs) + async def async_map_0(args): return parallel_map(args[0], *args[1:]) async def async_map_1(args) = parallel_map(args[0], *args[1:]) @@ -54,12 +56,35 @@ def asyncio_test() -> bool: for async_map in (async_map_0, async_map_1, async_map_2, async_map_3, async_map_4): assert (await ((pow$(2), range(5)) |> async_map)) |> tuple == (1, 2, 4, 8, 16) True + async def aplus(x) = y -> x + y aplus_: async int -> int -> int = async def x -> y -> x + y + if sys.version_info >= (3, 5) or TYPE_CHECKING: type AsyncNumFunc[T <: int | float] = async T -> T aplus1: AsyncNumFunc[int] = async def x -> x + 1 - async def main(): + + def and_then_[**T, U, V]( + first_async_func: async (**T) -> U, + second_func: U -> V, + ) -> async (**T) -> V = + async def (*args, **kwargs) -> ( + first_async_func(*args, **kwargs) + |> await + |> second_func + ) + def and_then_await_[**T, U, V]( + first_async_func: async (**T) -> U, + second_async_func: async U -> V, + ) -> async (**T) -> V = + async def (*args, **kwargs) -> ( + first_async_func(*args, **kwargs) + |> await + |> second_async_func + |> await + ) + + async def main() -> None: assert await async_map_test() assert `(+)$(1) .. await (aplus 1)` 1 == 3 assert `(.+1) .. await (aplus_ 1)` 1 == 3 @@ -68,6 +93,52 @@ def asyncio_test() -> bool: assert await (async match def (int(x), int(y)) -> x + y)(1, 2) == 3 assert await (match async def (int(x), int(y)) -> x + y)(1, 2) == 3 assert await (aplus1 2) == 3 + assert ( + 10 + |> aplus1 `and_then` (.*2) + |> await + ) == 22 == ( + 10 + |> aplus1 `and_then_` (.*2) + |> await + ) + assert ( + 10 + |> aplus1 `and_then_await` aplus1 + |> await + ) == 12 == ( + 10 + |> aplus1 `and_then_await_` aplus1 + |> await + ) + assert ( + 10 + |> aplus1 + `and_then` ((.*2) ..> (.*3)) + `and_then_await` aplus1 + `and_then_await` ((.+4) ..> aplus1) + `and_then` (./6) + |> await + ) == 12 == ( + 10 + |> aplus1 + `and_then_` ((.*2) ..> (.*3)) + `and_then_await_` aplus1 + `and_then_await_` ((.+4) ..> aplus1) + `and_then_` (./6) + |> await + ) + assert ( + 4 + |> toa(x -> (1, 2, 3, x)) + `and_then` (ident ..*> (,)) + |> await + ) == (1, 2, 3, 4) == ( + 4 + |> toa(x -> (1, 2, 3, x)) + `and_then_` (ident ..*> (,)) + |> await + ) loop = asyncio.new_event_loop() loop.run_until_complete(main()) @@ -129,3 +200,7 @@ def target_sys_test() -> bool: assert l == [10] return True + + +if __name__ == "__main__": + target_sys_test() |> print diff --git a/coconut/tests/src/extras.coco b/coconut/tests/src/extras.coco index fb46c2e99..854903912 100644 --- a/coconut/tests/src/extras.coco +++ b/coconut/tests/src/extras.coco @@ -23,6 +23,8 @@ from coconut.convenience import ( setup, parse, coconut_eval, + coconut_exec, + warm_up, ) if IPY: @@ -61,7 +63,7 @@ def assert_raises(c, Exc, not_Exc=None, err_has=None): assert "unprintable" not in syntax_err_str, syntax_err_str assert " bool: + setup(line_numbers=False) + assert_raises((def -> import \(_coconut)), ImportError, err_has="should never be done at runtime") # NOQA assert_raises((def -> import \_coconut), ImportError, err_has="should never be done at runtime") # NOQA @@ -103,6 +107,9 @@ def test_setup_none() -> bool: assert_raises((def -> raise CoconutException("derp").syntax_err()), SyntaxError) assert coconut_eval("x -> x + 1")(2) == 3 assert coconut_eval("addpattern") + exec_vars: dict = {} + coconut_exec("def f(x) = x", exec_vars) + assert exec_vars["f"](10) == 10 assert parse("abc") == parse("abc", "sys") assert parse("abc", "file") @@ -166,16 +173,25 @@ mismatched open '[' and close ')' (line 1) [([){[} ~^ """.strip()) - assert_raises(-> parse("[())]"), CoconutSyntaxError, err_has=""" + assert_raises(-> parse("[())]"), CoconutSyntaxError, err_has=r""" mismatched open '[' and close ')' (line 1) [())] - ~~~^ + \~~^ """.strip()) assert_raises(-> parse("[[\n])"), CoconutSyntaxError, err_has=""" mismatched open '[' and close ')' (line 1) ]) ^ """.strip()) + assert_raises(-> parse(""" +a = 1 +b = f"{1+}" +c = 3 + """.strip()), CoconutSyntaxError, err_has=""" +parsing failed for format string expression: 1+ (line 2) + b = f"{1+}" + ^ + """.strip()) assert_raises(-> parse("(|*?>)"), CoconutSyntaxError, err_has="'|?*>'") assert_raises(-> parse("(|**?>)"), CoconutSyntaxError, err_has="'|?**>'") @@ -200,10 +216,11 @@ cannot reassign type variable 'T' (use explicit '\T' syntax if intended) (line 1 """.strip(), ) - assert_raises(-> parse("$"), CoconutParseError, err_has=" ^") - assert_raises(-> parse("range(1,10) |> reduce$(*, initializer = 1000) |> print"), CoconutParseError, err_has=" ~~~~~~~~~~~~~~~~~~~~~~~~^") - assert_raises(-> parse("a := b"), CoconutParseError, err_has=" ~~^") - assert_raises(-> parse("1 + return"), CoconutParseError, err_has=" ~~~~~^") + assert_raises(-> parse("$"), CoconutParseError) + assert_raises(-> parse("@"), CoconutParseError, err_has=("\n ~^", "\n ^")) + assert_raises(-> parse("range(1,10) |> reduce$(*, initializer = 1000) |> print"), CoconutParseError, err_has=" \\~~~~~~~~~~~~~~~~~~~~~~~^") + assert_raises(-> parse("a := b"), CoconutParseError, err_has=" \\~^") + assert_raises(-> parse("1 + return"), CoconutParseError, err_has=" \\~~~~^") assert_raises(-> parse(""" def f() = assert 1 @@ -216,13 +233,16 @@ def f() = """.strip(), """ assert 2 - ^ + ~^ """.strip() )) - assert_raises(-> parse('b"abc" "def"'), CoconutParseError, err_has=" ~~~~~~~^") - assert_raises(-> parse('"abc" b"def"'), CoconutParseError, err_has=" ~~~~~~^") - assert_raises(-> parse('"a" 10'), CoconutParseError, err_has=" ~~~~^") - assert_raises(-> parse("A. ."), CoconutParseError, err_has=" ~~~^") + assert_raises(-> parse('b"abc" "def"'), CoconutParseError, err_has=" \\~~~~~~^") + assert_raises(-> parse('"abc" b"def"'), CoconutParseError, err_has=" \\~~~~~^") + assert_raises(-> parse('"a" 10'), CoconutParseError, err_has=" \\~~~^") + assert_raises(-> parse("A. ."), CoconutParseError, err_has=" \\~~^") + assert_raises(-> parse('''f"""{ +}"""'''), CoconutSyntaxError, err_has=(" ~~~~|", "\n ^~~/")) + assert_raises(-> parse("f([] {})"), CoconutParseError, err_has=" \\~~~~^") assert_raises(-> parse("return = 1"), CoconutParseError, err_has='invalid use of the keyword "return"') assert_raises(-> parse("if a = b: pass"), CoconutParseError, err_has="misplaced assignment") @@ -250,10 +270,10 @@ def gam_eps_rate(bitarr) = ( if not PYPY: assert """ |> map$(int(?, 2)) - ~~~~~^""" in err_str or """ + \~~~~^""" in err_str or """ |> map$(int(?, 2)) - ~~~~~~~~~~~~~~~~~^""" in err_str, err_str + ~~~~~~~~~~~~~~~~^""" in err_str, err_str else: assert False @@ -273,19 +293,25 @@ def test_convenience() -> bool: assert_raises(-> cmd("-pa ."), SystemExit) assert_raises(-> cmd("-n . ."), SystemExit) - setup(line_numbers=True) + setup() assert parse("abc", "lenient") == "abc #1 (line in Coconut source)" - setup(keep_lines=True) + setup(line_numbers=False, keep_lines=True) assert parse("abc", "lenient") == "abc # abc" - setup(line_numbers=True, keep_lines=True) + setup(keep_lines=True) assert parse("abc", "lenient") == "abc #1: abc" - - setup() + assert "#6:" in parse('''line 1 +f"""{""" +"""}""" + """ +""" + f"""{\'\'\' +\'\'\'}""" +line 6''') + + setup(line_numbers=False) assert "Deprecated Coconut built-in 'prepattern' disabled by --strict compilation" not in parse("\n", mode="file") assert "Deprecated Coconut built-in 'datamaker' disabled by --strict compilation" not in parse("\n", mode="file") assert "Deprecated Coconut built-in 'of' disabled by --strict compilation" not in parse("\n", mode="file") - setup(strict=True) + setup(line_numbers=False, strict=True) assert "Deprecated Coconut built-in 'prepattern' disabled by --strict compilation" in parse("\n", mode="file") assert "Deprecated Coconut built-in 'datamaker' disabled by --strict compilation" in parse("\n", mode="file") assert "Deprecated Coconut built-in 'of' disabled by --strict compilation" in parse("\n", mode="file") @@ -299,7 +325,7 @@ def test_convenience() -> bool: assert_raises(-> parse("a=1;"), CoconutStyleError, err_has="\n ^") assert_raises(-> parse("class derp(object)"), CoconutStyleError) assert_raises(-> parse("def f(a.b) = True"), CoconutStyleError, err_has="\n ^") - assert_raises(-> parse("match def kwd_only_x_is_int_def_0(*, x is int = 0) = x"), CoconutStyleError, err_has="\n ^") + assert_raises(-> parse("match def kwd_only_x_is_int_def_0(*, x is int = 0) = x"), CoconutStyleError, err_has="\n ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~|") try: parse(""" try: @@ -310,22 +336,23 @@ else: assert False """.strip()) except CoconutStyleError as err: - assert str(err) == """found deprecated isinstance-checking 'x is int is str' pattern; rewrite to use class patterns (try 'int(x) and str(x)') or explicit isinstance-checking ('x `isinstance` int and x `isinstance` str' should always work) (remove --strict to downgrade to a warning) (line 2) - x is int is str = x""", err + assert str(err).startswith("""found deprecated isinstance-checking 'x is int is str' pattern; rewrite to use class patterns (try 'int(x) and str(x)') or explicit isinstance-checking ('x `isinstance` int and x `isinstance` str' should always work) (remove --strict to downgrade to a warning) (line 2) + x is int is str = x"""), err assert_raises(-> parse("""case x: match x: pass"""), CoconutStyleError, err_has="case x:") assert_raises(-> parse("obj."), CoconutStyleError, err_has="getattr") assert_raises(-> parse("def x -> pass, 1"), CoconutStyleError, err_has="statement lambda") + assert_raises(-> parse("abc = f'abc'"), CoconutStyleError, err_has="\n ^") - setup(strict=True, target="sys") + setup(line_numbers=False, strict=True, target="sys") assert_raises(-> parse("await f x"), CoconutParseError, err_has='invalid use of the keyword "await"') - setup(target="2.7") + setup(line_numbers=False, target="2.7") assert parse("from io import BytesIO", mode="lenient") == "from io import BytesIO" assert_raises(-> parse("def f(*, x=None) = x"), CoconutTargetError, err_has="\n ^") - setup(target="3") + setup(line_numbers=False, target="3") assert parse(""" async def async_map_test() = for async_map in (async_map_0, async_map_1, async_map_2, async_map_3, async_map_4): @@ -333,7 +360,7 @@ async def async_map_test() = True """.strip()) - setup(target="3.3") + setup(line_numbers=False, target="3.3") gen_func_def = """def f(x): yield x return x""" @@ -345,42 +372,66 @@ async def async_map_test() = ) assert parse(gen_func_def, mode="lenient") in gen_func_def_outs - setup(target="3.2") + setup(line_numbers=False, target="3.2") assert parse(gen_func_def, mode="lenient") not in gen_func_def_outs - setup(target="3.4") + setup(line_numbers=False, target="3.4") assert_raises(-> parse("async def f(): yield 1"), CoconutTargetError) - setup(target="3.5") + setup(line_numbers=False, target="3.5") assert parse("async def f(): yield 1") assert_raises(-> parse("""async def agen(): yield from range(5)"""), CoconutSyntaxError, err_has="async generator") - setup(target="3.6") + setup(line_numbers=False, target="3.6") assert parse("def f(*, x=None) = x") assert "@" not in parse("async def f(x): yield x") - setup(target="3.8") + setup(line_numbers=False, target="3.8") assert parse("(a := b)") assert parse("print(a := 1, b := 2)") assert parse("def f(a, /, b) = a, b") assert "(b)(a)" in b"a |> b".decode("coconut") - setup(target="3.11") + setup(line_numbers=False, target="3.11") assert parse("a[x, *y]") - setup(target="3.12") + setup(line_numbers=False, target="3.12") assert parse("type Num = int | float").strip().endswith(""" # Compiled Coconut: ----------------------------------------------------------- type Num = int | float""".strip()) - setup(minify=True) + setup(line_numbers=False, minify=True) assert parse("123 # derp", "lenient") == "123# derp" return True +def test_incremental() -> bool: + setup() + warm_up(enable_incremental_mode=True) + assert parse(""" +def f(x): + x = 1 + y = 2 +""") + assert parse(""" +class F: + x = 1 + y = 2 +""") + assert parse(""" +def f(x): + x = 1 + y = 2 +class F: + x = 1 + y = 2 +""") + return True + + def test_kernel() -> bool: if PY35: loop = asyncio.new_event_loop() @@ -545,12 +596,18 @@ def test_pandas() -> bool: def test_extras() -> bool: if not PYPY and (PY2 or PY34): assert test_numpy() is True + print(".", end="") if not PYPY and PY36: - assert test_pandas() is True + assert test_pandas() is True # . + print(".", end="") if CoconutKernel is not None: - assert test_kernel() is True + assert test_kernel() is True # .. + print(".") # newline bc we print stuff after this assert test_setup_none() is True + print(".") # ditto assert test_convenience() is True + print(".", end="") + assert test_incremental() is True # must come last return True @@ -558,7 +615,7 @@ def main() -> bool: print("Expect Coconut errors below from running extras:") print("(but make sure you get a after them)") assert test_extras() is True - print("") + print("\n") return True diff --git a/coconut/tests/src/importable.coco b/coconut/tests/src/importable.coco new file mode 100644 index 000000000..9c5b0730a --- /dev/null +++ b/coconut/tests/src/importable.coco @@ -0,0 +1,14 @@ +import os + +def imported_main() -> bool: + # do some stuff that requires --target sys + yield def f(x) = x + l = [] + yield def g(x): + result = yield from f(x) + l.append(result) + assert g(10) |> list == [] + assert l == [10] + + assert os.path.basename(os.path.dirname(__file__)) == "src", __file__ + return True diff --git a/coconut/tests/src/runnable.coco b/coconut/tests/src/runnable.coco index 2d2affbca..d16f707c1 100644 --- a/coconut/tests/src/runnable.coco +++ b/coconut/tests/src/runnable.coco @@ -1,10 +1,15 @@ #!/usr/bin/env coconut-run import sys +import os.path + +sys.path.append(os.path.dirname(__file__)) +from importable import imported_main success = "" def main() -> bool: assert sys.argv[1] == "--arg" + assert imported_main() is True success |> print return True diff --git a/coconut/util.py b/coconut/util.py index 1b1b21a62..69e0e2f3c 100644 --- a/coconut/util.py +++ b/coconut/util.py @@ -84,6 +84,7 @@ def get_clock_time(): class pickleable_obj(object): """Version of object that binds __reduce_ex__ to __reduce__.""" + __slots__ = () def __reduce_ex__(self, _): return self.__reduce__() @@ -240,6 +241,13 @@ def __missing__(self, key): return self[key] +class dictset(dict, object): + """A set implemented using a dictionary to get ordering benefits.""" + + def add(self, item): + self[item] = True + + def assert_remove_prefix(inputstr, prefix): """Remove prefix asserting that inputstr starts with it.""" assert inputstr.startswith(prefix), inputstr