diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000000..ae1ccbc47b --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,31 @@ +{ + "name": "fe-lang-v2", + "image": "mcr.microsoft.com/devcontainers/rust:latest", + "features": { + "ghcr.io/devcontainers/features/node:1": {}, + "ghcr.io/devcontainers/features/github-cli:1": {}, + }, + "extensions": [ + "GitHub.vscode-github-actions", + "ms-vsliveshare.vsliveshare", + "vadimcn.vscode-lldb", + "matklad.rust-analyzer", + "serayuzgur.crates", + "tamasfe.even-better-toml", + "usernamehw.errorlens", + "aaron-bond.better-comments", + "yzhang.markdown-all-in-one" + ], + "settings": { + "explorer.compactFolders": false, + "editor.rulers": [ + 80 + ], + "workbench.colorTheme": "Default Dark+", + "workbench.preferredDarkColorTheme": "Monokai", + "workbench.colorCustomizations": { + "editorRuler.foreground": "#5f5f62" + }, + "workbench.activityBar.location": "top" + } +} \ No newline at end of file diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index aa8f9d7fc0..b0d1df62c5 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -11,11 +11,13 @@ on: env: CARGO_TERM_COLOR: always CARGO_INCREMENTAL: 0 - RUSTFLAGS: "-D warnings" + RUST_BACKTRACE: full jobs: lint: runs-on: ubuntu-latest + env: + RUSTFLAGS: "-D warnings" steps: - uses: actions/checkout@v3 - name: Install system dependencies @@ -102,7 +104,7 @@ jobs: # wasm-pack needs a Cargo.toml with a 'package' field. # (see https://github.com/rustwasm/wasm-pack/issues/642) # This will still run all tests in the workspace. - run: wasm-pack test --node crates/fe --workspace + run: wasm-pack test --node crates/fe --workspace --exclude fe-language-server release: # Only run this when we push a tag diff --git a/.gitignore b/.gitignore index d37e5db915..939e49c96d 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,5 @@ tarpaulin-report.html /output /docs/tmp_snippets -.vscode \ No newline at end of file +.vscode +.DS_Store diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000000..5629d148dc --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,23 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "args": [ + "--extensionDevelopmentPath=${workspaceFolder}/crates/language-server/editors/vscode", + "${workspaceFolder}/crates/", + "--disable-extensions" + ], + "name": "Launch Fe VSCode Extension", + "outFiles": [ + "${workspaceFolder}/crates/language-server/editors/vscode/out/**/*.js" + ], + "preLaunchTask": "compile-vscode-extension", + "request": "launch", + "type": "extensionHost", + "env": { + "RUST_BACKTRACE": "full", + "NODE_ENV": "development" + } + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..e1f31fd4b7 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "editor.tabSize": 4, + "rust-analyzer.linkedProjects": [ + "./crates/language-server/Cargo.toml" + ], +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000000..471a8d4df9 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,14 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "compile-vscode-extension", + "type": "shell", + "command": "npm install && npm run compile", + "options": { + "cwd": "${workspaceFolder}/crates/language-server/editors/vscode" + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 77249859e1..0bfc989199 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,12 +1,23 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 + +[[package]] +name = "act-locally" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef9a921eb67a664d9e4d4ec3cc00caac360326f12625edc77ec3f5ce60fa7254" +dependencies = [ + "futures", + "smol", + "tracing", +] [[package]] name = "addr2line" -version = "0.21.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" dependencies = [ "gimli", ] @@ -19,20 +30,21 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if 1.0.0", "once_cell", "version_check", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "0.7.20" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -45,9 +57,9 @@ checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "alloy-primitives" -version = "0.7.2" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525448f6afc1b70dd0f9d0a8145631bf2f5e434678ab23ab18409ca264cae6b3" +checksum = "f783611babedbbe90db3478c120fb5f5daacceffc210b39adc0af4fe0da70bad" dependencies = [ "alloy-rlp", "bytes", @@ -62,18 +74,295 @@ dependencies = [ [[package]] name = "alloy-rlp" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d58d9f5da7b40e9bfff0b7e7816700be4019db97d4b6359fe7f94a9e22e42ac" +checksum = "b155716bab55763c95ba212806cf43d05bcc70e5f35b02bad20cf5ec7fe11fed" dependencies = [ "bytes", ] +[[package]] +name = "anstream" +version = "0.6.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "anyhow" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" + +[[package]] +name = "append-only-vec" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7992085ec035cfe96992dd31bfd495a2ebd31969bb95f624471cb6c0b349e571" + +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + [[package]] name = "arrayvec" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-compat" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bab94bde396a3f7b4962e396fdad640e241ed797d4d8d77fc8c237d14c58fc0" +dependencies = [ + "futures-core", + "futures-io", + "once_cell", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "async-executor" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-fs" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebcd09b382f40fcd159c2d695175b2ae620ffa5f3bd6f664131efff4e8b9e04a" +dependencies = [ + "async-lock", + "blocking", + "futures-lite", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.3.1", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" +dependencies = [ + "async-lock", + "cfg-if 1.0.0", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-lock" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +dependencies = [ + "event-listener 5.3.1", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-lsp" +version = "0.2.1" +source = "git+https://github.com/micahscopes/async-lsp?branch=pub-inner-type-id#5ce6e1ba89162d90825bafdd4f9d265fa9909d02" +dependencies = [ + "futures", + "lsp-types", + "pin-project-lite", + "rustix", + "serde", + "serde_json", + "thiserror 2.0.3", + "tokio", + "tower-layer", + "tower-service", + "tracing", + "waitpid-any", +] + +[[package]] +name = "async-net" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b948000fad4873c1c9339d60f2623323a0cfd3816e5181033c6a5cb68b2accf7" +dependencies = [ + "async-io", + "blocking", + "futures-lite", +] + +[[package]] +name = "async-process" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63255f1dc2381611000436537bbedfe83183faa303a5a0edaf191edef06526bb" +dependencies = [ + "async-channel 2.3.1", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if 1.0.0", + "event-listener 5.3.1", + "futures-lite", + "rustix", + "tracing", +] + +[[package]] +name = "async-signal" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfb3634b73397aa844481f814fad23bbf07fdb0eabec10f2eb95e58944b1ec32" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if 1.0.0", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-std" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io", + "async-lock", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "atty" @@ -104,20 +393,20 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.90", ] [[package]] name = "autocfg" -version = "1.1.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" -version = "0.3.69" +version = "0.3.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" dependencies = [ "addr2line", "cc", @@ -145,9 +434,9 @@ dependencies = [ [[package]] name = "base64" -version = "0.21.5" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "beef" @@ -163,9 +452,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "bitvec" @@ -181,18 +470,31 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] +[[package]] +name = "blocking" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" +dependencies = [ + "async-channel 2.3.1", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + [[package]] name = "bumpalo" -version = "3.12.0" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byte-slice-cast" @@ -202,21 +504,21 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.4.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "camino" -version = "1.1.3" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6031a462f977dd38968b6f23378356512feeace69cef817e1a4475108093cec3" +checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" [[package]] name = "cast" @@ -226,9 +528,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.96" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "065a29261d53ba54260972629f9ca6bffa69bac13cd1fed61420f7fa68b9f8bd" +checksum = "ac367972e516d45567c7eafc73d24e1c193dcf200a8d94e9db7b3d38b349572d" dependencies = [ "jobserver", "libc", @@ -260,26 +562,48 @@ dependencies = [ [[package]] name = "clap" -version = "3.2.23" +version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ "atty", "bitflags 1.3.2", - "clap_derive", - "clap_lex", - "indexmap 1.9.2", + "clap_derive 3.2.25", + "clap_lex 0.2.4", + "indexmap 1.9.3", "once_cell", - "strsim", + "strsim 0.10.0", "termcolor", - "textwrap 0.16.0", + "textwrap 0.16.1", +] + +[[package]] +name = "clap" +version = "4.5.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" +dependencies = [ + "clap_builder", + "clap_derive 4.5.18", +] + +[[package]] +name = "clap_builder" +version = "4.5.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" +dependencies = [ + "anstream", + "anstyle", + "clap_lex 0.7.1", + "strsim 0.11.1", ] [[package]] name = "clap_derive" -version = "3.2.18" +version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65" +checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" dependencies = [ "heck 0.4.1", "proc-macro-error", @@ -288,6 +612,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "clap_derive" +version = "4.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "clap_lex" version = "0.2.4" @@ -297,6 +633,12 @@ dependencies = [ "os_str_bytes", ] +[[package]] +name = "clap_lex" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" + [[package]] name = "cloudabi" version = "0.1.0" @@ -308,9 +650,9 @@ dependencies = [ [[package]] name = "cmake" -version = "0.1.49" +version = "0.1.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db34956e100b30725f2eb215f90d4871051239535632f84fea3bc92722c66b7c" +checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" dependencies = [ "cc", ] @@ -325,15 +667,29 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "colorchoice" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" + [[package]] name = "colored" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd" +checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" dependencies = [ - "atty", "lazy_static", - "winapi", + "windows-sys 0.48.0", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", ] [[package]] @@ -348,9 +704,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.11.3" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ba00838774b4ab0233e355d26710fbfc8327a05c017f6dc4873f876d1f79f78" +checksum = "94fb8a24a26d37e1ffd45343323dc9fe6654ceea44c12f2fcb3d7ac29e610bc6" dependencies = [ "cfg-if 1.0.0", "cpufeatures", @@ -361,9 +717,9 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.9.2" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "convert_case" @@ -373,9 +729,9 @@ checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -383,19 +739,31 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "countme" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cpufeatures" -version = "0.2.5" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] +[[package]] +name = "cranelift-entity" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a59bcbca89c3f1b70b93ab3cbba5e5e0cbf3e63dadb23c7525cb142e21a9d4c" + [[package]] name = "criterion" version = "0.3.6" @@ -432,49 +800,62 @@ dependencies = [ "itertools", ] +[[package]] +name = "crossbeam" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" +dependencies = [ + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + [[package]] name = "crossbeam-channel" -version = "0.5.7" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf2b3e8478797446514c91ef04bafcb59faba183e621ad488df88983cc14128c" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ - "cfg-if 1.0.0", "crossbeam-utils", ] [[package]] name = "crossbeam-deque" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ - "cfg-if 1.0.0", "crossbeam-epoch", "crossbeam-utils", ] [[package]] name = "crossbeam-epoch" -version = "0.9.14" +version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ - "autocfg", - "cfg-if 1.0.0", "crossbeam-utils", - "memoffset", - "scopeguard", ] [[package]] -name = "crossbeam-utils" -version = "0.8.15" +name = "crossbeam-queue" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "cfg-if 1.0.0", + "crossbeam-utils", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" + [[package]] name = "crunchy" version = "0.2.2" @@ -505,9 +886,9 @@ dependencies = [ [[package]] name = "csv" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af91f40b7355f82b0a891f50e70399475945bb0b0da4f1700ce60761c9d3e359" +checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" dependencies = [ "csv-core", "itoa", @@ -517,21 +898,25 @@ dependencies = [ [[package]] name = "csv-core" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" dependencies = [ "memchr", ] [[package]] -name = "ctor" -version = "0.1.26" +name = "dashmap" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" dependencies = [ - "quote", - "syn 1.0.109", + "cfg-if 1.0.0", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core 0.9.10", ] [[package]] @@ -546,15 +931,15 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.17" +version = "0.99.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "convert_case", "proc-macro2", "quote", "rustc_version 0.4.0", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] @@ -587,7 +972,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "035f15b4ae5cc91ca448fe9668799e3d9b09fd7381a7004e232769ff0efabb79" dependencies = [ - "dir-test-macros", + "dir-test-macros 0.1.1", +] + +[[package]] +name = "dir-test" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c44bdf9319ad5223afb7eb15a7110452b0adf0373ea6756561b2c708eef0dd1" +dependencies = [ + "dir-test-macros 0.3.0", ] [[package]] @@ -602,6 +996,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "dir-test-macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "644f96047137dfaa7a09e34d4623f9e52a1926ecc25ba32ad2ba3fc422536b25" +dependencies = [ + "glob", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "dirs" version = "5.0.1" @@ -620,15 +1026,38 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys", + "windows-sys 0.48.0", ] +[[package]] +name = "dogged" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2638df109789fe360f0d9998c5438dd19a36678aaf845e46f285b688b1a1657a" + [[package]] name = "dot2" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f046ad836ddb46a42ae6219f11208b61ef9f1b96f105a88da4ae0dd5f1b89e6" +[[package]] +name = "dot2" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "855423f2158bcc73798b3b9a666ec4204597a72370dc91dbdb8e7f9519de8cc3" + +[[package]] +name = "driver2" +version = "0.26.0" +dependencies = [ + "dir-test 0.3.0", + "fe-compiler-test-utils", + "fe-driver2", + "fe-hir", + "wasm-bindgen-test", +] + [[package]] name = "dyn-clone" version = "1.0.17" @@ -650,9 +1079,9 @@ dependencies = [ [[package]] name = "either" -version = "1.8.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "elliptic-curve" @@ -672,24 +1101,34 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ena" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" +dependencies = [ + "dogged", + "log", +] + [[package]] name = "encoding_rs" -version = "0.8.33" +version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ "cfg-if 1.0.0", ] [[package]] name = "enumn" -version = "0.1.6" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88bcb3a067a6555d577aba299e75eff9942da276e6506fc6274327daa026132" +checksum = "6fd000fd6988e73bbe993ea3db9b1aa64906ab88766d654973924340c8cddb42" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] @@ -700,12 +1139,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -721,7 +1160,7 @@ dependencies = [ "serde", "serde_json", "sha3", - "thiserror", + "thiserror 1.0.61", "uint", ] @@ -771,6 +1210,33 @@ dependencies = [ "uint", ] +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "5.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +dependencies = [ + "event-listener 5.3.1", + "pin-project-lite", +] + [[package]] name = "evm" version = "0.37.0" @@ -822,15 +1288,15 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "fe" version = "0.26.0" dependencies = [ - "clap 3.2.23", + "clap 3.2.25", "colored", "fe-common", "fe-driver", @@ -838,7 +1304,7 @@ dependencies = [ "fe-test-runner", "fs_extra", "include_dir", - "indexmap 1.9.2", + "indexmap 1.9.3", "reqwest", "serde", "url", @@ -866,16 +1332,16 @@ dependencies = [ "fe-test-files", "hex", "if_chain", - "indexmap 1.9.2", + "indexmap 1.9.3", "insta", "num-bigint", "num-traits", - "parking_lot_core", + "parking_lot_core 0.8.0", "petgraph", "pretty_assertions", "rstest", - "salsa", - "semver 1.0.16", + "salsa 0.16.1", + "semver 1.0.23", "smallvec", "smol_str", "strum", @@ -891,9 +1357,9 @@ dependencies = [ "fe-common", "fe-mir", "fxhash", - "indexmap 1.9.2", + "indexmap 1.9.3", "num-bigint", - "salsa", + "salsa 0.16.1", "smol_str", "yultsur", ] @@ -909,20 +1375,33 @@ dependencies = [ "fe-library", "git2", "hex", - "indexmap 1.9.2", + "indexmap 1.9.3", "num-bigint", "num-traits", "once_cell", "path-clean", "ron", - "salsa", + "salsa 0.16.1", "serde", "smol_str", "tiny-keccak", - "toml 0.7.6", + "toml 0.7.8", "walkdir", ] +[[package]] +name = "fe-common2" +version = "0.26.0" +dependencies = [ + "camino", + "fe-parser2", + "indexmap 2.2.6", + "paste", + "salsa 0.18.0", + "semver 1.0.23", + "smol_str", +] + [[package]] name = "fe-compiler-test-utils" version = "0.26.0" @@ -937,7 +1416,7 @@ dependencies = [ "fe-yulc", "getrandom", "hex", - "indexmap 1.9.2", + "indexmap 1.9.3", "insta", "primitive-types", "serde_json", @@ -949,7 +1428,7 @@ dependencies = [ name = "fe-compiler-tests" version = "0.26.0" dependencies = [ - "dir-test", + "dir-test 0.3.0", "fe-common", "fe-driver", "fe-test-runner", @@ -959,7 +1438,7 @@ dependencies = [ name = "fe-compiler-tests-legacy" version = "0.26.0" dependencies = [ - "dir-test", + "dir-test 0.3.0", "ethabi", "evm", "evm-runtime", @@ -993,19 +1472,109 @@ dependencies = [ "fe-parser", "fe-test-runner", "fe-yulc", - "indexmap 1.9.2", + "indexmap 1.9.3", "serde_json", "smol_str", "toml 0.5.11", "vfs", ] +[[package]] +name = "fe-driver2" +version = "0.26.0" +dependencies = [ + "camino", + "clap 4.5.21", + "codespan-reporting", + "fe-common2", + "fe-hir", + "fe-hir-analysis", + "salsa 0.18.0", +] + +[[package]] +name = "fe-hir" +version = "0.26.0" +dependencies = [ + "camino", + "cranelift-entity", + "derive_more", + "dot2 1.0.0", + "fe-common2", + "fe-parser2", + "num-bigint", + "num-traits", + "paste", + "rustc-hash 1.1.0", + "salsa 0.18.0", + "smallvec", +] + +[[package]] +name = "fe-hir-analysis" +version = "0.26.0" +dependencies = [ + "bitflags 2.6.0", + "codespan-reporting", + "cranelift-entity", + "derive_more", + "dir-test 0.3.0", + "either", + "ena", + "fe-common2", + "fe-compiler-test-utils", + "fe-driver2", + "fe-hir", + "if_chain", + "itertools", + "num-bigint", + "rustc-hash 1.1.0", + "salsa 0.18.0", + "smallvec", +] + +[[package]] +name = "fe-language-server" +version = "0.26.0" +dependencies = [ + "act-locally", + "anyhow", + "async-compat", + "async-lsp", + "async-std", + "camino", + "clap 4.5.21", + "codespan-reporting", + "dir-test 0.1.1", + "fe-common2", + "fe-compiler-test-utils", + "fe-hir", + "fe-hir-analysis", + "futures", + "futures-batch", + "fxhash", + "glob", + "patricia_tree", + "rowan", + "rust-embed", + "salsa 0.18.0", + "serde", + "serde_json", + "tokio", + "tokio-macros", + "tower", + "tracing", + "tracing-subscriber", + "tracing-tree", + "url", +] + [[package]] name = "fe-library" version = "0.26.0" dependencies = [ "include_dir", - "indexmap 1.9.2", + "indexmap 1.9.3", "smol_str", ] @@ -1013,7 +1582,7 @@ dependencies = [ name = "fe-mir" version = "0.26.0" dependencies = [ - "dot2", + "dot2 0.1.0", "fe-analyzer", "fe-common", "fe-library", @@ -1021,11 +1590,11 @@ dependencies = [ "fe-test-files", "fxhash", "id-arena", - "indexmap 1.9.2", + "indexmap 1.9.3", "num-bigint", "num-integer", "num-traits", - "salsa", + "salsa 0.16.1", "smol_str", ] @@ -1041,7 +1610,7 @@ dependencies = [ "insta", "logos", "pretty_assertions", - "semver 1.0.16", + "semver 1.0.23", "serde", "smol_str", "unescape", @@ -1050,13 +1619,30 @@ dependencies = [ "wasm-bindgen-test", ] +[[package]] +name = "fe-parser2" +version = "0.26.0" +dependencies = [ + "derive_more", + "dir-test 0.3.0", + "fe-compiler-test-utils", + "lazy_static", + "logos", + "rowan", + "rustc-hash 1.1.0", + "smallvec", + "unwrap-infallible", + "wasm-bindgen", + "wasm-bindgen-test", +] + [[package]] name = "fe-test-files" version = "0.26.0" dependencies = [ "fe-library", "include_dir", - "indexmap 1.9.2", + "indexmap 1.9.3", ] [[package]] @@ -1068,7 +1654,7 @@ dependencies = [ "ethabi", "getrandom", "hex", - "indexmap 1.9.2", + "indexmap 1.9.3", "revm", ] @@ -1076,7 +1662,7 @@ dependencies = [ name = "fe-yulc" version = "0.26.0" dependencies = [ - "indexmap 1.9.2", + "indexmap 1.9.3", "serde_json", "solc", ] @@ -1151,47 +1737,118 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-batch" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f444c45a1cb86f2a7e301469fd50a82084a60dadc25d94529a8312276ecb71a" +dependencies = [ + "futures", + "futures-timer", + "pin-utils", +] + [[package]] name = "futures-channel" -version = "0.3.29" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-lite" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", ] [[package]] -name = "futures-core" -version = "0.3.29" +name = "futures-macro" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] [[package]] -name = "futures-io" -version = "0.3.29" +name = "futures-sink" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] -name = "futures-sink" -version = "0.3.29" +name = "futures-task" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] -name = "futures-task" -version = "0.3.29" +name = "futures-timer" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" -version = "0.3.29" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", "futures-io", + "futures-macro", + "futures-sink", "futures-task", "memchr", "pin-project-lite", @@ -1210,9 +1867,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -1221,9 +1878,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.8" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -1234,9 +1891,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "git2" @@ -1244,7 +1901,7 @@ version = "0.18.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", "libgit2-sys", "log", @@ -1259,6 +1916,18 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "group" version = "0.13.0" @@ -1272,9 +1941,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.21" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", @@ -1282,7 +1951,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 1.9.2", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -1291,9 +1960,9 @@ dependencies = [ [[package]] name = "half" -version = "1.8.2" +version = "1.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" [[package]] name = "hash-db" @@ -1318,14 +1987,23 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", ] +[[package]] +name = "hashlink" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +dependencies = [ + "hashbrown 0.14.5", +] + [[package]] name = "heck" version = "0.3.3" @@ -1341,6 +2019,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hermit-abi" version = "0.1.19" @@ -1352,12 +2036,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.2.6" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" [[package]] name = "hex" @@ -1382,9 +2063,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.9" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -1393,9 +2074,9 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", "http", @@ -1404,9 +2085,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.8.0" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" [[package]] name = "httpdate" @@ -1416,9 +2097,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.27" +version = "0.14.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" dependencies = [ "bytes", "futures-channel", @@ -1431,7 +2112,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2", "tokio", "tower-service", "tracing", @@ -1513,18 +2194,18 @@ dependencies = [ [[package]] name = "include_dir" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd" dependencies = [ "include_dir_macros", ] [[package]] name = "include_dir_macros" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75" dependencies = [ "proc-macro2", "quote", @@ -1538,9 +2219,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indexmap" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", @@ -1548,31 +2229,30 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.0" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.14.5", ] [[package]] name = "insta" -version = "1.28.0" +version = "1.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea5b3894afe466b4bcf0388630fc15e11938a6074af0cd637c825ba2ec8a099" +checksum = "810ae6042d48e2c9e9215043563a58a80b877bc863228a74cf10c49d4620a6f5" dependencies = [ "lazy_static", "linked-hash-map", "similar", - "yaml-rust", ] [[package]] name = "instant" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ "cfg-if 1.0.0", ] @@ -1583,6 +2263,12 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +[[package]] +name = "is_terminal_polyfill" +version = "1.70.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" + [[package]] name = "itertools" version = "0.10.5" @@ -1594,9 +2280,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.6" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" @@ -1609,9 +2295,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.61" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" dependencies = [ "wasm-bindgen", ] @@ -1630,27 +2316,36 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" dependencies = [ "cpufeatures", ] +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ "spin", ] [[package]] name = "libc" -version = "0.2.153" +version = "0.2.167" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" [[package]] name = "libgit2-sys" @@ -1668,19 +2363,18 @@ dependencies = [ [[package]] name = "libm" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libredox" -version = "0.0.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", - "redox_syscall 0.4.1", ] [[package]] @@ -1699,9 +2393,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.15" +version = "1.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037731f5d3aaa87a5675e895b63ddff1a87624bc29f77004ea829809654e48f6" +checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" dependencies = [ "cc", "libc", @@ -1717,15 +2411,15 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.4.10" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -1733,11 +2427,11 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" dependencies = [ - "cfg-if 1.0.0", + "value-bag", ] [[package]] @@ -1759,21 +2453,34 @@ dependencies = [ "fnv", "proc-macro2", "quote", - "regex-syntax 0.6.28", + "regex-syntax 0.6.29", "syn 1.0.109", ] +[[package]] +name = "lsp-types" +version = "0.95.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e34d33a8e9b006cd3fc4fe69a921affa097bae4bb65f76271f4644f9a334365" +dependencies = [ + "bitflags 1.3.2", + "serde", + "serde_json", + "serde_repr", + "url", +] + [[package]] name = "memchr" -version = "2.7.1" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memoffset" -version = "0.8.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ "autocfg", ] @@ -1786,31 +2493,30 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] [[package]] name = "mio" -version = "0.8.9" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", "wasi", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "native-tls" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" dependencies = [ - "lazy_static", "libc", "log", "openssl", @@ -1822,11 +2528,30 @@ dependencies = [ "tempfile", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" +dependencies = [ + "windows-sys 0.52.0", +] + [[package]] name = "num" -version = "0.4.0" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" dependencies = [ "num-bigint", "num-complex", @@ -1838,39 +2563,37 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" dependencies = [ - "autocfg", "num-integer", "num-traits", ] [[package]] name = "num-complex" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" dependencies = [ "num-traits", ] [[package]] name = "num-integer" -version = "0.1.45" +version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] [[package]] name = "num-iter" -version = "0.1.43" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" dependencies = [ "autocfg", "num-integer", @@ -1879,11 +2602,10 @@ dependencies = [ [[package]] name = "num-rational" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" dependencies = [ - "autocfg", "num-bigint", "num-integer", "num-traits", @@ -1891,29 +2613,19 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", ] -[[package]] -name = "num_cpus" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" -dependencies = [ - "hermit-abi 0.2.6", - "libc", -] - [[package]] name = "object" -version = "0.32.1" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +checksum = "576dfe1fc8f9df304abb159d767a29d0476f7750fbf8aa7ad07816004a207434" dependencies = [ "memchr", ] @@ -1932,11 +2644,11 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] name = "openssl" -version = "0.10.57" +version = "0.10.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if 1.0.0", "foreign-types", "libc", @@ -1947,13 +2659,13 @@ dependencies = [ [[package]] name = "openssl-macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] @@ -1964,9 +2676,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.93" +version = "0.9.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" dependencies = [ "cc", "libc", @@ -1982,24 +2694,21 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "os_str_bytes" -version = "6.4.1" +version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" [[package]] -name = "output_vt100" -version = "0.1.3" +name = "overload" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66" -dependencies = [ - "winapi", -] +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parity-scale-codec" -version = "3.4.0" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "637935964ff85a605d114591d4d2c13c5d1ba2806dae97cea6bf180238a749ac" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ "arrayvec", "bitvec", @@ -2011,9 +2720,9 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.1.4" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2021,6 +2730,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + [[package]] name = "parking_lot" version = "0.11.1" @@ -2029,7 +2744,17 @@ checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" dependencies = [ "instant", "lock_api", - "parking_lot_core", + "parking_lot_core 0.8.0", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.10", ] [[package]] @@ -2047,12 +2772,40 @@ dependencies = [ "winapi", ] +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.5.2", + "smallvec", + "windows-targets 0.52.5", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + [[package]] name = "path-clean" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17359afc20d7ab31fdb42bb844c8b3bb1dabd7dcf7e68428492da7f16966fcef" +[[package]] +name = "patricia_tree" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c8e0b346244f1606d39ec7c47046286cbdcc6553dbdf25b3b9550d5e026f2ed" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -2061,19 +2814,19 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 1.9.2", + "indexmap 2.2.6", ] [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" @@ -2081,17 +2834,28 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + [[package]] name = "pkg-config" -version = "0.3.27" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "plotters" -version = "0.3.4" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" +checksum = "a15b6eccb8484002195a3e44fe65a4ce8e93a625797a063735536fd59cb01cf3" dependencies = [ "num-traits", "plotters-backend", @@ -2102,19 +2866,34 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.4" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" +checksum = "414cec62c6634ae900ea1c56128dfe87cf63e7caece0852ec76aba307cebadb7" [[package]] name = "plotters-svg" -version = "0.3.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" +checksum = "81b30686a7d9c3e010b84284bdd26a29f2138574f52f5eb6f794fc0ad924e705" dependencies = [ "plotters-backend", ] +[[package]] +name = "polling" +version = "3.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" +dependencies = [ + "cfg-if 1.0.0", + "concurrent-queue", + "hermit-abi 0.4.0", + "pin-project-lite", + "rustix", + "tracing", + "windows-sys 0.52.0", +] + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -2123,21 +2902,19 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "pretty_assertions" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755" +checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" dependencies = [ - "ctor", "diff", - "output_vt100", "yansi", ] [[package]] name = "primitive-types" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3486ccba82358b11a77516035647c34ba167dfa53312630de83b12bd4f3d66" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ "fixed-hash", "impl-codec", @@ -2149,12 +2926,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.3.1" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" dependencies = [ - "once_cell", - "toml_edit", + "toml_edit 0.21.1", ] [[package]] @@ -2183,34 +2959,34 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.78" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] [[package]] name = "proptest" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "lazy_static", "num-traits", "rand", "rand_chacha", "rand_xorshift", - "regex-syntax 0.8.3", + "regex-syntax 0.8.4", "unarray", ] [[package]] name = "quote" -version = "1.0.35" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] @@ -2262,9 +3038,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.7.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" dependencies = [ "either", "rayon-core", @@ -2272,14 +3048,12 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.11.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] @@ -2290,54 +3064,66 @@ checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "redox_syscall" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", ] [[package]] name = "redox_users" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" dependencies = [ "getrandom", "libredox", - "thiserror", + "thiserror 1.0.61", ] [[package]] name = "regex" -version = "1.7.1" +version = "1.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.6.28", + "regex-syntax 0.8.4", ] [[package]] name = "regex-syntax" -version = "0.6.28" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "reqwest" -version = "0.11.22" +version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ - "base64 0.21.5", + "base64 0.21.7", "bytes", "encoding_rs", "futures-core", @@ -2355,9 +3141,11 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", + "sync_wrapper", "system-configuration", "tokio", "tokio-native-tls", @@ -2414,12 +3202,12 @@ checksum = "cbbc9640790cebcb731289afb7a7d96d16ad94afeb64b5d0b66443bd151e79d6" dependencies = [ "alloy-primitives", "auto_impl", - "bitflags 2.5.0", + "bitflags 2.6.0", "bitvec", "cfg-if 1.0.0", "dyn-clone", "enumn", - "hashbrown 0.14.0", + "hashbrown 0.14.5", "hex", ] @@ -2475,6 +3263,19 @@ dependencies = [ "serde", ] +[[package]] +name = "rowan" +version = "0.15.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49" +dependencies = [ + "countme", + "hashbrown 0.14.5", + "memoffset", + "rustc-hash 1.1.0", + "text-size", +] + [[package]] name = "rstest" version = "0.6.4" @@ -2490,9 +3291,9 @@ dependencies = [ [[package]] name = "ruint" -version = "1.12.1" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f308135fef9fc398342da5472ce7c484529df23743fb7c734e0f3d472971e62" +checksum = "2c3cc4c2511671f327125da14133d0c5c5d137f006a1017a16f557bc85b16286" dependencies = [ "alloy-rlp", "proptest", @@ -2505,15 +3306,49 @@ dependencies = [ [[package]] name = "ruint-macro" -version = "1.2.0" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" + +[[package]] +name = "rust-embed" +version = "8.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa66af4a4fdd5e7ebc276f115e895611a34739a9c1c01028383d612d550953c0" +dependencies = [ + "rust-embed-impl", + "rust-embed-utils", + "walkdir", +] + +[[package]] +name = "rust-embed-impl" +version = "8.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6125dbc8867951125eec87294137f4e9c2c96566e61bf72c45095a7c77761478" +dependencies = [ + "proc-macro2", + "quote", + "rust-embed-utils", + "syn 2.0.90", + "walkdir", +] + +[[package]] +name = "rust-embed-utils" +version = "8.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f86854cf50259291520509879a5c294c3c9a4c334e9ff65071c51e42ef1e2343" +checksum = "2e5347777e9aacb56039b0e1f28785929a8a3b709e87482e7442c72e7c12529d" +dependencies = [ + "sha2", + "walkdir", +] [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" @@ -2521,6 +3356,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" + [[package]] name = "rustc-hex" version = "2.1.0" @@ -2542,33 +3383,42 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.16", + "semver 1.0.23", ] [[package]] name = "rustix" -version = "0.38.21" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", ] [[package]] name = "rustversion" -version = "1.0.11" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "ryu" -version = "1.0.13" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "salsa" @@ -2577,26 +3427,63 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b84d9f96071f3f3be0dc818eae3327625d8ebc95b58da37d6850724f31d3403" dependencies = [ "crossbeam-utils", - "indexmap 1.9.2", + "indexmap 1.9.3", "lock_api", "log", "oorandom", - "parking_lot", - "rustc-hash", - "salsa-macros", + "parking_lot 0.11.1", + "rustc-hash 1.1.0", + "salsa-macros 0.16.0", + "smallvec", +] + +[[package]] +name = "salsa" +version = "0.18.0" +source = "git+https://github.com/salsa-rs/salsa?rev=e4d65a656fc68d0fb759b292ceae2aff2c785c5d#e4d65a656fc68d0fb759b292ceae2aff2c785c5d" +dependencies = [ + "append-only-vec", + "arc-swap", + "crossbeam", + "dashmap", + "hashlink", + "indexmap 2.2.6", + "parking_lot 0.12.3", + "rayon", + "rustc-hash 2.1.0", + "salsa-macro-rules", + "salsa-macros 0.18.0", "smallvec", + "tracing", ] +[[package]] +name = "salsa-macro-rules" +version = "0.1.0" +source = "git+https://github.com/salsa-rs/salsa?rev=e4d65a656fc68d0fb759b292ceae2aff2c785c5d#e4d65a656fc68d0fb759b292ceae2aff2c785c5d" + [[package]] name = "salsa-macros" version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd3904a4ba0a9d0211816177fd34b04c7095443f8cdacd11175064fe541c8fe2" dependencies = [ - "heck 0.3.3", + "heck 0.3.3", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "salsa-macros" +version = "0.18.0" +source = "git+https://github.com/salsa-rs/salsa?rev=e4d65a656fc68d0fb759b292ceae2aff2c785c5d#e4d65a656fc68d0fb759b292ceae2aff2c785c5d" +dependencies = [ + "heck 0.5.0", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.90", + "synstructure", ] [[package]] @@ -2610,9 +3497,9 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.3.1" +version = "2.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "001cf62ece89779fd16105b5f515ad0e5cedcd5440d3dd806bb067978e7c3608" +checksum = "eca070c12893629e2cc820a9761bedf6ce1dcddc9852984d1dc734b8bd9bd024" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -2622,9 +3509,9 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "2.3.1" +version = "2.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "303959cf613a6f6efd19ed4b4ad5bf79966a13352716299ad532cfb115f4205c" +checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2634,11 +3521,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" dependencies = [ - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -2649,15 +3536,15 @@ checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sec1" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48518a2b5775ba8ca5b46596aae011caa431e6ce7e4a67ead66d92f08884220e" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ "base16ct", "der", @@ -2668,11 +3555,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.9.2" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -2681,9 +3568,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.1" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" dependencies = [ "core-foundation-sys", "libc", @@ -2700,9 +3587,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.16" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "semver-parser" @@ -2712,9 +3599,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.196" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] @@ -2731,40 +3618,51 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.196" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.90", ] [[package]] name = "serde_json" -version = "1.0.93" +version = "1.0.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" +checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4" dependencies = [ "itoa", "ryu", "serde", ] +[[package]] +name = "serde_repr" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "serde_spanned" -version = "0.6.3" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" +checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" dependencies = [ "serde", ] [[package]] name = "serde_test" -version = "1.0.152" +version = "1.0.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3611210d2d67e3513204742004d6ac6f589e521861dabb0f649b070eea8bed9e" +checksum = "5a2f49ace1498612d14f7e0b8245519584db8299541dfe31a06374a828d620ab" dependencies = [ "serde", ] @@ -2783,9 +3681,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if 1.0.0", "cpufeatures", @@ -2794,14 +3692,32 @@ dependencies = [ [[package]] name = "sha3" -version = "0.10.6" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" dependencies = [ "digest", "keccak", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + [[package]] name = "signature" version = "2.2.0" @@ -2814,9 +3730,9 @@ dependencies = [ [[package]] name = "similar" -version = "2.2.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf" +checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" [[package]] name = "slab" @@ -2829,37 +3745,44 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.10.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] -name = "smol_str" -version = "0.1.24" +name = "smol" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9" +checksum = "a33bd3e260892199c3ccfc487c88b2da2265080acb316cd920da72fdfd7c599f" dependencies = [ - "serde", + "async-channel 2.3.1", + "async-executor", + "async-fs", + "async-io", + "async-lock", + "async-net", + "async-process", + "blocking", + "futures-lite", ] [[package]] -name = "socket2" -version = "0.4.10" +name = "smol_str" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +checksum = "fad6c857cbab2627dcf01ec85a623ca4e7dcb5691cbaa3d7fb7653671f0d09c9" dependencies = [ - "libc", - "winapi", + "serde", ] [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -2873,9 +3796,9 @@ dependencies = [ [[package]] name = "spin" -version = "0.5.2" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] name = "static_assertions" @@ -2889,6 +3812,12 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + [[package]] name = "strum" version = "0.23.0" @@ -2926,9 +3855,9 @@ dependencies = [ [[package]] name = "subtle" -version = "2.4.1" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -2943,15 +3872,32 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.48" +version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -2981,26 +3927,31 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.8.1" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if 1.0.0", "fastrand", - "redox_syscall 0.4.1", "rustix", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] name = "termcolor" -version = "1.2.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] +[[package]] +name = "text-size" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" + [[package]] name = "textwrap" version = "0.11.0" @@ -3012,28 +3963,58 @@ dependencies = [ [[package]] name = "textwrap" -version = "0.16.0" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" + +[[package]] +name = "thiserror" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +dependencies = [ + "thiserror-impl 1.0.61", +] [[package]] name = "thiserror" -version = "1.0.56" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa" dependencies = [ - "thiserror-impl", + "thiserror-impl 2.0.3", ] [[package]] name = "thiserror-impl" -version = "1.0.56" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.90", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", ] [[package]] @@ -3057,9 +4038,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "c55115c6fbe2d2bef26eb09ad74bde02d8255476fc0c7b515ef09fbb35742d82" dependencies = [ "tinyvec_macros", ] @@ -3072,18 +4053,32 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.33.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" +checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "bytes", "libc", "mio", - "num_cpus", + "parking_lot 0.12.3", "pin-project-lite", - "socket2 0.5.5", - "windows-sys", + "signal-hook-registry", + "socket2", + "tokio-macros", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", ] [[package]] @@ -3098,16 +4093,15 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -3121,38 +4115,66 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.6" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_edit 0.19.15", ] [[package]] name = "toml_datetime" -version = "0.6.3" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.19.14" +version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", "winnow", ] +[[package]] +name = "toml_edit" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +dependencies = [ + "indexmap 2.2.6", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + [[package]] name = "tower-service" version = "0.3.2" @@ -3165,10 +4187,23 @@ version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ + "log", "pin-project-lite", + "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-attributes" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "tracing-core" version = "0.1.32" @@ -3176,6 +4211,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "nu-ansi-term 0.46.0", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "tracing-tree" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c" +dependencies = [ + "nu-ansi-term 0.50.1", + "tracing-core", + "tracing-log", + "tracing-subscriber", ] [[package]] @@ -3190,15 +4263,15 @@ dependencies = [ [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "uint" @@ -3226,54 +4299,73 @@ checksum = "ccb97dac3243214f8d8507998906ca3e2e0b900bf9bf4870477f125b82e68f6e" [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" -version = "1.0.7" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "775c11906edafc97bc378816b94585fbd9a054eabaf86fdd0ced94af449efab7" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + +[[package]] +name = "unwrap-infallible" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "151ac09978d3c2862c4e39b557f4eceee2cc72150bc4cb4f16abf061b6e381fb" [[package]] name = "url" -version = "2.5.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", "percent-encoding", + "serde", ] +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + [[package]] name = "valuable" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +[[package]] +name = "value-bag" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2" + [[package]] name = "vcpkg" version = "0.2.15" @@ -3282,9 +4374,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vec1" -version = "1.10.1" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bda7c41ca331fe9a1c278a9e7ee055f4be7f5eb1c2b72f079b4ff8b5fce9d5c" +checksum = "eab68b56840f69efb0fefbe3ab6661499217ffdc58e2eef7c3f6f69835386322" dependencies = [ "serde", ] @@ -3301,17 +4393,26 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67fe15bc81afdceeaedd710d237f9043e895fd619db42cb503d9bfbb27a9f3a1" dependencies = [ - "thiserror", + "thiserror 1.0.61", +] + +[[package]] +name = "waitpid-any" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0189157c93c54d86e5c61ddf0c1223baa25e5bfb2f6f9983c678985b028d7c12" +dependencies = [ + "rustix", + "windows-sys 0.52.0", ] [[package]] name = "walkdir" -version = "2.3.2" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", - "winapi", "winapi-util", ] @@ -3332,9 +4433,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.84" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -3342,24 +4443,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.84" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.90", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.34" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -3369,9 +4470,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.84" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3379,28 +4480,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.84" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.90", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.84" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "wasm-bindgen-test" -version = "0.3.34" +version = "0.3.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db36fc0f9fb209e88fb3642590ae0205bb5a56216dabd963ba15879fe53a30b" +checksum = "d9bf62a58e0780af3e852044583deee40983e5886da43a271dd772379987667b" dependencies = [ "console_error_panic_hook", "js-sys", @@ -3412,19 +4513,20 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.34" +version = "0.3.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0734759ae6b3b1717d661fe4f016efcfb9828f5edb4520c18eaee05af3b43be9" +checksum = "b7f89739351a2e03cb94beb799d47fb2cac01759b40ec441f7de39b00cbf7ef0" dependencies = [ "proc-macro2", "quote", + "syn 2.0.90", ] [[package]] name = "web-sys" -version = "0.3.61" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" dependencies = [ "js-sys", "wasm-bindgen", @@ -3448,11 +4550,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -3467,7 +4569,16 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets", + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.5", ] [[package]] @@ -3476,13 +4587,29 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", ] [[package]] @@ -3491,47 +4618,95 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + [[package]] name = "winnow" -version = "0.5.36" +version = "0.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "818ce546a11a9986bc24f93d0cdf38a8a1a400f1473ea8c82e59f6e0ffab9249" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" dependencies = [ "memchr", ] @@ -3543,7 +4718,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ "cfg-if 1.0.0", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -3555,15 +4730,6 @@ dependencies = [ "tap", ] -[[package]] -name = "yaml-rust" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" -dependencies = [ - "linked-hash-map", -] - [[package]] name = "yansi" version = "0.5.1" @@ -3578,8 +4744,28 @@ dependencies = [ "indenter", ] +[[package]] +name = "zerocopy" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "zeroize" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/Cargo.toml b/Cargo.toml index 7f3d0639ca..fffc467acf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,5 +5,9 @@ resolver = "2" [profile.dev.package.solc] opt-level = 3 +[workspace.dependencies] +salsa = { git = "https://github.com/salsa-rs/salsa", rev = "e4d65a656fc68d0fb759b292ceae2aff2c785c5d" } + [profile.dev] +# Set to 0 to make the build faster and debugging more difficult. debug = 1 diff --git a/Makefile b/Makefile index f0849e0991..976cff8b66 100644 --- a/Makefile +++ b/Makefile @@ -71,7 +71,7 @@ docker-wasm-test: --volume "$(shell pwd):/mnt" \ --workdir '/mnt' \ davesque/rust-wasm \ - wasm-pack test --node -- --workspace + wasm-pack test --node -- --workspace --exclude fe-language-server .PHONY: coverage coverage: diff --git a/crates/analyzer/Cargo.toml b/crates/analyzer/Cargo.toml index 12ab3eea8f..31f52d5ff2 100644 --- a/crates/analyzer/Cargo.toml +++ b/crates/analyzer/Cargo.toml @@ -25,7 +25,7 @@ petgraph = "0.6.0" smol_str = "0.1.21" [dev-dependencies] -insta = { default-features = false, version = "1.7.1" } +insta = { default-features = false, version = "1.26.0" } rstest = "0.6.4" test-files = {path = "../test-files", package = "fe-test-files" } tests = {path = "../tests", package = "fe-compiler-tests" } diff --git a/crates/analyzer/benches/bench.rs b/crates/analyzer/benches/bench.rs index b9403a89a9..09f5e734d7 100644 --- a/crates/analyzer/benches/bench.rs +++ b/crates/analyzer/benches/bench.rs @@ -1,6 +1,5 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; -use fe_analyzer::namespace::items::ModuleId; -use fe_analyzer::TestDb; +use fe_analyzer::{namespace::items::ModuleId, TestDb}; fn criterion_benchmark(c: &mut Criterion) { let path = "demos/uniswap.fe"; diff --git a/crates/analyzer/src/context.rs b/crates/analyzer/src/context.rs index 180b741cce..7cb202d0bf 100644 --- a/crates/analyzer/src/context.rs +++ b/crates/analyzer/src/context.rs @@ -5,33 +5,34 @@ use crate::{ pattern_analysis::PatternMatrix, }; -use crate::namespace::items::{ - ContractId, DiagnosticSink, FunctionId, FunctionSigId, Item, TraitId, -}; -use crate::namespace::types::{Generic, SelfDecl, Type, TypeId}; -use crate::AnalyzerDb; use crate::{ builtins::{ContractTypeMethod, GlobalFunction, Intrinsic, ValueMethod}, - namespace::scopes::BlockScopeType, -}; -use crate::{ errors::{self, IncompleteItem, TypeError}, - namespace::items::ModuleId, + namespace::{ + items::{ContractId, DiagnosticSink, FunctionId, FunctionSigId, Item, ModuleId, TraitId}, + scopes::BlockScopeType, + types::{Generic, SelfDecl, Type, TypeId}, + }, + AnalyzerDb, }; -use fe_common::diagnostics::Diagnostic; pub use fe_common::diagnostics::Label; -use fe_common::Span; -use fe_parser::ast; -use fe_parser::node::{Node, NodeId}; +use fe_common::{diagnostics::Diagnostic, Span}; +use fe_parser::{ + ast, + node::{Node, NodeId}, +}; use indexmap::IndexMap; use num_bigint::BigInt; use smol_str::SmolStr; -use std::fmt::{self, Debug}; -use std::hash::Hash; -use std::marker::PhantomData; -use std::rc::Rc; -use std::{cell::RefCell, collections::HashMap}; +use std::{ + cell::RefCell, + collections::HashMap, + fmt::{self, Debug}, + hash::Hash, + marker::PhantomData, + rc::Rc, +}; #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct Analysis { @@ -54,7 +55,8 @@ pub trait AnalyzerContext { fn resolve_name(&self, name: &str, span: Span) -> Result, IncompleteItem>; /// Resolves the given path and registers all errors fn resolve_path(&self, path: &ast::Path, span: Span) -> Result; - /// Resolves the given path only if it is visible. Does not register any errors + /// Resolves the given path only if it is visible. Does not register any + /// errors fn resolve_visible_path(&self, path: &ast::Path) -> Option; /// Resolves the given path. Does not register any errors fn resolve_any_path(&self, path: &ast::Path) -> Option; diff --git a/crates/analyzer/src/db.rs b/crates/analyzer/src/db.rs index bfe6f08ebf..7ecf6078d1 100644 --- a/crates/analyzer/src/db.rs +++ b/crates/analyzer/src/db.rs @@ -1,20 +1,20 @@ #![allow(clippy::arc_with_non_send_sync)] -use crate::namespace::items::{ - self, AttributeId, ContractFieldId, ContractId, DepGraphWrapper, EnumVariantKind, FunctionId, - FunctionSigId, ImplId, IngotId, Item, ModuleConstantId, ModuleId, StructFieldId, StructId, - TraitId, TypeAliasId, -}; -use crate::namespace::types::{self, Type, TypeId}; use crate::{ context::{Analysis, Constant, FunctionBody}, - namespace::items::EnumId, -}; -use crate::{ errors::{ConstEvalError, TypeError}, - namespace::items::EnumVariantId, + namespace::{ + items::{ + self, AttributeId, ContractFieldId, ContractId, DepGraphWrapper, EnumId, EnumVariantId, + EnumVariantKind, FunctionId, FunctionSigId, ImplId, IngotId, Item, ModuleConstantId, + ModuleId, StructFieldId, StructId, TraitId, TypeAliasId, + }, + types::{self, Type, TypeId}, + }, +}; +use fe_common::{ + db::{SourceDb, SourceDbStorage, Upcast, UpcastMut}, + SourceFileId, Span, }; -use fe_common::db::{SourceDb, SourceDbStorage, Upcast, UpcastMut}; -use fe_common::{SourceFileId, Span}; use fe_parser::ast; use indexmap::map::IndexMap; use smol_str::SmolStr; diff --git a/crates/analyzer/src/db/queries/contracts.rs b/crates/analyzer/src/db/queries/contracts.rs index 1e00b9f3fa..44ac44bf80 100644 --- a/crates/analyzer/src/db/queries/contracts.rs +++ b/crates/analyzer/src/db/queries/contracts.rs @@ -1,13 +1,17 @@ -use crate::context::AnalyzerContext; -use crate::db::{Analysis, AnalyzerDb}; -use crate::errors; -use crate::namespace::items::{ - self, ContractFieldId, ContractId, DepGraph, DepGraphWrapper, DepLocality, FunctionId, Item, - TypeDef, +use crate::{ + context::AnalyzerContext, + db::{Analysis, AnalyzerDb}, + errors, + namespace::{ + items::{ + self, ContractFieldId, ContractId, DepGraph, DepGraphWrapper, DepLocality, FunctionId, + Item, TypeDef, + }, + scopes::ItemScope, + types::{self, Type}, + }, + traversal::types::type_desc, }; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::{self, Type}; -use crate::traversal::types::type_desc; use fe_common::diagnostics::Label; use fe_parser::ast; use indexmap::map::{Entry, IndexMap}; diff --git a/crates/analyzer/src/db/queries/functions.rs b/crates/analyzer/src/db/queries/functions.rs index c0e3835d55..9876a28a6e 100644 --- a/crates/analyzer/src/db/queries/functions.rs +++ b/crates/analyzer/src/db/queries/functions.rs @@ -1,21 +1,26 @@ -use crate::context::{AnalyzerContext, CallType, FunctionBody}; -use crate::db::{Analysis, AnalyzerDb}; -use crate::display::Displayable; -use crate::errors::TypeError; -use crate::namespace::items::{ - DepGraph, DepGraphWrapper, DepLocality, FunctionId, FunctionSigId, Item, TypeDef, +use crate::{ + context::{AnalyzerContext, CallType, FunctionBody}, + db::{Analysis, AnalyzerDb}, + display::Displayable, + errors::TypeError, + namespace::{ + items::{DepGraph, DepGraphWrapper, DepLocality, FunctionId, FunctionSigId, Item, TypeDef}, + scopes::{BlockScope, BlockScopeType, FunctionScope, ItemScope}, + types::{self, CtxDecl, Generic, SelfDecl, Type, TypeId}, + }, + traversal::{ + functions::traverse_statements, + types::{type_desc, type_desc_to_trait}, + }, }; -use crate::namespace::scopes::{BlockScope, BlockScopeType, FunctionScope, ItemScope}; -use crate::namespace::types::{self, CtxDecl, Generic, SelfDecl, Type, TypeId}; -use crate::traversal::functions::traverse_statements; -use crate::traversal::types::{type_desc, type_desc_to_trait}; use fe_common::diagnostics::Label; -use fe_parser::ast::{self, GenericParameter}; -use fe_parser::node::Node; +use fe_parser::{ + ast::{self, GenericParameter}, + node::Node, +}; use if_chain::if_chain; use smol_str::SmolStr; -use std::collections::HashMap; -use std::rc::Rc; +use std::{collections::HashMap, rc::Rc}; /// Gather context information for a function definition and check for type /// errors. Does not inspect the function body. diff --git a/crates/analyzer/src/db/queries/impls.rs b/crates/analyzer/src/db/queries/impls.rs index 86b39ac7eb..17336e8bf0 100644 --- a/crates/analyzer/src/db/queries/impls.rs +++ b/crates/analyzer/src/db/queries/impls.rs @@ -1,11 +1,14 @@ -use indexmap::map::Entry; -use indexmap::IndexMap; +use indexmap::{map::Entry, IndexMap}; use smol_str::SmolStr; -use crate::context::{Analysis, AnalyzerContext}; -use crate::namespace::items::{Function, FunctionId, ImplId, Item}; -use crate::namespace::scopes::ItemScope; -use crate::AnalyzerDb; +use crate::{ + context::{Analysis, AnalyzerContext}, + namespace::{ + items::{Function, FunctionId, ImplId, Item}, + scopes::ItemScope, + }, + AnalyzerDb, +}; use std::rc::Rc; pub fn impl_all_functions(db: &dyn AnalyzerDb, impl_: ImplId) -> Rc<[FunctionId]> { diff --git a/crates/analyzer/src/db/queries/ingots.rs b/crates/analyzer/src/db/queries/ingots.rs index 231f9673fe..fb3673a9ac 100644 --- a/crates/analyzer/src/db/queries/ingots.rs +++ b/crates/analyzer/src/db/queries/ingots.rs @@ -1,5 +1,7 @@ -use crate::namespace::items::{IngotId, IngotMode, ModuleId, ModuleSource}; -use crate::AnalyzerDb; +use crate::{ + namespace::items::{IngotId, IngotMode, ModuleId, ModuleSource}, + AnalyzerDb, +}; use fe_common::files::{SourceFileId, Utf8Path, Utf8PathBuf}; use indexmap::IndexSet; use std::rc::Rc; diff --git a/crates/analyzer/src/db/queries/module.rs b/crates/analyzer/src/db/queries/module.rs index 6874ffbae4..057e6a7267 100644 --- a/crates/analyzer/src/db/queries/module.rs +++ b/crates/analyzer/src/db/queries/module.rs @@ -1,21 +1,25 @@ -use crate::context::{Analysis, AnalyzerContext, Constant, NamedThing}; -use crate::display::Displayable; -use crate::errors::{self, ConstEvalError, TypeError}; -use crate::namespace::items::{ - Attribute, Contract, ContractId, Enum, Function, FunctionId, Impl, ImplId, Item, - ModuleConstant, ModuleConstantId, ModuleId, ModuleSource, Struct, StructId, Trait, TraitId, - TypeAlias, TypeDef, +use crate::{ + context::{Analysis, AnalyzerContext, Constant, NamedThing}, + display::Displayable, + errors::{self, ConstEvalError, TypeError}, + namespace::{ + items::{ + Attribute, Contract, ContractId, Enum, Function, FunctionId, Impl, ImplId, Item, + ModuleConstant, ModuleConstantId, ModuleId, ModuleSource, Struct, StructId, Trait, + TraitId, TypeAlias, TypeDef, + }, + scopes::ItemScope, + types::{self, TypeId}, + }, + traversal::{const_expr, expressions, types::type_desc}, + AnalyzerDb, }; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::{self, TypeId}; -use crate::traversal::{const_expr, expressions, types::type_desc}; -use crate::AnalyzerDb; -use fe_common::diagnostics::Label; -use fe_common::files::Utf8Path; -use fe_common::Span; +use fe_common::{diagnostics::Label, files::Utf8Path, Span}; use fe_parser::{ast, node::Node}; -use indexmap::indexmap; -use indexmap::map::{Entry, IndexMap}; +use indexmap::{ + indexmap, + map::{Entry, IndexMap}, +}; use smol_str::SmolStr; use std::rc::Rc; diff --git a/crates/analyzer/src/db/queries/structs.rs b/crates/analyzer/src/db/queries/structs.rs index 1c7686f757..8e41ff9fb8 100644 --- a/crates/analyzer/src/db/queries/structs.rs +++ b/crates/analyzer/src/db/queries/structs.rs @@ -1,22 +1,25 @@ -use crate::builtins; -use crate::constants::MAX_INDEXED_EVENT_FIELDS; -use crate::context::AnalyzerContext; -use crate::db::Analysis; -use crate::errors::TypeError; -use crate::namespace::items::{ - self, DepGraph, DepGraphWrapper, DepLocality, FunctionId, Item, StructField, StructFieldId, - StructId, TypeDef, +use crate::{ + builtins, + constants::MAX_INDEXED_EVENT_FIELDS, + context::AnalyzerContext, + db::Analysis, + errors::TypeError, + namespace::{ + items::{ + self, DepGraph, DepGraphWrapper, DepLocality, FunctionId, Item, StructField, + StructFieldId, StructId, TypeDef, + }, + scopes::ItemScope, + types::{Type, TypeId}, + }, + traversal::types::type_desc, + AnalyzerDb, }; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::{Type, TypeId}; -use crate::traversal::types::type_desc; -use crate::AnalyzerDb; use fe_common::utils::humanize::pluralize_conditionally; use fe_parser::{ast, Label}; use indexmap::map::{Entry, IndexMap}; use smol_str::SmolStr; -use std::rc::Rc; -use std::str::FromStr; +use std::{rc::Rc, str::FromStr}; pub fn struct_all_fields(db: &dyn AnalyzerDb, struct_: StructId) -> Rc<[StructFieldId]> { struct_ diff --git a/crates/analyzer/src/db/queries/traits.rs b/crates/analyzer/src/db/queries/traits.rs index 435dc69653..1dcff8c68d 100644 --- a/crates/analyzer/src/db/queries/traits.rs +++ b/crates/analyzer/src/db/queries/traits.rs @@ -1,12 +1,15 @@ -use indexmap::map::Entry; -use indexmap::IndexMap; +use indexmap::{map::Entry, IndexMap}; use smol_str::SmolStr; -use crate::context::{Analysis, AnalyzerContext}; -use crate::namespace::items::{FunctionSig, FunctionSigId, Item, TraitId}; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::TypeId; -use crate::AnalyzerDb; +use crate::{ + context::{Analysis, AnalyzerContext}, + namespace::{ + items::{FunctionSig, FunctionSigId, Item, TraitId}, + scopes::ItemScope, + types::TypeId, + }, + AnalyzerDb, +}; use std::rc::Rc; pub fn trait_all_functions(db: &dyn AnalyzerDb, trait_: TraitId) -> Rc<[FunctionSigId]> { diff --git a/crates/analyzer/src/db/queries/types.rs b/crates/analyzer/src/db/queries/types.rs index eec14d4a45..218f53e91e 100644 --- a/crates/analyzer/src/db/queries/types.rs +++ b/crates/analyzer/src/db/queries/types.rs @@ -2,14 +2,18 @@ use std::rc::Rc; use smol_str::SmolStr; -use crate::context::{AnalyzerContext, TempContext}; -use crate::db::Analysis; -use crate::errors::TypeError; -use crate::namespace::items::{FunctionSigId, ImplId, TraitId, TypeAliasId}; -use crate::namespace::scopes::ItemScope; -use crate::namespace::types::{self, TypeId}; -use crate::traversal::types::type_desc; -use crate::AnalyzerDb; +use crate::{ + context::{AnalyzerContext, TempContext}, + db::Analysis, + errors::TypeError, + namespace::{ + items::{FunctionSigId, ImplId, TraitId, TypeAliasId}, + scopes::ItemScope, + types::{self, TypeId}, + }, + traversal::types::type_desc, + AnalyzerDb, +}; /// Returns all `impl` for the given type from the current ingot as well as /// dependency ingots diff --git a/crates/analyzer/src/errors.rs b/crates/analyzer/src/errors.rs index 480b509da3..f104521d51 100644 --- a/crates/analyzer/src/errors.rs +++ b/crates/analyzer/src/errors.rs @@ -1,8 +1,10 @@ //! Semantic errors. use crate::context::{DiagnosticVoucher, NamedThing}; -use fe_common::diagnostics::{Diagnostic, Label, Severity}; -use fe_common::Span; +use fe_common::{ + diagnostics::{Diagnostic, Label, Severity}, + Span, +}; use std::fmt::Display; /// Error indicating that a type is invalid. @@ -11,18 +13,21 @@ use std::fmt::Display; /// in [`crate::namespace::types`] is sometimes represented as a /// `Result`. /// -/// If, for example, a function parameter has an undefined type, we emit a [`Diagnostic`] message, -/// give that parameter a "type" of `Err(TypeError)`, and carry on. If/when that parameter is -/// used in the function body, we assume that a diagnostic message about the undefined type -/// has already been emitted, and halt the analysis of the function body. +/// If, for example, a function parameter has an undefined type, we emit a +/// [`Diagnostic`] message, give that parameter a "type" of `Err(TypeError)`, +/// and carry on. If/when that parameter is used in the function body, we assume +/// that a diagnostic message about the undefined type has already been emitted, +/// and halt the analysis of the function body. /// -/// To ensure that that assumption is sound, a diagnostic *must* be emitted before creating -/// a `TypeError`. So that the rust compiler can help us enforce this rule, a `TypeError` -/// cannot be constructed without providing a [`DiagnosticVoucher`]. A voucher can be obtained -/// by calling an error function on an [`AnalyzerContext`](crate::context::AnalyzerContext). +/// To ensure that that assumption is sound, a diagnostic *must* be emitted +/// before creating a `TypeError`. So that the rust compiler can help us enforce +/// this rule, a `TypeError` cannot be constructed without providing a +/// [`DiagnosticVoucher`]. A voucher can be obtained by calling an error +/// function on an [`AnalyzerContext`](crate::context::AnalyzerContext). /// Please don't try to work around this restriction. /// -/// Example: `TypeError::new(context.error("something is wrong", some_span, "this thing"))` +/// Example: `TypeError::new(context.error("something is wrong", some_span, +/// "this thing"))` #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TypeError(DiagnosticVoucher); impl TypeError { @@ -44,15 +49,16 @@ impl From for TypeError { } } -/// Error to be returned when otherwise no meaningful information can be returned. -/// Can't be created unless a diagnostic has been emitted, and thus a [`DiagnosticVoucher`] -/// has been obtained. (See comment on [`TypeError`]) +/// Error to be returned when otherwise no meaningful information can be +/// returned. Can't be created unless a diagnostic has been emitted, and thus a +/// [`DiagnosticVoucher`] has been obtained. (See comment on [`TypeError`]) #[derive(Debug)] pub struct FatalError(DiagnosticVoucher); impl FatalError { /// Create a `FatalError` instance, given a "voucher" - /// obtained by emitting an error via an [`AnalyzerContext`](crate::context::AnalyzerContext). + /// obtained by emitting an error via an + /// [`AnalyzerContext`](crate::context::AnalyzerContext). pub fn new(voucher: DiagnosticVoucher) -> Self { Self(voucher) } @@ -77,8 +83,8 @@ impl From for FatalError { /// 2. arithmetic overflow occurred during evaluation /// 3. zero division is detected during evaluation /// -/// Can't be created unless a diagnostic has been emitted, and thus a [`DiagnosticVoucher`] -/// has been obtained. (See comment on [`TypeError`]) +/// Can't be created unless a diagnostic has been emitted, and thus a +/// [`DiagnosticVoucher`] has been obtained. (See comment on [`TypeError`]) /// /// NOTE: `Clone` is required because these are stored in a salsa db. /// Please don't clone these manually. @@ -109,10 +115,11 @@ impl From for ConstEvalError { } } -/// Error returned by `ModuleId::resolve_name` if the name is not found, and parsing of the module -/// failed. In this case, emitting an error message about failure to resolve the name might be misleading, -/// because the file may in fact contain an item with the given name, somewhere after the syntax error that caused -/// parsing to fail. +/// Error returned by `ModuleId::resolve_name` if the name is not found, and +/// parsing of the module failed. In this case, emitting an error message about +/// failure to resolve the name might be misleading, because the file may in +/// fact contain an item with the given name, somewhere after the syntax error +/// that caused parsing to fail. #[derive(Debug)] pub struct IncompleteItem(DiagnosticVoucher); impl IncompleteItem { diff --git a/crates/analyzer/src/namespace/items.rs b/crates/analyzer/src/namespace/items.rs index 2ff5a3c625..d1dccf37eb 100644 --- a/crates/analyzer/src/namespace/items.rs +++ b/crates/analyzer/src/namespace/items.rs @@ -1,24 +1,29 @@ -use crate::constants::{EMITTABLE_TRAIT_NAME, INDEXED}; -use crate::context::{self, Analysis, Constant, NamedThing}; -use crate::display::{DisplayWithDb, Displayable}; -use crate::errors::{self, IncompleteItem, TypeError}; -use crate::namespace::types::{self, GenericType, Type, TypeId}; -use crate::traversal::pragma::check_pragma_version; -use crate::AnalyzerDb; -use crate::{builtins, errors::ConstEvalError}; -use fe_common::diagnostics::Diagnostic; -use fe_common::diagnostics::Label; -use fe_common::files::{common_prefix, Utf8Path}; -use fe_common::utils::files::{BuildFiles, ProjectMode}; -use fe_common::{impl_intern_key, FileKind, SourceFileId}; -use fe_parser::ast::GenericParameter; -use fe_parser::node::{Node, Span}; -use fe_parser::{ast, node::NodeId}; +use crate::{ + builtins, + constants::{EMITTABLE_TRAIT_NAME, INDEXED}, + context::{self, Analysis, Constant, NamedThing}, + display::{DisplayWithDb, Displayable}, + errors::{self, ConstEvalError, IncompleteItem, TypeError}, + namespace::types::{self, GenericType, Type, TypeId}, + traversal::pragma::check_pragma_version, + AnalyzerDb, +}; +use fe_common::{ + diagnostics::{Diagnostic, Label}, + files::{common_prefix, Utf8Path}, + impl_intern_key, + utils::files::{BuildFiles, ProjectMode}, + FileKind, SourceFileId, +}; +use fe_parser::{ + ast, + ast::GenericParameter, + node::{Node, NodeId, Span}, +}; use indexmap::{indexmap, IndexMap}; use smallvec::SmallVec; use smol_str::SmolStr; -use std::rc::Rc; -use std::{fmt, ops::Deref}; +use std::{fmt, ops::Deref, rc::Rc}; use strum::IntoEnumIterator; use super::types::TraitOrType; diff --git a/crates/analyzer/src/namespace/scopes.rs b/crates/analyzer/src/namespace/scopes.rs index 6fc7eb0aba..48e83ff88a 100644 --- a/crates/analyzer/src/namespace/scopes.rs +++ b/crates/analyzer/src/namespace/scopes.rs @@ -1,21 +1,26 @@ #![allow(unstable_name_collisions)] // expect_none, which ain't gonna be stabilized -use crate::context::{ - AnalyzerContext, CallType, Constant, ExpressionAttributes, FunctionBody, NamedThing, +use crate::{ + context::{ + AnalyzerContext, CallType, Constant, ExpressionAttributes, FunctionBody, NamedThing, + }, + errors::{AlreadyDefined, FatalError, IncompleteItem, TypeError}, + namespace::{ + items::{FunctionId, Item, ModuleId, TypeDef}, + types::{Type, TypeId}, + }, + pattern_analysis::PatternMatrix, + AnalyzerDb, +}; +use fe_common::{diagnostics::Diagnostic, Span}; +use fe_parser::{ + ast, + ast::Expr, + node::{Node, NodeId}, + Label, }; -use crate::errors::{AlreadyDefined, FatalError, IncompleteItem, TypeError}; -use crate::namespace::items::{FunctionId, ModuleId}; -use crate::namespace::items::{Item, TypeDef}; -use crate::namespace::types::{Type, TypeId}; -use crate::pattern_analysis::PatternMatrix; -use crate::AnalyzerDb; -use fe_common::diagnostics::Diagnostic; -use fe_common::Span; -use fe_parser::{ast, node::NodeId, Label}; -use fe_parser::{ast::Expr, node::Node}; use indexmap::IndexMap; -use std::cell::RefCell; -use std::collections::BTreeMap; +use std::{cell::RefCell, collections::BTreeMap}; pub struct ItemScope<'a> { db: &'a dyn AnalyzerDb, @@ -34,7 +39,7 @@ impl<'a> ItemScope<'a> { } } -impl<'a> AnalyzerContext for ItemScope<'a> { +impl AnalyzerContext for ItemScope<'_> { fn db(&self) -> &dyn AnalyzerDb { self.db } @@ -230,7 +235,7 @@ impl<'a> FunctionScope<'a> { } } -impl<'a> AnalyzerContext for FunctionScope<'a> { +impl AnalyzerContext for FunctionScope<'_> { fn db(&self) -> &dyn AnalyzerDb { self.db } @@ -546,7 +551,7 @@ impl AnalyzerContext for BlockScope<'_, '_> { } fn inherits_type(&self, typ: BlockScopeType) -> bool { - self.typ == typ || self.parent.map_or(false, |scope| scope.inherits_type(typ)) + self.typ == typ || self.parent.is_some_and(|scope| scope.inherits_type(typ)) } fn resolve_path(&self, path: &ast::Path, span: Span) -> Result { diff --git a/crates/analyzer/src/namespace/types.rs b/crates/analyzer/src/namespace/types.rs index 50f02d7023..74dc89cc57 100644 --- a/crates/analyzer/src/namespace/types.rs +++ b/crates/analyzer/src/namespace/types.rs @@ -1,20 +1,18 @@ -use crate::context::AnalyzerContext; -use crate::display::DisplayWithDb; -use crate::display::Displayable; -use crate::errors::TypeError; -use crate::namespace::items::{ - ContractId, EnumId, FunctionId, FunctionSigId, ImplId, Item, StructId, TraitId, +use crate::{ + context::AnalyzerContext, + display::{DisplayWithDb, Displayable}, + errors::TypeError, + namespace::items::{ + ContractId, EnumId, FunctionId, FunctionSigId, ImplId, Item, StructId, TraitId, + }, + AnalyzerDb, }; -use crate::AnalyzerDb; -use fe_common::impl_intern_key; -use fe_common::Span; +use fe_common::{impl_intern_key, Span}; use num_bigint::BigInt; use num_traits::ToPrimitive; use smol_str::SmolStr; -use std::fmt; -use std::rc::Rc; -use std::str::FromStr; +use std::{fmt, rc::Rc, str::FromStr}; use strum::{AsRefStr, EnumIter, EnumString}; pub fn u256_min() -> BigInt { @@ -189,9 +187,10 @@ impl TypeId { db.impl_for(*self, trait_) } - /// Looks up all possible candidates of the given function name that are implemented via traits. - /// Groups results in two lists, the first contains all theoretical possible candidates and - /// the second contains only those that are actually callable because the trait is in scope. + /// Looks up all possible candidates of the given function name that are + /// implemented via traits. Groups results in two lists, the first + /// contains all theoretical possible candidates and the second contains + /// only those that are actually callable because the trait is in scope. pub fn trait_function_candidates( &self, context: &mut dyn AnalyzerContext, @@ -222,8 +221,8 @@ impl TypeId { (candidates, in_scope_candidates) } - /// Signature for the function with the given name defined directly on the type. - /// Does not consider trait impls. + /// Signature for the function with the given name defined directly on the + /// type. Does not consider trait impls. pub fn function_sig(&self, db: &dyn AnalyzerDb, name: &str) -> Option { match self.typ(db) { Type::SPtr(inner) => inner.function_sig(db, name), diff --git a/crates/analyzer/src/operations.rs b/crates/analyzer/src/operations.rs index 31305ad854..59fe01d214 100644 --- a/crates/analyzer/src/operations.rs +++ b/crates/analyzer/src/operations.rs @@ -1,6 +1,8 @@ -use crate::context::AnalyzerContext; -use crate::errors::{BinaryOperationError, IndexingError}; -use crate::namespace::types::{Array, Integer, Map, TraitOrType, Type, TypeDowncast, TypeId}; +use crate::{ + context::AnalyzerContext, + errors::{BinaryOperationError, IndexingError}, + namespace::types::{Array, Integer, Map, TraitOrType, Type, TypeDowncast, TypeId}, +}; use crate::traversal::types::{deref_type, try_coerce_type}; use fe_parser::{ast as fe, node::Node}; diff --git a/crates/analyzer/src/traversal/assignments.rs b/crates/analyzer/src/traversal/assignments.rs index 973fa5649a..3ef9ce205e 100644 --- a/crates/analyzer/src/traversal/assignments.rs +++ b/crates/analyzer/src/traversal/assignments.rs @@ -1,13 +1,18 @@ -use crate::context::{AnalyzerContext, DiagnosticVoucher, NamedThing}; -use crate::errors::FatalError; -use crate::namespace::scopes::BlockScope; -use crate::namespace::types::{Type, TypeId}; -use crate::operations; -use crate::traversal::expressions; -use crate::traversal::utils::add_bin_operations_errors; +use crate::{ + context::{AnalyzerContext, DiagnosticVoucher, NamedThing}, + errors::FatalError, + namespace::{ + scopes::BlockScope, + types::{Type, TypeId}, + }, + operations, + traversal::{expressions, utils::add_bin_operations_errors}, +}; use fe_common::diagnostics::Label; -use fe_parser::ast as fe; -use fe_parser::node::{Node, Span}; +use fe_parser::{ + ast as fe, + node::{Node, Span}, +}; use smol_str::SmolStr; /// Gather context information for assignments and check for type errors. diff --git a/crates/analyzer/src/traversal/borrowck.rs b/crates/analyzer/src/traversal/borrowck.rs index 4209398642..3c03c53fa1 100644 --- a/crates/analyzer/src/traversal/borrowck.rs +++ b/crates/analyzer/src/traversal/borrowck.rs @@ -1,9 +1,13 @@ use super::call_args::LabeledParameter; -use crate::context::{AnalyzerContext, NamedThing}; -use crate::namespace::types::{Type, TypeId}; +use crate::{ + context::{AnalyzerContext, NamedThing}, + namespace::types::{Type, TypeId}, +}; use fe_common::diagnostics::Label; -use fe_parser::ast; -use fe_parser::node::{Node, Span}; +use fe_parser::{ + ast, + node::{Node, Span}, +}; use smallvec::{smallvec, SmallVec}; // NOTE: This is a temporary solution to the only borrowing bug that's possible diff --git a/crates/analyzer/src/traversal/call_args.rs b/crates/analyzer/src/traversal/call_args.rs index 9df034fdff..76bf7c27cf 100644 --- a/crates/analyzer/src/traversal/call_args.rs +++ b/crates/analyzer/src/traversal/call_args.rs @@ -1,13 +1,15 @@ -use super::expressions::{expr, expr_type}; -use super::types::try_coerce_type; -use crate::context::{AnalyzerContext, DiagnosticVoucher}; -use crate::display::Displayable; -use crate::errors::{self, FatalError, TypeCoercionError, TypeError}; -use crate::namespace::types::{FunctionParam, Generic, Type, TypeId}; -use fe_common::{diagnostics::Label, utils::humanize::pluralize_conditionally}; -use fe_common::{Span, Spanned}; -use fe_parser::ast as fe; -use fe_parser::node::Node; +use super::{ + expressions::{expr, expr_type}, + types::try_coerce_type, +}; +use crate::{ + context::{AnalyzerContext, DiagnosticVoucher}, + display::Displayable, + errors::{self, FatalError, TypeCoercionError, TypeError}, + namespace::types::{FunctionParam, Generic, Type, TypeId}, +}; +use fe_common::{diagnostics::Label, utils::humanize::pluralize_conditionally, Span, Spanned}; +use fe_parser::{ast as fe, node::Node}; use smol_str::SmolStr; pub trait LabeledParameter { diff --git a/crates/analyzer/src/traversal/declarations.rs b/crates/analyzer/src/traversal/declarations.rs index 1a57338411..395f8cb1a5 100644 --- a/crates/analyzer/src/traversal/declarations.rs +++ b/crates/analyzer/src/traversal/declarations.rs @@ -1,12 +1,15 @@ -use crate::context::AnalyzerContext; -use crate::display::Displayable; -use crate::errors::{self, FatalError, TypeCoercionError}; -use crate::namespace::scopes::BlockScope; -use crate::namespace::types::{Type, TypeId}; -use crate::traversal::{const_expr, expressions, types}; +use crate::{ + context::AnalyzerContext, + display::Displayable, + errors::{self, FatalError, TypeCoercionError}, + namespace::{ + scopes::BlockScope, + types::{Type, TypeId}, + }, + traversal::{const_expr, expressions, types}, +}; use fe_common::{diagnostics::Label, utils::humanize::pluralize_conditionally}; -use fe_parser::ast as fe; -use fe_parser::node::Node; +use fe_parser::{ast as fe, node::Node}; /// Gather context information for var declarations and check for type errors. pub fn var_decl(scope: &mut BlockScope, stmt: &Node) -> Result<(), FatalError> { diff --git a/crates/analyzer/src/traversal/expressions.rs b/crates/analyzer/src/traversal/expressions.rs index 97ee3a7446..64d1aa6c72 100644 --- a/crates/analyzer/src/traversal/expressions.rs +++ b/crates/analyzer/src/traversal/expressions.rs @@ -1,33 +1,35 @@ -use super::borrowck; -use crate::builtins::{ContractTypeMethod, GlobalFunction, Intrinsic, ValueMethod}; -use crate::context::{AnalyzerContext, CallType, Constant, ExpressionAttributes, NamedThing}; -use crate::display::Displayable; -use crate::errors::{self, FatalError, IndexingError, TypeCoercionError}; -use crate::namespace::items::{ - EnumVariantId, EnumVariantKind, FunctionId, FunctionSigId, ImplId, Item, StructId, TypeDef, -}; -use crate::namespace::scopes::{check_visibility, BlockScopeType}; -use crate::namespace::types::{ - self, Array, Base, FeString, Integer, TraitOrType, Tuple, Type, TypeDowncast, TypeId, -}; -use crate::operations; -use crate::traversal::call_args::{validate_arg_count, validate_named_args}; -use crate::traversal::const_expr::eval_expr; -use crate::traversal::types::{ - apply_generic_type_args, deref_type, try_cast_type, try_coerce_type, -}; -use crate::traversal::utils::add_bin_operations_errors; +use std::{ops::RangeInclusive, str::FromStr}; -use fe_common::diagnostics::Label; -use fe_common::{numeric, Span}; -use fe_parser::ast as fe; -use fe_parser::ast::GenericArg; -use fe_parser::node::Node; +use fe_common::{diagnostics::Label, numeric, Span}; +use fe_parser::{ast as fe, ast::GenericArg, node::Node}; use num_bigint::BigInt; use num_traits::{ToPrimitive, Zero}; use smol_str::SmolStr; -use std::ops::RangeInclusive; -use std::str::FromStr; + +use super::borrowck; +use crate::{ + builtins::{ContractTypeMethod, GlobalFunction, Intrinsic, ValueMethod}, + context::{AnalyzerContext, CallType, Constant, ExpressionAttributes, NamedThing}, + display::Displayable, + errors::{self, FatalError, IndexingError, TypeCoercionError}, + namespace::{ + items::{ + EnumVariantId, EnumVariantKind, FunctionId, FunctionSigId, ImplId, Item, StructId, + TypeDef, + }, + scopes::{check_visibility, BlockScopeType}, + types::{ + self, Array, Base, FeString, Integer, TraitOrType, Tuple, Type, TypeDowncast, TypeId, + }, + }, + operations, + traversal::{ + call_args::{validate_arg_count, validate_named_args}, + const_expr::eval_expr, + types::{apply_generic_type_args, deref_type, try_cast_type, try_coerce_type}, + utils::add_bin_operations_errors, + }, +}; // TODO: don't fail fatally if expected type is provided @@ -573,7 +575,7 @@ fn expr_num( .and_then(|id| id.deref(context.db()).as_int(context.db())) .unwrap_or(Integer::U256); validate_numeric_literal_fits_type(context, num, exp.span, int_typ); - return ExpressionAttributes::new(TypeId::int(context.db(), int_typ)); + ExpressionAttributes::new(TypeId::int(context.db(), int_typ)) } fn expr_subscript( @@ -823,7 +825,7 @@ fn expr_unary_operation( ); }; - return match op.kind { + match op.kind { fe::UnaryOperator::USub => { let expected_int_type = expected_type .and_then(|id| id.as_int(context.db())) @@ -864,7 +866,7 @@ fn expr_unary_operation( Ok(ExpressionAttributes::new(operand_ty)) } - }; + } } fn expr_call( @@ -1002,8 +1004,8 @@ fn expr_call_path( validate_has_no_conflicting_trait_in_scope(context, &named_thing, path, func)?; expr_call_named_thing(context, named_thing, func, generic_args, args) } - // If we we can't resolve a call to a path e.g. `foo::Bar::do_thing()` there is a chance that `do_thing` - // still exists as as a trait associated function for `foo::Bar`. + // If we we can't resolve a call to a path e.g. `foo::Bar::do_thing()` there is a chance + // that `do_thing` still exists as as a trait associated function for `foo::Bar`. None => expr_call_trait_associated_function(context, path, func, generic_args, args), } } @@ -1100,7 +1102,8 @@ fn expr_call_trait_associated_function( .into(), ], ); - // We arbitrarily carry on with the first candidate since the error doesn't need to be fatal + // We arbitrarily carry on with the first candidate since the error doesn't need + // to be fatal let (fun, _) = in_scope_candidates[0]; return expr_call_pure(context, fun, func.span, generic_args, args); } else if in_scope_candidates.is_empty() && !candidates.is_empty() { @@ -1115,7 +1118,8 @@ fn expr_call_trait_associated_function( }).collect(), vec!["Hint: Bring one of these candidates in scope via `use module_name::trait_name`".into()], ); - // We arbitrarily carry on with an applicable candidate since the error doesn't need to be fatal + // We arbitrarily carry on with an applicable candidate since the error doesn't + // need to be fatal let (fun, _) = candidates[0]; return expr_call_pure(context, fun, func.span, generic_args, args); } else if in_scope_candidates.len() == 1 { @@ -1124,8 +1128,8 @@ fn expr_call_trait_associated_function( } } - // At this point, we will have an error so we run `resolve_path` to register any errors that we - // did not report yet + // At this point, we will have an error so we run `resolve_path` to register any + // errors that we did not report yet context.resolve_path(path, func.span)?; Err(FatalError::new(context.error( @@ -1712,13 +1716,13 @@ fn expr_call_method( vec!["Hint: rename one of the methods to disambiguate".into()], ); let return_type = first.signature(context.db()).return_type.clone()?; - return Ok(( + Ok(( ExpressionAttributes::new(return_type), CallType::ValueMethod { typ: obj_type, method: first.function(context.db()).unwrap(), }, - )); + )) } } } diff --git a/crates/analyzer/src/traversal/functions.rs b/crates/analyzer/src/traversal/functions.rs index 2a4781d31d..9e5fbe2b0e 100644 --- a/crates/analyzer/src/traversal/functions.rs +++ b/crates/analyzer/src/traversal/functions.rs @@ -1,16 +1,21 @@ -use crate::context::{AnalyzerContext, ExpressionAttributes, NamedThing}; -use crate::display::Displayable; -use crate::errors::{self, FatalError, TypeCoercionError}; -use crate::namespace::items::{EnumVariantId, EnumVariantKind, Item, StructId, TypeDef}; -use crate::namespace::scopes::{BlockScope, BlockScopeType}; -use crate::namespace::types::{Type, TypeId}; -use crate::pattern_analysis::PatternMatrix; -use crate::traversal::{assignments, declarations, expressions, types}; +use crate::{ + context::{AnalyzerContext, ExpressionAttributes, NamedThing}, + display::Displayable, + errors::{self, FatalError, TypeCoercionError}, + namespace::{ + items::{EnumVariantId, EnumVariantKind, Item, StructId, TypeDef}, + scopes::{BlockScope, BlockScopeType}, + types::{Type, TypeId}, + }, + pattern_analysis::PatternMatrix, + traversal::{assignments, declarations, expressions, types}, +}; use fe_common::diagnostics::Label; -use fe_parser::ast::{self as fe, LiteralPattern, Pattern}; -use fe_parser::node::{Node, Span}; -use indexmap::map::Entry; -use indexmap::{IndexMap, IndexSet}; +use fe_parser::{ + ast::{self as fe, LiteralPattern, Pattern}, + node::{Node, Span}, +}; +use indexmap::{map::Entry, IndexMap, IndexSet}; use smol_str::SmolStr; use super::matching_anomaly; diff --git a/crates/analyzer/src/traversal/matching_anomaly.rs b/crates/analyzer/src/traversal/matching_anomaly.rs index c751d66a68..85e716cdf6 100644 --- a/crates/analyzer/src/traversal/matching_anomaly.rs +++ b/crates/analyzer/src/traversal/matching_anomaly.rs @@ -3,6 +3,7 @@ use std::fmt::Write; use fe_common::Span; use fe_parser::{ast::MatchArm, node::Node, Label}; +use super::pattern_analysis::{PatternMatrix, SimplifiedPattern}; use crate::{ context::AnalyzerContext, display::Displayable, @@ -11,8 +12,6 @@ use crate::{ AnalyzerDb, }; -use super::pattern_analysis::{PatternMatrix, SimplifiedPattern}; - pub(super) fn check_match_exhaustiveness( scope: &mut BlockScope, arms: &[Node], diff --git a/crates/analyzer/src/traversal/pragma.rs b/crates/analyzer/src/traversal/pragma.rs index b074784f89..cb0842f824 100644 --- a/crates/analyzer/src/traversal/pragma.rs +++ b/crates/analyzer/src/traversal/pragma.rs @@ -1,7 +1,6 @@ use crate::errors; use fe_common::diagnostics::{Diagnostic, Label}; -use fe_parser::ast; -use fe_parser::node::Node; +use fe_parser::{ast, node::Node}; use semver::{Version, VersionReq}; pub fn check_pragma_version(stmt: &Node) -> Option { diff --git a/crates/analyzer/src/traversal/types.rs b/crates/analyzer/src/traversal/types.rs index 08d33a14a8..4db1316629 100644 --- a/crates/analyzer/src/traversal/types.rs +++ b/crates/analyzer/src/traversal/types.rs @@ -1,25 +1,30 @@ -use crate::builtins::ValueMethod; -use crate::context::{ - Adjustment, AdjustmentKind, AnalyzerContext, CallType, Constant, ExpressionAttributes, - NamedThing, +use crate::{ + builtins::ValueMethod, + context::{ + Adjustment, AdjustmentKind, AnalyzerContext, CallType, Constant, ExpressionAttributes, + NamedThing, + }, + display::Displayable, + errors::{TypeCoercionError, TypeError}, + namespace::{ + items::{Item, TraitId}, + types::{ + Base, FeString, GenericArg, GenericParamKind, GenericType, Integer, TraitOrType, Tuple, + Type, TypeId, + }, + }, + traversal::call_args::validate_arg_count, }; -use crate::display::Displayable; -use crate::errors::{TypeCoercionError, TypeError}; -use crate::namespace::items::{Item, TraitId}; -use crate::namespace::types::{ - Base, FeString, GenericArg, GenericParamKind, GenericType, Integer, TraitOrType, Tuple, Type, - TypeId, +use fe_common::{diagnostics::Label, utils::humanize::pluralize_conditionally, Spanned}; +use fe_parser::{ + ast, + node::{Node, Span}, }; -use crate::traversal::call_args::validate_arg_count; -use fe_common::diagnostics::Label; -use fe_common::utils::humanize::pluralize_conditionally; -use fe_common::Spanned; -use fe_parser::ast; -use fe_parser::node::{Node, Span}; use std::cmp::Ordering; -/// Try to perform an explicit type cast, eg `u256(my_address)` or `address(my_contract)`. -/// Returns nothing. Emits an error if the cast fails; explicit cast failures are not fatal. +/// Try to perform an explicit type cast, eg `u256(my_address)` or +/// `address(my_contract)`. Returns nothing. Emits an error if the cast fails; +/// explicit cast failures are not fatal. pub fn try_cast_type( context: &mut dyn AnalyzerContext, from: TypeId, @@ -259,10 +264,9 @@ fn coerce( && elts .iter() .zip(ftup.items.iter().zip(itup.items.iter())) - .map(|(elt, (from, into))| { + .all(|(elt, (from, into))| { try_coerce_type(context, Some(elt), *from, *into, should_copy).is_ok() }) - .all(|x| x) { // Update the type of the rhs tuple, because its elements // have been coerced into the lhs element types. @@ -570,7 +574,6 @@ pub fn type_desc( if let Some(val) = self_type { Ok(Type::SelfType(val).id(context.db())) } else { - dbg!("Reporting error"); Err(TypeError::new(context.error( "`Self` can not be used here", desc.span, diff --git a/crates/analyzer/src/traversal/utils.rs b/crates/analyzer/src/traversal/utils.rs index cd5af63d91..1b415767c2 100644 --- a/crates/analyzer/src/traversal/utils.rs +++ b/crates/analyzer/src/traversal/utils.rs @@ -1,11 +1,12 @@ -use fe_common::diagnostics::Label; -use fe_common::Span; +use fe_common::{diagnostics::Label, Span}; -use crate::context::{AnalyzerContext, DiagnosticVoucher}; -use crate::display::Displayable; -use crate::errors::BinaryOperationError; -use crate::namespace::types::TypeId; -use crate::AnalyzerDb; +use crate::{ + context::{AnalyzerContext, DiagnosticVoucher}, + display::Displayable, + errors::BinaryOperationError, + namespace::types::TypeId, + AnalyzerDb, +}; use std::fmt::Display; fn type_label(db: &dyn AnalyzerDb, span: Span, typ: TypeId) -> Label { diff --git a/crates/analyzer/tests/analysis.rs b/crates/analyzer/tests/analysis.rs index 2b416ce83a..df04897f6f 100644 --- a/crates/analyzer/tests/analysis.rs +++ b/crates/analyzer/tests/analysis.rs @@ -1,15 +1,21 @@ -use fe_analyzer::display::Displayable; -use fe_analyzer::namespace::items::{self, IngotId, IngotMode, Item, ModuleId, TypeDef}; -use fe_analyzer::{AnalyzerDb, TestDb}; -use fe_common::diagnostics::{diagnostics_string, print_diagnostics, Diagnostic, Label, Severity}; -use fe_common::files::{FileKind, Utf8Path}; -use fe_common::utils::files::BuildFiles; +use fe_analyzer::{ + display::Displayable, + namespace::items::{self, IngotId, IngotMode, Item, ModuleId, TypeDef}, + AnalyzerDb, TestDb, +}; +use fe_common::{ + diagnostics::{diagnostics_string, print_diagnostics, Diagnostic, Label, Severity}, + files::{FileKind, Utf8Path}, + utils::files::BuildFiles, +}; use fe_parser::node::{NodeId, Span}; use indexmap::IndexMap; use insta::assert_snapshot; use smallvec::SmallVec; -use std::collections::{HashMap, VecDeque}; -use std::fmt::Display; +use std::{ + collections::{HashMap, VecDeque}, + fmt::Display, +}; use wasm_bindgen_test::wasm_bindgen_test; #[test] @@ -161,7 +167,7 @@ impl<'a> ModuleIter<'a> { } } -impl<'a> Iterator for ModuleIter<'a> { +impl Iterator for ModuleIter<'_> { type Item = ModuleId; fn next(&mut self) -> Option { if let Some(modid) = self.emit.pop_front() { diff --git a/crates/analyzer/tests/errors.rs b/crates/analyzer/tests/errors.rs index 502bcc36d0..b2d7ab5393 100644 --- a/crates/analyzer/tests/errors.rs +++ b/crates/analyzer/tests/errors.rs @@ -1,9 +1,10 @@ //! Tests for contracts that should cause compile errors -use fe_analyzer::namespace::items::{IngotId, ModuleId}; -use fe_analyzer::TestDb; -use fe_common::diagnostics::diagnostics_string; -use fe_common::utils::files::BuildFiles; +use fe_analyzer::{ + namespace::items::{IngotId, ModuleId}, + TestDb, +}; +use fe_common::{diagnostics::diagnostics_string, utils::files::BuildFiles}; use insta::assert_snapshot; use wasm_bindgen_test::wasm_bindgen_test; diff --git a/crates/codegen/src/yul/isel/function.rs b/crates/codegen/src/yul/isel/function.rs index 9170ce2904..78eaecce2a 100644 --- a/crates/codegen/src/yul/isel/function.rs +++ b/crates/codegen/src/yul/isel/function.rs @@ -26,10 +26,10 @@ use yultsur::{ use crate::{ db::CodegenDb, - yul::isel::inst_order::StructuralInst, - yul::slot_size::{function_hash_type, yul_primitive_type, SLOT_SIZE}, yul::{ + isel::inst_order::StructuralInst, runtime::{self, RuntimeProvider}, + slot_size::{function_hash_type, yul_primitive_type, SLOT_SIZE}, YulVariable, }, }; diff --git a/crates/codegen/src/yul/isel/inst_order.rs b/crates/codegen/src/yul/isel/inst_order.rs index afc82f0016..b4bdf32a0e 100644 --- a/crates/codegen/src/yul/isel/inst_order.rs +++ b/crates/codegen/src/yul/isel/inst_order.rs @@ -464,13 +464,13 @@ impl<'a> InstSerializer<'a> { /// /// The scoring function `F` is defined as follows: /// 1. The initial score of each candidate('cand_bb`) is number of - /// predecessors of the candidate. + /// predecessors of the candidate. /// /// 2. Find the `top_cand` of each `cand_bb`. `top_cand` can be found by - /// [`Self::try_find_top_cand`] method, see the method for details. + /// [`Self::try_find_top_cand`] method, see the method for details. /// /// 3. If `top_cand` is found, then add the `cand_bb` score to the - /// `top_cand` score, then set 0 to the `cand_bb` score. + /// `top_cand` score, then set 0 to the `cand_bb` score. /// /// After the scoring, the candidates with the highest score will be /// selected. @@ -516,16 +516,16 @@ impl<'a> InstSerializer<'a> { /// A `top_cand` can be found by the following rules: /// /// 1. Find the block which is contained in DF of `cand_bb` and in - /// `cands_with_score`. + /// `cands_with_score`. /// /// 2. If a block is found in 1., and the score of the block is positive, - /// then the block is `top_cand`. + /// then the block is `top_cand`. /// /// 2'. If a block is found in 1., and the score of the block is 0, then the - /// `top_cand` of the block is `top_cand` of `cand_bb`. + /// `top_cand` of the block is `top_cand` of `cand_bb`. /// /// 2''. If a block is NOT found in 1., then there is no `top_cand` for - /// `cand_bb`. + /// `cand_bb`. fn try_find_top_cand( &self, cands_with_score: &IndexMap, diff --git a/crates/codegen/src/yul/runtime/revert.rs b/crates/codegen/src/yul/runtime/revert.rs index e884755b8e..1f8fa9eb7b 100644 --- a/crates/codegen/src/yul/runtime/revert.rs +++ b/crates/codegen/src/yul/runtime/revert.rs @@ -76,7 +76,8 @@ fn type_signature_for_revert(db: &dyn CodegenDb, name: &str, ty: TypeId) -> yul: } }; - // selector and state mutability is independent we can set has_self and has_ctx any value. + // selector and state mutability is independent we can set has_self and has_ctx + // any value. let selector = AbiFunction::new( AbiFunctionType::Function, name.to_string(), diff --git a/crates/common/src/db.rs b/crates/common/src/db.rs index 047aadc11e..51a9afceb0 100644 --- a/crates/common/src/db.rs +++ b/crates/common/src/db.rs @@ -18,7 +18,8 @@ pub trait SourceDb { #[salsa::interned] fn intern_file(&self, file: File) -> SourceFileId; - /// Set with `fn set_file_content(&mut self, file: SourceFileId, content: Rc) + /// Set with `fn set_file_content(&mut self, file: SourceFileId, content: + /// Rc) #[salsa::input] fn file_content(&self, file: SourceFileId) -> Rc; diff --git a/crates/common/src/diagnostics.rs b/crates/common/src/diagnostics.rs index f893a2a22e..55396a31fe 100644 --- a/crates/common/src/diagnostics.rs +++ b/crates/common/src/diagnostics.rs @@ -1,12 +1,12 @@ -use crate::db::SourceDb; -use crate::files::{SourceFileId, Utf8PathBuf}; -use crate::Span; +use crate::{ + db::SourceDb, + files::{SourceFileId, Utf8PathBuf}, + Span, +}; pub use codespan_reporting::diagnostic as cs; -use codespan_reporting::files::Error as CsError; -use codespan_reporting::term; +use codespan_reporting::{files::Error as CsError, term}; pub use cs::Severity; -use std::ops::Range; -use std::rc::Rc; +use std::{ops::Range, rc::Rc}; use term::termcolor::{BufferWriter, ColorChoice}; #[derive(Debug, PartialEq, Eq, Hash, Clone)] @@ -118,7 +118,7 @@ pub fn diagnostics_string(db: &dyn SourceDb, diagnostics: &[Diagnostic]) -> Stri struct SourceDbWrapper<'a>(pub &'a dyn SourceDb); -impl<'a> codespan_reporting::files::Files<'_> for SourceDbWrapper<'a> { +impl codespan_reporting::files::Files<'_> for SourceDbWrapper<'_> { type FileId = SourceFileId; type Name = Rc; type Source = Rc; diff --git a/crates/common/src/files.rs b/crates/common/src/files.rs index 5cb18269da..c56657fa92 100644 --- a/crates/common/src/files.rs +++ b/crates/common/src/files.rs @@ -1,8 +1,7 @@ use crate::db::SourceDb; pub use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; pub use fe_library::include_dir; -use std::ops::Range; -use std::rc::Rc; +use std::{ops::Range, rc::Rc}; // NOTE: all file paths are stored as utf8 strings. // Non-utf8 paths (for user code) should be reported diff --git a/crates/common/src/numeric.rs b/crates/common/src/numeric.rs index 77e83a4801..9cded623fb 100644 --- a/crates/common/src/numeric.rs +++ b/crates/common/src/numeric.rs @@ -62,7 +62,8 @@ impl<'a> Literal<'a> { } } -// Converts any positive or negative `BigInt` into a hex str using 2s complement representation for negative values. +// Converts any positive or negative `BigInt` into a hex str using 2s complement +// representation for negative values. pub fn to_hex_str(val: &BigInt) -> String { format!( "0x{}", diff --git a/crates/common/src/panic.rs b/crates/common/src/panic.rs index 49848c6f56..dd645cfce9 100644 --- a/crates/common/src/panic.rs +++ b/crates/common/src/panic.rs @@ -2,7 +2,7 @@ use once_cell::sync::Lazy; use std::panic; const BUG_REPORT_URL: &str = "https://github.com/ethereum/fe/issues/new"; -type PanicCallback = dyn Fn(&panic::PanicInfo<'_>) + Sync + Send + 'static; +type PanicCallback = dyn Fn(&panic::PanicHookInfo<'_>) + Sync + Send + 'static; static DEFAULT_PANIC_HOOK: Lazy> = Lazy::new(|| { let hook = panic::take_hook(); panic::set_hook(Box::new(report_ice)); @@ -12,7 +12,7 @@ static DEFAULT_PANIC_HOOK: Lazy> = Lazy::new(|| { pub fn install_panic_hook() { Lazy::force(&DEFAULT_PANIC_HOOK); } -fn report_ice(info: &panic::PanicInfo) { +fn report_ice(info: &panic::PanicHookInfo) { (*DEFAULT_PANIC_HOOK)(info); eprintln!(); diff --git a/crates/common/src/span.rs b/crates/common/src/span.rs index 5d1c34be97..6092b388e7 100644 --- a/crates/common/src/span.rs +++ b/crates/common/src/span.rs @@ -1,8 +1,10 @@ use crate::files::SourceFileId; use serde::{Deserialize, Serialize}; -use std::cmp; -use std::fmt::{Debug, Formatter}; -use std::ops::{Add, AddAssign, Range}; +use std::{ + cmp, + fmt::{Debug, Formatter}, + ops::{Add, AddAssign, Range}, +}; /// An exclusive span of byte offsets in a source file. #[derive(Serialize, Deserialize, PartialEq, Copy, Clone, Hash, Eq)] diff --git a/crates/common2/Cargo.toml b/crates/common2/Cargo.toml new file mode 100644 index 0000000000..57ba0ba484 --- /dev/null +++ b/crates/common2/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "fe-common2" +version = "0.26.0" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides HIR definition and lowering for Fe lang." + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +semver = "1.0.17" +camino = "1.1.4" +smol_str = "0.1.24" +salsa.workspace = true +indexmap = "2.2" +parser = { path = "../parser2", package = "fe-parser2" } +paste = "1.0.15" diff --git a/crates/common2/src/diagnostics.rs b/crates/common2/src/diagnostics.rs new file mode 100644 index 0000000000..7e6239c641 --- /dev/null +++ b/crates/common2/src/diagnostics.rs @@ -0,0 +1,178 @@ +use std::fmt; + +use parser::TextRange; + +use crate::InputFile; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CompleteDiagnostic { + pub severity: Severity, + pub message: String, + pub sub_diagnostics: Vec, + pub notes: Vec, + pub error_code: GlobalErrorCode, +} + +impl CompleteDiagnostic { + pub fn new( + severity: Severity, + message: String, + sub_diagnostics: Vec, + notes: Vec, + error_code: GlobalErrorCode, + ) -> Self { + Self { + severity, + message, + sub_diagnostics, + notes, + error_code, + } + } + + pub fn primary_span(&self) -> Span { + self.sub_diagnostics + .iter() + .find_map(|sub| sub.is_primary().then(|| sub.span.clone().unwrap())) + .unwrap() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct GlobalErrorCode { + pub pass: DiagnosticPass, + pub local_code: u16, +} + +impl GlobalErrorCode { + pub fn new(pass: DiagnosticPass, local_code: u16) -> Self { + Self { pass, local_code } + } +} + +impl fmt::Display for GlobalErrorCode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}-{:04}", self.pass.code(), self.local_code) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct SubDiagnostic { + pub style: LabelStyle, + pub message: String, + pub span: Option, +} + +impl SubDiagnostic { + pub fn new(style: LabelStyle, message: String, span: Option) -> Self { + Self { + style, + message, + span, + } + } + + pub fn is_primary(&self) -> bool { + matches!(self.style, LabelStyle::Primary) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum LabelStyle { + Primary, + Secondary, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Span { + pub file: InputFile, + pub range: TextRange, + pub kind: SpanKind, +} + +impl PartialOrd for Span { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Span { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + match self.file.cmp(&other.file) { + std::cmp::Ordering::Equal => self.range.start().cmp(&other.range.start()), + ord => ord, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum SpanKind { + /// A node corresponding is originally written in the source code. + Original, + + /// A node corresponding to the span is generated by macro expansion. + Expanded, + + /// No span information was found. + /// This happens if analysis code tries to get a span for a node that is + /// generated in lowering phase. + /// + /// If span has this kind, it means there is a bug in the analysis code. + /// The reason not to panic is that LSP should continue working even if + /// there are bugs in the span generation(This also makes easier to identify + /// the cause of the bug) + /// + /// Range is always the first character of the file in this case. + NotFound, +} + +impl Span { + pub fn new(file: InputFile, range: TextRange, kind: SpanKind) -> Self { + Self { file, range, kind } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum Severity { + Error, + Warning, + Note, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub enum DiagnosticPass { + Parse, + + NameResolution, + + TypeDefinition, + TraitDefinition, + ImplTraitDefinition, + TraitSatisfaction, + MethodDefinition, + TyCheck, + + ExternalAnalysis(ExternalAnalysisKey), +} + +impl DiagnosticPass { + pub fn code(&self) -> u16 { + match self { + Self::Parse => 1, + Self::NameResolution => 2, + Self::TypeDefinition => 3, + Self::TraitDefinition => 4, + Self::ImplTraitDefinition => 5, + Self::TraitSatisfaction => 6, + Self::MethodDefinition => 7, + Self::TyCheck => 8, + + Self::ExternalAnalysis(_) => u16::MAX, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct ExternalAnalysisKey { + name: String, +} diff --git a/crates/common2/src/indexmap.rs b/crates/common2/src/indexmap.rs new file mode 100644 index 0000000000..5cf9df906d --- /dev/null +++ b/crates/common2/src/indexmap.rs @@ -0,0 +1,229 @@ +use std::{ + hash::{BuildHasher, Hash, RandomState}, + ops::{Deref, DerefMut}, +}; + +use salsa::Update; + +#[derive(Debug, Clone)] +pub struct IndexMap(indexmap::IndexMap); + +impl IndexMap { + pub fn new() -> Self { + Self(indexmap::IndexMap::new()) + } + + pub fn with_capacity(n: usize) -> Self { + Self(indexmap::IndexMap::with_capacity(n)) + } +} + +impl Default for IndexMap { + fn default() -> Self { + Self::new() + } +} + +impl IntoIterator for IndexMap { + type Item = as IntoIterator>::Item; + type IntoIter = as IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl<'a, K, V, S> IntoIterator for &'a IndexMap { + type Item = <&'a indexmap::IndexMap as IntoIterator>::Item; + type IntoIter = <&'a indexmap::IndexMap as IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + (&self.0).into_iter() + } +} + +impl<'a, K, V, S> IntoIterator for &'a mut IndexMap { + type Item = <&'a mut indexmap::IndexMap as IntoIterator>::Item; + type IntoIter = <&'a mut indexmap::IndexMap as IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + (&mut self.0).into_iter() + } +} + +impl PartialEq for IndexMap +where + K: Eq + Hash, + V: PartialEq, + S: BuildHasher, +{ + fn eq(&self, other: &Self) -> bool { + self.0.eq(&other.0) + } +} + +impl Eq for IndexMap +where + K: Eq + Hash, + V: Eq, + S: BuildHasher, +{ +} + +impl FromIterator<(K, V)> for IndexMap +where + K: Hash + Eq, + S: BuildHasher + Default, +{ + fn from_iter>(iter: T) -> Self { + Self(indexmap::IndexMap::from_iter(iter)) + } +} + +impl Deref for IndexMap +where + S: BuildHasher, +{ + type Target = indexmap::IndexMap; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for IndexMap +where + S: BuildHasher, +{ + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +unsafe impl Update for IndexMap +where + K: Update + Eq + Hash, + V: Update, + S: BuildHasher, +{ + unsafe fn maybe_update(old_pointer: *mut Self, new_map: Self) -> bool { + let old_map = unsafe { &mut *old_pointer }; + + // Check if the keys in both maps are the same w.r.t the key order. + let is_key_same = old_map.len() == new_map.len() + && old_map + .keys() + .zip(new_map.keys()) + .all(|(old, new)| old == new); + + // If the keys are different, update entire map. + if !is_key_same { + old_map.clear(); + old_map.0.extend(new_map.0); + return true; + } + + // Update values if it's different. + let mut changed = false; + for (i, new_value) in new_map.0.into_values().enumerate() { + let old_value = &mut old_map[i]; + changed |= V::maybe_update(old_value, new_value); + } + + changed + } +} + +#[derive(Debug, Clone)] +pub struct IndexSet(indexmap::IndexSet); + +impl IndexSet { + pub fn new() -> Self { + Self(indexmap::IndexSet::new()) + } + + pub fn with_capacity(n: usize) -> Self { + Self(indexmap::IndexSet::with_capacity(n)) + } +} + +impl Default for IndexSet { + fn default() -> Self { + Self::new() + } +} + +impl IntoIterator for IndexSet { + type Item = as IntoIterator>::Item; + type IntoIter = as IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl<'a, V, S> IntoIterator for &'a IndexSet { + type Item = <&'a indexmap::IndexSet as IntoIterator>::Item; + type IntoIter = <&'a indexmap::IndexSet as IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + (&self.0).into_iter() + } +} + +impl PartialEq for IndexSet +where + V: Hash + Eq, + S: BuildHasher, +{ + fn eq(&self, other: &Self) -> bool { + self.0.eq(&other.0) + } +} + +impl Eq for IndexSet +where + V: Eq + Hash, + S: BuildHasher, +{ +} + +impl FromIterator for IndexSet +where + V: Hash + Eq, + S: BuildHasher + Default, +{ + fn from_iter>(iter: T) -> Self { + Self(indexmap::IndexSet::from_iter(iter)) + } +} + +impl Deref for IndexSet +where + S: BuildHasher, +{ + type Target = indexmap::IndexSet; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for IndexSet +where + S: BuildHasher, +{ + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +unsafe impl Update for IndexSet +where + V: Update + Eq + Hash, + S: BuildHasher, +{ + unsafe fn maybe_update(old_pointer: *mut Self, new_set: Self) -> bool { + let old_set = unsafe { &mut *old_pointer }; + if old_set == &new_set { + false + } else { + old_set.clear(); + old_set.0.extend(new_set.0); + true + } + } +} diff --git a/crates/common2/src/input.rs b/crates/common2/src/input.rs new file mode 100644 index 0000000000..da387998f1 --- /dev/null +++ b/crates/common2/src/input.rs @@ -0,0 +1,135 @@ +use camino::Utf8PathBuf; +use salsa::Setter; +use smol_str::SmolStr; + +use crate::{indexmap::IndexSet, InputDb}; + +/// An ingot is a collection of files which are compiled together. +/// Ingot can depend on other ingots. +#[salsa::input(constructor = __new_impl)] +pub struct InputIngot { + /// An absolute path to the ingot root directory. + /// The all files in the ingot should be located under this directory. + #[return_ref] + pub path: Utf8PathBuf, + + /// Specifies the kind of the ingot. + pub kind: IngotKind, + + /// A version of the ingot. + #[return_ref] + pub version: Version, + + /// A list of ingots which the current ingot depends on. + #[return_ref] + pub external_ingots: IndexSet, + + /// A list of files which the current ingot contains. + #[return_ref] + #[set(__set_files_impl)] + pub files: IndexSet, + + #[set(__set_root_file_impl)] + #[get(__get_root_file_impl)] + root_file: Option, +} +impl InputIngot { + pub fn new( + db: &dyn InputDb, + path: &str, + kind: IngotKind, + version: Version, + external_ingots: IndexSet, + ) -> InputIngot { + let path = Utf8PathBuf::from(path); + let root_file = None; + Self::__new_impl( + db, + path, + kind, + version, + external_ingots, + IndexSet::default(), + root_file, + ) + } + + /// Set the root file of the ingot. + /// The root file must be set before the ingot is used. + pub fn set_root_file(self, db: &mut dyn InputDb, file: InputFile) { + self.__set_root_file_impl(db).to(Some(file)); + } + + /// Set the list of files which the ingot contains. + /// All files must bee set before the ingot is used. + pub fn set_files(self, db: &mut dyn InputDb, files: IndexSet) { + self.__set_files_impl(db).to(files); + } + + /// Returns the root file of the ingot. + /// Panics if the root file is not set. + pub fn root_file(&self, db: &dyn InputDb) -> InputFile { + self.__get_root_file_impl(db).unwrap() + } +} + +#[salsa::input] +pub struct InputFile { + /// A path to the file from the ingot root directory. + #[return_ref] + pub path: Utf8PathBuf, + + #[return_ref] + pub text: String, +} + +impl InputFile { + pub fn abs_path(&self, db: &dyn InputDb, ingot: InputIngot) -> Utf8PathBuf { + ingot.path(db).join(self.path(db)) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum IngotKind { + /// A standalone ingot is a dummy ingot when the compiler is invoked + /// directly on a file. + StandAlone, + + /// A local ingot which is the current ingot being compiled. + Local, + + /// An external ingot which is depended on by the current ingot. + External, + + /// Standard library ingot. + Std, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct IngotDependency { + /// The ingot may have a alias name from the original ingot name. + pub name: SmolStr, + /// An ingot which the current ingot depends on. + pub ingot: InputIngot, +} +impl IngotDependency { + pub fn new(name: &str, ingot: InputIngot) -> Self { + Self { + name: SmolStr::new(name), + ingot, + } + } +} + +impl PartialOrd for IngotDependency { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.ingot.cmp(&other.ingot)) + } +} +impl Ord for IngotDependency { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.ingot.cmp(&other.ingot) + } +} + +pub type Version = semver::Version; diff --git a/crates/common2/src/lib.rs b/crates/common2/src/lib.rs new file mode 100644 index 0000000000..b275ea94ea --- /dev/null +++ b/crates/common2/src/lib.rs @@ -0,0 +1,33 @@ +pub mod diagnostics; +pub mod indexmap; +pub mod input; +pub use input::{InputFile, InputIngot}; + +#[salsa::db] +pub trait InputDb: salsa::Database { + fn as_input_db(&self) -> &dyn InputDb; +} + +#[doc(hidden)] +pub use paste::paste; + +#[macro_export] +macro_rules! impl_db_traits { + ($db_type:ty, $($trait_name:ident),+ $(,)?) => { + #[salsa::db] + impl salsa::Database for $db_type { + fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {} + } + + $( + $crate::paste! { + #[salsa::db] + impl $trait_name for $db_type { + fn [](&self) -> &dyn $trait_name { + self + } + } + } + )+ + }; +} diff --git a/crates/driver2/Cargo.toml b/crates/driver2/Cargo.toml new file mode 100644 index 0000000000..bd3f0c2648 --- /dev/null +++ b/crates/driver2/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "fe-driver2" +version = "0.26.0" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides Fe driver" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +salsa.workspace = true +codespan-reporting = "0.11" + +hir = { path = "../hir", package = "fe-hir" } +common = { path = "../common2", package = "fe-common2" } +hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } +camino = "1.1.4" +clap = { version = "4.3", features = ["derive"] } diff --git a/crates/driver2/src/diagnostics.rs b/crates/driver2/src/diagnostics.rs new file mode 100644 index 0000000000..b0b633749a --- /dev/null +++ b/crates/driver2/src/diagnostics.rs @@ -0,0 +1,126 @@ +use std::ops::Range; + +use camino::Utf8Path; +use codespan_reporting as cs; +use common::{ + diagnostics::{LabelStyle, Severity}, + InputDb, InputFile, +}; +use cs::{diagnostic as cs_diag, files as cs_files}; +use hir::{diagnostics::DiagnosticVoucher, SpannedHirDb}; + +use crate::DriverDb; + +pub trait ToCsDiag { + fn to_cs(&self, db: &dyn SpannedInputDb) -> cs_diag::Diagnostic; +} + +pub trait SpannedInputDb: SpannedHirDb + InputDb {} +impl SpannedInputDb for T where T: SpannedHirDb + InputDb {} + +impl ToCsDiag for T +where + T: for<'db> DiagnosticVoucher<'db>, +{ + fn to_cs(&self, db: &dyn SpannedInputDb) -> cs_diag::Diagnostic { + let complete = self.to_complete(db.as_spanned_hir_db()); + + let severity = convert_severity(complete.severity); + let code = Some(complete.error_code.to_string()); + let message = complete.message; + + let labels = complete + .sub_diagnostics + .into_iter() + .filter_map(|sub_diag| { + let span = sub_diag.span?; + match sub_diag.style { + LabelStyle::Primary => { + cs_diag::Label::new(cs_diag::LabelStyle::Primary, span.file, span.range) + } + LabelStyle::Secondary => { + cs_diag::Label::new(cs_diag::LabelStyle::Secondary, span.file, span.range) + } + } + .with_message(sub_diag.message) + .into() + }) + .collect(); + + cs_diag::Diagnostic { + severity, + code, + message, + labels, + notes: vec![], + } + } +} + +fn convert_severity(severity: Severity) -> cs_diag::Severity { + match severity { + Severity::Error => cs_diag::Severity::Error, + Severity::Warning => cs_diag::Severity::Warning, + Severity::Note => cs_diag::Severity::Note, + } +} + +#[salsa::tracked(return_ref)] +pub fn file_line_starts(db: &dyn DriverDb, file: InputFile) -> Vec { + cs::files::line_starts(file.text(db.as_input_db())).collect() +} + +pub struct CsDbWrapper<'a>(pub &'a dyn DriverDb); + +impl<'db> cs_files::Files<'db> for CsDbWrapper<'db> { + type FileId = InputFile; + type Name = &'db Utf8Path; + type Source = &'db str; + + fn name(&'db self, file_id: Self::FileId) -> Result { + Ok(file_id.path(self.0.as_input_db()).as_path()) + } + + fn source(&'db self, file_id: Self::FileId) -> Result { + Ok(file_id.text(self.0.as_input_db())) + } + + fn line_index( + &'db self, + file_id: Self::FileId, + byte_index: usize, + ) -> Result { + let starts = file_line_starts(self.0, file_id); + Ok(starts + .binary_search(&byte_index) + .unwrap_or_else(|next_line| next_line - 1)) + } + + fn line_range( + &'db self, + file_id: Self::FileId, + line_index: usize, + ) -> Result, cs_files::Error> { + let line_starts = file_line_starts(self.0, file_id); + + let start = *line_starts + .get(line_index) + .ok_or(cs_files::Error::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + })?; + + let end = if line_index == line_starts.len() - 1 { + file_id.text(self.0.as_input_db()).len() + } else { + *line_starts + .get(line_index + 1) + .ok_or(cs_files::Error::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + })? + }; + + Ok(Range { start, end }) + } +} diff --git a/crates/driver2/src/lib.rs b/crates/driver2/src/lib.rs new file mode 100644 index 0000000000..57173281a8 --- /dev/null +++ b/crates/driver2/src/lib.rs @@ -0,0 +1,149 @@ +pub mod diagnostics; + +use std::path; + +use codespan_reporting::term::{ + self, + termcolor::{BufferWriter, ColorChoice}, +}; +use common::{ + diagnostics::CompleteDiagnostic, + impl_db_traits, + indexmap::IndexSet, + input::{IngotKind, Version}, + InputDb, InputFile, InputIngot, +}; +pub use diagnostics::CsDbWrapper; +use hir::{ + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, + lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, +}; +use hir_analysis::{ + name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, + ty::{ + AdtDefAnalysisPass, BodyAnalysisPass, FuncAnalysisPass, ImplAnalysisPass, + ImplTraitAnalysisPass, TraitAnalysisPass, TypeAliasAnalysisPass, + }, + HirAnalysisDb, +}; + +use crate::diagnostics::ToCsDiag; + +#[salsa::db] +pub trait DriverDb: + salsa::Database + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ + fn as_driver_db(&self) -> &dyn DriverDb; +} + +#[derive(Default, Clone)] +#[salsa::db] +pub struct DriverDataBase { + storage: salsa::Storage, +} +impl_db_traits!( + DriverDataBase, + InputDb, + HirDb, + LowerHirDb, + SpannedHirDb, + HirAnalysisDb, + DriverDb +); + +impl DriverDataBase { + // TODO: An temporary implementation for ui testing. + pub fn run_on_top_mod<'db>(&'db self, top_mod: TopLevelMod<'db>) -> DiagnosticsCollection<'db> { + self.run_on_file_with_pass_manager(top_mod, initialize_analysis_pass) + } + + pub fn run_on_file_with_pass_manager<'db, F>( + &'db self, + top_mod: TopLevelMod<'db>, + pm_builder: F, + ) -> DiagnosticsCollection<'db> + where + F: FnOnce(&'db DriverDataBase) -> AnalysisPassManager<'db>, + { + let mut pass_manager = pm_builder(self); + DiagnosticsCollection(pass_manager.run_on_module(top_mod)) + } + + pub fn standalone(&mut self, file_path: &path::Path, source: &str) -> (InputIngot, InputFile) { + let kind = IngotKind::StandAlone; + + // We set the ingot version to 0.0.0 for stand-alone file. + let version = Version::new(0, 0, 0); + let root_file = file_path; + let ingot = InputIngot::new( + self, + file_path.parent().unwrap().as_os_str().to_str().unwrap(), + kind, + version, + IndexSet::new(), + ); + + let file_name = root_file.file_name().unwrap().to_str().unwrap(); + let input_file = InputFile::new(self, file_name.into(), source.to_string()); + ingot.set_root_file(self, input_file); + ingot.set_files(self, [input_file].into_iter().collect()); + (ingot, input_file) + } + + pub fn top_mod(&self, ingot: InputIngot, input: InputFile) -> TopLevelMod { + map_file_to_mod(self, ingot, input) + } +} + +pub struct DiagnosticsCollection<'db>(Vec + 'db>>); +impl<'db> DiagnosticsCollection<'db> { + pub fn emit(&self, db: &'db DriverDataBase) { + let writer = BufferWriter::stderr(ColorChoice::Auto); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + for diag in self.finalize(db) { + term::emit(&mut buffer, &config, &CsDbWrapper(db), &diag.to_cs(db)).unwrap(); + } + + eprintln!("{}", std::str::from_utf8(buffer.as_slice()).unwrap()); + } + + /// Format the accumulated diagnostics to a string. + pub fn format_diags(&self, db: &'db DriverDataBase) -> String { + let writer = BufferWriter::stderr(ColorChoice::Never); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + for diag in self.finalize(db) { + term::emit(&mut buffer, &config, &CsDbWrapper(db), &diag.to_cs(db)).unwrap(); + } + + std::str::from_utf8(buffer.as_slice()).unwrap().to_string() + } + + fn finalize(&self, db: &'db DriverDataBase) -> Vec { + let mut diags: Vec<_> = self.0.iter().map(|d| d.to_complete(db)).collect(); + diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { + std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), + ord => ord, + }); + diags + } +} + +fn initialize_analysis_pass(db: &DriverDataBase) -> AnalysisPassManager<'_> { + let mut pass_manager = AnalysisPassManager::new(); + pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(AdtDefAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TypeAliasAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TraitAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImplAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImplTraitAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(FuncAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(BodyAnalysisPass::new(db))); + pass_manager +} diff --git a/crates/driver2/src/main.rs b/crates/driver2/src/main.rs new file mode 100644 index 0000000000..7f8c727d0f --- /dev/null +++ b/crates/driver2/src/main.rs @@ -0,0 +1,41 @@ +use clap::Parser; +use fe_driver2::DriverDataBase; +use hir::hir_def::TopLevelMod; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + /// The file to compile. + #[arg()] + file_path: String, + + /// Dump a graphviz dot file of the scope graph for the given file. + #[arg(long = "dump-scope-graph", default_value_t = false)] + dump_scope_graph: bool, +} + +pub fn main() { + let args = Args::parse(); + let path = std::path::Path::new(&args.file_path); + if !path.exists() { + eprintln!("file '{}' does not exist", args.file_path); + std::process::exit(2); + } + let source = std::fs::read_to_string(&args.file_path).unwrap(); + + let mut db = DriverDataBase::default(); + let (ingot, file) = db.standalone(path, &source); + let top_mod = db.top_mod(ingot, file); + let diags = db.run_on_top_mod(top_mod); + diags.emit(&db); + + if args.dump_scope_graph { + println!("{}", dump_scope_graph(&db, top_mod)); + } +} + +fn dump_scope_graph(db: &DriverDataBase, top_mod: TopLevelMod) -> String { + let mut s = vec![]; + top_mod.scope_graph(db).write_as_dot(db, &mut s).unwrap(); + String::from_utf8(s).unwrap() +} diff --git a/crates/fe/src/task/build.rs b/crates/fe/src/task/build.rs index bbeb5cc991..ed7ce1a4fb 100644 --- a/crates/fe/src/task/build.rs +++ b/crates/fe/src/task/build.rs @@ -1,11 +1,15 @@ -use std::fs; -use std::io::{Error, Write}; -use std::path::Path; +use std::{ + fs, + io::{Error, Write}, + path::Path, +}; use clap::{ArgEnum, Args}; -use fe_common::diagnostics::print_diagnostics; -use fe_common::files::SourceFileId; -use fe_common::utils::files::{get_project_root, BuildFiles, ProjectMode}; +use fe_common::{ + diagnostics::print_diagnostics, + files::SourceFileId, + utils::files::{get_project_root, BuildFiles, ProjectMode}, +}; use fe_driver::CompiledModule; const DEFAULT_OUTPUT_DIR_NAME: &str = "output"; diff --git a/crates/fe/src/task/test.rs b/crates/fe/src/task/test.rs index c3ca5b9ba2..5ad68b5876 100644 --- a/crates/fe/src/task/test.rs +++ b/crates/fe/src/task/test.rs @@ -1,10 +1,11 @@ -#![cfg(feature = "solc-backend")] use std::path::Path; use clap::Args; use colored::Colorize; -use fe_common::diagnostics::print_diagnostics; -use fe_common::utils::files::{get_project_root, BuildFiles}; +use fe_common::{ + diagnostics::print_diagnostics, + utils::files::{get_project_root, BuildFiles}, +}; use fe_driver::CompiledTest; use fe_test_runner::TestSink; diff --git a/crates/hir-analysis/Cargo.toml b/crates/hir-analysis/Cargo.toml new file mode 100644 index 0000000000..f409a65e08 --- /dev/null +++ b/crates/hir-analysis/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "fe-hir-analysis" +version = "0.26.0" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides HIR semantic analysis for Fe lang" + +[dependencies] +salsa.workspace = true +smallvec = "1.10" +rustc-hash = "1.1.0" +either = "1.8" +derive_more = "0.99" +itertools = "0.10" +ena = { version = "0.14", features = ["persistent"] } +fe-compiler-test-utils = { path = "../test-utils" } +num-bigint = "0.4" +if_chain = "1.0" +bitflags = "2.5" +cranelift-entity = "0.91" +hir = { path = "../hir", package = "fe-hir" } +common = { path = "../common2", package = "fe-common2" } + +[dev-dependencies] +codespan-reporting = "0.11" +dir-test = "0.3" +# TODO move cs diagnostics utils +driver = { path = "../driver2", package = "fe-driver2" } diff --git a/crates/hir-analysis/build.rs b/crates/hir-analysis/build.rs new file mode 100644 index 0000000000..8e048f9218 --- /dev/null +++ b/crates/hir-analysis/build.rs @@ -0,0 +1,4 @@ +fn main() { + #[cfg(test)] + println!("cargo:rerun-if-changed=./test_files"); +} diff --git a/crates/hir-analysis/src/lib.rs b/crates/hir-analysis/src/lib.rs new file mode 100644 index 0000000000..da7b83215b --- /dev/null +++ b/crates/hir-analysis/src/lib.rs @@ -0,0 +1,15 @@ +use hir::{span::DynLazySpan, HirDb}; + +#[salsa::db] +pub trait HirAnalysisDb: salsa::Database + HirDb { + fn as_hir_analysis_db(&self) -> &dyn HirAnalysisDb; +} + +pub mod name_resolution; +pub mod ty; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Spanned<'db, T> { + pub data: T, + pub span: DynLazySpan<'db>, +} diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs new file mode 100644 index 0000000000..e14409f268 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -0,0 +1,279 @@ +use common::diagnostics::{ + CompleteDiagnostic, DiagnosticPass, GlobalErrorCode, LabelStyle, Severity, SubDiagnostic, +}; +use hir::{ + diagnostics::DiagnosticVoucher, + hir_def::{IdentId, TopLevelMod}, + span::{DynLazySpan, LazySpan}, + HirDb, +}; + +use super::NameRes; +use crate::HirAnalysisDb; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum NameResDiag<'db> { + /// The definition conflicts with other definitions. + Conflict(IdentId<'db>, Vec>), + + /// The name is not found. + NotFound(DynLazySpan<'db>, IdentId<'db>), + + /// The resolved name is not visible. + Invisible(DynLazySpan<'db>, IdentId<'db>, Option>), + + /// The resolved name is ambiguous. + Ambiguous(DynLazySpan<'db>, IdentId<'db>, Vec>), + + /// The name is found, but it can't be used as a middle segment of a path. + InvalidPathSegment(DynLazySpan<'db>, IdentId<'db>, Option>), + + TooManyGenericArgs { + span: DynLazySpan<'db>, + expected: usize, + given: usize, + }, + + /// The name is found but belongs to a different name domain other than the + /// Type. + ExpectedType(DynLazySpan<'db>, IdentId<'db>, &'static str), + + /// The name is found but belongs to a different name domain other than the + /// trait. + ExpectedTrait(DynLazySpan<'db>, IdentId<'db>, &'static str), + + /// The name is found but belongs to a different name domain other than the + /// value. + ExpectedValue(DynLazySpan<'db>, IdentId<'db>, &'static str), +} + +impl<'db> NameResDiag<'db> { + /// Returns the top-level module where the diagnostic is located. + pub fn top_mod(&self, db: &'db dyn HirAnalysisDb) -> TopLevelMod<'db> { + match self { + Self::Conflict(_, conflicts) => conflicts + .iter() + .filter_map(|span| span.top_mod(db.as_hir_db())) + .min() + .unwrap(), + Self::NotFound(span, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::Invisible(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::Ambiguous(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::InvalidPathSegment(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::ExpectedType(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::ExpectedTrait(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::ExpectedValue(span, _, _) => span.top_mod(db.as_hir_db()).unwrap(), + Self::TooManyGenericArgs { span, .. } => span.top_mod(db.as_hir_db()).unwrap(), + } + } + + pub(super) fn ambiguous( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + ident: IdentId<'db>, + cands: Vec>, + ) -> Self { + let cands = cands + .into_iter() + .filter_map(|name| name.kind.name_span(db)) + .collect(); + Self::Ambiguous(span, ident, cands) + } + + fn local_code(&self) -> u16 { + match self { + Self::Conflict(..) => 1, + Self::NotFound(..) => 2, + Self::Invisible(..) => 3, + Self::Ambiguous(..) => 4, + Self::InvalidPathSegment(..) => 5, + Self::ExpectedType(..) => 6, + Self::ExpectedTrait(..) => 7, + Self::ExpectedValue(..) => 8, + Self::TooManyGenericArgs { .. } => 9, + } + } + + fn severity(&self) -> Severity { + Severity::Error + } + + fn message(&self, db: &dyn HirDb) -> String { + match self { + Self::Conflict(name, _) => { + format!("`{}` conflicts with other definitions", name.data(db)) + } + Self::NotFound(_, name) => format!("`{}` is not found", name.data(db)), + Self::Invisible(_, name, _) => { + format!("`{}` is not visible", name.data(db),) + } + Self::Ambiguous(_, name, _) => format!("`{}` is ambiguous", name.data(db)), + Self::InvalidPathSegment(_, name, _) => { + format!( + "`{}` can't be used as a middle segment of a path", + name.data(db) + ) + } + Self::ExpectedType(_, _, _) => "expected type item here".to_string(), + Self::ExpectedTrait(_, _, _) => "expected trait item here".to_string(), + Self::ExpectedValue(_, _, _) => "expected value here".to_string(), + Self::TooManyGenericArgs { + span: _, + expected, + given, + } => format!("too many generic args; expected {expected}, given {given}"), + } + } + + fn sub_diagnostics(&self, db: &dyn hir::SpannedHirDb) -> Vec { + match self { + Self::Conflict(ident, conflicts) => { + let ident = ident.data(db.as_hir_db()); + let mut diags = Vec::with_capacity(conflicts.len()); + let mut spans: Vec<_> = conflicts + .iter() + .filter_map(|span| span.resolve(db)) + .collect(); + spans.sort_unstable(); + let mut spans = spans.into_iter(); + + diags.push(SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ident}` is defined here"), + spans.next(), + )); + for sub_span in spans { + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + format! {"`{ident}` is redefined here"}, + Some(sub_span), + )); + } + + diags + } + + Self::NotFound(prim_span, ident) => { + let ident = ident.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ident}` is not found"), + prim_span.resolve(db), + )] + } + + Self::Invisible(prim_span, ident, span) => { + let ident = ident.data(db.as_hir_db()); + + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ident}` is not visible"), + prim_span.resolve(db), + )]; + if let Some(span) = span { + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("`{ident}` is defined here"), + span.resolve(db), + )) + } + diags + } + + Self::Ambiguous(prim_span, ident, candidates) => { + let ident = ident.data(db.as_hir_db()); + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ident}` is ambiguous"), + prim_span.resolve(db), + )]; + let mut cand_spans: Vec<_> = candidates + .iter() + .filter_map(|span| span.resolve(db)) + .collect(); + cand_spans.sort_unstable(); + diags.extend(cand_spans.into_iter().enumerate().map(|(i, span)| { + SubDiagnostic::new( + LabelStyle::Secondary, + format!("candidate `#{i}`"), + Some(span), + ) + })); + + diags + } + + Self::InvalidPathSegment(prim_span, name, res_span) => { + let name = name.data(db.as_hir_db()); + let mut diag = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{}` can't be used as a middle segment of a path", name,), + prim_span.resolve(db), + )]; + + if let Some(span) = res_span { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("`{name}` is defined here"), + span.resolve(db), + )); + } + + diag + } + + Self::ExpectedType(prim_span, name, given_kind) => { + let name = name.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected type here, but found {given_kind} `{name}`"), + prim_span.resolve(db), + )] + } + + Self::ExpectedTrait(prim_span, name, given_kind) => { + let name = name.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected trait here, but found {given_kind} `{name}`",), + prim_span.resolve(db), + )] + } + + Self::ExpectedValue(prim_span, name, given_kind) => { + let name = name.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected value here, but found {given_kind} `{name}`",), + prim_span.resolve(db), + )] + } + + Self::TooManyGenericArgs { + span, + expected, + given, + } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("Too many generic args; expected {expected}, given {given}",), + span.resolve(db), + )] + } + } + } +} + +impl<'db> DiagnosticVoucher<'db> for NameResDiag<'db> { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::NameResolution, self.local_code()) + } + + fn to_complete(&self, db: &'db dyn hir::SpannedHirDb) -> CompleteDiagnostic { + let error_code = self.error_code(); + let message = self.message(db.as_hir_db()); + let sub_diags = self.sub_diagnostics(db); + + CompleteDiagnostic::new(self.severity(), message, sub_diags, vec![], error_code) + } +} diff --git a/crates/hir-analysis/src/name_resolution/import_resolver.rs b/crates/hir-analysis/src/name_resolution/import_resolver.rs new file mode 100644 index 0000000000..207e8ba566 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/import_resolver.rs @@ -0,0 +1,1050 @@ +//! This module implements import and export resolution for HIR. +use std::{ + collections::{hash_map::Entry, VecDeque}, + mem, +}; + +use hir::{ + hir_def::{prim_ty::PrimTy, scope_graph::ScopeId, IdentId, IngotId, Use}, + span::DynLazySpan, +}; +use itertools::Itertools; +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::{ + diagnostics::NameResDiag, + name_resolver::{ + NameDerivation, NameDomain, NameRes, NameResBucket, NameResKind, NameResolutionError, + NameResolutionResult, NameResolver, QueryDirective, + }, + EarlyNameQueryId, +}; +use crate::{name_resolution::visibility_checker::is_use_visible, HirAnalysisDb}; + +pub(crate) struct ImportResolver<'db> { + db: &'db dyn HirAnalysisDb, + + /// The ingot that is being resolved. + ingot: IngotId<'db>, + + /// The set of imports that have been resolved. + resolved_imports: IntermediateResolvedImports<'db>, + + /// The uses that have resolution is work in progress. + intermediate_uses: FxHashMap, VecDeque>>, + + /// The errors that have been accumulated during the import resolution. + accumulated_errors: Vec>, + + /// The number of imported resolutions. + /// This is used to judge if a import resolution doesn't change in each + /// iteration of fixed point calculation. + /// This check rely on the fact that the number of resolutions is + /// monotonically increasing. + num_imported_res: FxHashMap, usize>, + + /// The set of imports that are suspicious to be ambiguous. + /// In this case, the use will turns out to be ambiguous after the import + /// resolution reaches the fixed point. + suspicious_imports: FxHashSet>, +} +impl<'db> ImportResolver<'db> { + pub(crate) fn new(db: &'db dyn HirAnalysisDb, ingot: IngotId<'db>) -> Self { + Self { + db, + ingot, + resolved_imports: IntermediateResolvedImports::new(ingot), + intermediate_uses: FxHashMap::default(), + accumulated_errors: Vec::default(), + num_imported_res: FxHashMap::default(), + suspicious_imports: FxHashSet::default(), + } + } + + pub(crate) fn resolve_imports(mut self) -> (ResolvedImports<'db>, Vec>) { + self.initialize_i_uses(); + + let mut changed = true; + let mut unresolved_scope: VecDeque<_> = + self.intermediate_uses.keys().copied().dedup().collect(); + while changed { + changed = false; + let n_unresolved_scope = unresolved_scope.len(); + let mut scope_counter = 0; + + while scope_counter < n_unresolved_scope { + scope_counter += 1; + let scope = unresolved_scope.pop_front().unwrap(); + + let n_i_uses = self.intermediate_uses[&scope].len(); + let mut i_use_counter = 0; + while i_use_counter < n_i_uses { + i_use_counter += 1; + let i_use = self + .intermediate_uses + .get_mut(&scope) + .unwrap() + .pop_front() + .unwrap(); + + match self.resolve_i_use(i_use) { + (Some(updated_i_use), i_use_changed) => { + changed |= i_use_changed; + self.intermediate_uses + .get_mut(&scope) + .unwrap() + .push_back(updated_i_use); + } + + (None, i_use_changed) => { + changed |= i_use_changed; + } + } + } + + if !self.scope_state(scope).is_closed() { + unresolved_scope.push_back(scope); + } + } + } + + for i_use in std::mem::take(&mut self.intermediate_uses) + .into_values() + .flatten() + { + // If the unresolved use is a glob and the base path is fully resolved, then we + // can regard the resolution for the glob as completed. + // This happens if the scope that glob is referring to is not closed, e.g., if + // there is a cycle in the import graph. + if i_use.is_glob(self.db) && i_use.is_base_resolved(self.db) { + continue; + } + + // If the unresolved use is not a glob and the number of imported bucket is + // not 0, then we can regard the resolution for the use as completed. + // This happens if the scope that the use is referring to is not closed. + if !i_use.is_glob(self.db) && *self.num_imported_res.entry(i_use.use_).or_default() != 0 + { + continue; + } + + self.register_error(&i_use, NameResolutionError::NotFound); + } + + for suspicious in mem::take(&mut self.suspicious_imports) { + self.verify_ambiguity(suspicious); + } + + ( + self.resolved_imports.resolved_imports, + self.accumulated_errors, + ) + } + + /// Try to resolve the given `IntermediateUse`. + /// + /// The first value of the returned tuple is the updated `IntermediateUse` + /// if the resolution is not fully completed. + /// + /// The second value of the returned tuple indicates whether the resolution + /// is progressed from the passed `IntermediateUse`. + fn resolve_i_use( + &mut self, + i_use: IntermediateUse<'db>, + ) -> (Option>, bool) { + if i_use.is_glob(self.db) { + self.resolve_glob(i_use) + } else { + self.resolve_named(i_use) + } + } + + /// Try to resolve the given named `IntermediateUse`. + fn resolve_named( + &mut self, + i_use: IntermediateUse<'db>, + ) -> (Option>, bool) { + let Some(i_use_res) = self.resolve_base_path(i_use.clone()) else { + return (None, true); + }; + + match i_use_res { + IUseResolution::Full(_) => unreachable!(), + + IUseResolution::BasePath(base_path_resolved) => { + if self.try_finalize_named_use(base_path_resolved.clone()) { + (None, true) + } else { + let changed = !i_use.is_base_resolved(self.db); + (Some(base_path_resolved), changed) + } + } + + IUseResolution::Partial(i_use) => (Some(i_use), true), + + IUseResolution::Unchanged(i_use) => (Some(i_use), false), + } + } + + /// Try to resolve the given glob `IntermediateUse`. + /// + /// The first value of the returned tuple is the updated `IntermediateUse` + /// if the resolution is not fully completed. + /// + /// The second value of the returned tuple indicates whether the resolution + /// is progressed from the passed `IntermediateUse`. + fn resolve_glob( + &mut self, + i_use: IntermediateUse<'db>, + ) -> (Option>, bool) { + let (base_path_resolved, changed) = { + if i_use.is_base_resolved(self.db) { + (i_use, false) + } else { + let Some(i_use_res) = self.resolve_base_path(i_use) else { + return (None, true); + }; + match i_use_res { + IUseResolution::Full(_) => unreachable!(), + + IUseResolution::BasePath(resolved) => (resolved, true), + + IUseResolution::Partial(i_use) => { + return (Some(i_use), true); + } + + IUseResolution::Unchanged(i_use) => { + return (Some(i_use), false); + } + } + } + }; + + let Some(target_scope) = base_path_resolved.current_scope() else { + return (None, true); + }; + + let original_scope = base_path_resolved.original_scope; + let use_ = base_path_resolved.use_; + + // Collect all unresolved named imports in the target scope to avoid binding a + // name to a wrong resolution being brought by a glob. + let unresolved_named_imports = match self.intermediate_uses.get(&target_scope) { + Some(i_uses) => i_uses + .iter() + .filter_map(|i_use_in_target| { + if !i_use_in_target.is_glob(self.db) + && is_use_visible(self.db, original_scope, use_) + { + i_use_in_target.imported_name(self.db) + } else { + None + } + }) + .collect(), + + None => FxHashSet::default(), + }; + + // Collect all bucket in the target scope. + let mut resolver = NameResolver::new(self.db, &self.resolved_imports); + let resolutions = resolver.collect_all_resolutions_for_glob( + target_scope, + original_scope, + unresolved_named_imports, + ); + + let is_decidable = self.is_decidable(&base_path_resolved); + let n_res = resolutions.iter().fold(0, |acc, res| acc + res.1.len()); + if *self.num_imported_res.entry(use_).or_default() == n_res { + if is_decidable { + return (None, true); + } else { + return (Some(base_path_resolved), changed); + } + } + + self.num_imported_res.insert(base_path_resolved.use_, n_res); + self.resolved_imports + .set_glob_resolutions(&base_path_resolved, resolutions); + + if is_decidable { + (None, true) + } else { + (Some(base_path_resolved), true) + } + } + + /// Resolves all segments of the given `IntermediateUse` except for the last + /// segment. + /// NOTE: `IUseResolution::Full` is never returned from this function. + /// + /// # Returns + /// - `Some(IUseResolution::BasePath(_))` if the base path is fully + /// resolved. + /// - `Some(IUseResolution::Partial(_))` if the base path is partially + /// resolved and the `IntermediateUse` is updated. + /// - `Some(IUseResolution::Unchanged(_))` if the resulted `IntermediateUse` + /// is unchanged. + /// - `None` if the error happens during the resolution, the error is + /// accumulated in the function. + fn resolve_base_path( + &mut self, + mut i_use: IntermediateUse<'db>, + ) -> Option> { + let mut changed = false; + if i_use.is_base_resolved(self.db) { + return Some(IUseResolution::BasePath(i_use)); + } + + loop { + match self.resolve_segment(&i_use)? { + IUseResolution::Full(_) => unreachable!(), + + IUseResolution::BasePath(resolved) => { + return Some(IUseResolution::BasePath(resolved)); + } + + IUseResolution::Partial(updated_i_use) => { + changed = true; + i_use = updated_i_use; + } + + IUseResolution::Unchanged(i_use) => { + return if changed { + Some(IUseResolution::Partial(i_use)) + } else { + Some(IUseResolution::Unchanged(i_use)) + }; + } + } + } + } + + /// Resolves the segments of the given `IntermediateUse` one by one. + /// + /// # Returns + /// - `Some(IUseResolution::Full(_))` if the given use is fully resolved. + /// - `Some(IUseResolution::BasePath(_))` if the base path is fully + /// resolved. + /// - `Some(IUseResolution::Partial(_))` if the base path is partially + /// resolved and the `IntermediateUse` is updated. + /// - `Some(IUseResolution::Unchanged(_))` if the resulted `IntermediateUse` + /// is unchanged. + /// - `None` if the error happens during the resolution, the error is + /// accumulated in the function. + fn resolve_segment(&mut self, i_use: &IntermediateUse<'db>) -> Option> { + // The segment is syntactically invalid. We can't perform name resolution + // anymore. + // We don't need to report the error here because the parser should have already + // reported it. + let query = match self.make_query(i_use) { + Ok(query) => query, + Err(err) => { + self.register_error(i_use, err); + return None; + } + }; + + let mut resolver = NameResolver::new(self.db, &self.resolved_imports); + let mut bucket = resolver.resolve_query(query); + // Filter out invisible resolutions. + let mut invisible_span = None; + bucket.bucket.retain(|_, res| { + let Ok(res) = res else { + return true; + }; + + if !res.is_importable() { + return false; + } + if res.is_visible(self.db, i_use.original_scope) { + true + } else { + if let Some(span) = res.derived_from(self.db) { + invisible_span.get_or_insert(span); + } + false + } + }); + + for (_, err) in bucket.errors() { + if !matches!( + err, + NameResolutionError::NotFound | NameResolutionError::Invalid + ) { + self.register_error(i_use, err.clone()); + return None; + } + } + if bucket.is_empty() { + if self.is_decidable(i_use) { + let err = if let Some(invisible_span) = invisible_span { + NameResolutionError::Invisible(invisible_span.into()) + } else { + NameResolutionError::NotFound + }; + self.register_error(i_use, err); + return None; + } else { + return Some(IUseResolution::Unchanged(i_use.clone())); + }; + } + + // If the resolution is derived from glob import or external crate, we have to + // insert the use into the `suspicious_imports` set to verify the ambiguity + // after the algorithm reaches the fixed point. + if i_use.is_first_segment() { + for res in bucket.iter_ok() { + if res.is_builtin() + || res.is_external(self.db, self.ingot) + || res.is_derived_from_glob() + { + self.suspicious_imports.insert(i_use.use_); + break; + } + } + } + + if i_use.is_base_resolved(self.db) { + Some(IUseResolution::Full(bucket)) + } else { + let next_i_use = match i_use.proceed(self.db, bucket) { + Ok(next_i_use) => next_i_use, + Err(err) => { + self.register_error(i_use, err); + return None; + } + }; + + if next_i_use.is_base_resolved(self.db) { + Some(IUseResolution::BasePath(next_i_use)) + } else { + Some(IUseResolution::Partial(next_i_use)) + } + } + } + + fn initialize_i_uses(&mut self) { + let m_tree = self.ingot.module_tree(self.db.as_hir_db()); + + for top_mod in m_tree.all_modules() { + let s_graph = top_mod.scope_graph(self.db.as_hir_db()); + for &use_ in &s_graph.unresolved_uses { + let i_use = IntermediateUse::new(self.db, use_); + self.intermediate_uses + .entry(i_use.original_scope) + .or_default() + .push_back(i_use); + } + } + } + + /// Returns `true` if the given `IntermediateUse` reaches the fixed point. + fn try_finalize_named_use(&mut self, i_use: IntermediateUse<'db>) -> bool { + debug_assert!(i_use.is_base_resolved(self.db)); + + let bucket = match self.resolve_segment(&i_use) { + Some(IUseResolution::Full(bucket)) => bucket, + Some(IUseResolution::Unchanged(_)) => { + return false; + } + + Some(_) => unreachable!(), + + None => { + return true; + } + }; + + let n_res = bucket.len(); + let is_decidable = self.is_decidable(&i_use); + if *self.num_imported_res.entry(i_use.use_).or_default() == n_res { + return is_decidable; + } + + self.num_imported_res.insert(i_use.use_, n_res); + if let Err(err) = self + .resolved_imports + .set_named_bucket(self.db, &i_use, bucket) + { + self.register_error(&i_use, err); + return true; + } + + is_decidable + } + + /// Check the ambiguity of the given suspicious `IntermediateUse` and report + /// an error if it is ambiguous. + /// An additional ambiguity check should be performed after the import + /// resolution reaches a fixed point. + // + // The ambiguity in the first segment possibly occurs when the segment is + // resolved to either a glob imported derived resolution or an external ingot in + // the `i_use` resolution. + // + // This is because: + // 1. the resolution of the first segment changes depending on whether the const + // glob is resolved or not at the time of `i_use` resolution, + // 2. the order in which uses are resolved is nondeterministic. + // + // In normal name resolution rules, the name brought in by a glob always shadows + // the external ingot, so this ambiguity is inherent in import resolution. + // As a result, we need to add additional verification to check this kind of + // ambiguity. + fn verify_ambiguity(&mut self, use_: Use<'db>) { + let i_use = IntermediateUse::new(self.db, use_); + let first_segment_ident = i_use.current_segment_ident(self.db).unwrap(); + + let res = match self.resolve_segment(&i_use) { + Some(IUseResolution::Full(bucket)) => match bucket.pick(NameDomain::TYPE) { + Ok(res) => res.clone(), + _ => { + return; + } + }, + + Some(IUseResolution::BasePath(i_use) | IUseResolution::Partial(i_use)) => { + i_use.current_res.unwrap() + } + + Some(IUseResolution::Unchanged(_)) | None => return, + }; + + // The resolved scope is shadowed by an glob imports while originally + // the use might be resolved to an external ingot or builtin. This means there + // is an ambiguity between the external ingot and the name + // imported by the glob import. + if !res.is_external(self.db, self.ingot) + && (self + .ingot + .external_ingots(self.db.as_hir_db()) + .iter() + .any(|(ingot_name, _)| *ingot_name == first_segment_ident) + || PrimTy::all_types() + .iter() + .any(|ty| ty.name(self.db.as_hir_db()) == first_segment_ident)) + { + self.register_error(&i_use, NameResolutionError::Ambiguous(vec![])); + } + } + + fn register_error(&mut self, i_use: &IntermediateUse<'db>, err: NameResolutionError<'db>) { + self.suspicious_imports.remove(&i_use.use_); + + match err { + NameResolutionError::NotFound => { + self.accumulated_errors.push(NameResDiag::NotFound( + i_use.current_segment_span(), + i_use.current_segment_ident(self.db).unwrap(), + )); + } + + NameResolutionError::Invalid => { + // Do nothing because the error is already reported in the + // parsing phase. + } + + NameResolutionError::Ambiguous(cands) => { + self.accumulated_errors.push(NameResDiag::ambiguous( + self.db, + i_use.current_segment_span(), + i_use.current_segment_ident(self.db).unwrap(), + cands, + )); + } + + NameResolutionError::InvalidPathSegment(res) => { + self.accumulated_errors + .push(NameResDiag::InvalidPathSegment( + i_use.current_segment_span(), + i_use.current_segment_ident(self.db).unwrap(), + res.kind.name_span(self.db), + )) + } + + NameResolutionError::Invisible(invisible_span) => { + self.accumulated_errors.push(NameResDiag::Invisible( + i_use.current_segment_span(), + i_use.current_segment_ident(self.db).unwrap(), + invisible_span, + )); + } + + NameResolutionError::Conflict(ident, spans) => { + self.accumulated_errors + .push(NameResDiag::Conflict(ident, spans)); + } + } + } + + /// Makes a query for the current segment of the intermediate use to be + /// resolved. + fn make_query( + &self, + i_use: &IntermediateUse<'db>, + ) -> NameResolutionResult<'db, EarlyNameQueryId<'db>> { + let Some(seg_name) = i_use.current_segment_ident(self.db) else { + return Err(NameResolutionError::Invalid); + }; + + let Some(current_scope) = i_use.current_scope() else { + return Err(NameResolutionError::NotFound); + }; + + // In the middle of the use path, disallow lexically scoped names and + // external names. + let directive = if !i_use.is_first_segment() { + QueryDirective::new().disallow_lex().disallow_external() + } else if self.contains_unresolved_named_use( + seg_name, + current_scope, + i_use.is_first_segment(), + ) { + QueryDirective::new().disallow_glob().disallow_external() + } else { + QueryDirective::new() + }; + + Ok(EarlyNameQueryId::new( + self.db, + seg_name, + current_scope, + directive, + )) + } + + /// Returns `true` if there is an unresolved named import for the given name + /// in the given scope or its lexical parents(if `allow_lex` is `true`). + fn contains_unresolved_named_use( + &self, + name: IdentId, + scope: ScopeId, + allow_lex: bool, + ) -> bool { + let mut current_scope = Some(scope); + + while let Some(scope) = current_scope { + for i_use in self.intermediate_uses.get(&scope).into_iter().flatten() { + if i_use.imported_name(self.db) == Some(name) { + return true; + } + } + if !allow_lex { + break; + } + current_scope = scope.lex_parent(self.db.as_hir_db()); + } + + false + } + + /// Returns the current state of the scope. + fn scope_state(&self, scope: ScopeId) -> ScopeState { + if scope.ingot(self.db.as_hir_db()) != self.ingot { + return ScopeState::Closed; + } + + let Some(i_uses) = self.intermediate_uses.get(&scope) else { + return ScopeState::Closed; + }; + + if i_uses.is_empty() { + ScopeState::Closed + } else { + ScopeState::Open + } + } + + /// Returns `true` if the next segment of the intermediate use is + /// deterministically resolvable. + fn is_decidable(&self, i_use: &IntermediateUse) -> bool { + let Some(target_scope) = i_use.current_scope() else { + return true; + }; + + if i_use.is_first_segment() { + let mut target_scope = Some(target_scope); + while let Some(scope) = target_scope { + if self.scope_state(scope) != ScopeState::Closed { + return false; + } + target_scope = scope.lex_parent(self.db.as_hir_db()); + } + true + } else { + self.scope_state(target_scope) != ScopeState::Open + } + } +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct ResolvedImports<'db> { + pub named_resolved: FxHashMap, NamedImportSet<'db>>, + pub glob_resolved: FxHashMap, GlobImportSet<'db>>, + pub unnamed_resolved: FxHashMap, Vec>>, +} + +pub(super) trait Importer<'db> { + fn named_imports<'a>( + &'a self, + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, + ) -> Option<&'a NamedImportSet<'db>>; + + fn glob_imports<'a>( + &'a self, + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, + ) -> Option<&'a GlobImportSet<'db>>; +} + +#[derive(Debug, Clone, Copy, Default)] +pub(super) struct DefaultImporter; + +impl<'db> Importer<'db> for DefaultImporter { + fn named_imports<'a>( + &'a self, + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, + ) -> Option<&'a NamedImportSet<'db>> { + resolved_imports_for_scope(db, scope) + .named_resolved + .get(&scope) + } + + fn glob_imports<'a>( + &'a self, + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, + ) -> Option<&'a GlobImportSet<'db>> { + resolved_imports_for_scope(db, scope) + .glob_resolved + .get(&scope) + } +} + +pub type NamedImportSet<'db> = FxHashMap, NameResBucket<'db>>; + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct GlobImportSet<'db> { + imported: FxHashMap, FxHashMap, Vec>>>, +} +impl<'db> GlobImportSet<'db> { + /// Returns imported resolutions for the given `name`. + pub fn name_res_for(&self, name: IdentId<'db>) -> impl Iterator> { + self.imported + .values() + .flat_map(move |v| v.get(&name).into_iter().flatten()) + } + + pub fn iter( + &self, + ) -> impl Iterator, &FxHashMap, Vec>>)> { + self.imported.iter() + } +} + +/// This is the state of import resolution for a given scope. +#[derive(Clone, Debug, Copy, PartialEq, Eq)] +enum ScopeState { + // The scope is open, meaning that the scope needs further resolution. + Open, + + /// The scope is closed, meaning that the all imports in the scope is fully + /// resolved. + Closed, +} + +impl ScopeState { + fn is_closed(self) -> bool { + matches!(self, ScopeState::Closed) + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +struct IntermediateUse<'db> { + use_: Use<'db>, + current_res: Option>, + original_scope: ScopeId<'db>, + unresolved_from: usize, +} + +impl<'db> IntermediateUse<'db> { + fn new(db: &'db dyn HirAnalysisDb, use_: Use<'db>) -> Self { + let scope = ScopeId::from_item(use_.into()) + .lex_parent(db.as_hir_db()) + .unwrap(); + Self { + use_, + current_res: None, + original_scope: scope, + unresolved_from: 0, + } + } + + /// Returns the scope that the current resolution is pointed to. + fn current_scope(&self) -> Option> { + if let Some(current_res) = self.current_res.as_ref() { + match current_res.kind { + NameResKind::Scope(scope) => Some(scope), + NameResKind::Prim(_) => None, + } + } else { + self.original_scope.into() + } + } + + fn is_glob(&self, db: &dyn HirAnalysisDb) -> bool { + self.use_.is_glob(db.as_hir_db()) + } + + /// Proceed the resolution of the use path to the next segment. + /// Returns an error if the bucket doesn't contain appropriate resolution + /// for use path segment. # Panics + /// - Panics if the the base path is already resolved. + /// - Panics if the bucket is empty. + fn proceed( + &self, + db: &'db dyn HirAnalysisDb, + bucket: NameResBucket<'db>, + ) -> NameResolutionResult<'db, Self> { + debug_assert!(!bucket.is_empty()); + debug_assert!(!self.is_base_resolved(db)); + + let next_res = match bucket.pick(NameDomain::TYPE) { + Ok(res) => res.clone(), + Err(_) => { + let res = bucket.iter_ok().next().unwrap(); + return Err(NameResolutionError::InvalidPathSegment(res.clone())); + } + }; + + if next_res.is_mod(db) || next_res.is_enum(db) { + Ok(Self { + use_: self.use_, + current_res: next_res.into(), + original_scope: self.original_scope, + unresolved_from: self.unresolved_from + 1, + }) + } else { + Err(NameResolutionError::InvalidPathSegment(next_res)) + } + } + + /// Returns the span of the current segment of the use. + fn current_segment_span(&self) -> DynLazySpan<'db> { + self.use_ + .lazy_span() + .path() + .segment(self.unresolved_from) + .into() + } + + fn current_segment_ident(&self, db: &'db dyn HirAnalysisDb) -> Option> { + let segments = self + .use_ + .path(db.as_hir_db()) + .to_opt()? + .data(db.as_hir_db()); + + let seg_idx = self.unresolved_from; + let segment = segments[seg_idx].to_opt()?; + segment.ident() + } + + fn imported_name(&self, db: &'db dyn HirAnalysisDb) -> Option> { + self.use_.imported_name(db.as_hir_db()) + } + + fn segment_len(&self, db: &dyn HirAnalysisDb) -> Option { + self.use_ + .path(db.as_hir_db()) + .to_opt() + .map(|p| p.segment_len(db.as_hir_db())) + } + + /// Returns `true` if the segment that should be resolved next is the first + /// segment. + fn is_first_segment(&self) -> bool { + self.unresolved_from == 0 + } + + /// Returns `true` if the use path except the last segment is fully + /// resolved. + fn is_base_resolved(&self, db: &dyn HirAnalysisDb) -> bool { + let Some(segment_len) = self.segment_len(db) else { + return false; + }; + + self.unresolved_from + 1 == segment_len + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +enum IUseResolution<'db> { + /// The all segments are resolved. + Full(NameResBucket<'db>), + + /// The all path segments except the last one are resolved. + BasePath(IntermediateUse<'db>), + + /// The intermediate use was partially resolved, but still needs further + /// resolution. + Partial(IntermediateUse<'db>), + + /// There was no change to the intermediate use. + Unchanged(IntermediateUse<'db>), +} + +struct IntermediateResolvedImports<'db> { + resolved_imports: ResolvedImports<'db>, + ingot: IngotId<'db>, +} + +impl<'db> IntermediateResolvedImports<'db> { + fn new(ingot: IngotId<'db>) -> Self { + Self { + resolved_imports: ResolvedImports::default(), + ingot, + } + } + + fn set_named_bucket( + &mut self, + db: &'db dyn HirAnalysisDb, + i_use: &IntermediateUse<'db>, + mut bucket: NameResBucket<'db>, + ) -> NameResolutionResult<'db, ()> { + let scope = i_use.original_scope; + bucket.set_derivation(NameDerivation::NamedImported(i_use.use_)); + + let imported_name = match i_use.imported_name(db) { + Some(name) => name, + None => { + self.resolved_imports + .unnamed_resolved + .entry(scope) + .or_default() + .push(bucket); + return Ok(()); + } + }; + + let imported_set = self + .resolved_imports + .named_resolved + .entry(scope) + .or_default(); + + match imported_set.entry(imported_name) { + Entry::Occupied(mut e) => { + let old_bucket = e.get_mut(); + old_bucket.merge(&bucket); + for (_, err) in old_bucket.errors() { + let NameResolutionError::Ambiguous(cands) = err else { + continue; + }; + for cand in cands { + let NameDerivation::NamedImported(use_) = cand.derivation else { + continue; + }; + + if i_use.use_ != use_ { + return Err(NameResolutionError::Conflict( + imported_name, + vec![ + i_use.use_.imported_name_span(db.as_hir_db()).unwrap(), + cand.derived_from(db).unwrap(), + ], + )); + } + } + } + Ok(()) + } + + Entry::Vacant(e) => { + e.insert(bucket); + Ok(()) + } + } + } + + fn set_glob_resolutions( + &mut self, + i_use: &IntermediateUse<'db>, + mut resolutions: FxHashMap, Vec>>, + ) { + let scope = i_use.original_scope; + for res in resolutions.values_mut().flatten() { + res.derivation = NameDerivation::GlobImported(i_use.use_); + } + + self.resolved_imports + .glob_resolved + .entry(scope) + .or_default() + .imported + .insert(i_use.use_, resolutions); + } +} + +impl<'db> Importer<'db> for IntermediateResolvedImports<'db> { + fn named_imports<'a>( + &'a self, + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, + ) -> Option<&'a NamedImportSet<'db>> { + if scope.top_mod(db.as_hir_db()).ingot(db.as_hir_db()) != self.ingot { + resolved_imports_for_scope(db, scope) + .named_resolved + .get(&scope) + } else { + self.resolved_imports.named_resolved.get(&scope) + } + } + + fn glob_imports<'a>( + &'a self, + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, + ) -> Option<&'a GlobImportSet<'db>> { + if scope.top_mod(db.as_hir_db()).ingot(db.as_hir_db()) != self.ingot { + resolved_imports_for_scope(db, scope) + .glob_resolved + .get(&scope) + } else { + self.resolved_imports.glob_resolved.get(&scope) + } + } +} + +fn resolved_imports_for_scope<'db>( + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, +) -> &'db ResolvedImports<'db> { + let ingot = scope.ingot(db.as_hir_db()); + &super::resolve_imports(db, ingot).1 +} + +impl NameRes<'_> { + /// Returns true if the bucket contains an resolution that is not in the + /// same ingot as the current resolution of the `i_use`. + fn is_external(&self, db: &dyn HirAnalysisDb, ingot: IngotId) -> bool { + match self.kind { + NameResKind::Scope(scope) => scope.ingot(db.as_hir_db()) != ingot, + NameResKind::Prim(_) => true, + } + } + + fn is_builtin(&self) -> bool { + matches!(self.kind, NameResKind::Prim(_)) + } + + /// Returns true if the bucket contains a glob import. + fn is_derived_from_glob(&self) -> bool { + matches!(self.derivation, NameDerivation::GlobImported(_)) + } +} diff --git a/crates/hir-analysis/src/name_resolution/mod.rs b/crates/hir-analysis/src/name_resolution/mod.rs new file mode 100644 index 0000000000..87f627436d --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/mod.rs @@ -0,0 +1,534 @@ +pub mod diagnostics; + +mod import_resolver; +mod name_resolver; +mod path_resolver; +pub(crate) mod traits_in_scope; +mod visibility_checker; + +use hir::{ + analysis_pass::ModuleAnalysisPass, + diagnostics::DiagnosticVoucher, + hir_def::{ + scope_graph::ScopeId, Expr, ExprId, GenericArgListId, IngotId, ItemKind, Pat, PatId, + PathId, TopLevelMod, TraitRefId, TypeId, + }, + visitor::prelude::*, +}; +pub use import_resolver::ResolvedImports; +pub use name_resolver::{ + EarlyNameQueryId, NameDerivation, NameDomain, NameRes, NameResBucket, NameResKind, + NameResolutionError, QueryDirective, +}; +use path_resolver::resolve_path_with_observer; +pub use path_resolver::{ + resolve_ident_to_bucket, resolve_name_res, resolve_path, PathRes, PathResError, + PathResErrorKind, ResolvedVariant, +}; +use rustc_hash::FxHashSet; +pub use traits_in_scope::available_traits_in_scope; +pub(crate) use visibility_checker::is_scope_visible_from; + +use self::{diagnostics::NameResDiag, import_resolver::DefaultImporter}; +use crate::HirAnalysisDb; + +#[salsa::tracked(return_ref)] +pub fn resolve_query<'db>( + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, +) -> NameResBucket<'db> { + let importer = DefaultImporter; + let mut name_resolver = name_resolver::NameResolver::new(db, &importer); + name_resolver.resolve_query(query) +} + +/// Performs import resolution analysis. This pass only checks correctness of +/// the imports and doesn't emit other name resolutions errors. +pub struct ImportAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> ImportAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } + + pub fn resolve_imports(&self, ingot: IngotId<'db>) -> &'db ResolvedImports<'db> { + &resolve_imports(self.db, ingot).1 + } +} + +impl<'db> ModuleAnalysisPass<'db> for ImportAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + let ingot = top_mod.ingot(self.db.as_hir_db()); + resolve_imports(self.db, ingot) + .0 + .iter() + .filter(|diag| diag.top_mod(self.db) == top_mod) + .map(|diag| Box::new(diag.clone()) as _) + .collect() + } +} + +/// Performs path resolution analysis. This pass checks all paths appeared in a +/// module for +/// - Existence +/// - Visibility +/// - Domain correctness +/// - Ambiguity +/// +/// NOTE: This pass doesn't check the conflict of item definitions or import +/// errors. If you need to check them, please consider using +/// [`ImportAnalysisPass`] or [`DefConflictAnalysisPass`]. +pub struct PathAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> PathAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +/// TODO: Remove this!!!! +impl<'db> ModuleAnalysisPass<'db> for PathAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + let importer = DefaultImporter; + let mut visitor = EarlyPathVisitor::new(self.db, &importer); + let mut ctxt = VisitorCtxt::with_item(self.db.as_hir_db(), top_mod.into()); + visitor.visit_item(&mut ctxt, top_mod.into()); + + visitor + .diags + .iter() + .filter(|diag| !matches!(diag, NameResDiag::Conflict(..))) + .map(|diag| Box::new(diag.clone()) as _) + .collect() + } +} + +/// Performs conflict analysis. This pass checks the conflict of item +/// definitions. +pub struct DefConflictAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> DefConflictAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +/// TODO: Remove this!!!! +impl<'db> ModuleAnalysisPass<'db> for DefConflictAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + let importer = DefaultImporter; + let mut visitor = EarlyPathVisitor::new(self.db, &importer); + let mut ctxt = VisitorCtxt::with_item(self.db.as_hir_db(), top_mod.into()); + visitor.visit_item(&mut ctxt, top_mod.into()); + + visitor + .diags + .iter() + .filter(|diag| matches!(diag, NameResDiag::Conflict(..))) + .map(|diag| Box::new(diag.clone()) as _) + .collect() + } +} + +#[salsa::tracked(return_ref)] +pub fn resolve_imports<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, +) -> (Vec>, ResolvedImports<'db>) { + let resolver = import_resolver::ImportResolver::new(db, ingot); + let (imports, diags) = resolver.resolve_imports(); + (diags, imports) +} + +struct EarlyPathVisitor<'db, 'a> { + db: &'db dyn HirAnalysisDb, + inner: name_resolver::NameResolver<'db, 'a>, + diags: Vec>, + item_stack: Vec>, + path_ctxt: Vec, + + /// The set of scopes that have already been conflicted to avoid duplicate + /// diagnostics. + already_conflicted: FxHashSet>, +} + +impl<'db, 'a> EarlyPathVisitor<'db, 'a> { + fn new(db: &'db dyn HirAnalysisDb, importer: &'a DefaultImporter) -> Self { + let resolver = name_resolver::NameResolver::new(db, importer); + Self { + db: db.as_hir_analysis_db(), + inner: resolver, + diags: Vec::new(), + item_stack: Vec::new(), + path_ctxt: Vec::new(), + already_conflicted: FxHashSet::default(), + } + } + + fn check_conflict(&mut self, scope: ScopeId<'db>) { + if !self.already_conflicted.insert(scope) { + return; + } + + let Some(query) = self.make_query_for_conflict_check(scope) else { + return; + }; + + let domain = NameDomain::from_scope(self.db, scope); + let binding = self.inner.resolve_query(query); + match binding.pick(domain) { + Ok(_) => {} + + Err(NameResolutionError::Ambiguous(cands)) => { + let conflicted_span = cands + .iter() + .filter_map(|res| { + let conflicted_scope = res.scope()?; + self.already_conflicted.insert(conflicted_scope); + conflicted_scope.name_span(self.db.as_hir_db()) + }) + .collect(); + + let diag = diagnostics::NameResDiag::Conflict( + scope.name(self.db.as_hir_db()).unwrap(), + conflicted_span, + ); + self.diags.push(diag); + } + + Err(_) => unreachable!(), + }; + } + + fn make_query_for_conflict_check(&self, scope: ScopeId<'db>) -> Option> { + let name = scope.name(self.db.as_hir_db())?; + let directive = QueryDirective::new() + .disallow_lex() + .disallow_glob() + .disallow_external(); + + let parent_scope = scope.parent(self.db.as_hir_db())?; + Some(EarlyNameQueryId::new( + self.db, + name, + parent_scope, + directive, + )) + } +} + +impl<'db> Visitor<'db> for EarlyPathVisitor<'db, '_> { + fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'db, LazyItemSpan<'db>>, item: ItemKind<'db>) { + // We don't need to check use statements for conflicts because they are + // already checked in import resolution. + if matches!(item, ItemKind::Use(_)) { + return; + } + + // We don't need to check impl blocks for conflicts because they + // needs ingot granularity analysis, the conflict checks for them is done by the + // `ImplCollector`. + if !matches!(item, ItemKind::Impl(_)) { + let scope = ScopeId::from_item(item); + self.check_conflict(scope); + } + + self.item_stack.push(item); + if matches!(item, ItemKind::Body(_)) { + self.path_ctxt.push(ExpectedPathKind::Value); + } else { + self.path_ctxt.push(ExpectedPathKind::Type); + } + + walk_item(self, ctxt, item); + + self.item_stack.pop(); + self.path_ctxt.pop(); + } + + fn visit_trait_ref( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyTraitRefSpan<'db>>, + trait_ref: TraitRefId<'db>, + ) { + self.path_ctxt.push(ExpectedPathKind::Trait); + walk_trait_ref(self, ctxt, trait_ref); + self.path_ctxt.pop(); + } + + fn visit_field_def( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFieldDefSpan<'db>>, + field: &hir::hir_def::FieldDef<'db>, + ) { + let scope = ctxt.scope(); + self.check_conflict(scope); + walk_field_def(self, ctxt, field); + } + + fn visit_variant_def( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyVariantDefSpan<'db>>, + variant: &hir::hir_def::VariantDef<'db>, + ) { + let scope = ctxt.scope(); + self.check_conflict(scope); + walk_variant_def(self, ctxt, variant); + } + + fn visit_generic_param( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyGenericParamSpan<'db>>, + param: &hir::hir_def::GenericParam<'db>, + ) { + let scope = ctxt.scope(); + self.check_conflict(scope); + walk_generic_param(self, ctxt, param); + } + + fn visit_generic_arg_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyGenericArgListSpan<'db>>, + args: GenericArgListId<'db>, + ) { + self.path_ctxt.push(ExpectedPathKind::Type); + walk_generic_arg_list(self, ctxt, args); + self.path_ctxt.pop(); + } + + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'db, LazyTySpan<'db>>, ty: TypeId<'db>) { + self.path_ctxt.push(ExpectedPathKind::Type); + walk_ty(self, ctxt, ty); + self.path_ctxt.pop(); + } + + // We don't need to run path analysis on patterns, statements and expressions in + // early path resolution. + fn visit_pat( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyPatSpan<'db>>, + pat: PatId, + pat_data: &Pat<'db>, + ) { + match pat_data { + Pat::PathTuple { .. } | Pat::Record { .. } => { + self.path_ctxt.push(ExpectedPathKind::Record) + } + _ => self.path_ctxt.push(ExpectedPathKind::Pat), + } + walk_pat(self, ctxt, pat); + self.path_ctxt.pop(); + } + + fn visit_expr( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyExprSpan<'db>>, + expr: ExprId, + expr_data: &Expr<'db>, + ) { + match expr_data { + Expr::RecordInit(..) => { + self.path_ctxt.push(ExpectedPathKind::Record); + } + + _ => { + self.path_ctxt.push(ExpectedPathKind::Expr); + } + } + walk_expr(self, ctxt, expr); + self.path_ctxt.pop(); + } + + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'db, LazyPathSpan<'db>>, path: PathId<'db>) { + let scope = ctxt.scope(); + + let mut invisible = None; + + let mut check_visibility = |path: PathId<'db>, reso: &PathRes<'db>| { + if invisible.is_some() { + return; + } + if !reso.is_visible_from(self.db, scope) { + invisible = Some((path, reso.name_span(self.db))); + } + }; + + let expected_path_kind = *self.path_ctxt.last().unwrap(); + let resolve_tail_as_value = expected_path_kind.domain().contains(NameDomain::VALUE); + + let res = match resolve_path_with_observer( + self.db, + path, + scope, + resolve_tail_as_value, + &mut check_visibility, + ) { + Ok(res) => res, + + Err(err) => { + let hir_db = self.db.as_hir_db(); + let failed_at = err.failed_at; + let span = ctxt + .span() + .unwrap() + .segment(failed_at.segment_index(hir_db)) + .ident(); + + let Some(ident) = failed_at.ident(hir_db).to_opt() else { + return; + }; + + let diag = match err.kind { + PathResErrorKind::ParseError => unreachable!(), + PathResErrorKind::NotFound(bucket) => { + if path.len(hir_db) == 1 + && matches!( + self.path_ctxt.last().unwrap(), + ExpectedPathKind::Expr | ExpectedPathKind::Pat + ) + { + return; + } else if let Some(nr) = bucket.iter_ok().next() { + if path != err.failed_at { + NameResDiag::InvalidPathSegment( + span.into(), + ident, + nr.kind.name_span(self.db), + ) + } else { + match expected_path_kind { + ExpectedPathKind::Record | ExpectedPathKind::Type => { + NameResDiag::ExpectedType( + span.into(), + ident, + nr.kind_name(), + ) + } + ExpectedPathKind::Trait => NameResDiag::ExpectedTrait( + span.into(), + ident, + nr.kind_name(), + ), + ExpectedPathKind::Value => NameResDiag::ExpectedValue( + span.into(), + ident, + nr.kind_name(), + ), + _ => NameResDiag::NotFound(span.into(), ident), + } + } + } else { + NameResDiag::NotFound(span.into(), ident) + } + } + + PathResErrorKind::Ambiguous(cands) => { + NameResDiag::ambiguous(self.db, span.into(), ident, cands) + } + + PathResErrorKind::AssocTy(_) => todo!(), + PathResErrorKind::TraitMethodNotFound(_) => todo!(), + PathResErrorKind::TooManyGenericArgs { expected, given } => { + NameResDiag::TooManyGenericArgs { + span: span.into(), + expected, + given, + } + } + + PathResErrorKind::InvalidPathSegment(res) => { + // res.name_span(db) + NameResDiag::InvalidPathSegment(span.into(), ident, res.name_span(self.db)) + } + + PathResErrorKind::Conflict(spans) => NameResDiag::Conflict(ident, spans), + }; + + self.diags.push(diag); + return; + } + }; + + if let Some((path, deriv_span)) = invisible { + let hir_db = self.db.as_hir_db(); + let span = ctxt + .span() + .unwrap() + .segment(path.segment_index(hir_db)) + .ident(); + + let ident = path.ident(hir_db); + let diag = NameResDiag::Invisible(span.into(), *ident.unwrap(), deriv_span); + self.diags.push(diag); + } + + let is_type = matches!(res, PathRes::Ty(_)); + let is_trait = matches!(res, PathRes::Trait(_)); + + let span = ctxt + .span() + .unwrap() + .segment(path.segment_index(self.db.as_hir_db())) + .into(); + + let ident = path.ident(self.db.as_hir_db()).to_opt().unwrap(); + + match expected_path_kind { + ExpectedPathKind::Type if !is_type => { + self.diags + .push(NameResDiag::ExpectedType(span, ident, res.kind_name())) + } + + ExpectedPathKind::Trait if !is_trait => { + self.diags + .push(NameResDiag::ExpectedTrait(span, ident, res.kind_name())) + } + + ExpectedPathKind::Value if is_type || is_trait => self + .diags + .push(NameResDiag::ExpectedValue(span, ident, res.kind_name())), + + _ => {} + } + + walk_path(self, ctxt, path); + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +enum ExpectedPathKind { + Type, + Trait, + Value, + Record, + Pat, + Expr, +} + +impl ExpectedPathKind { + fn domain(self) -> NameDomain { + match self { + ExpectedPathKind::Type => NameDomain::TYPE, + ExpectedPathKind::Trait => NameDomain::TYPE, + ExpectedPathKind::Value => NameDomain::VALUE, + ExpectedPathKind::Pat | ExpectedPathKind::Record | ExpectedPathKind::Expr => { + NameDomain::VALUE | NameDomain::TYPE + } + } + } +} diff --git a/crates/hir-analysis/src/name_resolution/name_resolver.rs b/crates/hir-analysis/src/name_resolution/name_resolver.rs new file mode 100644 index 0000000000..1c30b1d031 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/name_resolver.rs @@ -0,0 +1,1122 @@ +use std::{ + cmp, + collections::hash_map::{Entry, IntoValues}, + fmt, mem, +}; + +use bitflags::bitflags; +use hir::{ + hir_def::{ + prim_ty::PrimTy, + scope_graph::{ + AnonEdge, EdgeKind, FieldEdge, GenericParamEdge, IngotEdge, LexEdge, ModEdge, ScopeId, + SelfEdge, SelfTyEdge, SuperEdge, TraitEdge, TypeEdge, ValueEdge, VariantEdge, + }, + Enum, GenericParam, GenericParamOwner, IdentId, ItemKind, Mod, TopLevelMod, Trait, Use, + }, + span::DynLazySpan, +}; +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::{ + import_resolver::Importer, + visibility_checker::{is_scope_visible_from, is_use_visible}, +}; +use crate::HirAnalysisDb; + +#[salsa::interned] +pub struct EarlyNameQueryId<'db> { + /// The name to be resolved. + name: IdentId<'db>, + /// The scope where the name is resolved. + scope: ScopeId<'db>, + directive: QueryDirective, +} + +/// The query directive is used to control the name resolution behavior, such as +/// whether to lookup the name in the lexical scope or not. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct QueryDirective { + /// If `allow_lex` is `true`, then the query will be propagated to the + /// lexical scope if the name is not found in the current scope. + allow_lex: bool, + + /// If `allow_external` is `true`, then the query will be propagated to the + /// external ingot and builtin types as well. + allow_external: bool, + + /// If `allow_glob` is `true`, then the resolver uses the glob import to + /// resolve the name. + allow_glob: bool, +} + +impl QueryDirective { + /// Make a new query directive with the default settings. + /// The default setting is to lookup the name in the lexical scope and all + /// imports and external ingots. + pub fn new() -> Self { + Self { + allow_lex: true, + allow_external: true, + allow_glob: true, + } + } + + /// Disallow lexical scope lookup. + pub fn disallow_lex(mut self) -> Self { + self.allow_lex = false; + self + } + + pub(super) fn disallow_external(mut self) -> Self { + self.allow_external = false; + self + } + + pub(super) fn disallow_glob(mut self) -> Self { + self.allow_glob = false; + self + } +} + +impl Default for QueryDirective { + fn default() -> Self { + Self::new() + } +} + +/// The struct contains the lookup result of a name query. +/// The results can contain more than one name resolutions which belong to +/// different name domains. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct NameResBucket<'db> { + pub(super) bucket: FxHashMap>>, +} + +impl<'db> NameResBucket<'db> { + /// Returns the number of resolutions in the bucket. + pub fn len(&self) -> usize { + self.iter_ok().count() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + pub fn iter(&self) -> impl Iterator>> { + self.bucket.values() + } + + pub fn iter_ok(&self) -> impl Iterator> { + self.bucket.values().filter_map(|res| res.as_ref().ok()) + } + + pub fn iter_ok_mut(&mut self) -> impl Iterator> { + self.bucket.values_mut().filter_map(|res| res.as_mut().ok()) + } + + pub fn errors(&self) -> impl Iterator)> { + self.bucket + .iter() + .filter_map(|(domain, res)| res.as_ref().err().map(|err| (*domain, err))) + } + + /// Returns the resolution of the given `domain`. + pub fn pick(&self, domain: NameDomain) -> &NameResolutionResult<'db, NameRes<'db>> { + for domain in domain.iter() { + if let Some(res) = self.bucket.get(&domain) { + return res; + } + } + + &Err(NameResolutionError::NotFound) + } + + pub fn pick_any(&self, from: &[NameDomain]) -> &NameResolutionResult<'db, NameRes<'db>> { + let mut res = &Err(NameResolutionError::NotFound); + for domain in from { + res = self.pick(*domain); + if res.is_ok() { + return res; + } + } + res + } + + pub fn filter_by_domain(&mut self, domain: NameDomain) { + for domain in domain.iter() { + self.bucket.retain(|d, _| *d == domain); + } + } + + pub(super) fn merge(&mut self, bucket: &NameResBucket<'db>) { + for (domain, err) in bucket.errors() { + if let Err(NameResolutionError::NotFound) = self.pick(domain) { + self.bucket.insert(domain, Err(err.clone())); + } + } + for res in bucket.iter_ok() { + self.push(res); + } + } + + pub(super) fn set_derivation(&mut self, derivation: NameDerivation<'db>) { + for res in self.iter_ok_mut() { + res.derivation = derivation.clone(); + } + } + + /// Push the `res` into the set. + fn push(&mut self, res: &NameRes<'db>) { + for domain in res.domain.iter() { + match self.bucket.entry(domain) { + Entry::Occupied(mut e) => { + let old_res = match e.get_mut() { + Ok(res) => res, + Err(NameResolutionError::NotFound) => { + e.insert(Ok(res.clone())).ok(); + return; + } + Err(NameResolutionError::Ambiguous(ambiguous_set)) => { + if ambiguous_set[0].derivation == res.derivation { + ambiguous_set.push(res.clone()); + } + return; + } + Err(_) => { + return; + } + }; + + let old_derivation = old_res.derivation.clone(); + match res.derivation.cmp(&old_derivation) { + cmp::Ordering::Less => {} + cmp::Ordering::Equal => { + if old_res.kind != res.kind { + let old_res_cloned = old_res.clone(); + let res = res.clone(); + e.insert(Err(NameResolutionError::Ambiguous(vec![ + old_res_cloned, + res, + ]))) + .ok(); + } + } + cmp::Ordering::Greater => { + e.insert(Ok(res.clone())).ok(); + } + } + } + + Entry::Vacant(e) => { + e.insert(Ok(res.clone())); + } + } + } + } + + fn set_lexed_derivation(&mut self) { + for res in self.iter_ok_mut() { + res.derivation.lexed() + } + } +} + +impl<'db> IntoIterator for NameResBucket<'db> { + type Item = NameResolutionResult<'db, NameRes<'db>>; + type IntoIter = IntoValues>>; + + fn into_iter(self) -> Self::IntoIter { + self.bucket.into_values() + } +} + +impl<'db> From> for NameResBucket<'db> { + fn from(res: NameRes<'db>) -> Self { + let mut names = FxHashMap::default(); + names.insert(res.domain, Ok(res)); + Self { bucket: names } + } +} + +/// The struct contains the lookup result of a name query. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct NameRes<'db> { + /// The kind of the resolution. + pub kind: NameResKind<'db>, + /// The domain of the name resolution. + pub domain: NameDomain, + /// Where the resolution is derived from. (e.g, via `use` or item definition + /// in the same scope). + pub derivation: NameDerivation<'db>, +} + +impl<'db> NameRes<'db> { + /// Returns `true` if the name is visible from the given `scope`. + pub fn is_visible(&self, db: &'db dyn HirAnalysisDb, from: ScopeId<'db>) -> bool { + match self.derivation { + NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { + match self.kind { + NameResKind::Scope(scope) => is_scope_visible_from(db, scope, from), + NameResKind::Prim(_) => true, + } + } + NameDerivation::NamedImported(use_) | NameDerivation::GlobImported(use_) => { + is_use_visible(db, from, use_) + } + NameDerivation::Lex(ref inner) => { + let mut inner = inner; + while let NameDerivation::Lex(parent) = inner.as_ref() { + inner = parent; + } + + Self { + derivation: inner.as_ref().clone(), + ..self.clone() + } + .is_visible(db, from) + } + } + } + + /// Returns `true` if the resolution is a type. + pub fn is_type(&self) -> bool { + match self.kind { + NameResKind::Prim(_) => true, + NameResKind::Scope(scope) => scope.is_type(), + } + } + + pub fn trait_(&self) -> Option> { + match self.kind { + NameResKind::Scope(ScopeId::Item(ItemKind::Trait(trait_))) => Some(trait_), + _ => None, + } + } + + pub fn enum_variant(&self) -> Option<(ItemKind<'db>, usize)> { + match self.kind { + NameResKind::Scope(ScopeId::Variant(item, idx)) => Some((item, idx)), + _ => None, + } + } + + /// Returns `true` if the resolution is a trait. + pub fn is_trait(&self) -> bool { + self.trait_().is_some() + } + + pub fn is_enum(&self, db: &dyn HirAnalysisDb) -> bool { + match self.kind { + NameResKind::Prim(_) => false, + NameResKind::Scope(scope) => scope.resolve_to::(db.as_hir_db()).is_some(), + } + } + + pub fn is_mod(&self, db: &dyn HirAnalysisDb) -> bool { + match self.kind { + NameResKind::Prim(_) => false, + NameResKind::Scope(scope) => { + scope.resolve_to::(db.as_hir_db()).is_some() + || scope.resolve_to::(db.as_hir_db()).is_some() + } + } + } + + pub fn is_value(&self) -> bool { + !self.is_type() && !self.is_trait() + } + + /// Returns the scope of the name resolution if the name is not a builtin + /// type. + pub fn scope(&self) -> Option> { + match self.kind { + NameResKind::Scope(scope) => Some(scope), + NameResKind::Prim(_) => None, + } + } + + pub fn pretty_path(&self, db: &dyn HirAnalysisDb) -> Option { + match self.kind { + NameResKind::Scope(scope) => scope.pretty_path(db.as_hir_db()), + NameResKind::Prim(prim) => prim + .name(db.as_hir_db()) + .data(db.as_hir_db()) + .clone() + .into(), + } + } + + pub(super) fn derived_from(&self, db: &'db dyn HirAnalysisDb) -> Option> { + match self.derivation { + NameDerivation::Def | NameDerivation::Prim | NameDerivation::External => { + self.kind.name_span(db) + } + NameDerivation::NamedImported(use_) => use_.imported_name_span(db.as_hir_db()), + NameDerivation::GlobImported(use_) => use_.glob_span(db.as_hir_db()), + NameDerivation::Lex(ref inner) => { + let mut inner = inner; + while let NameDerivation::Lex(parent) = inner.as_ref() { + inner = parent; + } + Self { + derivation: inner.as_ref().clone(), + ..self.clone() + } + .derived_from(db) + } + } + } + + pub(super) fn new_from_scope( + scope: ScopeId<'db>, + domain: NameDomain, + derivation: NameDerivation<'db>, + ) -> Self { + Self { + kind: scope.into(), + derivation, + domain, + } + } + + pub(super) fn kind_name(&self) -> &'static str { + match self.kind { + NameResKind::Scope(scope) => scope.kind_name(), + NameResKind::Prim(_) => "type", + } + } + + pub(super) fn is_importable(&self) -> bool { + matches!(self.domain, NameDomain::TYPE | NameDomain::VALUE) + } + + fn new_prim(prim: PrimTy) -> Self { + Self { + kind: prim.into(), + derivation: NameDerivation::Prim, + domain: NameDomain::TYPE, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, derive_more::From)] +pub enum NameResKind<'db> { + /// The name is resolved to a scope. + Scope(ScopeId<'db>), + /// The name is resolved to a primitive type. + Prim(PrimTy), +} + +impl<'db> NameResKind<'db> { + pub fn name_span(self, db: &'db dyn HirAnalysisDb) -> Option> { + match self { + NameResKind::Scope(scope) => scope.name_span(db.as_hir_db()), + NameResKind::Prim(_) => None, + } + } + + pub fn name(self, db: &'db dyn HirAnalysisDb) -> IdentId<'db> { + match self { + NameResKind::Scope(scope) => scope.name(db.as_hir_db()).unwrap(), + NameResKind::Prim(prim) => prim.name(db.as_hir_db()), + } + } +} + +/// The name derivation indicates where a name resolution comes from. +/// Name derivation is used to track the origin of a resolution, and to +/// determine the shadowing rules. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum NameDerivation<'db> { + /// Derived from a definition in the current scope. + Def, + /// Derived from a named import in the current scope. + NamedImported(Use<'db>), + /// Derived from a glob import in the current scope. + GlobImported(Use<'db>), + /// Derived from lexical parent scope. + Lex(Box>), + /// Derived from an external ingot. + External, + /// Derived from a builtin primitive. + Prim, +} + +impl NameDerivation<'_> { + fn lexed(&mut self) { + let inner = mem::replace(self, NameDerivation::Def); + *self = NameDerivation::Lex(Box::new(inner)); + } + + pub fn use_stmt(&self) -> Option> { + match self { + NameDerivation::NamedImported(u) | NameDerivation::GlobImported(u) => Some(*u), + NameDerivation::Lex(deriv) => deriv.use_stmt(), + _ => None, + } + } +} + +impl PartialOrd for NameDerivation<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for NameDerivation<'_> { + fn cmp(&self, other: &Self) -> cmp::Ordering { + match (self, other) { + (NameDerivation::Def, NameDerivation::Def) => cmp::Ordering::Equal, + (NameDerivation::Def, _) => cmp::Ordering::Greater, + (_, NameDerivation::Def) => cmp::Ordering::Less, + + (NameDerivation::NamedImported(_), NameDerivation::NamedImported(_)) => { + cmp::Ordering::Equal + } + (NameDerivation::NamedImported(_), _) => cmp::Ordering::Greater, + (_, NameDerivation::NamedImported(_)) => cmp::Ordering::Less, + + (NameDerivation::GlobImported(_), NameDerivation::GlobImported(_)) => { + cmp::Ordering::Equal + } + (NameDerivation::GlobImported(_), _) => cmp::Ordering::Greater, + (_, NameDerivation::GlobImported(_)) => cmp::Ordering::Less, + + (NameDerivation::Lex(lhs), NameDerivation::Lex(rhs)) => lhs.cmp(rhs), + (NameDerivation::Lex(_), _) => cmp::Ordering::Greater, + (_, NameDerivation::Lex(_)) => cmp::Ordering::Less, + + (NameDerivation::External, NameDerivation::External) => cmp::Ordering::Equal, + (NameDerivation::External, _) => cmp::Ordering::Greater, + (_, NameDerivation::External) => cmp::Ordering::Less, + + (NameDerivation::Prim, NameDerivation::Prim) => cmp::Ordering::Equal, + } + } +} + +pub(crate) struct NameResolver<'db, 'a> { + db: &'db dyn HirAnalysisDb, + importer: &'a dyn Importer<'db>, +} + +impl<'db, 'a> NameResolver<'db, 'a> { + pub(super) fn new(db: &'db dyn HirAnalysisDb, importer: &'a dyn Importer<'db>) -> Self { + Self { db, importer } + } + + pub(crate) fn resolve_query(&mut self, query: EarlyNameQueryId<'db>) -> NameResBucket<'db> { + let hir_db = self.db.as_hir_db(); + + let mut bucket = NameResBucket::default(); + + // The shadowing rule is + // `$ > NamedImports > GlobImports > Lex > external ingot > builtin types`, + // where `$` means current scope. + // This ordering means that greater one shadows lower ones in the same domain. + let mut parent = None; + + // 1. Look for the name in the current scope. + let mut found_scopes = FxHashSet::default(); + for edge in query.scope(self.db).edges(hir_db) { + match edge.kind.propagate(self.db, query) { + PropagationResult::Terminated => { + if found_scopes.insert(edge.dest) { + let res = NameRes::new_from_scope( + edge.dest, + NameDomain::from_scope(self.db, edge.dest), + NameDerivation::Def, + ); + bucket.push(&res); + } + } + + PropagationResult::Continuation => { + debug_assert!(parent.is_none()); + parent = Some(edge.dest); + } + + PropagationResult::UnPropagated => {} + } + } + + // 2. Look for the name in the named imports of the current scope. + if let Some(imported) = self + .importer + .named_imports(self.db, query.scope(self.db)) + .and_then(|imports| imports.get(&query.name(self.db))) + { + bucket.merge(imported); + } + + // 3. Look for the name in the glob imports. + if query.directive(self.db).allow_glob { + if let Some(imported) = self.importer.glob_imports(self.db, query.scope(self.db)) { + for res in imported.name_res_for(query.name(self.db)) { + bucket.push(res); + } + } + } + + // 4. Look for the name in the lexical scope if it exists. + if let Some(parent) = parent { + let directive = query.directive(self.db).disallow_external(); + let query_for_parent = + EarlyNameQueryId::new(self.db, query.name(self.db), parent, directive); + + let mut resolved = self.resolve_query(query_for_parent); + resolved.set_lexed_derivation(); + bucket.merge(&resolved); + } + + if !query.directive(self.db).allow_external { + return bucket; + } + + // 5. Look for the name in the external ingots. + query + .scope(self.db) + .top_mod(hir_db) + .ingot(hir_db) + .external_ingots(hir_db) + .iter() + .for_each(|(name, ingot)| { + if *name == query.name(self.db) { + // We don't care about the result of `push` because we assume ingots are + // guaranteed to be unique. + bucket.push(&NameRes::new_from_scope( + ScopeId::from_item((ingot.root_mod(hir_db)).into()), + NameDomain::TYPE, + NameDerivation::External, + )) + } + }); + + // 6. Look for the name in the builtin types. + for &prim in PrimTy::all_types() { + // We don't care about the result of `push` because we assume builtin types are + // guaranteed to be unique. + if query.name(self.db) == prim.name(self.db.as_hir_db()) { + bucket.push(&NameRes::new_prim(prim)); + } + } + + bucket + } + + /// Collect all visible resolutions in the given `target` scope. + /// + /// The function follows the shadowing rule, meaning the same name in the + /// same domain is properly shadowed. Also, this function guarantees that + /// the collected resolutions are unique in terms of its name and resolved + /// scope. + /// + /// On the other hand, the function doesn't cause any error and collect all + /// resolutions even if they are in the same domain. The reason + /// for this is + /// - Ambiguous error should be reported lazily, meaning it should be + /// reported when the resolution is actually used. + /// - The function is used for glob imports, so it's necessary to return + /// monotonously increasing results. Also, we can't arbitrarily choose the + /// possible resolution from multiple candidates to avoid hiding + /// ambiguity. That's also the reason why we can't use [`ResBucket`] and + /// [`ResBucket::merge`] in this function. + /// + /// The below examples demonstrates the second point. + /// We need to report ambiguous error at `const C: S = S` because `S` is + /// ambiguous, on the other hand, we need NOT to report ambiguous error in + /// `foo` modules because `S` is not referred to in the module. + /// + /// ```fe + /// use foo::* + /// const C: S = S + /// + /// mod foo { + /// pub use inner1::* + /// pub use inner2::* + /// + /// mod inner1 { + /// pub struct S {} + /// } + /// mod inner2 { + /// pub struct S {} + /// } + /// } + /// ``` + pub(super) fn collect_all_resolutions_for_glob( + &mut self, + target: ScopeId<'db>, + use_scope: ScopeId<'db>, + unresolved_named_imports: FxHashSet>, + ) -> FxHashMap, Vec>> { + let mut res_collection: FxHashMap> = FxHashMap::default(); + let mut found_domains: FxHashMap = FxHashMap::default(); + let mut found_kinds: FxHashSet<(IdentId, NameResKind)> = FxHashSet::default(); + + for edge in target.edges(self.db.as_hir_db()) { + let scope = match edge.kind.propagate_glob() { + PropagationResult::Terminated => edge.dest, + _ => { + continue; + } + }; + + let name = scope.name(self.db.as_hir_db()).unwrap(); + if !found_kinds.insert((name, scope.into())) { + continue; + } + let res = NameRes::new_from_scope( + scope, + NameDomain::from_scope(self.db, scope), + NameDerivation::Def, + ); + + if res.is_visible(self.db, use_scope) { + *found_domains.entry(name).or_default() |= res.domain; + res_collection.entry(name).or_default().push(res); + } + } + + let mut found_domains_after_named = found_domains.clone(); + if let Some(named_imports) = self.importer.named_imports(self.db, target) { + for (&name, import) in named_imports { + let found_domain = found_domains.get(&name).copied().unwrap_or_default(); + for res in import + .iter_ok() + .filter(|res| res.is_visible(self.db, use_scope)) + { + if (found_domain & res.domain != NameDomain::Invalid) + || !found_kinds.insert((name, res.kind)) + { + continue; + } + + *found_domains_after_named.entry(name).or_default() |= res.domain; + res_collection.entry(name).or_default().push(res.clone()); + } + } + } + + if let Some(glob_imports) = self.importer.glob_imports(self.db, target) { + for (_, resolutions) in glob_imports.iter() { + // if !is_use_visible(self.db, ref_scope, use_) { + // continue; + // } + for (&name, res_for_name) in resolutions.iter() { + if unresolved_named_imports.contains(&name) { + continue; + } + + for res in res_for_name + .iter() + .filter(|res| res.is_visible(self.db, use_scope)) + { + let seen_domain = found_domains_after_named + .get(&name) + .copied() + .unwrap_or_default(); + + if (seen_domain & res.domain != NameDomain::Invalid) + || !found_kinds.insert((name, res.kind)) + { + continue; + } + res_collection.entry(name).or_default().push(res.clone()); + } + } + } + } + + res_collection + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum NameResolutionError<'db> { + /// The name is not found. + NotFound, + + /// The name is invalid in parsing. Basically, no need to report it because + /// the error is already emitted from parsing phase. + Invalid, + + /// The name is found, but it's not visible from the reference site. + Invisible(Option>), + + /// The name is found, but it's ambiguous. + Ambiguous(Vec>), + + /// The name is found, but it can't be used in the middle of a use path. + InvalidPathSegment(NameRes<'db>), + + /// The definition conflicts with other definitions. + Conflict(IdentId<'db>, Vec>), +} + +pub type NameResolutionResult<'db, T> = Result>; + +impl fmt::Display for NameResolutionError<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + NameResolutionError::NotFound => write!(f, "name not found"), + NameResolutionError::Invalid => write!(f, "invalid name"), + NameResolutionError::Invisible(_) => write!(f, "name is not visible"), + NameResolutionError::Ambiguous(_) => write!(f, "name is ambiguous"), + NameResolutionError::InvalidPathSegment(_) => write!( + f, + "the found resolution can't be used in the middle of a path" + ), + NameResolutionError::Conflict(_, _) => write!(f, "name conflicts with other names"), + } + } +} + +impl std::error::Error for NameResolutionError<'_> {} + +bitflags! { + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] + /// Each resolved name is associated with a domain that indicates which domain + /// the name belongs to. + /// The multiple same names can be introduced in a same scope as long as they + /// are in different domains. + /// + /// E.g., A `Foo` in the below example can be introduced in the same scope as a + /// type and variant at the same time. + /// ```fe + /// struct Foo {} + /// enum MyEnum { + /// Foo + /// } + /// use MyEnum::Foo + /// ``` + pub struct NameDomain: u8 + { + const TYPE = 0b00000001; + const VALUE = 0b00000010; + const FIELD = 0b100; + const Invalid = 0b0; + } +} + +#[allow(non_upper_case_globals)] +impl NameDomain { + pub(super) fn from_scope(db: &dyn HirAnalysisDb, scope: ScopeId) -> Self { + match scope { + ScopeId::Item(ItemKind::Func(_) | ItemKind::Const(_)) + | ScopeId::FuncParam(..) + | ScopeId::Block(..) => Self::VALUE, + ScopeId::Item(_) => Self::TYPE, + ScopeId::GenericParam(parent, idx) => { + let parent = GenericParamOwner::from_item_opt(parent).unwrap(); + + let param = &parent.params(db.as_hir_db()).data(db.as_hir_db())[idx]; + match param { + GenericParam::Type(_) => NameDomain::TYPE, + GenericParam::Const(_) => NameDomain::TYPE | NameDomain::VALUE, + } + } + ScopeId::Field(..) => Self::FIELD, + ScopeId::Variant(..) => Self::VALUE, + } + } +} + +impl Default for NameDomain { + fn default() -> Self { + Self::Invalid + } +} + +/// The propagator controls how the name query is propagated to the next scope. +trait QueryPropagator<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult; + fn propagate_glob(self) -> PropagationResult; +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +enum PropagationResult { + /// The query is resolved to the next scope(edge's destination). + Terminated, + /// The query resolution should be continued, i.e., the query is propagated + /// to the next scope and the next scope should be searched for the query. + Continuation, + /// The query can't be propagated to the next scope. + UnPropagated, +} + +impl<'db> QueryPropagator<'db> for LexEdge { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if query.directive(db).allow_lex { + PropagationResult::Continuation + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::UnPropagated + } +} + +impl<'db> QueryPropagator<'db> for ModEdge<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if self.0 == query.name(db) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::Terminated + } +} + +impl<'db> QueryPropagator<'db> for TypeEdge<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if self.0 == query.name(db) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::Terminated + } +} + +impl<'db> QueryPropagator<'db> for TraitEdge<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if self.0 == query.name(db) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::Terminated + } +} + +impl<'db> QueryPropagator<'db> for ValueEdge<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if self.0 == query.name(db) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::Terminated + } +} + +impl<'db> QueryPropagator<'db> for GenericParamEdge<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if self.0 == query.name(db) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::UnPropagated + } +} + +impl<'db> QueryPropagator<'db> for FieldEdge<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if self.0 == query.name(db) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::UnPropagated + } +} + +impl<'db> QueryPropagator<'db> for VariantEdge<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if self.0 == query.name(db) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::Terminated + } +} + +impl<'db> QueryPropagator<'db> for SuperEdge { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if query.name(db).is_super(db.as_hir_db()) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::UnPropagated + } +} + +impl<'db> QueryPropagator<'db> for IngotEdge { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if query.name(db).is_ingot(db.as_hir_db()) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::UnPropagated + } +} + +impl<'db> QueryPropagator<'db> for SelfTyEdge { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if query.name(db).is_self_ty(db.as_hir_db()) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::UnPropagated + } +} + +impl<'db> QueryPropagator<'db> for SelfEdge { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + if query.name(db).is_self(db.as_hir_db()) { + PropagationResult::Terminated + } else { + PropagationResult::UnPropagated + } + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::UnPropagated + } +} + +impl<'db> QueryPropagator<'db> for AnonEdge { + fn propagate( + self, + _db: &'db dyn HirAnalysisDb, + _query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + PropagationResult::UnPropagated + } + + fn propagate_glob(self) -> PropagationResult { + PropagationResult::UnPropagated + } +} + +impl<'db> QueryPropagator<'db> for EdgeKind<'db> { + fn propagate( + self, + db: &'db dyn HirAnalysisDb, + query: EarlyNameQueryId<'db>, + ) -> PropagationResult { + match self { + EdgeKind::Lex(edge) => edge.propagate(db, query), + EdgeKind::Mod(edge) => edge.propagate(db, query), + EdgeKind::Type(edge) => edge.propagate(db, query), + EdgeKind::Trait(edge) => edge.propagate(db, query), + EdgeKind::GenericParam(edge) => edge.propagate(db, query), + EdgeKind::Value(edge) => edge.propagate(db, query), + EdgeKind::Field(edge) => edge.propagate(db, query), + EdgeKind::Variant(edge) => edge.propagate(db, query), + EdgeKind::Super(edge) => edge.propagate(db, query), + EdgeKind::Ingot(edge) => edge.propagate(db, query), + EdgeKind::Self_(edge) => edge.propagate(db, query), + EdgeKind::SelfTy(edge) => edge.propagate(db, query), + EdgeKind::Anon(edge) => edge.propagate(db, query), + } + } + + fn propagate_glob(self) -> PropagationResult { + match self { + EdgeKind::Lex(edge) => edge.propagate_glob(), + EdgeKind::Mod(edge) => edge.propagate_glob(), + EdgeKind::Type(edge) => edge.propagate_glob(), + EdgeKind::Trait(edge) => edge.propagate_glob(), + EdgeKind::GenericParam(edge) => edge.propagate_glob(), + EdgeKind::Value(edge) => edge.propagate_glob(), + EdgeKind::Field(edge) => edge.propagate_glob(), + EdgeKind::Variant(edge) => edge.propagate_glob(), + EdgeKind::Super(edge) => edge.propagate_glob(), + EdgeKind::Ingot(edge) => edge.propagate_glob(), + EdgeKind::Self_(edge) => edge.propagate_glob(), + EdgeKind::SelfTy(edge) => edge.propagate_glob(), + EdgeKind::Anon(edge) => edge.propagate_glob(), + } + } +} diff --git a/crates/hir-analysis/src/name_resolution/path_resolver.rs b/crates/hir-analysis/src/name_resolution/path_resolver.rs new file mode 100644 index 0000000000..aa00ca786a --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/path_resolver.rs @@ -0,0 +1,518 @@ +use hir::{ + hir_def::{ + scope_graph::ScopeId, Enum, ItemKind, Partial, PathId, TypeId, VariantDef, VariantKind, + }, + span::DynLazySpan, +}; + +use super::{ + is_scope_visible_from, + name_resolver::{NameRes, NameResBucket, NameResolutionError}, + resolve_query, + visibility_checker::is_ty_visible_from, + EarlyNameQueryId, NameDomain, +}; +use crate::{ + name_resolution::{NameResKind, QueryDirective}, + ty::{ + adt_def::{lower_adt, AdtRef, AdtRefId}, + func_def::lower_func, + trait_def::TraitDef, + trait_lower::lower_trait, + ty_def::{InvalidCause, TyId}, + ty_lower::{ + collect_generic_params, lower_generic_arg_list, lower_hir_ty, lower_type_alias, + GenericParamOwnerId, + }, + }, + HirAnalysisDb, +}; + +pub type PathResolutionResult<'db, T> = Result>; + +#[derive(Debug)] +pub struct PathResError<'db> { + pub kind: PathResErrorKind<'db>, + pub failed_at: PathId<'db>, +} + +#[derive(Debug)] +pub enum PathResErrorKind<'db> { + /// The name is not found. + NotFound(NameResBucket<'db>), + + /// The name is invalid in parsing. Basically, no need to report it because + /// the error is already emitted from parsing phase. + ParseError, + + /// The name is found, but it's ambiguous. + Ambiguous(Vec>), + + /// The name is found, but it can't be used in the middle of a use path. + InvalidPathSegment(PathRes<'db>), + + /// The definition conflicts with other definitions. + Conflict(Vec>), + + TooManyGenericArgs { + expected: usize, + given: usize, + }, + + TraitMethodNotFound(TraitDef<'db>), + + AssocTy(TyId<'db>), // TyId is parent type. +} + +impl<'db> PathResError<'db> { + pub fn new(kind: PathResErrorKind<'db>, failed_at: PathId<'db>) -> Self { + Self { kind, failed_at } + } + + pub fn not_found(path: PathId<'db>, bucket: NameResBucket<'db>) -> Self { + Self::new(PathResErrorKind::NotFound(bucket), path) + } + + pub fn parse_err(path: PathId<'db>) -> Self { + Self::new(PathResErrorKind::ParseError, path) + } + + pub fn from_name_res_error(err: NameResolutionError<'db>, path: PathId<'db>) -> Self { + let kind = match err { + NameResolutionError::NotFound => PathResErrorKind::NotFound(NameResBucket::default()), + NameResolutionError::Invalid => PathResErrorKind::ParseError, + NameResolutionError::Ambiguous(vec) => PathResErrorKind::Ambiguous(vec), + NameResolutionError::Conflict(_ident, vec) => PathResErrorKind::Conflict(vec), + NameResolutionError::Invisible(_) => unreachable!(), + NameResolutionError::InvalidPathSegment(_) => unreachable!(), + }; + Self::new(kind, path) + } + + pub fn print(&self) -> String { + match &self.kind { + PathResErrorKind::NotFound(_) => "Not found".to_string(), + PathResErrorKind::ParseError => "Parse error".to_string(), + PathResErrorKind::Ambiguous(v) => format!("Ambiguous; {} options.", v.len()), + PathResErrorKind::InvalidPathSegment(_) => "Invalid path segment".to_string(), + PathResErrorKind::Conflict(..) => "Conflicting definitions".to_string(), + PathResErrorKind::TooManyGenericArgs { + expected, + given: actual, + } => { + format!("Incorrect number of generic args; expected {expected}, given {actual}.") + } + PathResErrorKind::TraitMethodNotFound(_) => "Trait method not found".to_string(), + PathResErrorKind::AssocTy(_) => "Types cannot be nested inside other types".to_string(), + } + } +} + +/// Panics if `path` has more than one segment. +pub fn resolve_ident_to_bucket<'db>( + db: &'db dyn HirAnalysisDb, + path: PathId<'db>, + scope: ScopeId<'db>, +) -> &'db NameResBucket<'db> { + assert!(path.parent(db.as_hir_db()).is_none()); + let query = make_query(db, path, scope); + resolve_query(db, query) +} + +/// Panics if path.ident is `Absent` +fn make_query<'db>( + db: &'db dyn HirAnalysisDb, + path: PathId<'db>, + scope: ScopeId<'db>, +) -> EarlyNameQueryId<'db> { + let mut directive = QueryDirective::new(); + + if path.segment_index(db.as_hir_db()) != 0 { + directive = directive.disallow_external(); + directive = directive.disallow_lex(); + } + + let name = *path.ident(db.as_hir_db()).unwrap(); + EarlyNameQueryId::new(db, name, scope, directive) +} + +#[derive(Debug, Clone)] +pub enum PathRes<'db> { + Ty(TyId<'db>), + Func(TyId<'db>), + FuncParam(ItemKind<'db>, usize), + Trait(TraitDef<'db>), + EnumVariant(ResolvedVariant<'db>), + Const(TyId<'db>), + Mod(ScopeId<'db>), + TypeMemberTbd(TyId<'db>), +} + +impl<'db> PathRes<'db> { + pub fn map_over_ty(self, mut f: F) -> Self + where + F: FnMut(TyId<'db>) -> TyId<'db>, + { + match self { + PathRes::Ty(ty) => PathRes::Ty(f(ty)), + PathRes::Func(ty) => PathRes::Func(f(ty)), + PathRes::Const(ty) => PathRes::Const(f(ty)), + PathRes::EnumVariant(v) => { + PathRes::EnumVariant(ResolvedVariant::new(f(v.ty), v.idx, v.path)) + } + PathRes::TypeMemberTbd(parent_ty) => PathRes::TypeMemberTbd(f(parent_ty)), + r @ (PathRes::Trait(_) | PathRes::Mod(_) | PathRes::FuncParam(..)) => r, + } + } + + pub fn as_scope(&self, db: &'db dyn HirAnalysisDb) -> Option> { + match self { + PathRes::Ty(ty) + | PathRes::Func(ty) + | PathRes::Const(ty) + | PathRes::TypeMemberTbd(ty) => ty.as_scope(db), + PathRes::Trait(trait_) => Some(trait_.trait_(db).scope()), + PathRes::EnumVariant(variant) => Some(variant.enum_(db).scope()), + PathRes::FuncParam(item, idx) => Some(ScopeId::FuncParam(*item, *idx)), + PathRes::Mod(scope) => Some(*scope), + } + } + + pub fn is_visible_from(&self, db: &'db dyn HirAnalysisDb, from_scope: ScopeId<'db>) -> bool { + match self { + PathRes::Ty(ty) + | PathRes::Func(ty) + | PathRes::Const(ty) + | PathRes::TypeMemberTbd(ty) => is_ty_visible_from(db, *ty, from_scope), + r => is_scope_visible_from(db, r.as_scope(db).unwrap(), from_scope), + } + } + + pub fn name_span(&self, db: &'db dyn HirAnalysisDb) -> Option> { + self.as_scope(db)?.name_span(db.as_hir_db()) + } + + pub fn pretty_path(&self, db: &'db dyn HirAnalysisDb) -> Option { + let hir_db = db.as_hir_db(); + + let ty_path = |ty: TyId<'db>| { + if let Some(scope) = ty.as_scope(db) { + scope.pretty_path(hir_db) + } else { + Some(ty.pretty_print(db).to_string()) + } + }; + + match self { + PathRes::Ty(ty) | PathRes::Func(ty) | PathRes::Const(ty) => ty_path(*ty), + + PathRes::EnumVariant(v) => { + let variant_idx = v.idx; + Some(format!( + "{}::{}", + ty_path(v.ty).unwrap_or_else(|| "".into()), + v.enum_(db).variants(db.as_hir_db()).data(db.as_hir_db())[variant_idx] + .name + .to_opt()? + .data(db.as_hir_db()) + )) + } + r @ (PathRes::Trait(..) | PathRes::Mod(..) | PathRes::FuncParam(..)) => { + r.as_scope(db).unwrap().pretty_path(db.as_hir_db()) + } + PathRes::TypeMemberTbd(parent_ty) => Some(format!( + "", + ty_path(*parent_ty).unwrap_or_else(|| "".into()) + )), + } + } + + pub fn kind_name(&self) -> &'static str { + match self { + PathRes::Ty(_) => "type", + PathRes::Func(_) => "function", + PathRes::FuncParam(..) => "function parameter", + PathRes::Trait(_) => "trait", + PathRes::EnumVariant(_) => "enum variant", + PathRes::Const(_) => "constant", + PathRes::Mod(_) => "module", + PathRes::TypeMemberTbd(_) => "method", + } + } +} + +#[derive(Clone, Debug)] +pub struct ResolvedVariant<'db> { + pub ty: TyId<'db>, + pub idx: usize, + pub path: PathId<'db>, +} + +impl<'db> ResolvedVariant<'db> { + pub fn variant_def(&self, db: &'db dyn HirAnalysisDb) -> &'db VariantDef<'db> { + &self.enum_(db).variants(db.as_hir_db()).data(db.as_hir_db())[self.idx] + } + + pub fn variant_kind(&self, db: &'db dyn HirAnalysisDb) -> VariantKind<'db> { + self.variant_def(db).kind + } + + pub fn enum_(&self, db: &'db dyn HirAnalysisDb) -> Enum<'db> { + let AdtRef::Enum(enum_) = self.ty.adt_ref(db).unwrap().data(db) else { + unreachable!() + }; + enum_ + } + + pub fn new(ty: TyId<'db>, idx: usize, path: PathId<'db>) -> Self { + Self { ty, idx, path } + } +} + +pub fn resolve_path<'db>( + db: &'db dyn HirAnalysisDb, + path: PathId<'db>, + scope: ScopeId<'db>, + resolve_tail_as_value: bool, +) -> PathResolutionResult<'db, PathRes<'db>> { + resolve_path_impl(db, path, scope, resolve_tail_as_value, true, &mut |_, _| {}) +} + +pub fn resolve_path_with_observer<'db, F>( + db: &'db dyn HirAnalysisDb, + path: PathId<'db>, + scope: ScopeId<'db>, + resolve_tail_as_value: bool, + observer: &mut F, +) -> PathResolutionResult<'db, PathRes<'db>> +where + F: FnMut(PathId<'db>, &PathRes<'db>), +{ + resolve_path_impl(db, path, scope, resolve_tail_as_value, true, observer) +} + +fn resolve_path_impl<'db, F>( + db: &'db dyn HirAnalysisDb, + path: PathId<'db>, + scope: ScopeId<'db>, + resolve_tail_as_value: bool, + is_tail: bool, + observer: &mut F, +) -> PathResolutionResult<'db, PathRes<'db>> +where + F: FnMut(PathId<'db>, &PathRes<'db>), +{ + let hir_db = db.as_hir_db(); + + let parent_res = path + .parent(hir_db) + .map(|path| resolve_path_impl(db, path, scope, resolve_tail_as_value, false, observer)) + .transpose()?; + + if !path.ident(hir_db).is_present() { + return Err(PathResError::parse_err(path)); + } + + let parent_scope = parent_res + .as_ref() + .and_then(|r| r.as_scope(db)) + .unwrap_or(scope); + + match parent_res { + Some(PathRes::Ty(ty)) => { + // Try to resolve as an enum variant + if let Some(enum_) = ty.as_enum(db) { + // We need to use the concrete enum scope instead of + // parent_scope to resolve the variants in all cases, + // eg when parent is `Self`. I'm not really sure why this is. + let query = make_query(db, path, enum_.scope()); + let bucket = resolve_query(db, query); + + if let Ok(res) = bucket.pick(NameDomain::VALUE) { + if let Some((_, idx)) = res.enum_variant() { + let reso = PathRes::EnumVariant(ResolvedVariant::new(ty, idx, path)); + observer(path, &reso); + return Ok(reso); + } + } + } + if is_tail { + let r = PathRes::TypeMemberTbd(ty); + observer(path, &r); + return Ok(r); + } else { + todo!() // assoc type error + } + } + + Some(PathRes::Func(_) | PathRes::EnumVariant(..)) => { + return Err(PathResError::new( + PathResErrorKind::InvalidPathSegment(parent_res.unwrap()), + path, + )); + } + Some(PathRes::TypeMemberTbd(_) | PathRes::FuncParam(..)) => unreachable!(), + Some(PathRes::Const(_) | PathRes::Mod(_) | PathRes::Trait(_)) | None => {} + }; + + let query = make_query(db, path, parent_scope); + let bucket = resolve_query(db, query); + + let res = if is_tail && resolve_tail_as_value { + match bucket.pick(NameDomain::VALUE) { + Ok(res) => res.clone(), + Err(_) => pick_type_domain_from_bucket(bucket, path)?, + } + } else { + pick_type_domain_from_bucket(bucket, path)? + }; + let reso = resolve_name_res(db, &res, parent_res, path, scope)?; + + observer(path, &reso); + Ok(reso) +} + +pub fn resolve_name_res<'db>( + db: &'db dyn HirAnalysisDb, + nameres: &NameRes<'db>, + parent_ty: Option>, + path: PathId<'db>, + scope: ScopeId<'db>, +) -> PathResolutionResult<'db, PathRes<'db>> { + let hir_db = db.as_hir_db(); + + let args = &lower_generic_arg_list(db, path.generic_args(hir_db), scope); + let res = match nameres.kind { + NameResKind::Prim(prim) => { + let ty = TyId::from_hir_prim_ty(db, prim); + PathRes::Ty(TyId::foldl(db, ty, args)) + } + NameResKind::Scope(scope_id) => match scope_id { + ScopeId::Item(item) => match item { + ItemKind::Struct(_) | ItemKind::Contract(_) | ItemKind::Enum(_) => { + let adt_ref = AdtRefId::try_from_item(db, item).unwrap(); + PathRes::Ty(ty_from_adtref(db, adt_ref, args)?) + } + + ItemKind::TopMod(_) | ItemKind::Mod(_) => PathRes::Mod(scope_id), + + ItemKind::Func(func) => { + let func_def = lower_func(db, func).unwrap(); + let ty = TyId::func(db, func_def); + PathRes::Func(TyId::foldl(db, ty, args)) + } + ItemKind::Const(const_) => { + // TODO err if any args + let ty = if let Some(ty) = const_.ty(hir_db).to_opt() { + lower_hir_ty(db, ty, scope) + } else { + TyId::invalid(db, InvalidCause::Other) + }; + PathRes::Const(ty) + } + + ItemKind::TypeAlias(type_alias) => { + let Ok(alias) = lower_type_alias(db, type_alias) else { + // Type alias cycle error reported in `def_analysis.rs` + return Ok(PathRes::Ty(TyId::invalid(db, InvalidCause::Other))); + }; + + if args.len() < alias.params(db).len() { + PathRes::Ty(TyId::invalid( + db, + InvalidCause::UnboundTypeAliasParam { + alias: type_alias, + n_given_args: args.len(), + }, + )) + } else { + PathRes::Ty(alias.alias_to.instantiate(db, args)) + } + } + + ItemKind::Impl(impl_) => { + PathRes::Ty(impl_typeid_to_ty(db, path, impl_.ty(hir_db), scope, args)?) + } + ItemKind::ImplTrait(impl_) => { + PathRes::Ty(impl_typeid_to_ty(db, path, impl_.ty(hir_db), scope, args)?) + } + + ItemKind::Trait(t) => { + if path.is_self_ty(hir_db) { + let params = + collect_generic_params(db, GenericParamOwnerId::new(db, t.into())); + let ty = params.trait_self(db).unwrap(); + let ty = TyId::foldl(db, ty, args); + PathRes::Ty(ty) + } else { + PathRes::Trait(lower_trait(db, t)) + } + } + + ItemKind::Use(_) | ItemKind::Body(_) => unreachable!(), + }, + ScopeId::GenericParam(parent, idx) => { + let owner = GenericParamOwnerId::from_item_opt(db, parent).unwrap(); + let param_set = collect_generic_params(db, owner); + let ty = param_set.param_by_original_idx(db, idx).unwrap(); + let ty = TyId::foldl(db, ty, args); + PathRes::Ty(ty) + } + + ScopeId::Variant(enum_, idx) => { + let enum_ty = if let Some(PathRes::Ty(ty)) = parent_ty { + ty + } else { + // The variant was imported via `use`. + debug_assert!(path.parent(hir_db).is_none()); + let enum_: Enum = enum_.try_into().unwrap(); + ty_from_adtref(db, AdtRefId::from_enum(db, enum_), &[])? + }; + // TODO report error if args isn't empty + PathRes::EnumVariant(ResolvedVariant::new(enum_ty, idx, path)) + } + ScopeId::FuncParam(item, idx) => PathRes::FuncParam(item, idx), + ScopeId::Field(..) => unreachable!(), + ScopeId::Block(..) => unreachable!(), + }, + }; + Ok(res) +} + +fn impl_typeid_to_ty<'db>( + db: &'db dyn HirAnalysisDb, + path: PathId<'db>, + hir_ty: Partial>, + scope: ScopeId<'db>, + args: &[TyId<'db>], +) -> PathResolutionResult<'db, TyId<'db>> { + if let Some(hir_ty) = hir_ty.to_opt() { + let ty = lower_hir_ty(db, hir_ty, scope); // root scope! + Ok(TyId::foldl(db, ty, args)) + } else { + Err(PathResError::parse_err(path)) + } +} + +fn ty_from_adtref<'db>( + db: &'db dyn HirAnalysisDb, + adt_ref: AdtRefId<'db>, + args: &[TyId<'db>], +) -> PathResolutionResult<'db, TyId<'db>> { + let adt = lower_adt(db, adt_ref); + let ty = TyId::adt(db, adt); + Ok(TyId::foldl(db, ty, args)) +} + +fn pick_type_domain_from_bucket<'db>( + bucket: &NameResBucket<'db>, + path: PathId<'db>, +) -> PathResolutionResult<'db, NameRes<'db>> { + bucket + .pick(NameDomain::TYPE) + .clone() + .map_err(|err| match err { + NameResolutionError::NotFound => PathResError::not_found(path, bucket.clone()), + err => PathResError::from_name_res_error(err, path), + }) +} diff --git a/crates/hir-analysis/src/name_resolution/traits_in_scope.rs b/crates/hir-analysis/src/name_resolution/traits_in_scope.rs new file mode 100644 index 0000000000..f62f6575d0 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/traits_in_scope.rs @@ -0,0 +1,114 @@ +use hir::hir_def::{scope_graph::ScopeId, Body, ExprId, ItemKind, Mod, TopLevelMod, Trait}; +use rustc_hash::FxHashSet; + +use crate::{name_resolution::resolve_imports, HirAnalysisDb}; + +/// Returns the all traits that are available in the given scope. +pub fn available_traits_in_scope<'db>( + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, +) -> &'db FxHashSet> { + let scope_kind = TraitScopeKind::from_scope(db, scope); + let trait_scope = TraitScope::new(db, scope_kind); + available_traits_in_scope_impl(db, trait_scope) +} + +#[salsa::tracked(return_ref)] +pub(crate) fn available_traits_in_scope_impl<'db>( + db: &'db dyn HirAnalysisDb, + t_scope: TraitScope<'db>, +) -> FxHashSet> { + let mut traits = FxHashSet::default(); + let scope = t_scope.inner(db).to_scope(); + + let imports = &resolve_imports(db, scope.ingot(db.as_hir_db())).1; + if let Some(named) = imports.named_resolved.get(&scope) { + named + .values() + .flat_map(|bucket| bucket.iter_ok()) + .for_each(|name_res| { + if let Some(trait_) = name_res.trait_() { + traits.insert(trait_); + } + }) + } + + if let Some(glob) = imports.glob_resolved.get(&scope) { + glob.iter() + .flat_map(|(_, map)| map.values().flat_map(|v| v.iter())) + .for_each(|name_res| { + if let Some(trait_) = name_res.trait_() { + traits.insert(trait_); + } + }) + } + + if let Some(unnamed) = imports.unnamed_resolved.get(&scope) { + unnamed + .iter() + .flat_map(|bucket| bucket.iter_ok()) + .for_each(|name_res| { + if let Some(trait_) = name_res.trait_() { + traits.insert(trait_); + } + }) + } + + for child in scope.child_items(db.as_hir_db()) { + if let ItemKind::Trait(trait_) = child { + traits.insert(trait_); + } + } + + if let Some(parent) = scope.parent(db.as_hir_db()) { + let parent_traits = available_traits_in_scope(db, parent); + traits.extend(parent_traits.iter().copied()); + } + + traits +} + +#[salsa::interned] +pub struct TraitScope<'db> { + inner: TraitScopeKind<'db>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TraitScopeKind<'db> { + TopLevelMod(TopLevelMod<'db>), + Module(Mod<'db>), + Block(Body<'db>, ExprId), +} + +impl<'db> TraitScopeKind<'db> { + fn from_scope(db: &'db dyn HirAnalysisDb, mut scope: ScopeId<'db>) -> Self { + loop { + match scope { + ScopeId::Item(item) => match item { + ItemKind::TopMod(top_level_mod) => { + return TraitScopeKind::TopLevelMod(top_level_mod); + } + ItemKind::Mod(mod_) => { + return TraitScopeKind::Module(mod_); + } + _ => {} + }, + ScopeId::Block(body, expr_id) => { + return TraitScopeKind::Block(body, expr_id); + } + _ => {} + } + scope = scope.parent(db.as_hir_db()).unwrap(); + } + } + + fn to_scope(&self) -> ScopeId<'db> { + match self { + TraitScopeKind::TopLevelMod(top_level_mod) => { + ScopeId::Item(ItemKind::TopMod(*top_level_mod)) + } + TraitScopeKind::Module(mod_) => ScopeId::Item(ItemKind::Mod(*mod_)), + TraitScopeKind::Block(body, expr_id) => ScopeId::Block(*body, *expr_id), + } + } +} diff --git a/crates/hir-analysis/src/name_resolution/visibility_checker.rs b/crates/hir-analysis/src/name_resolution/visibility_checker.rs new file mode 100644 index 0000000000..df4ebad7a2 --- /dev/null +++ b/crates/hir-analysis/src/name_resolution/visibility_checker.rs @@ -0,0 +1,84 @@ +use hir::hir_def::{scope_graph::ScopeId, ItemKind, Use}; + +use crate::{ + ty::{ + const_ty::ConstTyData, + ty_def::{TyBase, TyData, TyId}, + }, + HirAnalysisDb, +}; + +/// Return `true` if the given `scope` is visible from `from_scope`. +pub(crate) fn is_scope_visible_from( + db: &dyn HirAnalysisDb, + scope: ScopeId, + from_scope: ScopeId, +) -> bool { + let hir_db = db.as_hir_db(); + // If resolved is public, then it is visible. + if scope.data(hir_db).vis.is_pub() { + return true; + } + + let Some(def_scope) = (match scope { + ScopeId::Item(ItemKind::Func(func)) => { + let parent_item = scope.parent_item(hir_db); + if matches!(parent_item, Some(ItemKind::Trait(..))) { + return true; + } + + if func.is_associated_func(hir_db) { + scope + .parent_item(hir_db) + .and_then(|item| ScopeId::Item(item).parent(hir_db)) + } else { + scope.parent(hir_db) + } + } + ScopeId::Item(_) => scope.parent(hir_db), + ScopeId::Field(..) | ScopeId::Variant(..) => { + let parent_item = scope.item(); + ScopeId::Item(parent_item).parent(hir_db) + } + + _ => scope.parent(hir_db), + }) else { + return false; + }; + + from_scope.is_transitive_child_of(db.as_hir_db(), def_scope) +} + +pub(crate) fn is_ty_visible_from(db: &dyn HirAnalysisDb, ty: TyId, from_scope: ScopeId) -> bool { + match ty.base_ty(db).data(db) { + TyData::TyBase(base) => match base { + TyBase::Prim(_) => true, + TyBase::Adt(adt) => is_scope_visible_from(db, adt.scope(db), from_scope), + TyBase::Func(func) => is_scope_visible_from(db, func.scope(db), from_scope), + }, + TyData::TyParam(param) => is_scope_visible_from(db, param.scope(db), from_scope), + + TyData::ConstTy(const_ty) => match const_ty.data(db) { + ConstTyData::TyVar(_, _) => true, + ConstTyData::TyParam(param, _) => { + is_scope_visible_from(db, param.scope(db), from_scope) + } + ConstTyData::Evaluated(_, _) => true, + ConstTyData::UnEvaluated(body) => is_scope_visible_from(db, body.scope(), from_scope), + }, + TyData::TyVar(_) | TyData::Never | TyData::Invalid(_) => true, + TyData::TyApp(_, _) => unreachable!(), + } +} + +/// Return `true` if the given `use_` is visible from the `ref_scope`. +pub(super) fn is_use_visible(db: &dyn HirAnalysisDb, ref_scope: ScopeId, use_: Use) -> bool { + let use_scope = ScopeId::from_item(use_.into()); + + if use_scope.data(db.as_hir_db()).vis.is_pub() { + return true; + } + + let use_def_scope = use_scope.parent(db.as_hir_db()).unwrap(); + ref_scope.is_transitive_child_of(db.as_hir_db(), use_def_scope) +} diff --git a/crates/hir-analysis/src/ty/adt_def.rs b/crates/hir-analysis/src/ty/adt_def.rs new file mode 100644 index 0000000000..994e0945c5 --- /dev/null +++ b/crates/hir-analysis/src/ty/adt_def.rs @@ -0,0 +1,326 @@ +use hir::{ + hir_def::{ + scope_graph::ScopeId, Contract, Enum, FieldDefListId, IdentId, IngotId, ItemKind, Partial, + Struct, TypeId as HirTyId, VariantDefListId, VariantKind, + }, + span::DynLazySpan, +}; + +use super::{ + binder::Binder, + ty_def::{InvalidCause, TyId}, + ty_lower::{collect_generic_params, lower_hir_ty, GenericParamOwnerId, GenericParamTypeSet}, +}; +use crate::HirAnalysisDb; + +/// Lower HIR ADT definition(`struct/enum/contract`) to [`AdtDef`]. +#[salsa::tracked] +pub fn lower_adt<'db>(db: &'db dyn HirAnalysisDb, adt: AdtRefId<'db>) -> AdtDef<'db> { + AdtTyBuilder::new(db, adt).build() +} + +/// Represents a ADT type definition. +#[salsa::tracked] +pub struct AdtDef<'db> { + pub adt_ref: AdtRefId<'db>, + + /// Type parameters of the ADT. + #[return_ref] + pub param_set: GenericParamTypeSet<'db>, + + /// Fields of the ADT, if the ADT is an enum, this represents variants. + /// Otherwise, `fields[0]` represents all fields of the struct. + #[return_ref] + pub fields: Vec>, +} + +impl<'db> AdtDef<'db> { + pub(crate) fn name(self, db: &'db dyn HirAnalysisDb) -> IdentId<'db> { + self.adt_ref(db).name(db) + } + + pub(crate) fn params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + self.param_set(db).params(db) + } + + pub(crate) fn original_params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + self.param_set(db).explicit_params(db) + } + + pub(crate) fn is_struct(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.adt_ref(db).data(db), AdtRef::Struct(_)) + } + + pub fn scope(self, db: &'db dyn HirAnalysisDb) -> ScopeId<'db> { + self.adt_ref(db).scope(db) + } + + pub(crate) fn variant_ty_span( + self, + db: &'db dyn HirAnalysisDb, + field_idx: usize, + ty_idx: usize, + ) -> DynLazySpan<'db> { + match self.adt_ref(db).data(db) { + AdtRef::Enum(e) => { + let span = e.lazy_span().variants_moved().variant_moved(field_idx); + match e.variants(db.as_hir_db()).data(db.as_hir_db())[field_idx].kind { + VariantKind::Tuple(_) => span.tuple_type_moved().elem_ty_moved(ty_idx).into(), + VariantKind::Record(_) => { + span.fields_moved().field_moved(ty_idx).ty_moved().into() + } + VariantKind::Unit => unreachable!(), + } + } + + AdtRef::Struct(s) => s + .lazy_span() + .fields_moved() + .field_moved(field_idx) + .ty_moved() + .into(), + + AdtRef::Contract(c) => c + .lazy_span() + .fields_moved() + .field_moved(field_idx) + .ty_moved() + .into(), + } + } + + pub(crate) fn ingot(self, db: &'db dyn HirAnalysisDb) -> IngotId<'db> { + let hir_db = db.as_hir_db(); + match self.adt_ref(db).data(db) { + AdtRef::Enum(e) => e.top_mod(hir_db).ingot(hir_db), + AdtRef::Struct(s) => s.top_mod(hir_db).ingot(hir_db), + AdtRef::Contract(c) => c.top_mod(hir_db).ingot(hir_db), + } + } + + pub(crate) fn as_generic_param_owner( + self, + db: &'db dyn HirAnalysisDb, + ) -> Option> { + self.adt_ref(db).generic_owner_id(db) + } +} + +/// This struct represents a field of an ADT. If the ADT is an enum, this +/// represents a variant. +#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] +pub struct AdtField<'db> { + /// Fields of the variant. + /// If the adt is an struct or contract, + /// the length of the vector is always 1. + /// + /// To allow recursive types, the type of the field is represented as a HIR + /// type and. + tys: Vec>>, + + scope: ScopeId<'db>, +} +impl<'db> AdtField<'db> { + pub fn ty(&self, db: &'db dyn HirAnalysisDb, i: usize) -> Binder> { + let ty = if let Some(ty) = self.tys[i].to_opt() { + lower_hir_ty(db, ty, self.scope) + } else { + TyId::invalid(db, InvalidCause::Other) + }; + + Binder::bind(ty) + } + + /// Iterates all fields types of the `field`. + pub fn iter_types<'a>( + &'a self, + db: &'db dyn HirAnalysisDb, + ) -> impl Iterator>> + 'a { + (0..self.num_types()).map(|i| self.ty(db, i)) + } + + pub fn num_types(&self) -> usize { + self.tys.len() + } + + pub(super) fn new(tys: Vec>>, scope: ScopeId<'db>) -> Self { + Self { tys, scope } + } +} + +#[salsa::interned] +pub struct AdtRefId<'db> { + pub data: AdtRef<'db>, +} + +impl<'db> AdtRefId<'db> { + pub fn scope(self, db: &'db dyn HirAnalysisDb) -> ScopeId<'db> { + self.data(db).scope() + } + + pub fn as_item(self, db: &'db dyn HirAnalysisDb) -> ItemKind<'db> { + match self.data(db) { + AdtRef::Enum(e) => e.into(), + AdtRef::Struct(s) => s.into(), + AdtRef::Contract(c) => c.into(), + } + } + + pub fn name(self, db: &'db dyn HirAnalysisDb) -> IdentId<'db> { + let hir_db = db.as_hir_db(); + match self.data(db) { + AdtRef::Enum(e) => e.name(hir_db), + AdtRef::Struct(s) => s.name(hir_db), + AdtRef::Contract(c) => c.name(hir_db), + } + .to_opt() + .unwrap_or_else(|| IdentId::new(hir_db, "".to_string())) + } + + pub fn kind_name(self, db: &dyn HirAnalysisDb) -> &'static str { + self.as_item(db).kind_name() + } + + pub fn name_span(self, db: &'db dyn HirAnalysisDb) -> DynLazySpan<'db> { + self.scope(db) + .name_span(db.as_hir_db()) + .unwrap_or_else(DynLazySpan::invalid) + } + + pub fn from_enum(db: &'db dyn HirAnalysisDb, enum_: Enum<'db>) -> Self { + Self::new(db, AdtRef::Enum(enum_)) + } + + pub fn from_struct(db: &'db dyn HirAnalysisDb, struct_: Struct<'db>) -> Self { + Self::new(db, AdtRef::Struct(struct_)) + } + + pub fn from_contract(db: &'db dyn HirAnalysisDb, contract: Contract<'db>) -> Self { + Self::new(db, AdtRef::Contract(contract)) + } + + pub fn try_from_item(db: &'db dyn HirAnalysisDb, item: ItemKind<'db>) -> Option { + match item { + ItemKind::Enum(e) => Some(Self::from_enum(db, e)), + ItemKind::Struct(s) => Some(Self::from_struct(db, s)), + ItemKind::Contract(c) => Some(Self::from_contract(db, c)), + _ => None, + } + } + + pub(crate) fn generic_owner_id( + self, + db: &'db dyn HirAnalysisDb, + ) -> Option> { + match self.data(db) { + AdtRef::Enum(e) => Some(GenericParamOwnerId::new(db, e.into())), + AdtRef::Struct(s) => Some(GenericParamOwnerId::new(db, s.into())), + AdtRef::Contract(_) => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +pub enum AdtRef<'db> { + Enum(Enum<'db>), + Struct(Struct<'db>), + Contract(Contract<'db>), +} + +impl<'db> AdtRef<'db> { + pub fn scope(self) -> ScopeId<'db> { + match self { + Self::Enum(e) => e.scope(), + Self::Struct(s) => s.scope(), + Self::Contract(c) => c.scope(), + } + } +} + +struct AdtTyBuilder<'db> { + db: &'db dyn HirAnalysisDb, + adt: AdtRefId<'db>, + params: GenericParamTypeSet<'db>, + variants: Vec>, +} + +impl<'db> AdtTyBuilder<'db> { + fn new(db: &'db dyn HirAnalysisDb, adt: AdtRefId<'db>) -> Self { + Self { + db, + adt, + params: GenericParamTypeSet::empty(db, adt.scope(db)), + variants: Vec::new(), + } + } + + fn build(mut self) -> AdtDef<'db> { + self.collect_generic_params(); + self.collect_variants(); + AdtDef::new(self.db, self.adt, self.params, self.variants) + } + + fn collect_generic_params(&mut self) { + let owner = match self.adt.data(self.db) { + AdtRef::Contract(_) => return, + AdtRef::Enum(enum_) => enum_.into(), + AdtRef::Struct(struct_) => struct_.into(), + }; + let owner_id = GenericParamOwnerId::new(self.db, owner); + + self.params = collect_generic_params(self.db, owner_id); + } + + fn collect_variants(&mut self) { + match self.adt.data(self.db) { + AdtRef::Struct(struct_) => { + self.collect_field_types(struct_.fields(self.db.as_hir_db())); + } + + AdtRef::Contract(contract) => { + self.collect_field_types(contract.fields(self.db.as_hir_db())) + } + + AdtRef::Enum(enum_) => { + self.collect_enum_variant_types(enum_.variants(self.db.as_hir_db())) + } + }; + } + + fn collect_field_types(&mut self, fields: FieldDefListId<'db>) { + let scope = self.adt.scope(self.db); + + let fields = fields + .data(self.db.as_hir_db()) + .iter() + .map(|field| field.ty) + .collect(); + + self.variants.push(AdtField::new(fields, scope)); + } + + fn collect_enum_variant_types(&mut self, variants: VariantDefListId<'db>) { + let scope = self.adt.scope(self.db); + + variants + .data(self.db.as_hir_db()) + .iter() + .for_each(|variant| { + // TODO: FIX here when record variant is introduced. + let tys = match variant.kind { + VariantKind::Tuple(tuple_id) => tuple_id.data(self.db.as_hir_db()).clone(), + + VariantKind::Record(fields) => fields + .data(self.db.as_hir_db()) + .iter() + .map(|field| field.ty) + .collect(), + + VariantKind::Unit => vec![], + }; + + let variant = AdtField::new(tys, scope); + self.variants.push(variant) + }) + } +} diff --git a/crates/hir-analysis/src/ty/binder.rs b/crates/hir-analysis/src/ty/binder.rs new file mode 100644 index 0000000000..ef0a198611 --- /dev/null +++ b/crates/hir-analysis/src/ty/binder.rs @@ -0,0 +1,181 @@ +use std::collections::hash_map::Entry; + +use rustc_hash::FxHashMap; + +use super::{ + const_ty::ConstTyData, + fold::{TyFoldable, TyFolder}, + ty_def::{TyData, TyId}, +}; +use crate::HirAnalysisDb; + +/// A `Binder` is a type constructor that binds a type variable within its +/// scope. +/// +/// # Type Parameters +/// - `T`: The type being bound within the `Binder`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Binder { + value: T, +} +unsafe impl salsa::Update for Binder +where + T: salsa::Update, +{ + unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool { + let old_value = unsafe { &mut *old_pointer }; + T::maybe_update(&mut old_value.value, new_value.value) + } +} + +impl Binder { + pub const fn bind(value: T) -> Self { + Binder { value } + } +} + +impl<'db, T> Binder +where + T: TyFoldable<'db>, +{ + /// Instantiates the binder with an identity function. + /// + /// This method essentially returns the value within the binder without any + /// modifications. + /// + /// # Returns + /// The value contained within the `Binder`. + /// + /// # Note + /// This function is useful when you want to retrieve the value inside the + /// binder without applying any transformations. + pub fn instantiate_identity(self) -> T { + self.value + } + + /// Retrieves a reference to the value within the binder. + /// + /// This function is useful when you want to access some data that you know + /// doesn't depend on bounded variables in the binder. + pub fn skip_binder(&self) -> &T { + &self.value + } + + /// Instantiates the binder with the provided arguments. + /// + /// This method takes a reference to a `HirAnalysisDb` and a slice of `TyId` + /// arguments, and returns a new instance of the type contained within + /// the binder with the arguments applied. + /// + /// # Parameters + /// - `db`: A reference to the `HirAnalysisDb`. + /// - `args`: A slice of `TyId` that will be used to instantiate the type. + /// + /// # Returns + /// A new instance of the type contained within the binder with the + /// arguments applied. + pub fn instantiate(self, db: &'db dyn HirAnalysisDb, args: &[TyId<'db>]) -> T { + let mut folder = InstantiateFolder { db, args }; + self.value.fold_with(&mut folder) + } + + /// Instantiates the binder with a custom function. + /// + /// This method takes a reference to a `HirAnalysisDb` and a closure that + /// maps a bound variable to `TyId`, and returns a new instance of the + /// type contained within the binder with the custom function applied. + /// + /// # Parameters + /// - `db`: A reference to the `HirAnalysisDb`. + /// - `f`: A function that map a bouded variable to a type. + /// + /// # Returns + /// A new instance of the type contained within the binder with the custom + /// function applied. + pub fn instantiate_with(self, db: &'db dyn HirAnalysisDb, f: F) -> T + where + F: FnMut(TyId<'db>) -> TyId<'db>, + { + let mut folder = InstantiateWithFolder { + db, + f, + params: FxHashMap::default(), + }; + self.value.fold_with(&mut folder) + } +} + +struct InstantiateFolder<'db, 'a> { + db: &'db dyn HirAnalysisDb, + args: &'a [TyId<'db>], +} + +impl<'db> TyFolder<'db> for InstantiateFolder<'db, '_> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn fold_ty(&mut self, ty: TyId<'db>) -> TyId<'db> { + match ty.data(self.db) { + TyData::TyParam(param) => return self.args[param.idx], + TyData::ConstTy(const_ty) => { + if let ConstTyData::TyParam(param, _) = const_ty.data(self.db) { + return self.args[param.idx]; + } + } + + _ => {} + } + + ty.super_fold_with(self) + } +} + +struct InstantiateWithFolder<'db, F> +where + F: FnMut(TyId<'db>) -> TyId<'db>, +{ + db: &'db dyn HirAnalysisDb, + f: F, + params: FxHashMap>, +} + +impl<'db, F> TyFolder<'db> for InstantiateWithFolder<'db, F> +where + F: FnMut(TyId<'db>) -> TyId<'db>, +{ + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn fold_ty(&mut self, ty: TyId<'db>) -> TyId<'db> { + match ty.data(self.db) { + TyData::TyParam(param) => { + match self.params.entry(param.idx) { + Entry::Occupied(entry) => return *entry.get(), + Entry::Vacant(entry) => { + let ty = (self.f)(ty); + entry.insert(ty); + return ty; + } + }; + } + TyData::ConstTy(const_ty) => { + if let ConstTyData::TyParam(param, _) = const_ty.data(self.db) { + match self.params.entry(param.idx) { + Entry::Occupied(entry) => return *entry.get(), + Entry::Vacant(entry) => { + let ty = (self.f)(ty); + entry.insert(ty); + return ty; + } + }; + } + } + + _ => {} + } + + ty.super_fold_with(self) + } +} diff --git a/crates/hir-analysis/src/ty/canonical.rs b/crates/hir-analysis/src/ty/canonical.rs new file mode 100644 index 0000000000..86f61d7bf5 --- /dev/null +++ b/crates/hir-analysis/src/ty/canonical.rs @@ -0,0 +1,294 @@ +use rustc_hash::FxHashMap; + +use super::{ + const_ty::{ConstTyData, ConstTyId}, + fold::{TyFoldable, TyFolder}, + ty_def::{TyData, TyId, TyVar}, + unify::{InferenceKey, UnificationStore, UnificationTableBase}, +}; +use crate::{ty::ty_def::collect_variables, HirAnalysisDb}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Canonical { + pub value: T, +} + +impl<'db, T> Canonical +where + T: TyFoldable<'db>, +{ + pub fn new(db: &'db dyn HirAnalysisDb, value: T) -> Self { + let mut c = Canonicalizer::new(db); + let value = value.fold_with(&mut c); + Canonical { value } + } + + /// Extracts the identity from the canonical value. + /// + /// This method initializes the unification table with new variables + /// based on the canonical value and then returns the canonical value + /// itself. + /// + /// # Parameters + /// - `table`: The unification table to be initialized with new variables. + /// + /// # Returns + /// The canonical value after initializing the unification table. + /// + /// # Panics + /// This function will panic if the `table` is not empty. + pub(super) fn extract_identity(self, table: &mut UnificationTableBase<'db, S>) -> T + where + S: UnificationStore<'db>, + { + assert!(table.is_empty()); + + for var in collect_variables(table.db, &self.value).iter() { + table.new_var(var.sort, &var.kind); + } + + self.value + } + + /// Canonicalize a new solution that corresponds to the canonical query. + /// This function creates a new solution for a canonical query by folding + /// the provided solution with the unification table. It then constructs + /// a substitution map from probed type variables to canonical type + /// variables, and uses this map to canonicalize the solution. + /// + /// # Parameters + /// - `db`: The database reference. + /// - `table`: The unification table must be from the same environment as + /// the solution. + /// - `solution`: The solution to be canonicalized. + /// + /// # Returns + /// A `Solution` where `U` is the type of the provided solution, + /// canonicalized to the context of the canonical query. + pub(super) fn canonicalize_solution( + &self, + db: &'db dyn HirAnalysisDb, + table: &mut UnificationTableBase<'db, S>, + solution: U, + ) -> Solution + where + T: Copy, + S: UnificationStore<'db>, + U: TyFoldable<'db> + Clone, + { + let solution = solution.fold_with(table); + + // Make the substitution so that it maps back from probed type variable to + // canonical type variables. + // `Probed type variable -> Canonical type variable`. + let canonical_vars = collect_variables(db, &self.value) + .into_iter() + .filter_map(|var| { + let ty = TyId::ty_var(db, var.sort, var.kind, var.key); + let probed = ty.fold_with(table); + if probed.is_ty_var(db) { + Some((probed, ty)) + } else { + None + } + }); + let mut canonicalizer = Canonicalizer { + db, + subst: canonical_vars.collect(), + }; + + Solution { + value: solution.fold_with(&mut canonicalizer), + } + } +} + +/// This type contains [`Canonical`] type and auxiliary information to map back +/// [`Solution`] that corresponds to [`Canonical`] query. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Canonicalized<'db, T> { + pub value: Canonical, + // A substitution from canonical type variables to original type variables. + subst: FxHashMap, TyId<'db>>, +} + +impl<'db, T> Canonicalized<'db, T> +where + T: TyFoldable<'db>, +{ + pub fn new(db: &'db dyn HirAnalysisDb, value: T) -> Self { + let mut canonicalizer = Canonicalizer::new(db); + let value = value.fold_with(&mut canonicalizer); + let map = canonicalizer + .subst + .into_iter() + .map(|(orig_var, canonical_var)| (canonical_var, orig_var)) + .collect(); + Canonicalized { + value: Canonical { value }, + subst: map, + } + } + + /// Extracts the solution from the canonicalized query. + /// + /// This method takes a unification table and a solution, and returns the + /// solution in the context of the original query environment. + /// + /// # Parameters + /// - `table`: The unification table in the original query environement. + /// - `solution`: The solution to extract. + /// + /// # Returns + /// The extracted solution in the context of the original query environment. + pub fn extract_solution( + &self, + table: &mut UnificationTableBase<'db, S>, + solution: Solution, + ) -> U + where + U: TyFoldable<'db>, + S: UnificationStore<'db>, + { + let map = self.subst.clone(); + let mut extractor = SolutionExtractor::new(table, map); + solution.value.fold_with(&mut extractor) + } +} + +/// Represents a solution to a [`Canonical`] query. +/// +/// This type guarantees: +/// 1. Any type variable in the solution that is unifiable with a type variable +/// from the [`Canonical`] query will be canonicalized to that variable. +/// 2. All other type variables are canonicalized in a consistent manner with +/// the [`Canonical`] type. +/// +/// To extract the internal value into the environment where the query was +/// created, use [`Canonicalized::extract_solution`]. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Solution { + pub(super) value: T, +} + +/// A struct that helps in converting types to their canonical form. +/// It maintains a mapping from original type variables to canonical variables. +struct Canonicalizer<'db> { + db: &'db dyn HirAnalysisDb, + // A substitution from original type variables to canonical variables. + subst: FxHashMap, TyId<'db>>, +} + +impl<'db> Canonicalizer<'db> { + fn new(db: &'db dyn HirAnalysisDb) -> Self { + Canonicalizer { + db, + subst: FxHashMap::default(), + } + } + + fn canonical_var(&mut self, var: &TyVar<'db>) -> TyVar<'db> { + let key = self.subst.len() as u32; + TyVar { + sort: var.sort, + kind: var.kind.clone(), + key: InferenceKey(key, Default::default()), + } + } +} + +impl<'db> TyFolder<'db> for Canonicalizer<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn fold_ty(&mut self, ty: TyId<'db>) -> TyId<'db> { + if let Some(&canonical) = self.subst.get(&ty) { + return canonical; + } + + match ty.data(self.db) { + TyData::TyVar(var) => { + let canonical_var = self.canonical_var(var); + let canonical_ty = TyId::new(self.db, TyData::TyVar(canonical_var)); + + self.subst.insert(ty, canonical_ty); + canonical_ty + } + + TyData::ConstTy(const_ty) => { + if let ConstTyData::TyVar(var, const_ty_ty) = const_ty.data(self.db) { + let canonical_var = self.canonical_var(var); + let const_ty = + ConstTyId::new(self.db, ConstTyData::TyVar(canonical_var, *const_ty_ty)); + let canonical_ty = TyId::const_ty(self.db, const_ty); + + self.subst.insert(ty, canonical_ty); + canonical_ty + } else { + ty.super_fold_with(self) + } + } + + _ => ty.super_fold_with(self), + } + } +} + +struct SolutionExtractor<'a, 'db, S> +where + S: UnificationStore<'db>, +{ + table: &'a mut UnificationTableBase<'db, S>, + /// A subst from canonical type variables to the variables in the current + /// env. + subst: FxHashMap, TyId<'db>>, +} + +impl<'a, 'db, S> SolutionExtractor<'a, 'db, S> +where + S: UnificationStore<'db>, +{ + fn new( + table: &'a mut UnificationTableBase<'db, S>, + subst: FxHashMap, TyId<'db>>, + ) -> Self { + SolutionExtractor { table, subst } + } +} + +impl<'db, S> TyFolder<'db> for SolutionExtractor<'_, 'db, S> +where + S: UnificationStore<'db>, +{ + fn db(&self) -> &'db dyn HirAnalysisDb { + self.table.db() + } + + fn fold_ty(&mut self, ty: TyId<'db>) -> TyId<'db> { + if let Some(&ty) = self.subst.get(&ty) { + return ty; + } + + match ty.data(self.db()) { + TyData::TyVar(var) => { + let new_ty = self.table.new_var(var.sort, &var.kind); + self.subst.insert(ty, new_ty); + new_ty + } + + TyData::ConstTy(const_ty) => { + if let ConstTyData::TyVar(var, const_ty_ty) = const_ty.data(self.db()) { + let new_key = self.table.new_key(&var.kind, var.sort); + let new_ty = TyId::const_ty_var(self.db(), *const_ty_ty, new_key); + self.subst.insert(ty, new_ty); + new_ty + } else { + ty.super_fold_with(self) + } + } + + _ => ty.super_fold_with(self), + } + } +} diff --git a/crates/hir-analysis/src/ty/const_ty.rs b/crates/hir-analysis/src/ty/const_ty.rs new file mode 100644 index 0000000000..bad0ae6742 --- /dev/null +++ b/crates/hir-analysis/src/ty/const_ty.rs @@ -0,0 +1,188 @@ +use hir::hir_def::{Body, Expr, IntegerId, LitKind, Partial}; + +use super::{ + ty_def::{InvalidCause, TyId, TyParam, TyVar}, + unify::UnificationTable, +}; +use crate::{ + ty::ty_def::{Kind, TyBase, TyData, TyVarSort}, + HirAnalysisDb, +}; + +#[salsa::interned] +pub struct ConstTyId<'db> { + #[return_ref] + pub(crate) data: ConstTyData<'db>, +} + +#[salsa::tracked] +pub(crate) fn evaluate_const_ty<'db>( + db: &'db dyn HirAnalysisDb, + const_ty: ConstTyId<'db>, + expected_ty: Option>, +) -> ConstTyId<'db> { + let ConstTyData::UnEvaluated(body) = const_ty.data(db) else { + let const_ty_ty = const_ty.ty(db); + return match check_const_ty(db, const_ty_ty, expected_ty, &mut UnificationTable::new(db)) { + Ok(_) => const_ty, + Err(cause) => { + let ty = TyId::invalid(db, cause); + return const_ty.swap_ty(db, ty); + } + }; + }; + + let Partial::Present(expr) = body.expr(db.as_hir_db()).data(db.as_hir_db(), *body) else { + let data = ConstTyData::Evaluated( + EvaluatedConstTy::Invalid, + TyId::invalid(db, InvalidCause::Other), + ); + return ConstTyId::new(db, data); + }; + + let mut table = UnificationTable::new(db); + let (resolved, ty) = match expr { + Expr::Lit(LitKind::Bool(b)) => ( + EvaluatedConstTy::LitBool(*b), + TyId::new(db, TyData::TyBase(TyBase::bool())), + ), + + Expr::Lit(LitKind::Int(i)) => ( + EvaluatedConstTy::LitInt(*i), + table.new_var(TyVarSort::Integral, &Kind::Star), + ), + + _ => { + return ConstTyId::new( + db, + ConstTyData::Evaluated( + EvaluatedConstTy::Invalid, + TyId::invalid(db, InvalidCause::InvalidConstTyExpr { body: *body }), + ), + ); + } + }; + + let data = match check_const_ty(db, ty, expected_ty, &mut table) { + Ok(ty) => ConstTyData::Evaluated(resolved, ty), + Err(err) => ConstTyData::Evaluated(resolved, TyId::invalid(db, err)), + }; + + ConstTyId::new(db, data) +} + +// FIXME: When we add type inference, we need to use the inference engine to +// check the type of the expression instead of this function. +fn check_const_ty<'db>( + db: &'db dyn HirAnalysisDb, + const_ty_ty: TyId<'db>, + expected_ty: Option>, + table: &mut UnificationTable<'db>, +) -> Result, InvalidCause<'db>> { + if const_ty_ty.has_invalid(db) { + return Err(InvalidCause::Other); + } + + let Some(expected_ty) = expected_ty else { + return Ok(const_ty_ty); + }; + + if table.unify(expected_ty, const_ty_ty).is_ok() { + Ok(expected_ty) + } else { + let invalid = InvalidCause::ConstTyMismatch { + expected: expected_ty, + given: const_ty_ty, + }; + Err(invalid) + } +} + +impl<'db> ConstTyId<'db> { + pub fn ty(self, db: &'db dyn HirAnalysisDb) -> TyId<'db> { + match self.data(db) { + ConstTyData::TyVar(_, ty) => *ty, + ConstTyData::TyParam(_, ty) => *ty, + ConstTyData::Evaluated(_, ty) => *ty, + ConstTyData::UnEvaluated(_) => TyId::invalid(db, InvalidCause::Other), + } + } + + pub(super) fn pretty_print(self, db: &dyn HirAnalysisDb) -> String { + match &self.data(db) { + ConstTyData::TyVar(var, _) => var.pretty_print(), + ConstTyData::TyParam(param, ty) => { + format!("const {}: {}", param.pretty_print(db), ty.pretty_print(db)) + } + ConstTyData::Evaluated(resolved, _) => resolved.pretty_print(db), + ConstTyData::UnEvaluated(_) => "".to_string(), + } + } + + pub(super) fn evaluate( + self, + db: &'db dyn HirAnalysisDb, + expected_ty: Option>, + ) -> Self { + evaluate_const_ty(db, self, expected_ty) + } + + pub(super) fn from_body(db: &'db dyn HirAnalysisDb, body: Body<'db>) -> Self { + let data = ConstTyData::UnEvaluated(body); + Self::new(db, data) + } + + pub(super) fn from_opt_body(db: &'db dyn HirAnalysisDb, body: Partial>) -> Self { + match body { + Partial::Present(body) => Self::from_body(db, body), + Partial::Absent => Self::invalid(db, InvalidCause::Other), + } + } + + pub(super) fn invalid(db: &'db dyn HirAnalysisDb, cause: InvalidCause<'db>) -> Self { + let resolved = EvaluatedConstTy::Invalid; + let ty = TyId::invalid(db, cause); + let data = ConstTyData::Evaluated(resolved, ty); + Self::new(db, data) + } + + fn swap_ty(self, db: &'db dyn HirAnalysisDb, ty: TyId<'db>) -> Self { + let data = match self.data(db) { + ConstTyData::TyVar(var, _) => ConstTyData::TyVar(var.clone(), ty), + ConstTyData::TyParam(param, _) => ConstTyData::TyParam(param.clone(), ty), + ConstTyData::Evaluated(evaluated, _) => ConstTyData::Evaluated(evaluated.clone(), ty), + ConstTyData::UnEvaluated(_) => { + return self; + } + }; + + Self::new(db, data) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ConstTyData<'db> { + TyVar(TyVar<'db>, TyId<'db>), + TyParam(TyParam<'db>, TyId<'db>), + Evaluated(EvaluatedConstTy<'db>, TyId<'db>), + UnEvaluated(Body<'db>), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum EvaluatedConstTy<'db> { + LitInt(IntegerId<'db>), + LitBool(bool), + Invalid, +} + +impl EvaluatedConstTy<'_> { + pub fn pretty_print(&self, db: &dyn HirAnalysisDb) -> String { + match self { + EvaluatedConstTy::LitInt(val) => { + format!("{}", val.data(db.as_hir_db())) + } + EvaluatedConstTy::LitBool(val) => format!("{}", val), + EvaluatedConstTy::Invalid => "".to_string(), + } + } +} diff --git a/crates/hir-analysis/src/ty/def_analysis.rs b/crates/hir-analysis/src/ty/def_analysis.rs new file mode 100644 index 0000000000..e9e0f4ee5b --- /dev/null +++ b/crates/hir-analysis/src/ty/def_analysis.rs @@ -0,0 +1,1272 @@ +//! This module contains analysis for the definition of the type/trait. +//! This module is the only module in `ty` module which is allowed to emit +//! diagnostics. + +use std::collections::hash_map::Entry; + +use common::indexmap::IndexSet; +use hir::{ + hir_def::{ + scope_graph::ScopeId, FieldDef, Func, FuncParamListId, GenericParam, GenericParamListId, + IdentId, Impl as HirImpl, ImplTrait, ItemKind, PathId, Trait, TraitRefId, TypeAlias, + TypeId as HirTyId, VariantKind, + }, + visitor::prelude::*, +}; +use rustc_hash::{FxHashMap, FxHashSet}; +use salsa::plumbing::FromId; + +use super::{ + adt_def::{lower_adt, AdtRef, AdtRefId}, + canonical::Canonical, + const_ty::ConstTyId, + diagnostics::{ImplDiag, TraitConstraintDiag, TraitLowerDiag, TyDiagCollection, TyLowerDiag}, + func_def::FuncDef, + method_cmp::compare_impl_method, + method_table::probe_method, + trait_def::{ingot_trait_env, Implementor, TraitDef}, + trait_lower::{lower_trait, lower_trait_ref, TraitRefLowerError}, + trait_resolution::{ + constraint::{ + collect_adt_constraints, collect_func_def_constraints, collect_impl_block_constraints, + collect_super_traits, SuperTraitCycle, + }, + PredicateListId, + }, + ty_def::{InvalidCause, TyData, TyId}, + ty_lower::{collect_generic_params, lower_kind, GenericParamOwnerId}, + visitor::{walk_ty, TyVisitor}, +}; +use crate::{ + name_resolution::{resolve_path, PathRes}, + ty::{ + adt_def::AdtDef, + binder::Binder, + canonical::Canonicalized, + func_def::lower_func, + trait_def::{does_impl_trait_conflict, TraitInstId}, + trait_lower::lower_impl_trait, + trait_resolution::{ + constraint::collect_trait_constraints, is_goal_satisfiable, GoalSatisfiability, + }, + ty_lower::{lower_hir_ty, lower_type_alias}, + visitor::TyVisitable, + }, + HirAnalysisDb, +}; + +/// This function implements analysis for the ADT definition. +/// The analysis includes the following: +/// - Check if the types in the ADT is well-formed. +/// - Check if the trait instantiation appears in the ADT is well-formed. +/// - Check if the field types are fully applied(i.e., these types should have +/// `*` kind). +/// - Check if the types in the ADT satisfies the constraints which is required +/// in type application. +/// - Check if the trait instantiations in the ADT satisfies the constraints. +/// - Check if the recursive types has indirect type wrapper like pointer. +#[salsa::tracked(return_ref)] +pub fn analyze_adt<'db>( + db: &'db dyn HirAnalysisDb, + adt_ref: AdtRefId<'db>, +) -> Vec> { + let analyzer = DefAnalyzer::for_adt(db, adt_ref); + let mut diags = analyzer.analyze(); + + if let Some(diag) = check_recursive_adt(db, adt_ref) { + diags.push(diag); + } + diags +} + +/// This function implements analysis for the trait definition. +/// The analysis includes the following: +/// - Check if the types appear in the trait is well-formed. +/// - Check if the trait instantiation appears in the trait is well-formed. +/// - Check if the types in the trait satisfy the constraints which is required +/// in type application. +/// - Check if the trait instantiations in the trait satisfies the constraints. +#[salsa::tracked(return_ref)] +pub fn analyze_trait<'db>( + db: &'db dyn HirAnalysisDb, + trait_: Trait<'db>, +) -> Vec> { + let analyzer = DefAnalyzer::for_trait(db, trait_); + analyzer.analyze() +} + +/// This function implements analysis for the trait implementation definition. +/// The analysis include the following: +/// - Check if the types appear in the trait impl is well-formed. +/// - Check if the trait instantiation appears in the trait impl is well-formed. +/// - Check if the types in the trait impl satisfy the constraints which is +/// required in type application. +/// - Check if the trait instantiations in the trait impl satisfies the +/// constraints. +/// - Check if the conflict doesn't occur. +/// - Check if the trait or type is included in the ingot which contains the +/// impl trait. +#[salsa::tracked(return_ref)] +pub fn analyze_impl_trait<'db>( + db: &'db dyn HirAnalysisDb, + impl_trait: ImplTrait<'db>, +) -> Vec> { + let implementor = match analyze_impl_trait_specific_error(db, impl_trait) { + Ok(implementor) => implementor, + Err(diags) => { + return diags; + } + }; + + let mut diags = ImplTraitMethodAnalyzer::new(db, implementor.instantiate_identity()).analyze(); + + let analyzer = DefAnalyzer::for_trait_impl(db, implementor.instantiate_identity()); + let def_diags = analyzer.analyze(); + + diags.extend(def_diags); + diags +} + +#[salsa::tracked(return_ref)] +pub fn analyze_impl<'db>( + db: &'db dyn HirAnalysisDb, + impl_: HirImpl<'db>, +) -> Vec> { + let Some(hir_ty) = impl_.ty(db.as_hir_db()).to_opt() else { + return Vec::new(); + }; + let ty = lower_hir_ty(db, hir_ty, impl_.scope()); + + let analyzer = DefAnalyzer::for_impl(db, impl_, ty); + analyzer.analyze() +} + +#[salsa::tracked(return_ref)] +pub fn analyze_func<'db>( + db: &'db dyn HirAnalysisDb, + func: Func<'db>, +) -> Vec> { + let Some(func_def) = lower_func(db, func) else { + return Vec::new(); + }; + + let analyzer = DefAnalyzer::for_func(db, func_def); + analyzer.analyze() +} + +/// This function implements analysis for the type alias definition. +/// The analysis includes the following: +/// - Check if the type alias is not recursive. +/// - Check if the type in the type alias is well-formed. +/// +/// NOTE: This function doesn't check the satisfiability of the type since our +/// type system treats the alias as kind of macro, meaning type alias doesn't +/// included in the type system. Satisfiability is checked where the type alias +/// is used. +#[salsa::tracked(return_ref)] +pub fn analyze_type_alias<'db>( + db: &'db dyn HirAnalysisDb, + alias: TypeAlias<'db>, +) -> Option> { + let hir_ty = alias.ty(db.as_hir_db()).to_opt()?; + let ty = lower_hir_ty(db, hir_ty, alias.scope()); + + if let Err(cycle) = lower_type_alias(db, alias) { + if cycle.representative() == alias { + let diag = TyLowerDiag::TypeAliasCycle { + primary: alias.lazy_span().ty().into(), + cycle: cycle.participants().collect(), + }; + return Some(diag.into()); + } + } + + // We don't need to check for bound satisfiability here because type alias + // doesn't have trait bound, it will be checked where the type alias is used. + ty.emit_diag(db, alias.lazy_span().ty().into()) +} + +pub struct DefAnalyzer<'db> { + db: &'db dyn HirAnalysisDb, + def: DefKind<'db>, + self_ty: Option>, + diags: Vec>, + assumptions: PredicateListId<'db>, + current_ty: Option<(TyId<'db>, DynLazySpan<'db>)>, +} + +impl<'db> DefAnalyzer<'db> { + fn for_adt(db: &'db dyn HirAnalysisDb, adt: AdtRefId<'db>) -> Self { + let def = lower_adt(db, adt); + let assumptions = collect_adt_constraints(db, def).instantiate_identity(); + Self { + db, + def: def.into(), + self_ty: None, + diags: vec![], + assumptions, + current_ty: None, + } + } + + fn for_trait(db: &'db dyn HirAnalysisDb, trait_: Trait<'db>) -> Self { + let def = lower_trait(db, trait_); + let assumptions = collect_trait_constraints(db, def).instantiate_identity(); + Self { + db, + def: def.into(), + self_ty: def.self_param(db).into(), + diags: vec![], + assumptions, + current_ty: None, + } + } + + fn for_impl(db: &'db dyn HirAnalysisDb, impl_: HirImpl<'db>, ty: TyId<'db>) -> Self { + let assumptions = collect_impl_block_constraints(db, impl_).instantiate_identity(); + let def = DefKind::Impl(impl_); + Self { + db, + def, + self_ty: ty.into(), + diags: vec![], + assumptions, + current_ty: None, + } + } + + fn for_trait_impl(db: &'db dyn HirAnalysisDb, implementor: Implementor<'db>) -> Self { + let assumptions = implementor.constraints(db); + Self { + db, + def: implementor.into(), + self_ty: implementor.self_ty(db).into(), + diags: vec![], + assumptions, + current_ty: None, + } + } + + fn for_func(db: &'db dyn HirAnalysisDb, func: FuncDef<'db>) -> Self { + let hir_db = db.as_hir_db(); + let assumptions = collect_func_def_constraints(db, func, true).instantiate_identity(); + let self_ty = match func + .hir_func_def(db) + .unwrap() + .scope() + .parent(hir_db) + .unwrap() + { + ScopeId::Item(ItemKind::Trait(trait_)) => lower_trait(db, trait_).self_param(db).into(), + ScopeId::Item(ItemKind::ImplTrait(impl_trait)) => { + match impl_trait.ty(hir_db).to_opt() { + Some(hir_ty) => lower_hir_ty(db, hir_ty, impl_trait.scope()).into(), + _ => TyId::invalid(db, InvalidCause::Other).into(), + } + } + ScopeId::Item(ItemKind::Impl(impl_)) => match impl_.ty(hir_db).to_opt() { + Some(hir_ty) => lower_hir_ty(db, hir_ty, impl_.scope()).into(), + None => TyId::invalid(db, InvalidCause::Other).into(), + }, + _ => None, + }; + + Self { + db, + def: func.into(), + self_ty, + diags: vec![], + assumptions, + current_ty: None, + } + } + + /// This method verifies if + /// 1. the given `ty` has `*` kind. + /// 2. the given `ty` is not const type + /// + /// TODO: This method is a stop-gap implementation until we design a true + /// const type system. + fn verify_term_type_kind(&mut self, ty: HirTyId<'db>, span: DynLazySpan<'db>) -> bool { + let ty = lower_hir_ty(self.db, ty, self.scope()); + if !ty.has_star_kind(self.db) { + self.diags + .push(TyLowerDiag::expected_star_kind_ty(span).into()); + false + } else if ty.is_const_ty(self.db) { + self.diags + .push(TyLowerDiag::normal_type_expected(self.db, span, ty).into()); + false + } else { + true + } + } + + // Check if the same generic parameter is already defined in the parent item. + // Other name conflict check is done in the name resolution. + // + // This check is necessary because the conflict rule + // for the generic parameter is the exceptional case where shadowing shouldn't + // occur. + fn verify_method_generic_param_conflict( + &mut self, + params: GenericParamListId<'db>, + span: LazyGenericParamListSpan<'db>, + ) -> bool { + let mut is_conflict = false; + for (i, param) in params.data(self.db.as_hir_db()).iter().enumerate() { + if let Some(name) = param.name().to_opt() { + let scope = self.scope(); + let parent_scope = scope.parent_item(self.db.as_hir_db()).unwrap().scope(); + let path = PathId::from_ident(self.db.as_hir_db(), name); + + match resolve_path(self.db, path, parent_scope, false) { + Ok(r @ PathRes::Ty(ty)) if ty.is_param(self.db) => { + self.diags.push( + TyLowerDiag::generic_param_conflict( + span.param(i).into(), + r.name_span(self.db).unwrap(), + name, + ) + .into(), + ); + is_conflict = true; + } + _ => {} + } + } + } + + !is_conflict + } + + fn verify_self_type(&mut self, self_ty: HirTyId<'db>, span: DynLazySpan<'db>) -> bool { + let Some(expected_ty) = self.self_ty else { + return false; + }; + + let param_ty = lower_hir_ty(self.db, self_ty, self.def.scope(self.db)); + if !param_ty.has_invalid(self.db) && !expected_ty.has_invalid(self.db) { + let (expected_base_ty, expected_param_ty_args) = expected_ty.decompose_ty_app(self.db); + let (param_base_ty, param_ty_args) = param_ty.decompose_ty_app(self.db); + + if param_base_ty != expected_base_ty { + self.diags.push( + ImplDiag::invalid_self_ty(self.db, span.clone(), expected_ty, param_ty).into(), + ); + return false; + } + + for (expected_arg, param_arg) in expected_param_ty_args.iter().zip(param_ty_args.iter()) + { + if expected_arg != param_arg { + self.diags.push( + ImplDiag::invalid_self_ty(self.db, span, expected_ty, param_ty).into(), + ); + return false; + } + } + } + + true + } + + fn check_method_conflict(&mut self, func: FuncDef<'db>) -> bool { + let self_ty = func + .receiver_ty(self.db) + .map_or_else(|| self.self_ty.unwrap(), |ty| ty.instantiate_identity()); + + if self_ty.has_invalid(self.db) { + return true; + } + + for &cand in probe_method( + self.db, + self.scope().ingot(self.db.as_hir_db()), + Canonical::new(self.db, self_ty), + func.name(self.db), + ) { + if cand != func { + self.diags.push( + ImplDiag::conflict_method_impl( + func.name_span(self.db), + cand.name_span(self.db), + ) + .into(), + ); + return false; + } + } + + true + } + + fn scope(&self) -> ScopeId<'db> { + self.def.scope(self.db) + } + + fn analyze(mut self) -> Vec> { + match self.def { + DefKind::Adt(def) => match def.adt_ref(self.db).data(self.db) { + AdtRef::Struct(struct_) => { + let mut ctxt = VisitorCtxt::with_struct(self.db.as_hir_db(), struct_); + self.visit_struct(&mut ctxt, struct_); + } + + AdtRef::Enum(enum_) => { + let mut ctxt = VisitorCtxt::with_enum(self.db.as_hir_db(), enum_); + self.visit_enum(&mut ctxt, enum_); + } + + AdtRef::Contract(contract) => { + let mut ctxt = VisitorCtxt::with_contract(self.db.as_hir_db(), contract); + self.visit_contract(&mut ctxt, contract); + } + }, + + DefKind::Trait(trait_) => { + let trait_ = trait_.trait_(self.db); + let mut ctxt = VisitorCtxt::with_trait(self.db.as_hir_db(), trait_); + self.visit_trait(&mut ctxt, trait_); + } + + DefKind::ImplTrait(implementor) => { + let impl_trait = implementor.hir_impl_trait(self.db); + let mut ctxt = VisitorCtxt::with_impl_trait(self.db.as_hir_db(), impl_trait); + self.visit_impl_trait(&mut ctxt, impl_trait); + } + + DefKind::Impl(hir_impl) => { + let mut ctxt = VisitorCtxt::with_impl(self.db.as_hir_db(), hir_impl); + self.visit_impl(&mut ctxt, hir_impl) + } + + DefKind::Func(func) => { + let hir_func = func.hir_func_def(self.db).unwrap(); + let mut ctxt = VisitorCtxt::with_func(self.db.as_hir_db(), hir_func); + self.visit_func(&mut ctxt, hir_func); + } + } + + self.diags + } +} + +impl<'db> Visitor<'db> for DefAnalyzer<'db> { + // We don't need to traverse the nested item, each item kinds are explicitly + // handled(e.g, `visit_trait` or `visit_enum`). + fn visit_item(&mut self, _ctxt: &mut VisitorCtxt<'db, LazyItemSpan>, _item: ItemKind<'db>) {} + + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'db, LazyTySpan<'db>>, hir_ty: HirTyId<'db>) { + let ty = lower_hir_ty(self.db, hir_ty, self.scope()); + let span = ctxt.span().unwrap(); + if let Some(diag) = ty.emit_diag(self.db, span.clone().into()) { + self.diags.push(diag) + } else if let Some(diag) = + ty.emit_wf_diag(self.db, ctxt.ingot(), self.assumptions, span.into()) + { + self.diags.push(diag) + } + } + + fn visit_where_predicate( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyWherePredicateSpan<'db>>, + pred: &hir::hir_def::WherePredicate<'db>, + ) { + let Some(hir_ty) = pred.ty.to_opt() else { + return; + }; + + let ty = lower_hir_ty(self.db, hir_ty, self.scope()); + + if ty.is_const_ty(self.db) { + let diag = + TraitConstraintDiag::const_ty_bound(self.db, ty, ctxt.span().unwrap().ty().into()) + .into(); + self.diags.push(diag); + return; + } + + if !ty.has_invalid(self.db) && !ty.has_param(self.db) { + let diag = TraitConstraintDiag::concrete_type_bound( + self.db, + ctxt.span().unwrap().ty().into(), + ty, + ) + .into(); + self.diags.push(diag); + return; + } + + self.current_ty = Some((ty, ctxt.span().unwrap().ty().into())); + walk_where_predicate(self, ctxt, pred); + } + + fn visit_field_def( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFieldDefSpan<'db>>, + field: &FieldDef<'db>, + ) { + let Some(ty) = field.ty.to_opt() else { + return; + }; + + if !self.verify_term_type_kind(ty, ctxt.span().unwrap().ty().into()) { + return; + } + + let Some(name) = field.name.to_opt() else { + return; + }; + + // Checks if the field type is the same as the type of const type parameter. + if let Some(const_ty) = find_const_ty_param(self.db, name, ctxt.scope()) { + let const_ty_ty = const_ty.ty(self.db); + let field_ty = lower_hir_ty(self.db, ty, ctxt.scope()); + if !const_ty_ty.has_invalid(self.db) + && !field_ty.has_invalid(self.db) + && field_ty != const_ty_ty + { + self.diags.push( + TyLowerDiag::const_ty_mismatch( + self.db, + ctxt.span().unwrap().ty().into(), + const_ty_ty, + field_ty, + ) + .into(), + ); + return; + } + } + + walk_field_def(self, ctxt, field); + } + + fn visit_variant_def( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyVariantDefSpan<'db>>, + variant: &hir::hir_def::VariantDef<'db>, + ) { + if let VariantKind::Tuple(tuple_id) = variant.kind { + let span = ctxt.span().unwrap().tuple_type_moved(); + for (i, elem_ty) in tuple_id.data(self.db.as_hir_db()).iter().enumerate() { + let Some(elem_ty) = elem_ty.to_opt() else { + continue; + }; + + self.verify_term_type_kind(elem_ty, span.elem_ty(i).into()); + } + } + walk_variant_def(self, ctxt, variant); + } + + fn visit_generic_param( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyGenericParamSpan<'db>>, + param: &hir::hir_def::GenericParam<'db>, + ) { + let ScopeId::GenericParam(_, idx) = ctxt.scope() else { + unreachable!() + }; + + if let Some(name) = param.name().to_opt() { + let scope = self.scope(); + let parent_scope = scope.parent_item(self.db.as_hir_db()).unwrap().scope(); + let path = PathId::from_ident(self.db.as_hir_db(), name); + match resolve_path(self.db, path, parent_scope, false) { + Ok(r @ PathRes::Ty(ty)) if ty.is_param(self.db) => { + self.diags.push( + TyLowerDiag::generic_param_conflict( + ctxt.span().unwrap().into(), + r.name_span(self.db).unwrap(), + name, + ) + .into(), + ); + return; + } + _ => {} + } + } + + match param { + GenericParam::Type(_) => { + self.current_ty = Some(( + self.def.original_params(self.db)[idx], + ctxt.span().unwrap().into_type_param().name().into(), + )); + walk_generic_param(self, ctxt, param) + } + GenericParam::Const(_) => { + let ty = self.def.original_params(self.db)[idx]; + let Some(const_ty_param) = ty.const_ty_param(self.db) else { + return; + }; + + if let Some(diag) = const_ty_param + .emit_diag(self.db, ctxt.span().unwrap().into_const_param().ty().into()) + { + self.diags.push(diag) + } + } + } + } + + fn visit_kind_bound( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyKindBoundSpan<'db>>, + bound: &hir::hir_def::KindBound, + ) { + let Some((ty, _)) = self.current_ty else { + return; + }; + + let kind = lower_kind(bound); + let former_kind = ty.kind(self.db); + if !former_kind.does_match(&kind) { + self.diags.push( + TyLowerDiag::inconsistent_kind_bound( + self.db, + ctxt.span().unwrap().into(), + ty, + former_kind, + &kind, + ) + .into(), + ); + } + } + + fn visit_trait_ref( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyTraitRefSpan<'db>>, + trait_ref: TraitRefId<'db>, + ) { + let current_ty = self + .current_ty + .as_ref() + .map(|(ty, _)| *ty) + .unwrap_or(TyId::invalid(self.db, InvalidCause::Other)); + + if current_ty.is_trait_self(self.db) { + if let Some(cycle) = self.def.collect_super_trait_cycle(self.db) { + if let Ok(trait_inst) = + lower_trait_ref(self.db, current_ty, trait_ref, self.scope()) + { + if cycle.contains(trait_inst.def(self.db)) { + self.diags.push( + TraitLowerDiag::CyclicSuperTraits(ctxt.span().unwrap().path().into()) + .into(), + ); + return; + } + } + } + } + + if let (Some((ty, span)), Ok(trait_inst)) = ( + &self.current_ty, + lower_trait_ref(self.db, current_ty, trait_ref, self.scope()), + ) { + let expected_kind = trait_inst.def(self.db).expected_implementor_kind(self.db); + if !expected_kind.does_match(ty.kind(self.db)) { + self.diags.push( + TraitConstraintDiag::kind_mismatch(self.db, span.clone(), expected_kind, *ty) + .into(), + ); + } + } + + if let Some(diag) = analyze_trait_ref( + self.db, + current_ty, + trait_ref, + self.scope(), + Some(self.assumptions), + ctxt.span().unwrap().into(), + ) { + self.diags.push(diag); + } else { + walk_trait_ref(self, ctxt, trait_ref); + } + } + + fn visit_super_trait_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, hir::span::item::LazySuperTraitListSpan<'db>>, + super_traits: &[TraitRefId<'db>], + ) { + let DefKind::Trait(def) = self.def else { + unreachable!() + }; + let name_span = def.trait_(self.db).lazy_span().name().into(); + self.current_ty = Some((self.def.trait_self_param(self.db), name_span)); + walk_super_trait_list(self, ctxt, super_traits); + } + + fn visit_impl(&mut self, ctxt: &mut VisitorCtxt<'db, LazyImplSpan<'db>>, impl_: HirImpl<'db>) { + let Some(impl_ty) = impl_.ty(self.db.as_hir_db()).to_opt() else { + return; + }; + + let impl_ty = lower_hir_ty(self.db, impl_ty, impl_.scope()); + if !impl_ty.is_inherent_impl_allowed(self.db, self.scope().ingot(self.db.as_hir_db())) { + let base = impl_ty.base_ty(self.db); + let diag = ImplDiag::InherentImplIsNotAllowed { + primary: ctxt.span().unwrap().target_ty().into(), + ty: base.pretty_print(self.db).to_string(), + is_nominal: !base.is_param(self.db), + }; + + self.diags.push(diag.into()); + } + + if let Some(ty) = impl_ty.emit_diag(self.db, ctxt.span().unwrap().target_ty().into()) { + self.diags.push(ty); + } else { + walk_impl(self, ctxt, impl_); + } + } + + fn visit_func( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFuncSpan<'db>>, + hir_func: hir::hir_def::Func<'db>, + ) { + let Some(func) = lower_func(self.db, hir_func) else { + return; + }; + + // We need to check the conflict only when the function is defined in the `impl` + // block since this check requires the ingot-wide method table(i.e., which is + // not performed in name resolution phase). + if matches!( + ctxt.scope().parent_item(self.db.as_hir_db()).unwrap(), + ItemKind::Impl(_) + ) && !self.check_method_conflict(func) + { + return; + } + + if !self.verify_method_generic_param_conflict( + hir_func.generic_params(self.db.as_hir_db()), + hir_func.lazy_span().generic_params_moved(), + ) { + return; + } + + let def = std::mem::replace(&mut self.def, func.into()); + let constraints = std::mem::replace( + &mut self.assumptions, + collect_func_def_constraints(self.db, func, true).instantiate_identity(), + ); + + walk_func(self, ctxt, hir_func); + + if let Some(ret_ty) = hir_func.ret_ty(self.db.as_hir_db()) { + self.verify_term_type_kind(ret_ty, hir_func.lazy_span().ret_ty().into()); + } + + self.assumptions = constraints; + self.def = def; + } + + fn visit_body(&mut self, _ctxt: &mut VisitorCtxt<'_, LazyBodySpan>, _body: hir::hir_def::Body) { + } + + fn visit_func_param_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFuncParamListSpan<'db>>, + params: FuncParamListId<'db>, + ) { + // Checks if the argument names are not duplicated. + let mut already_seen: FxHashMap = FxHashMap::default(); + + for (i, param) in params.data(self.db.as_hir_db()).iter().enumerate() { + let Some(name) = param.name.to_opt().and_then(|name| name.ident()) else { + continue; + }; + + match already_seen.entry(name) { + Entry::Occupied(entry) => { + let diag = TyLowerDiag::duplicated_arg_name( + ctxt.span().unwrap().param(i).name().into(), + ctxt.span().unwrap().param(*entry.get()).name().into(), + name, + ) + .into(); + self.diags.push(diag); + } + + Entry::Vacant(entry) => { + entry.insert(i); + } + } + } + + walk_func_param_list(self, ctxt, params) + } + + fn visit_func_param( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFuncParamSpan<'db>>, + param: &hir::hir_def::FuncParam<'db>, + ) { + let Some(hir_ty) = param.ty.to_opt() else { + return; + }; + + let ty_span: DynLazySpan = if param.is_self_param(ctxt.db()) && param.self_ty_fallback { + ctxt.span().unwrap().name().into() + } else { + ctxt.span().unwrap().ty().into() + }; + + if param.is_self_param(ctxt.db()) { + self.verify_self_type(hir_ty, ty_span.clone()); + } + + if !self.verify_term_type_kind(hir_ty, ty_span) { + return; + } + + walk_func_param(self, ctxt, param); + } +} + +#[salsa::tracked(recovery_fn = check_recursive_adt_impl)] +pub(crate) fn check_recursive_adt<'db>( + db: &'db dyn HirAnalysisDb, + adt: AdtRefId<'db>, +) -> Option> { + let adt_def = lower_adt(db, adt); + for field in adt_def.fields(db) { + for ty in field.iter_types(db) { + for adt_ref in ty.instantiate_identity().collect_direct_adts(db) { + check_recursive_adt(db, adt_ref); + } + } + } + + None +} + +fn check_recursive_adt_impl<'db>( + db: &'db dyn HirAnalysisDb, + cycle: &salsa::Cycle, + adt: AdtRefId<'db>, +) -> Option> { + let participants: FxHashSet<_> = cycle + .participant_keys() + .map(|key| { + let id = key.key_index(); + AdtRefId::from_id(id) + }) + .collect(); + + let adt_def = lower_adt(db, adt); + for (field_idx, field) in adt_def.fields(db).iter().enumerate() { + for (ty_idx, ty) in field.iter_types(db).enumerate() { + for field_adt_ref in ty.instantiate_identity().collect_direct_adts(db) { + if participants.contains(&field_adt_ref) && participants.contains(&adt) { + let diag = TyLowerDiag::recursive_type( + adt.name_span(db), + adt_def.variant_ty_span(db, field_idx, ty_idx), + ); + return Some(diag.into()); + } + } + } + } + + None +} + +impl<'db> TyId<'db> { + /// Collect all adts inside types which are not wrapped by indirect type + /// wrapper like pointer or reference. + fn collect_direct_adts(self, db: &'db dyn HirAnalysisDb) -> FxHashSet> { + struct AdtCollector<'db> { + db: &'db dyn HirAnalysisDb, + adts: FxHashSet>, + } + + impl<'db> TyVisitor<'db> for AdtCollector<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn visit_app(&mut self, abs: TyId<'db>, arg: TyId<'db>) { + if !abs.is_indirect(self.db) { + walk_ty(self, arg) + } + } + + fn visit_adt(&mut self, adt: AdtDef<'db>) { + self.adts.insert(adt.adt_ref(self.db)); + } + } + + let mut collector = AdtCollector { + db, + adts: FxHashSet::default(), + }; + + self.visit_with(&mut collector); + collector.adts + } +} + +fn analyze_trait_ref<'db>( + db: &'db dyn HirAnalysisDb, + self_ty: TyId<'db>, + trait_ref: TraitRefId<'db>, + scope: ScopeId<'db>, + assumptions: Option>, + span: DynLazySpan<'db>, +) -> Option> { + let trait_inst = match lower_trait_ref(db, self_ty, trait_ref, scope) { + Ok(trait_ref) => trait_ref, + + Err(TraitRefLowerError::ArgNumMismatch { expected, given }) => { + return Some(TraitConstraintDiag::trait_arg_num_mismatch(span, expected, given).into()); + } + + Err(TraitRefLowerError::ArgKindMisMatch { expected, given }) => { + return Some(TraitConstraintDiag::kind_mismatch(db, span, &expected, given).into()); + } + + Err(TraitRefLowerError::ArgTypeMismatch { expected, given }) => match (expected, given) { + (Some(expected), Some(given)) => { + return Some(TyLowerDiag::const_ty_mismatch(db, span, expected, given).into()) + } + + (Some(expected), None) => { + return Some(TyLowerDiag::const_ty_expected(db, span, expected).into()) + } + + (None, Some(given)) => { + return Some(TyLowerDiag::normal_type_expected(db, span, given).into()) + } + + (None, None) => unreachable!(), + }, + + Err(TraitRefLowerError::Other) => { + return None; + } + }; + + if let Some(assumptions) = assumptions { + trait_inst.emit_sat_diag(db, scope.ingot(db.as_hir_db()), assumptions, span) + } else { + None + } +} + +#[derive(Clone, Copy, Debug, derive_more::From)] +enum DefKind<'db> { + Adt(AdtDef<'db>), + Trait(TraitDef<'db>), + ImplTrait(Implementor<'db>), + Impl(HirImpl<'db>), + Func(FuncDef<'db>), +} + +impl<'db> DefKind<'db> { + fn original_params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + match self { + Self::Adt(def) => def.original_params(db), + Self::Trait(def) => def.original_params(db), + Self::ImplTrait(def) => def.original_params(db), + Self::Impl(hir_impl) => { + collect_generic_params(db, GenericParamOwnerId::new(db, hir_impl.into())).params(db) + } + Self::Func(def) => def.explicit_params(db), + } + } + + fn trait_self_param(self, db: &'db dyn HirAnalysisDb) -> TyId<'db> { + if let Self::Trait(def) = self { + def.self_param(db) + } else { + panic!() + } + } + + fn collect_super_trait_cycle( + self, + db: &'db dyn HirAnalysisDb, + ) -> Option<&'db SuperTraitCycle<'db>> { + if let Self::Trait(def) = self { + collect_super_traits(db, def).as_ref().err() + } else { + None + } + } + + fn scope(self, db: &'db dyn HirAnalysisDb) -> ScopeId<'db> { + match self { + Self::Adt(def) => def.adt_ref(db).scope(db), + Self::Trait(def) => def.trait_(db).scope(), + Self::ImplTrait(def) => def.hir_impl_trait(db).scope(), + Self::Impl(hir_impl) => hir_impl.scope(), + Self::Func(def) => def.scope(db), + } + } +} + +/// This function analyzes the trait impl specific error. +/// 1. If the trait ref is well-formed except for the satisfiability. +/// 2. If implementor type is well-formed except for the satisfiability. +/// 3. If the ingot contains impl trait is the same as the ingot which contains +/// either the type or trait. +/// 4. If conflict occurs. +/// 5. If implementor type satisfies the required kind bound. +/// 6. If implementor type satisfies the required trait bound. +fn analyze_impl_trait_specific_error<'db>( + db: &'db dyn HirAnalysisDb, + impl_trait: ImplTrait<'db>, +) -> Result>, Vec>> { + let mut diags = vec![]; + let hir_db = db.as_hir_db(); + // We don't need to report error because it should be reported from the parser. + let (Some(trait_ref), Some(ty)) = ( + impl_trait.trait_ref(hir_db).to_opt(), + impl_trait.ty(hir_db).to_opt(), + ) else { + return Err(diags); + }; + + // 1. Checks if implementor type is well-formed except for the satisfiability. + let ty = lower_hir_ty(db, ty, impl_trait.scope()); + if let Some(diag) = ty.emit_diag(db, impl_trait.lazy_span().ty().into()) { + diags.push(diag); + } + + // 2. Checks if the trait ref is well-formed except for the satisfiability. + if let Some(diag) = analyze_trait_ref( + db, + ty, + trait_ref, + impl_trait.scope(), + None, + impl_trait.lazy_span().trait_ref().into(), + ) { + diags.push(diag); + } + + // If there is any error at the point, it means that `Implementor` is not + // well-formed and no more analysis is needed to reduce the amount of error + // messages. + if !diags.is_empty() || ty.has_invalid(db) { + return Err(diags); + } + + let trait_inst = match lower_trait_ref(db, ty, trait_ref, impl_trait.scope()) { + Ok(trait_inst) => trait_inst, + Err(_) => return Err(vec![]), + }; + + // 3. Check if the ingot containing impl trait is the same as the ingot which + // contains either the type or trait. + let impl_trait_ingot = impl_trait.top_mod(hir_db).ingot(hir_db); + if Some(impl_trait_ingot) != ty.ingot(db) && impl_trait_ingot != trait_inst.def(db).ingot(db) { + diags.push(TraitLowerDiag::external_trait_for_external_type(impl_trait).into()); + return Err(diags); + } + + let trait_env = ingot_trait_env(db, impl_trait_ingot); + let Some(implementor) = trait_env.map_impl_trait(impl_trait) else { + // Lower impl trait never fails if the trait ref and implementor type is + // well-formed. + let current_impl = lower_impl_trait(db, impl_trait).unwrap(); + + // 4. Checks if conflict occurs. + // If there is no implementor type even if the trait ref and implementor type is + // well-formed, it means that the conflict does occur. + analyze_conflict_impl(db, current_impl, &mut diags); + return Err(diags); + }; + + fn analyze_conflict_impl<'db>( + db: &'db dyn HirAnalysisDb, + implementor: Binder>, + diags: &mut Vec>, + ) { + let trait_ = implementor.skip_binder().trait_(db); + let env = ingot_trait_env(db, trait_.ingot(db)); + let Some(impls) = env.impls.get(&trait_.def(db)) else { + return; + }; + + for cand in impls { + if does_impl_trait_conflict(db, *cand, implementor) { + diags.push( + TraitLowerDiag::conflict_impl( + cand.skip_binder().hir_impl_trait(db), + implementor.skip_binder().hir_impl_trait(db), + ) + .into(), + ); + + return; + } + } + } + + // 5. Checks if implementor type satisfies the kind bound which is required by + // the trait. + let expected_kind = implementor + .instantiate_identity() + .trait_def(db) + .expected_implementor_kind(db); + if ty.kind(db) != expected_kind { + diags.push( + TraitConstraintDiag::kind_mismatch( + db, + impl_trait.lazy_span().ty().into(), + expected_kind, + implementor.instantiate_identity().self_ty(db), + ) + .into(), + ); + return Err(diags); + } + + let trait_def = trait_inst.def(db); + let trait_constraints = + collect_trait_constraints(db, trait_def).instantiate(db, trait_inst.args(db)); + let assumptions = implementor.instantiate_identity().constraints(db); + + let mut is_satisfied = |goal: TraitInstId<'db>, span: DynLazySpan<'db>| { + let canonical_goal = Canonicalized::new(db, goal); + match is_goal_satisfiable(db, impl_trait_ingot, canonical_goal.value, assumptions) { + GoalSatisfiability::Satisfied(_) | GoalSatisfiability::ContainsInvalid => {} + GoalSatisfiability::NeedsConfirmation(_) => unreachable!(), + GoalSatisfiability::UnSat(subgoal) => { + diags.push( + TraitConstraintDiag::trait_bound_not_satisfied( + db, + span, + goal, + subgoal.map(|subgoal| subgoal.value), + ) + .into(), + ); + } + } + }; + + // 6. Checks if the trait inst is WF. + let trait_ref_span: DynLazySpan = impl_trait.lazy_span().trait_ref_moved().into(); + for &goal in trait_constraints.list(db) { + is_satisfied(goal, trait_ref_span.clone()); + } + + // 7. Checks if the implementor ty satisfies the super trait constraints. + let target_ty_span: DynLazySpan = impl_trait.lazy_span().ty().into(); + for &super_trait in trait_def.super_traits(db) { + let super_trait = super_trait.instantiate(db, trait_inst.args(db)); + is_satisfied(super_trait, target_ty_span.clone()) + } + + if diags.is_empty() { + Ok(implementor) + } else { + Err(diags) + } +} + +struct ImplTraitMethodAnalyzer<'db> { + db: &'db dyn HirAnalysisDb, + diags: Vec>, + implementor: Implementor<'db>, +} + +impl<'db> ImplTraitMethodAnalyzer<'db> { + fn new(db: &'db dyn HirAnalysisDb, implementor: Implementor<'db>) -> Self { + Self { + db, + diags: vec![], + implementor, + } + } + + fn analyze(mut self) -> Vec> { + let impl_methods = self.implementor.methods(self.db); + let hir_trait = self.implementor.trait_def(self.db).trait_(self.db); + let trait_methods = self.implementor.trait_def(self.db).methods(self.db); + let mut required_methods: IndexSet<_> = trait_methods + .iter() + .filter_map(|(name, &trait_method)| { + if !trait_method.has_default_impl(self.db) { + Some(*name) + } else { + None + } + }) + .collect(); + + for (name, impl_m) in impl_methods { + let Some(trait_m) = trait_methods.get(name) else { + self.diags.push( + ImplDiag::method_not_defined_in_trait( + self.implementor + .hir_impl_trait(self.db) + .lazy_span() + .trait_ref() + .into(), + hir_trait, + *name, + ) + .into(), + ); + continue; + }; + + compare_impl_method( + self.db, + *impl_m, + *trait_m, + self.implementor.trait_(self.db), + &mut self.diags, + ); + + required_methods.shift_remove(name); + } + + if !required_methods.is_empty() { + self.diags.push( + ImplDiag::not_all_trait_items_implemented( + self.implementor + .hir_impl_trait(self.db) + .lazy_span() + .ty_moved() + .into(), + required_methods.into_iter().collect(), + ) + .into(), + ); + } + + self.diags + } +} + +fn find_const_ty_param<'db>( + db: &'db dyn HirAnalysisDb, + ident: IdentId<'db>, + scope: ScopeId<'db>, +) -> Option> { + let path = PathId::from_ident(db.as_hir_db(), ident); + let Ok(PathRes::Ty(ty)) = resolve_path(db, path, scope, true) else { + return None; + }; + match ty.data(db) { + TyData::ConstTy(const_ty) => Some(*const_ty), + _ => None, + } +} diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs new file mode 100644 index 0000000000..e663c892cf --- /dev/null +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -0,0 +1,2384 @@ +use common::diagnostics::{ + CompleteDiagnostic, DiagnosticPass, GlobalErrorCode, LabelStyle, Severity, SubDiagnostic, +}; +use either::Either; +use hir::{ + diagnostics::DiagnosticVoucher, + hir_def::{ + FieldIndex, Func, FuncParamName, IdentId, ImplTrait, ItemKind, PathId, Trait, + TypeAlias as HirTypeAlias, + }, + span::{DynLazySpan, LazySpan}, + HirDb, SpannedHirDb, +}; +use itertools::Itertools; + +use super::{ + trait_def::{TraitDef, TraitInstId}, + ty_check::{RecordLike, TraitOps}, + ty_def::{Kind, TyData, TyId, TyVarSort}, +}; +use crate::{name_resolution::diagnostics::NameResDiag, HirAnalysisDb}; + +#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::From)] +pub enum FuncBodyDiag<'db> { + Ty(TyDiagCollection<'db>), + Body(BodyDiag<'db>), + NameRes(NameResDiag<'db>), +} + +impl<'db> FuncBodyDiag<'db> { + pub(super) fn to_voucher(&self) -> Box + 'db> { + match self { + Self::Ty(diag) => diag.to_voucher(), + Self::Body(diag) => Box::new(diag.clone()) as _, + Self::NameRes(diag) => Box::new(diag.clone()) as _, + } + } +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::From)] +pub enum TyDiagCollection<'db> { + Ty(TyLowerDiag<'db>), + Satisfiability(TraitConstraintDiag<'db>), + TraitLower(TraitLowerDiag<'db>), + Impl(ImplDiag<'db>), +} + +impl<'db> TyDiagCollection<'db> { + pub(super) fn to_voucher(&self) -> Box + 'db> { + match self.clone() { + TyDiagCollection::Ty(diag) => Box::new(diag) as _, + TyDiagCollection::Satisfiability(diag) => Box::new(diag) as _, + TyDiagCollection::TraitLower(diag) => Box::new(diag) as _, + TyDiagCollection::Impl(diag) => Box::new(diag) as _, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TyLowerDiag<'db> { + ExpectedStarKind(DynLazySpan<'db>), + InvalidTypeArgKind(DynLazySpan<'db>, String), + TooManyGenericArgs { + span: DynLazySpan<'db>, + expected: usize, + given: usize, + }, + + RecursiveType { + primary_span: DynLazySpan<'db>, + field_span: DynLazySpan<'db>, + }, + + UnboundTypeAliasParam { + span: DynLazySpan<'db>, + type_alias: HirTypeAlias<'db>, + n_given_arg: usize, + }, + TypeAliasCycle { + primary: DynLazySpan<'db>, + cycle: Vec>, + }, + + InconsistentKindBound(DynLazySpan<'db>, String), + + KindBoundNotAllowed(DynLazySpan<'db>), + + GenericParamAlreadyDefinedInParent { + primary: DynLazySpan<'db>, + conflict_with: DynLazySpan<'db>, + name: IdentId<'db>, + }, + + DuplicatedArgName { + primary: DynLazySpan<'db>, + conflict_with: DynLazySpan<'db>, + name: IdentId<'db>, + }, + + InvalidConstParamTy { + primary: DynLazySpan<'db>, + }, + + RecursiveConstParamTy(DynLazySpan<'db>), + + ConstTyMismatch { + primary: DynLazySpan<'db>, + expected: String, + actual: String, + }, + + ConstTyExpected { + primary: DynLazySpan<'db>, + expected: String, + }, + + NormalTypeExpected { + primary: DynLazySpan<'db>, + given: String, + }, + + AssocTy(DynLazySpan<'db>), + + InvalidConstTyExpr(DynLazySpan<'db>), +} + +impl<'db> TyLowerDiag<'db> { + pub fn expected_star_kind_ty(span: DynLazySpan<'db>) -> Self { + Self::ExpectedStarKind(span) + } + + pub fn invalid_type_arg_kind( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + expected: Option, + arg: TyId<'db>, + ) -> Self { + let msg = if let Some(expected) = expected { + let arg_kind = arg.kind(db); + debug_assert!(!expected.does_match(arg_kind)); + + format!( + "expected `{}` kind, but `{}` has `{}` kind", + expected, + arg.pretty_print(db), + arg_kind + ) + } else { + "too many generic arguments".to_string() + }; + + Self::InvalidTypeArgKind(span, msg) + } + + pub(super) fn recursive_type( + primary_span: DynLazySpan<'db>, + field_span: DynLazySpan<'db>, + ) -> Self { + Self::RecursiveType { + primary_span, + field_span, + } + } + + pub(super) fn unbound_type_alias_param( + span: DynLazySpan<'db>, + type_alias: HirTypeAlias<'db>, + n_given_arg: usize, + ) -> Self { + Self::UnboundTypeAliasParam { + span, + type_alias, + n_given_arg, + } + } + + pub(super) fn invalid_const_param_ty(primary: DynLazySpan<'db>) -> Self { + Self::InvalidConstParamTy { primary } + } + + pub(super) fn inconsistent_kind_bound( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + ty: TyId<'db>, + former_bound: &Kind, + new_kind: &Kind, + ) -> Self { + let msg = format!( + "`{}` is already declared with `{}` kind, but found `{}` kind here", + ty.pretty_print(db), + former_bound, + new_kind + ); + Self::InconsistentKindBound(span, msg) + } + + pub(super) fn generic_param_conflict( + primary: DynLazySpan<'db>, + conflict_with: DynLazySpan<'db>, + name: IdentId<'db>, + ) -> Self { + Self::GenericParamAlreadyDefinedInParent { + primary, + conflict_with, + name, + } + } + + pub(super) fn const_ty_mismatch( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + expected: TyId<'db>, + actual: TyId<'db>, + ) -> Self { + let expected = expected.pretty_print(db).to_string(); + let actual = actual.pretty_print(db).to_string(); + Self::ConstTyMismatch { + primary, + expected, + actual, + } + } + + pub(super) fn const_ty_expected( + db: &dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + expected: TyId<'db>, + ) -> Self { + let expected = expected.pretty_print(db).to_string(); + Self::ConstTyExpected { primary, expected } + } + + pub(super) fn normal_type_expected( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + given: TyId<'db>, + ) -> Self { + let given = given.pretty_print(db).to_string(); + Self::NormalTypeExpected { primary, given } + } + + pub(super) fn duplicated_arg_name( + primary: DynLazySpan<'db>, + conflict_with: DynLazySpan<'db>, + name: IdentId<'db>, + ) -> Self { + Self::DuplicatedArgName { + primary, + conflict_with, + name, + } + } + + pub(super) fn assoc_ty(span: DynLazySpan<'db>) -> Self { + Self::AssocTy(span) + } + + fn local_code(&self) -> u16 { + match self { + Self::ExpectedStarKind(_) => 0, + Self::InvalidTypeArgKind(_, _) => 1, + Self::RecursiveType { .. } => 2, + Self::UnboundTypeAliasParam { .. } => 3, + Self::TypeAliasCycle { .. } => 4, + Self::InconsistentKindBound(_, _) => 5, + Self::KindBoundNotAllowed(_) => 6, + Self::GenericParamAlreadyDefinedInParent { .. } => 7, + Self::DuplicatedArgName { .. } => 8, + Self::InvalidConstParamTy { .. } => 9, + Self::RecursiveConstParamTy { .. } => 10, + Self::ConstTyMismatch { .. } => 11, + Self::ConstTyExpected { .. } => 12, + Self::NormalTypeExpected { .. } => 13, + Self::AssocTy(_) => 14, + Self::InvalidConstTyExpr(_) => 15, + Self::TooManyGenericArgs { .. } => 16, + } + } + + fn message(&self) -> String { + match self { + Self::ExpectedStarKind(_) => "expected `*` kind in this context".to_string(), + Self::InvalidTypeArgKind(_, _) => "invalid type argument kind".to_string(), + Self::TooManyGenericArgs { + span: _, + expected, + given, + } => format!("too many generic args; expected {expected}, given {given}"), + Self::RecursiveType { .. } => "recursive type is not allowed".to_string(), + + Self::UnboundTypeAliasParam { .. } => { + "all type parameters of type alias must be given".to_string() + } + Self::TypeAliasCycle { .. } => "recursive type alias cycle is detected".to_string(), + + Self::InconsistentKindBound(_, _) => "duplicate type bound is not allowed.".to_string(), + Self::KindBoundNotAllowed(_) => "kind bound is not allowed".to_string(), + + Self::GenericParamAlreadyDefinedInParent { .. } => { + "generic parameter is already defined in the parent item".to_string() + } + + Self::DuplicatedArgName { .. } => { + "duplicated argument name in function definition is not allowed".to_string() + } + + Self::InvalidConstParamTy { .. } => "invalid const parameter type".to_string(), + + Self::ConstTyMismatch { .. } => { + "given type doesn't match the expected const type".to_string() + } + + Self::ConstTyExpected { .. } => "expected const type".to_string(), + + Self::NormalTypeExpected { .. } => "expected a normal type".to_string(), + + Self::RecursiveConstParamTy(_) => { + "recursive const parameter type is not allowed".to_string() + } + + Self::AssocTy(_) => "associated type is not supported ".to_string(), + + Self::InvalidConstTyExpr(_) => { + "the expression is not supported yet in a const type context".to_string() + } + } + } + + fn sub_diags(&self, db: &dyn SpannedHirDb) -> Vec { + match self { + Self::ExpectedStarKind(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "expected `*` kind here".to_string(), + span.resolve(db), + )], + + Self::InvalidTypeArgKind(span, msg) => vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )], + + Self::TooManyGenericArgs { + span, + expected, + given, + } => vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("too many generic args; expected {expected}, given {given}"), + span.resolve(db), + )], + + Self::RecursiveType { + primary_span, + field_span, + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "recursive type definition".to_string(), + primary_span.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "recursion occurs here".to_string(), + field_span.resolve(db), + ), + ] + } + + Self::UnboundTypeAliasParam { + span: primary_span, + type_alias, + .. + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!( + "expected at least {} arguments here", + type_alias + .generic_params(db.as_hir_db()) + .len(db.as_hir_db()) + ), + primary_span.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "type alias defined here".to_string(), + type_alias.lazy_span().resolve(db), + ), + ] + } + + Self::TypeAliasCycle { primary, cycle } => { + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + "cycle happens here".to_string(), + primary.resolve(db), + )]; + diags.extend(cycle.iter().map(|type_alias| { + SubDiagnostic::new( + LabelStyle::Secondary, + "type alias defined here".to_string(), + type_alias.lazy_span().alias_moved().resolve(db), + ) + })); + diags + } + + Self::InconsistentKindBound(primary, msg) => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + primary.resolve(db), + )] + } + + Self::KindBoundNotAllowed(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "kind bound is not allowed here".to_string(), + span.resolve(db), + )], + + Self::GenericParamAlreadyDefinedInParent { + primary, + conflict_with, + name, + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!("`{}` is already defined", name.data(db.as_hir_db())), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "conflict with this generic parameter".to_string(), + conflict_with.resolve(db), + ), + ] + } + + Self::DuplicatedArgName { + primary, + conflict_with, + name, + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!("duplicated argument name `{}`", name.data(db.as_hir_db())), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "conflict with this argument name".to_string(), + conflict_with.resolve(db), + ), + ] + } + + Self::InvalidConstParamTy { primary, .. } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + "only integer or bool types are allowed as a const parameter type".to_string(), + primary.resolve(db), + )] + } + + Self::ConstTyMismatch { + primary, + expected, + actual, + } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!( + "expected `{}` type here, but `{}` is given", + expected, actual + ), + primary.resolve(db), + )] + } + + Self::ConstTyExpected { primary, expected } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected const type of `{}` here", expected), + primary.resolve(db), + )] + } + + Self::NormalTypeExpected { primary, given } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected a normal type here, but `{}` is given", given,), + primary.resolve(db), + )] + } + + Self::RecursiveConstParamTy(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "recursive const parameter type is detected here".to_string(), + span.resolve(db), + )], + + Self::AssocTy(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "associated type is not implemented".to_string(), + span.resolve(db), + )], + + Self::InvalidConstTyExpr(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "only literal expression is supported".to_string(), + span.resolve(db), + )], + } + } + + fn severity(&self) -> Severity { + Severity::Error + } +} + +impl<'db> DiagnosticVoucher<'db> for TyLowerDiag<'db> { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::TypeDefinition, self.local_code()) + } + + fn to_complete(&self, db: &'db dyn SpannedHirDb) -> CompleteDiagnostic { + let severity = self.severity(); + let error_code = self.error_code(); + let message = self.message(); + let sub_diags = self.sub_diags(db); + + CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum BodyDiag<'db> { + TypeMismatch(DynLazySpan<'db>, String, String), + InfiniteOccurrence(DynLazySpan<'db>), + + DuplicatedRestPat(DynLazySpan<'db>), + + InvalidPathDomainInPat { + primary: DynLazySpan<'db>, + resolved: Option>, + }, + + UnitVariantExpected { + primary: DynLazySpan<'db>, + kind_name: String, + hint: Option, + }, + + TupleVariantExpected { + primary: DynLazySpan<'db>, + kind_name: Option, + hint: Option, + }, + + RecordExpected { + primary: DynLazySpan<'db>, + kind_name: Option, + hint: Option, + }, + + MismatchedFieldCount { + primary: DynLazySpan<'db>, + expected: usize, + given: usize, + }, + + DuplicatedRecordFieldBind { + primary: DynLazySpan<'db>, + first_use: DynLazySpan<'db>, + name: IdentId<'db>, + }, + + RecordFieldNotFound { + primary: DynLazySpan<'db>, + label: IdentId<'db>, + }, + + ExplicitLabelExpectedInRecord { + primary: DynLazySpan<'db>, + hint: Option, + }, + + MissingRecordFields { + primary: DynLazySpan<'db>, + missing_fields: Vec>, + hint: Option, + }, + + UndefinedVariable(DynLazySpan<'db>, IdentId<'db>), + + ReturnedTypeMismatch { + primary: DynLazySpan<'db>, + actual: String, + expected: String, + func: Option>, + }, + + TypeMustBeKnown(DynLazySpan<'db>), + + AccessedFieldNotFound { + primary: DynLazySpan<'db>, + given_ty: String, + index: FieldIndex<'db>, + }, + + OpsTraitNotImplemented { + span: DynLazySpan<'db>, + ty: String, + op: IdentId<'db>, + trait_path: PathId<'db>, + }, + + NonAssignableExpr(DynLazySpan<'db>), + + ImmutableAssignment { + primary: DynLazySpan<'db>, + binding: Option<(IdentId<'db>, DynLazySpan<'db>)>, + }, + + LoopControlOutsideOfLoop { + primary: DynLazySpan<'db>, + is_break: bool, + }, + + TraitNotImplemented { + primary: DynLazySpan<'db>, + ty: String, + trait_name: IdentId<'db>, + }, + + NotCallable(DynLazySpan<'db>, String), + + CallGenericArgNumMismatch { + primary: DynLazySpan<'db>, + def_span: DynLazySpan<'db>, + given: usize, + expected: usize, + }, + + CallArgNumMismatch { + primary: DynLazySpan<'db>, + def_span: DynLazySpan<'db>, + given: usize, + expected: usize, + }, + + CallArgLabelMismatch { + primary: DynLazySpan<'db>, + def_span: DynLazySpan<'db>, + given: Option>, + expected: IdentId<'db>, + }, + + AmbiguousInherentMethodCall { + primary: DynLazySpan<'db>, + method_name: IdentId<'db>, + cand_spans: Vec>, + }, + + AmbiguousTrait { + primary: DynLazySpan<'db>, + method_name: IdentId<'db>, + traits: Vec>, + }, + + AmbiguousTraitInst { + primary: DynLazySpan<'db>, + cands: Vec, + }, + + InvisibleAmbiguousTrait { + primary: DynLazySpan<'db>, + traits: Vec>, + }, + + MethodNotFound { + primary: DynLazySpan<'db>, + method_name: IdentId<'db>, + receiver: String, + }, + + NotValue { + primary: DynLazySpan<'db>, + given: Either, TyId<'db>>, + }, + + TypeAnnotationNeeded { + primary: DynLazySpan<'db>, + ty: Option, + is_integral: bool, + }, +} + +impl<'db> BodyDiag<'db> { + pub(super) fn type_mismatch( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + expected: TyId<'db>, + actual: TyId<'db>, + ) -> Self { + let expected = expected.pretty_print(db).to_string(); + let actual = actual.pretty_print(db).to_string(); + Self::TypeMismatch(span, expected, actual) + } + + pub(super) fn unit_variant_expected( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + record_like: T, + ) -> Self + where + T: RecordLike<'db>, + { + let kind_name = record_like.kind_name(db); + let hint = record_like.initializer_hint(db); + Self::UnitVariantExpected { + primary, + kind_name, + hint, + } + } + + pub(super) fn tuple_variant_expected( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + record_like: Option, + ) -> Self + where + T: RecordLike<'db>, + { + let (kind_name, hint) = if let Some(record_like) = record_like { + ( + Some(record_like.kind_name(db)), + record_like.initializer_hint(db), + ) + } else { + (None, None) + }; + + Self::TupleVariantExpected { + primary, + kind_name, + hint, + } + } + + pub(super) fn record_expected( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + record_like: Option, + ) -> Self + where + T: RecordLike<'db>, + { + let (kind_name, hint) = if let Some(record_like) = record_like { + ( + Some(record_like.kind_name(db)), + record_like.initializer_hint(db), + ) + } else { + (None, None) + }; + + Self::RecordExpected { + primary, + kind_name, + hint, + } + } + + pub(super) fn record_field_not_found(primary: DynLazySpan<'db>, label: IdentId<'db>) -> Self { + Self::RecordFieldNotFound { primary, label } + } + + pub(super) fn returned_type_mismatch( + db: &dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + actual: TyId<'db>, + expected: TyId<'db>, + func: Option>, + ) -> Self { + let actual = actual.pretty_print(db).to_string(); + let expected = expected.pretty_print(db).to_string(); + Self::ReturnedTypeMismatch { + primary, + actual, + expected, + func, + } + } + + pub(super) fn accessed_field_not_found( + db: &dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + given_ty: TyId<'db>, + index: FieldIndex<'db>, + ) -> Self { + let given_ty = given_ty.pretty_print(db).to_string(); + Self::AccessedFieldNotFound { + primary, + given_ty, + index, + } + } + + pub(super) fn ops_trait_not_implemented( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + ty: TyId<'db>, + ops: T, + ) -> Self + where + T: TraitOps, + { + let ty = ty.pretty_print(db).to_string(); + let op = ops.op_symbol(db); + let trait_path = ops.trait_path(db); + Self::OpsTraitNotImplemented { + span, + ty, + op, + trait_path, + } + } + pub(super) fn not_callable( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + ty: TyId<'db>, + ) -> Self { + let ty = ty.pretty_print(db).to_string(); + Self::NotCallable(span, ty) + } + + pub(super) fn method_not_found( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + method_name: IdentId<'db>, + receiver: Either, TraitDef<'db>>, + ) -> Self { + let receiver = match receiver { + Either::Left(ty) => ty.pretty_print(db), + Either::Right(trait_) => trait_ + .trait_(db) + .name(db.as_hir_db()) + .unwrap() + .data(db.as_hir_db()), + }; + + Self::MethodNotFound { + primary, + method_name, + receiver: receiver.to_string(), + } + } + + pub(super) fn type_annotation_needed( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + ty: TyId<'db>, + ) -> Self { + let (ty, is_integral) = match ty.base_ty(db).data(db) { + TyData::TyVar(var) => (None, matches!(var.sort, TyVarSort::Integral)), + _ => (ty.pretty_print(db).to_string().into(), false), + }; + + Self::TypeAnnotationNeeded { + primary, + ty, + is_integral, + } + } + + pub(super) fn ambiguous_trait_inst( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + cands: Vec>, + ) -> Self { + let cands = cands + .into_iter() + .map(|cand| cand.pretty_print(db, false)) + .collect_vec(); + Self::AmbiguousTraitInst { primary, cands } + } + + fn local_code(&self) -> u16 { + match self { + Self::TypeMismatch(..) => 0, + Self::InfiniteOccurrence(..) => 1, + Self::DuplicatedRestPat(..) => 2, + Self::InvalidPathDomainInPat { .. } => 3, + Self::UnitVariantExpected { .. } => 4, + Self::TupleVariantExpected { .. } => 5, + Self::RecordExpected { .. } => 6, + Self::MismatchedFieldCount { .. } => 7, + Self::DuplicatedRecordFieldBind { .. } => 8, + Self::RecordFieldNotFound { .. } => 9, + Self::ExplicitLabelExpectedInRecord { .. } => 10, + Self::MissingRecordFields { .. } => 11, + Self::UndefinedVariable(..) => 12, + Self::ReturnedTypeMismatch { .. } => 13, + Self::TypeMustBeKnown(..) => 14, + Self::AccessedFieldNotFound { .. } => 15, + Self::OpsTraitNotImplemented { .. } => 16, + Self::NonAssignableExpr(..) => 17, + Self::ImmutableAssignment { .. } => 18, + Self::LoopControlOutsideOfLoop { .. } => 19, + Self::TraitNotImplemented { .. } => 20, + Self::NotCallable(..) => 21, + Self::CallGenericArgNumMismatch { .. } => 22, + Self::CallArgNumMismatch { .. } => 23, + Self::CallArgLabelMismatch { .. } => 24, + Self::AmbiguousInherentMethodCall { .. } => 25, + Self::AmbiguousTrait { .. } => 26, + Self::AmbiguousTraitInst { .. } => 27, + Self::InvisibleAmbiguousTrait { .. } => 28, + Self::MethodNotFound { .. } => 29, + Self::NotValue { .. } => 30, + Self::TypeAnnotationNeeded { .. } => 31, + } + } + + fn message(&self, db: &dyn HirDb) -> String { + match self { + Self::TypeMismatch(_, _, _) => "type mismatch".to_string(), + Self::InfiniteOccurrence(_) => "infinite sized type found".to_string(), + Self::DuplicatedRestPat(_) => "duplicated `..` found".to_string(), + Self::InvalidPathDomainInPat { .. } => "invalid item is given here".to_string(), + Self::UnitVariantExpected { .. } => "expected unit variant".to_string(), + Self::TupleVariantExpected { .. } => "expected tuple variant".to_string(), + Self::RecordExpected { .. } => "expected record variant or struct".to_string(), + Self::MismatchedFieldCount { .. } => "field count mismatch".to_string(), + Self::DuplicatedRecordFieldBind { .. } => "duplicated record field binding".to_string(), + Self::RecordFieldNotFound { .. } => "specified field not found".to_string(), + Self::ExplicitLabelExpectedInRecord { .. } => "explicit label is required".to_string(), + Self::MissingRecordFields { .. } => "all fields are not given".to_string(), + Self::UndefinedVariable(..) => "undefined variable".to_string(), + Self::ReturnedTypeMismatch { .. } => "returned type mismatch".to_string(), + Self::TypeMustBeKnown(..) => "type must be known here".to_string(), + Self::AccessedFieldNotFound { .. } => "invalid field index".to_string(), + Self::OpsTraitNotImplemented { trait_path, .. } => { + format!("`{}` trait is not implemented", trait_path.pretty_print(db)) + } + Self::NonAssignableExpr { .. } => { + "not assignable left-hand side of assignment".to_string() + } + Self::ImmutableAssignment { .. } => { + "left-hand side of assignment is immutable".to_string() + } + + Self::LoopControlOutsideOfLoop { is_break, .. } => { + format!( + "`{}` is not allowed outside of a loop", + if *is_break { "break" } else { "continue" } + ) + } + + Self::TraitNotImplemented { trait_name, ty, .. } => { + format!("`{}` needs to be implemented for {ty}", trait_name.data(db)) + } + + Self::NotCallable(..) => "not callable type is given in call expression".to_string(), + + Self::CallGenericArgNumMismatch { .. } => { + "given generic argument number mismatch".to_string() + } + + Self::CallArgNumMismatch { .. } => "given argument number mismatch".to_string(), + Self::CallArgLabelMismatch { .. } => "given argument label mismatch".to_string(), + + Self::AmbiguousInherentMethodCall { .. } => "ambiguous method call".to_string(), + + Self::AmbiguousTrait { .. } => "multiple trait candidates found".to_string(), + + Self::AmbiguousTraitInst { .. } => "ambiguous trait implementation".to_string(), + + Self::InvisibleAmbiguousTrait { .. } => "trait is not in the scope".to_string(), + + Self::MethodNotFound { method_name, .. } => { + format!("`{}` is not found", method_name.data(db)) + } + + Self::NotValue { .. } => "value is expected".to_string(), + Self::TypeAnnotationNeeded { .. } => "type annotation is needed".to_string(), + } + } + + fn sub_diags(&self, db: &dyn SpannedHirDb) -> Vec { + match self { + Self::TypeMismatch(span, expected, actual) => vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected `{}`, but `{}` is given", expected, actual), + span.resolve(db), + )], + + Self::InfiniteOccurrence(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "infinite sized type found".to_string(), + span.resolve(db), + )], + + Self::DuplicatedRestPat(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "`..` can be used only once".to_string(), + span.resolve(db), + )], + + Self::InvalidPathDomainInPat { primary, resolved } => { + let mut diag = vec![SubDiagnostic::new( + LabelStyle::Primary, + "expected type or enum variant here".to_string(), + primary.resolve(db), + )]; + if let Some(resolved) = resolved { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + "this item given".to_string(), + resolved.resolve(db), + )) + } + diag + } + + Self::UnitVariantExpected { + primary, + kind_name: pat_kind, + hint, + } => { + let mut diag = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected unit variant here, but found {}", pat_kind,), + primary.resolve(db), + )]; + if let Some(hint) = hint { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("Consider using `{}` instead", hint), + primary.resolve(db), + )) + } + diag + } + + Self::TupleVariantExpected { + primary, + kind_name: pat_kind, + hint, + } => { + let mut diag = if let Some(pat_kind) = pat_kind { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected tuple variant here, but found {}", pat_kind,), + primary.resolve(db), + )] + } else { + vec![SubDiagnostic::new( + LabelStyle::Primary, + "expected tuple variant here".to_string(), + primary.resolve(db), + )] + }; + + if let Some(hint) = hint { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("Consider using `{}` instead", hint), + primary.resolve(db), + )) + } + + diag + } + + Self::RecordExpected { + primary, + kind_name: pat_kind, + hint, + } => { + let mut diag = if let Some(pat_kind) = pat_kind { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!( + "expected record variant or struct here, but found {}", + pat_kind, + ), + primary.resolve(db), + )] + } else { + vec![SubDiagnostic::new( + LabelStyle::Primary, + "expected record variant or struct here".to_string(), + primary.resolve(db), + )] + }; + + if let Some(hint) = hint { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("Consider using `{}` instead", hint), + primary.resolve(db), + )) + } + + diag + } + + Self::MismatchedFieldCount { + primary, + expected, + given, + } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected {} fields here, but {} given", expected, given,), + primary.resolve(db), + )] + } + + Self::DuplicatedRecordFieldBind { + primary, + first_use, + name, + } => { + let name = name.data(db.as_hir_db()); + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!("duplicated field binding `{}`", name), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + format!("first use of `{}`", name), + first_use.resolve(db), + ), + ] + } + + Self::RecordFieldNotFound { primary, label } => { + let label = label.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("field `{}` not found", label), + primary.resolve(db), + )] + } + + Self::ExplicitLabelExpectedInRecord { primary, hint } => { + let mut diag = vec![SubDiagnostic::new( + LabelStyle::Primary, + "explicit label is required".to_string(), + primary.resolve(db), + )]; + if let Some(hint) = hint { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("Consider using `{}` instead", hint), + primary.resolve(db), + )) + } + + diag + } + + Self::MissingRecordFields { + primary, + missing_fields, + hint, + } => { + let missing = missing_fields + .iter() + .map(|id| id.data(db.as_hir_db())) + .join(", "); + + let mut diag = vec![SubDiagnostic::new( + LabelStyle::Primary, + format! {"missing `{}`", missing}, + primary.resolve(db), + )]; + if let Some(hint) = hint { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("Consider using `{}` instead", hint), + primary.resolve(db), + )) + } + diag + } + + Self::UndefinedVariable(primary, ident) => { + let ident = ident.data(db.as_hir_db()); + + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("undefined variable `{}`", ident), + primary.resolve(db), + )] + } + + Self::ReturnedTypeMismatch { + primary, + actual, + expected, + func, + } => { + let mut diag = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected `{}`, but `{}` is returned", expected, actual), + primary.resolve(db), + )]; + + if let Some(func) = func { + if func.ret_ty(db.as_hir_db()).is_some() { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("this function expects `{}` to be returned", expected), + func.lazy_span().ret_ty_moved().resolve(db), + )) + } else { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("try adding `-> {}`", actual), + func.lazy_span().name_moved().resolve(db), + )) + } + } + + diag + } + + Self::TypeMustBeKnown(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "type must be known here".to_string(), + span.resolve(db), + )], + + Self::AccessedFieldNotFound { + primary, + given_ty, + index, + } => { + let message = match index { + FieldIndex::Ident(ident) => { + format!( + "field `{}` is not found in `{}`", + ident.data(db.as_hir_db()), + &given_ty, + ) + } + FieldIndex::Index(index) => { + format!( + "field `{}` is not found in `{}`", + index.data(db.as_hir_db()), + &given_ty + ) + } + }; + + vec![SubDiagnostic::new( + LabelStyle::Primary, + message, + primary.resolve(db), + )] + } + + Self::OpsTraitNotImplemented { + span, + ty, + op, + trait_path, + } => { + let op = op.data(db.as_hir_db()); + let trait_path = trait_path.pretty_print(db.as_hir_db()); + + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!("`{}` cant be applied to `{}`", op, ty), + span.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + format! {"Try implementing `{}` for `{}`", trait_path, ty}, + span.resolve(db), + ), + ] + } + + Self::NonAssignableExpr(primary) => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + "cant assign to this expression".to_string(), + primary.resolve(db), + )] + } + + Self::ImmutableAssignment { primary, binding } => { + let mut diag = vec![SubDiagnostic::new( + LabelStyle::Primary, + "immutable assignment".to_string(), + primary.resolve(db), + )]; + if let Some((name, span)) = binding { + diag.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("try changing to `mut {}`", name.data(db.as_hir_db())), + span.resolve(db), + )); + } + diag + } + + Self::LoopControlOutsideOfLoop { primary, is_break } => { + let stmt = if *is_break { "break" } else { "continue" }; + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{}` is not allowed here", stmt), + primary.resolve(db), + )] + } + + Self::TraitNotImplemented { + primary, + ty, + trait_name, + } => { + let trait_name = trait_name.data(db.as_hir_db()); + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!("`{trait_name}` needs to be implemented for `{ty}`"), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + format!("consider implementing `{trait_name}` for `{ty}`"), + primary.resolve(db), + ), + ] + } + + Self::NotCallable(primary, ty) => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{ty}` is not callable"), + primary.resolve(db), + )] + } + + Self::CallGenericArgNumMismatch { + primary, + def_span, + given, + expected, + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!( + "expected {} generic arguments, but {} given", + expected, given + ), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "function defined here".to_string(), + def_span.resolve(db), + ), + ] + } + + Self::CallArgNumMismatch { + primary, + def_span, + given, + expected, + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!("expected {} arguments, but {} given", expected, given), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "function defined here".to_string(), + def_span.resolve(db), + ), + ] + } + + Self::CallArgLabelMismatch { + primary, + def_span, + given, + expected, + } => { + let mut diags = if let Some(given) = given { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!( + "expected `{}` label, but `{}` given", + expected.data(db.as_hir_db()), + given.data(db.as_hir_db()) + ), + primary.resolve(db), + )] + } else { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected `{}` label", expected.data(db.as_hir_db())), + primary.resolve(db), + )] + }; + + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + "function defined here".to_string(), + def_span.resolve(db), + )); + + diags + } + + Self::AmbiguousInherentMethodCall { + primary, + method_name, + cand_spans: candidates, + } => { + let method_name = method_name.data(db.as_hir_db()); + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{}` is ambiguous", method_name), + primary.resolve(db), + )]; + + for candidate in candidates.iter() { + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("`{method_name}` is defined here"), + candidate.resolve(db), + )); + } + + diags + } + + Self::AmbiguousTrait { + primary, + method_name, + traits, + } => { + let method_name = method_name.data(db.as_hir_db()); + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{method_name}` is ambiguous"), + primary.resolve(db), + )]; + + for trait_ in traits.iter() { + let trait_name = trait_.name(db.as_hir_db()).unwrap().data(db.as_hir_db()); + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("candidate: `{trait_name}::{method_name}`"), + primary.resolve(db), + )); + } + + diags + } + + Self::AmbiguousTraitInst { primary, cands } => { + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + "multiple implementations are found".to_string(), + primary.resolve(db), + )]; + + for cand in cands { + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + format!("candidate: {cand}"), + primary.resolve(db), + )) + } + + diags + } + + Self::InvisibleAmbiguousTrait { primary, traits } => { + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + "consider importing one of the following traits into the scope to resolve the ambiguity" + .to_string(), + primary.resolve(db), + )]; + + for trait_ in traits { + if let Some(path) = trait_.scope().pretty_path(db.as_hir_db()) { + let diag = SubDiagnostic::new( + LabelStyle::Secondary, + format!("`use {path}`"), + primary.resolve(db), + ); + diags.push(diag); + }; + } + + diags + } + + Self::MethodNotFound { + primary, + method_name, + receiver: ty, + } => { + let method_name = method_name.data(db.as_hir_db()); + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("`{}` is not found in `{}`", method_name, ty), + primary.resolve(db), + )] + } + + Self::NotValue { primary, given } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!( + "`{}` cannot be used as a value", + match given { + Either::Left(item) => item.kind_name(), + Either::Right(_) => "type", + } + ), + primary.resolve(db), + )] + } + + Self::TypeAnnotationNeeded { + primary, + ty, + is_integral, + } => { + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + "type annotation is needed".to_string(), + primary.resolve(db), + )]; + + let sub_diag_msg = match ty { + Some(ty) => format!("consider giving `: {ty}` here"), + None if *is_integral => "no default type is provided for an integer type. consider giving integer type".to_string(), + None => "consider giving `: Type` here".to_string(), + }; + + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + sub_diag_msg, + primary.resolve(db), + )); + + diags + } + } + } + + fn severity(&self) -> Severity { + Severity::Error + } +} + +impl<'db> DiagnosticVoucher<'db> for BodyDiag<'db> { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::TyCheck, self.local_code()) + } + + fn to_complete(&self, db: &'db dyn SpannedHirDb) -> CompleteDiagnostic { + let severity = self.severity(); + let error_code = self.error_code(); + let message = self.message(db.as_hir_db()); + let sub_diags = self.sub_diags(db); + + CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TraitLowerDiag<'db> { + ExternalTraitForExternalType(DynLazySpan<'db>), + + ConflictTraitImpl { + primary: ImplTrait<'db>, + conflict_with: ImplTrait<'db>, + }, + + CyclicSuperTraits(DynLazySpan<'db>), +} + +impl<'db> TraitLowerDiag<'db> { + pub(super) fn external_trait_for_external_type(impl_trait: ImplTrait<'db>) -> Self { + Self::ExternalTraitForExternalType(impl_trait.lazy_span().trait_ref().into()) + } + + pub(super) fn conflict_impl(primary: ImplTrait<'db>, conflict_with: ImplTrait<'db>) -> Self { + Self::ConflictTraitImpl { + primary, + conflict_with, + } + } + + fn local_code(&self) -> u16 { + match self { + Self::ExternalTraitForExternalType(_) => 0, + Self::ConflictTraitImpl { .. } => 1, + Self::CyclicSuperTraits { .. } => 2, + } + } + + fn message(&self) -> String { + match self { + Self::ExternalTraitForExternalType(_) => { + "external trait cannot be implemented for external type".to_string() + } + + Self::ConflictTraitImpl { .. } => "conflict trait implementation".to_string(), + + Self::CyclicSuperTraits { .. } => "cyclic super traits are not allowed".to_string(), + } + } + + fn sub_diags(&self, db: &dyn hir::SpannedHirDb) -> Vec { + match self { + Self::ExternalTraitForExternalType(span) => vec![SubDiagnostic::new( + LabelStyle::Primary, + "external trait cannot be implemented for external type".to_string(), + span.resolve(db), + )], + + Self::ConflictTraitImpl { + primary, + conflict_with, + } => vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "conflict trait implementation".to_string(), + primary.lazy_span().ty().resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "conflict with this trait implementation".to_string(), + conflict_with.lazy_span().ty().resolve(db), + ), + ], + + Self::CyclicSuperTraits(span) => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + "super traits cycle is detected here".to_string(), + span.resolve(db), + )] + } + } + } + + fn severity(&self) -> Severity { + Severity::Error + } +} + +impl<'db> DiagnosticVoucher<'db> for TraitLowerDiag<'db> { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::ImplTraitDefinition, self.local_code()) + } + + fn to_complete(&self, db: &'db dyn hir::SpannedHirDb) -> CompleteDiagnostic { + let severity = self.severity(); + let error_code = self.error_code(); + let message = self.message(); + let sub_diags = self.sub_diags(db); + + CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TraitConstraintDiag<'db> { + KindMismatch { + primary: DynLazySpan<'db>, + trait_def: Trait<'db>, + }, + + TraitArgNumMismatch { + span: DynLazySpan<'db>, + expected: usize, + given: usize, + }, + + TraitArgKindMismatch(DynLazySpan<'db>, String), + + TraitBoundNotSat(DynLazySpan<'db>, String, Option), + + InfiniteBoundRecursion(DynLazySpan<'db>, String), + + ConcreteTypeBound(DynLazySpan<'db>, String), + + ConstTyBound(DynLazySpan<'db>, String), +} + +impl<'db> TraitConstraintDiag<'db> { + pub(super) fn kind_mismatch( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + expected: &Kind, + actual: TyId<'db>, + ) -> Self { + let actual_kind = actual.kind(db); + let ty_display = actual.pretty_print(db); + let msg = format!( + "expected `{}` kind, but `{}` has `{}` kind", + expected, ty_display, actual_kind, + ); + Self::TraitArgKindMismatch(span, msg) + } + + pub(super) fn trait_arg_num_mismatch( + span: DynLazySpan<'db>, + expected: usize, + given: usize, + ) -> Self { + Self::TraitArgNumMismatch { + span, + expected, + given, + } + } + + pub(super) fn trait_bound_not_satisfied( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + primary_goal: TraitInstId<'db>, + unsat_subgoal: Option>, + ) -> Self { + let msg = format!( + "`{}` doesn't implement `{}`", + primary_goal.self_ty(db).pretty_print(db), + primary_goal.pretty_print(db, false) + ); + + let unsat_subgoal = unsat_subgoal.map(|unsat| { + format!( + "trait bound `{}` is not satisfied", + unsat.pretty_print(db, true) + ) + }); + Self::TraitBoundNotSat(span, msg, unsat_subgoal) + } + + pub(super) fn const_ty_bound( + db: &'db dyn HirAnalysisDb, + ty: TyId<'db>, + span: DynLazySpan<'db>, + ) -> Self { + let msg = format!("`{}` is a const type", ty.pretty_print(db)); + Self::ConstTyBound(span, msg) + } + + pub(super) fn concrete_type_bound( + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + ty: TyId<'db>, + ) -> Self { + let msg = format!("`{}` is a concrete type", ty.pretty_print(db)); + Self::ConcreteTypeBound(span, msg) + } + + fn local_code(&self) -> u16 { + match self { + Self::KindMismatch { .. } => 0, + Self::TraitArgNumMismatch { .. } => 1, + Self::TraitArgKindMismatch(_, _) => 2, + Self::TraitBoundNotSat(..) => 3, + Self::InfiniteBoundRecursion(..) => 4, + Self::ConcreteTypeBound(..) => 5, + Self::ConstTyBound(..) => 6, + } + } + + fn message(&self) -> String { + match self { + Self::KindMismatch { .. } => "type doesn't satisfy required kind bound".to_string(), + + Self::TraitArgNumMismatch { .. } => "given trait argument number mismatch".to_string(), + + Self::TraitArgKindMismatch(..) => "given trait argument kind mismatch".to_string(), + + Self::TraitBoundNotSat(..) => "trait bound is not satisfied".to_string(), + + Self::InfiniteBoundRecursion(..) => "infinite trait bound recursion".to_string(), + + Self::ConcreteTypeBound(..) => { + "trait bound for concrete type is not allowed".to_string() + } + + Self::ConstTyBound(..) => "trait bound for const type is not allowed".to_string(), + } + } + + fn sub_diags(&self, db: &dyn SpannedHirDb) -> Vec { + match self { + Self::KindMismatch { primary, trait_def } => vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "type doesn't satisfy required kind bound here".to_string(), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "trait is defined here".to_string(), + trait_def.lazy_span().name().resolve(db), + ), + ], + + Self::TraitArgNumMismatch { + span, + expected, + given, + } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected {} arguments here, but {} given", expected, given,), + span.resolve(db), + )] + } + + Self::TraitArgKindMismatch(span, msg) => vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )], + + Self::TraitBoundNotSat(span, msg, subgoal) => { + let mut diags = vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )]; + + if let Some(subgoal) = subgoal { + diags.push(SubDiagnostic::new( + LabelStyle::Secondary, + subgoal.clone(), + span.resolve(db), + )) + } + + diags + } + + Self::InfiniteBoundRecursion(span, msg) => vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )], + + Self::ConcreteTypeBound(span, msg) => vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )], + + Self::ConstTyBound(span, msg) => vec![SubDiagnostic::new( + LabelStyle::Primary, + msg.clone(), + span.resolve(db), + )], + } + } + + fn severity(&self) -> Severity { + Severity::Error + } +} + +impl<'db> DiagnosticVoucher<'db> for TraitConstraintDiag<'db> { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::TraitSatisfaction, self.local_code()) + } + + fn to_complete(&self, db: &'db dyn SpannedHirDb) -> CompleteDiagnostic { + let severity = self.severity(); + let error_code = self.error_code(); + let message = self.message(); + let sub_diags = self.sub_diags(db); + + CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ImplDiag<'db> { + ConflictMethodImpl { + primary: DynLazySpan<'db>, + conflict_with: DynLazySpan<'db>, + }, + + MethodNotDefinedInTrait { + primary: DynLazySpan<'db>, + trait_: Trait<'db>, + method_name: IdentId<'db>, + }, + + NotAllTraitItemsImplemented { + primary: DynLazySpan<'db>, + not_implemented: Vec>, + }, + + MethodTypeParamNumMismatch { + primary: DynLazySpan<'db>, + expected: usize, + given: usize, + }, + + MethodTypeParamKindMismatch { + primary: DynLazySpan<'db>, + message: String, + }, + + MethodArgNumMismatch { + primary: DynLazySpan<'db>, + expected: usize, + given: usize, + }, + + MethodArgLabelMismatch { + primary: DynLazySpan<'db>, + definition: DynLazySpan<'db>, + message: String, + }, + + MethodArgTyMismatch { + primary: DynLazySpan<'db>, + message: String, + }, + + MethodRetTyMismatch { + primary: DynLazySpan<'db>, + message: String, + }, + + MethodStricterBound { + primary: DynLazySpan<'db>, + message: String, + }, + + InvalidSelfType { + primary: DynLazySpan<'db>, + message: String, + }, + + InherentImplIsNotAllowed { + primary: DynLazySpan<'db>, + ty: String, + is_nominal: bool, + }, +} + +impl<'db> ImplDiag<'db> { + pub(super) fn conflict_method_impl( + primary: DynLazySpan<'db>, + conflict_with: DynLazySpan<'db>, + ) -> Self { + Self::ConflictMethodImpl { + primary, + conflict_with, + } + } + + pub(super) fn method_not_defined_in_trait( + primary: DynLazySpan<'db>, + trait_: Trait<'db>, + method_name: IdentId<'db>, + ) -> Self { + Self::MethodNotDefinedInTrait { + primary, + trait_, + method_name, + } + } + + pub(super) fn not_all_trait_items_implemented( + primary: DynLazySpan<'db>, + not_implemented: Vec>, + ) -> Self { + Self::NotAllTraitItemsImplemented { + primary, + not_implemented, + } + } + + pub(super) fn method_param_num_mismatch( + primary: DynLazySpan<'db>, + expected: usize, + given: usize, + ) -> Self { + Self::MethodTypeParamNumMismatch { + primary, + expected, + given, + } + } + + pub(super) fn method_arg_num_mismatch( + primary: DynLazySpan<'db>, + expected: usize, + given: usize, + ) -> Self { + Self::MethodArgNumMismatch { + primary, + expected, + given, + } + } + + pub fn method_arg_ty_mismatch( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + expected: TyId<'db>, + given: TyId<'db>, + ) -> Self { + let message = format!( + "expected `{}` type, but the given type is `{}`", + expected.pretty_print(db), + given.pretty_print(db), + ); + + Self::MethodArgTyMismatch { primary, message } + } + + pub fn method_arg_label_mismatch( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + definition: DynLazySpan<'db>, + expected: FuncParamName<'db>, + given: FuncParamName<'db>, + ) -> Self { + let message = format!( + "expected `{}` label, but the given label is `{}`", + expected.pretty_print(db.as_hir_db()), + given.pretty_print(db.as_hir_db()) + ); + + Self::MethodArgLabelMismatch { + primary, + definition, + message, + } + } + + pub fn method_ret_type_mismatch( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + expected: TyId<'db>, + given: TyId<'db>, + ) -> Self { + let message = format!( + "expected `{}` type, but the given type is `{}`", + expected.pretty_print(db), + given.pretty_print(db), + ); + + Self::MethodRetTyMismatch { primary, message } + } + + pub(super) fn method_param_kind_mismatch( + primary: DynLazySpan<'db>, + expected: &Kind, + given: &Kind, + ) -> Self { + let message = format!( + "expected `{}` kind, but the given type has `{}` kind", + expected, given, + ); + + Self::MethodTypeParamKindMismatch { primary, message } + } + + pub(super) fn method_stricter_bound( + db: &'db dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + stricter_bounds: &[TraitInstId<'db>], + ) -> Self { + let message = format!( + "method has stricter bounds than the declared method in the trait: {}", + stricter_bounds + .iter() + .map(|pred| format!("`{}`", pred.pretty_print(db, true))) + .join(", ") + ); + Self::MethodStricterBound { primary, message } + } + + pub(super) fn invalid_self_ty( + db: &dyn HirAnalysisDb, + primary: DynLazySpan<'db>, + expected: TyId<'db>, + actual: TyId<'db>, + ) -> Self { + let message = if !expected.is_trait_self(db) { + format!( + "type of `self` must starts with `Self` or `{}`, but the given type is `{}`", + expected.pretty_print(db), + actual.pretty_print(db), + ) + } else { + format!( + "type of `self` must starts with `Self`, but the given type is `{}`", + actual.pretty_print(db), + ) + }; + + Self::InvalidSelfType { primary, message } + } + + pub fn local_code(&self) -> u16 { + match self { + Self::ConflictMethodImpl { .. } => 0, + Self::MethodNotDefinedInTrait { .. } => 1, + Self::NotAllTraitItemsImplemented { .. } => 2, + Self::MethodTypeParamNumMismatch { .. } => 3, + Self::MethodTypeParamKindMismatch { .. } => 4, + Self::MethodArgNumMismatch { .. } => 5, + Self::MethodArgLabelMismatch { .. } => 6, + Self::MethodArgTyMismatch { .. } => 7, + Self::MethodRetTyMismatch { .. } => 8, + Self::MethodStricterBound { .. } => 9, + Self::InvalidSelfType { .. } => 10, + Self::InherentImplIsNotAllowed { .. } => 11, + } + } + + fn message(&self, db: &dyn HirDb) -> String { + match self { + Self::ConflictMethodImpl { .. } => "conflict method implementation".to_string(), + Self::MethodNotDefinedInTrait { + trait_, + method_name, + .. + } => format!( + "method `{}` is not defined in trait `{}`", + method_name.data(db), + trait_.name(db).unwrap().data(db), + ), + + Self::NotAllTraitItemsImplemented { .. } => { + "not all trait methods are implemented".to_string() + } + + Self::MethodTypeParamNumMismatch { .. } => { + "trait method type parameter number mismatch".to_string() + } + + Self::MethodTypeParamKindMismatch { .. } => { + "trait method type parameter kind mismatch".to_string() + } + + Self::MethodArgNumMismatch { .. } => { + "trait method argument number mismatch".to_string() + } + + Self::MethodArgLabelMismatch { .. } => { + "given argument label doesn't match the expected label required by trait" + .to_string() + } + + Self::MethodArgTyMismatch { .. } => { + "given argument type doesn't match the expected type required by trait".to_string() + } + + Self::MethodRetTyMismatch { .. } => { + "given return type doesn't match the expected type required by trait".to_string() + } + + Self::MethodStricterBound { .. } => { + "impl method has stricter bound than the declared method in the trait".to_string() + } + + Self::InvalidSelfType { .. } => "invalid type for `self` argument".to_string(), + + Self::InherentImplIsNotAllowed { .. } => "inherent impl is not allowed".to_string(), + } + } + + fn sub_diags(&self, db: &dyn SpannedHirDb) -> Vec { + match self { + Self::ConflictMethodImpl { + primary, + conflict_with, + } => vec![ + SubDiagnostic::new( + LabelStyle::Primary, + "conflict method implementation".to_string(), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "conflict with this method implementation".to_string(), + conflict_with.resolve(db), + ), + ], + + Self::MethodNotDefinedInTrait { + primary, + trait_, + method_name, + } => { + vec![ + SubDiagnostic::new( + LabelStyle::Primary, + format!( + "method `{}` is not defined in trait `{}`", + method_name.data(db.as_hir_db()), + trait_.name(db.as_hir_db()).unwrap().data(db.as_hir_db()), + ), + primary.resolve(db), + ), + SubDiagnostic::new( + LabelStyle::Secondary, + "trait is defined here".to_string(), + trait_.lazy_span().name().resolve(db), + ), + ] + } + + Self::NotAllTraitItemsImplemented { + primary, + not_implemented, + } => { + let not_implemented: String = not_implemented + .iter() + .map(|name| name.data(db.as_hir_db())) + .join(", "); + + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!( + "all required trait items must be implemented, missing: `{}`", + not_implemented + ), + primary.resolve(db), + )] + } + + Self::MethodTypeParamNumMismatch { + primary, + expected, + given, + } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!( + "expected {} type parameters here, but {} given", + expected, given + ), + primary.resolve(db), + )] + } + + Self::MethodTypeParamKindMismatch { primary, message } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + message.clone(), + primary.resolve(db), + )] + } + + Self::MethodArgNumMismatch { + primary, + expected, + given, + } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + format!("expected {} arguments here, but {} given", expected, given), + primary.resolve(db), + )] + } + + Self::MethodArgLabelMismatch { + primary, + definition, + message, + } => { + vec![ + SubDiagnostic::new(LabelStyle::Primary, message.clone(), primary.resolve(db)), + SubDiagnostic::new( + LabelStyle::Secondary, + "argument label is defined here".to_string(), + definition.resolve(db), + ), + ] + } + + Self::MethodArgTyMismatch { primary, message } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + message.clone(), + primary.resolve(db), + )] + } + + Self::MethodRetTyMismatch { primary, message } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + message.clone(), + primary.resolve(db), + )] + } + + Self::MethodStricterBound { primary, message } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + message.clone(), + primary.resolve(db), + )] + } + + Self::InvalidSelfType { primary, message } => { + vec![SubDiagnostic::new( + LabelStyle::Primary, + message.clone(), + primary.resolve(db), + )] + } + + Self::InherentImplIsNotAllowed { + primary, + ty, + is_nominal, + } => { + let msg = if *is_nominal { + format!("inherent impl is not allowed for foreign type `{}`", ty) + } else { + "inherent impl is not allowed for non nominal type".to_string() + }; + + vec![SubDiagnostic::new( + LabelStyle::Primary, + msg, + primary.resolve(db), + )] + } + } + } + + fn severity(&self) -> Severity { + Severity::Error + } +} + +impl<'db> DiagnosticVoucher<'db> for ImplDiag<'db> { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::TraitSatisfaction, self.local_code()) + } + + fn to_complete(&self, db: &'db dyn SpannedHirDb) -> CompleteDiagnostic { + let severity = self.severity(); + let error_code = self.error_code(); + let message = self.message(db.as_hir_db()); + let sub_diags = self.sub_diags(db); + + CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) + } +} diff --git a/crates/hir-analysis/src/ty/fold.rs b/crates/hir-analysis/src/ty/fold.rs new file mode 100644 index 0000000000..89995e3c8b --- /dev/null +++ b/crates/hir-analysis/src/ty/fold.rs @@ -0,0 +1,172 @@ +use std::hash::Hash; + +use common::indexmap::IndexSet; + +use super::{ + trait_def::{Implementor, TraitInstId}, + trait_resolution::PredicateListId, + ty_check::ExprProp, + ty_def::{TyData, TyId}, + visitor::TyVisitable, +}; +use crate::{ + ty::const_ty::{ConstTyData, ConstTyId}, + HirAnalysisDb, +}; + +pub trait TyFoldable<'db> +where + Self: Sized + TyVisitable<'db>, +{ + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>; + + fn fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + self.super_fold_with(folder) + } +} + +pub trait TyFolder<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb; + fn fold_ty(&mut self, ty: TyId<'db>) -> TyId<'db>; +} + +impl<'db> TyFoldable<'db> for TyId<'db> { + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + use TyData::*; + + let db = folder.db(); + match self.data(db) { + TyApp(abs, arg) => { + let abs = folder.fold_ty(*abs); + let arg = folder.fold_ty(*arg); + + TyId::app(db, abs, arg) + } + + ConstTy(cty) => { + use ConstTyData::*; + let cty_data = match cty.data(db) { + TyVar(var, ty) => { + let ty = folder.fold_ty(*ty); + TyVar(var.clone(), ty) + } + TyParam(param, ty) => { + let ty = folder.fold_ty(*ty); + TyParam(param.clone(), ty) + } + Evaluated(val, ty) => { + let ty = folder.fold_ty(*ty); + Evaluated(val.clone(), ty) + } + UnEvaluated(body) => UnEvaluated(*body), + }; + + let const_ty = ConstTyId::new(db, cty_data); + TyId::const_ty(db, const_ty) + } + + TyVar(_) | TyParam(_) | TyBase(_) | Never | Invalid(_) => self, + } + } + + fn fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + folder.fold_ty(self) + } +} + +impl<'db, T> TyFoldable<'db> for Vec +where + T: TyFoldable<'db>, +{ + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + self.into_iter() + .map(|inner| inner.fold_with(folder)) + .collect() + } +} + +impl<'db, T> TyFoldable<'db> for IndexSet +where + T: TyFoldable<'db> + Hash + Eq, +{ + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + self.into_iter().map(|ty| ty.fold_with(folder)).collect() + } +} + +impl<'db> TyFoldable<'db> for TraitInstId<'db> { + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + let db = folder.db(); + let def = self.def(db); + let args = self + .args(db) + .iter() + .map(|ty| ty.fold_with(folder)) + .collect::>(); + + TraitInstId::new(db, def, args) + } +} + +impl<'db> TyFoldable<'db> for Implementor<'db> { + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + let db = folder.db(); + let trait_inst = self.trait_(db).fold_with(folder); + let params = self + .params(db) + .iter() + .map(|ty| ty.fold_with(folder)) + .collect::>(); + let hir_impl_trait = self.hir_impl_trait(db); + + Implementor::new(db, trait_inst, params, hir_impl_trait) + } +} + +impl<'db> TyFoldable<'db> for PredicateListId<'db> { + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + let predicates = self + .list(folder.db()) + .iter() + .map(|pred| pred.fold_with(folder)) + .collect::>(); + + Self::new(folder.db(), predicates) + } +} + +impl<'db> TyFoldable<'db> for ExprProp<'db> { + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + let ty = self.ty.fold_with(folder); + Self { ty, ..self } + } +} diff --git a/crates/hir-analysis/src/ty/func_def.rs b/crates/hir-analysis/src/ty/func_def.rs new file mode 100644 index 0000000000..11bbf48845 --- /dev/null +++ b/crates/hir-analysis/src/ty/func_def.rs @@ -0,0 +1,202 @@ +use hir::{ + hir_def::{scope_graph::ScopeId, Enum, Func, IdentId, IngotId, Partial}, + span::DynLazySpan, +}; + +use super::{binder::Binder, ty_def::TyId, ty_lower::GenericParamTypeSet}; +use crate::{ + ty::{ + ty_def::InvalidCause, + ty_lower::{collect_generic_params, lower_hir_ty, GenericParamOwnerId}, + }, + HirAnalysisDb, +}; + +/// Lower func to [`FuncDef`]. This function returns `None` iff the function +/// name is `Partial::Absent`. +#[salsa::tracked] +pub fn lower_func<'db>(db: &'db dyn HirAnalysisDb, func: Func<'db>) -> Option> { + let name = func.name(db.as_hir_db()).to_opt()?; + let params_set = collect_generic_params(db, GenericParamOwnerId::new(db, func.into())); + + let args = match func.params(db.as_hir_db()) { + Partial::Present(args) => args + .data(db.as_hir_db()) + .iter() + .map(|arg| { + let ty = arg + .ty + .to_opt() + .map(|ty| lower_hir_ty(db, ty, func.scope())) + .unwrap_or_else(|| TyId::invalid(db, InvalidCause::Other)); + Binder::bind(ty) + }) + .collect(), + Partial::Absent => vec![], + }; + + let ret_ty = func + .ret_ty(db.as_hir_db()) + .map(|ty| lower_hir_ty(db, ty, func.scope())) + .unwrap_or_else(|| TyId::unit(db)); + + Some(FuncDef::new( + db, + func.into(), + name, + params_set, + args, + Binder::bind(ret_ty), + )) +} + +#[salsa::tracked] +pub struct FuncDef<'db> { + pub hir_def: HirFuncDefKind<'db>, + + pub name: IdentId<'db>, + + pub params_set: GenericParamTypeSet<'db>, + + /// Argument types of the function. + #[return_ref] + pub arg_tys: Vec>>, + + /// Return types of the function. + pub ret_ty: Binder>, +} + +impl<'db> FuncDef<'db> { + pub fn ingot(self, db: &'db dyn HirAnalysisDb) -> IngotId<'db> { + self.hir_def(db).ingot(db) + } + + pub fn name_span(self, db: &'db dyn HirAnalysisDb) -> DynLazySpan<'db> { + self.hir_def(db).name_span() + } + + pub fn param_list_span(self, db: &'db dyn HirAnalysisDb) -> DynLazySpan<'db> { + self.hir_def(db).param_list_span() + } + + pub fn scope(self, db: &'db dyn HirAnalysisDb) -> ScopeId<'db> { + self.hir_def(db).scope() + } + + pub fn params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + self.params_set(db).params(db) + } + + pub fn explicit_params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + self.params_set(db).explicit_params(db) + } + + pub fn receiver_ty(self, db: &'db dyn HirAnalysisDb) -> Option>> { + self.is_method(db) + .then(|| self.arg_tys(db).first().copied().unwrap()) + } + + pub fn is_method(self, db: &dyn HirAnalysisDb) -> bool { + self.hir_def(db).is_method(db) + } + + pub fn offset_to_explicit_params_position(self, db: &dyn HirAnalysisDb) -> usize { + self.params_set(db).offset_to_explicit_params_position(db) + } + + pub fn hir_func_def(self, db: &'db dyn HirAnalysisDb) -> Option> { + if let HirFuncDefKind::Func(func) = self.hir_def(db) { + Some(func) + } else { + None + } + } + + pub fn param_span(self, db: &'db dyn HirAnalysisDb, idx: usize) -> DynLazySpan<'db> { + self.hir_def(db).param_span(idx) + } + + pub fn param_label(self, db: &'db dyn HirAnalysisDb, idx: usize) -> Option> { + self.hir_def(db).param_label(db, idx) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From, salsa::Update)] +pub enum HirFuncDefKind<'db> { + Func(Func<'db>), + VariantCtor(Enum<'db>, usize), +} + +impl<'db> HirFuncDefKind<'db> { + pub fn name_span(self) -> DynLazySpan<'db> { + match self { + Self::Func(func) => func.lazy_span().name_moved().into(), + Self::VariantCtor(enum_, idx) => enum_ + .lazy_span() + .variants_moved() + .variant_moved(idx) + .name_moved() + .into(), + } + } + + pub fn is_method(self, db: &dyn HirAnalysisDb) -> bool { + match self { + Self::Func(func) => func.is_method(db.as_hir_db()), + Self::VariantCtor(..) => false, + } + } + + pub fn ingot(self, db: &'db dyn HirAnalysisDb) -> IngotId<'db> { + let top_mod = match self { + Self::Func(func) => func.top_mod(db.as_hir_db()), + Self::VariantCtor(enum_, ..) => enum_.top_mod(db.as_hir_db()), + }; + + top_mod.ingot(db.as_hir_db()) + } + + pub fn scope(self) -> ScopeId<'db> { + match self { + Self::Func(func) => func.scope(), + Self::VariantCtor(enum_, idx) => ScopeId::Variant(enum_.into(), idx), + } + } + + pub fn param_list_span(self) -> DynLazySpan<'db> { + match self { + Self::Func(func) => func.lazy_span().params_moved().into(), + Self::VariantCtor(enum_, idx) => enum_ + .lazy_span() + .variants_moved() + .variant(idx) + .tuple_type() + .into(), + } + } + + pub fn param_label(self, db: &'db dyn HirAnalysisDb, idx: usize) -> Option> { + let Self::Func(func) = self else { + return None; + }; + + func.params(db.as_hir_db()) + .to_opt()? + .data(db.as_hir_db()) + .get(idx)? + .label_eagerly() + } + + pub fn param_span(self, idx: usize) -> DynLazySpan<'db> { + match self { + Self::Func(func) => func.lazy_span().params_moved().param(idx).into(), + Self::VariantCtor(enum_, variant_idx) => enum_ + .lazy_span() + .variants_moved() + .variant_moved(variant_idx) + .tuple_type_moved() + .elem_ty_moved(idx) + .into(), + } + } +} diff --git a/crates/hir-analysis/src/ty/method_cmp.rs b/crates/hir-analysis/src/ty/method_cmp.rs new file mode 100644 index 0000000000..b17bb4457d --- /dev/null +++ b/crates/hir-analysis/src/ty/method_cmp.rs @@ -0,0 +1,299 @@ +use super::{ + canonical::Canonical, + diagnostics::{ImplDiag, TyDiagCollection}, + func_def::FuncDef, + trait_def::{TraitInstId, TraitMethod}, + trait_resolution::{ + constraint::collect_func_def_constraints, is_goal_satisfiable, GoalSatisfiability, + }, + ty_def::TyId, +}; +use crate::HirAnalysisDb; + +/// Compares the implementation method with the trait method to ensure they +/// match. +/// +/// This function performs the following checks: +/// +/// 1. Number of generic parameters. +/// 2. Kinds of generic parameters. +/// 3. Arity (number of arguments). +/// 4. Argument labels. +/// 5. Argument types and return type. +/// 6. Method constraints. +/// +/// If any of these checks fail, the function will record the appropriate +/// diagnostics. +/// +/// # Arguments +/// +/// * `db` - Reference to the database implementing the `HirAnalysisDb` trait. +/// * `impl_m` - The implementation method to compare. +/// * `trait_m` - The trait method to compare against. +/// * `trait_inst` - The instance of the trait being checked. +/// * `sink` - A mutable reference to a vector where diagnostic messages will be +/// collected. +pub(super) fn compare_impl_method<'db>( + db: &'db dyn HirAnalysisDb, + impl_m: FuncDef<'db>, + trait_m: TraitMethod<'db>, + trait_inst: TraitInstId<'db>, + sink: &mut Vec>, +) { + if !compare_generic_param_num(db, impl_m, trait_m.0, sink) { + return; + } + + if !compare_generic_param_kind(db, impl_m, trait_m.0, sink) { + return; + } + + if !compare_arity(db, impl_m, trait_m.0, sink) { + return; + } + + // Compare the argument labels, argument types, and return type of the impl + // method with the trait method. + let mut err = !compare_arg_label(db, impl_m, trait_m.0, sink); + + let map_to_impl: Vec<_> = trait_inst + .args(db) + .iter() + .chain(impl_m.explicit_params(db).iter()) + .copied() + .collect(); + err |= !compare_ty(db, impl_m, trait_m.0, &map_to_impl, sink); + if err { + return; + } + + compare_constraints(db, impl_m, trait_m.0, &map_to_impl, sink); +} + +/// Checks if the number of generic parameters of the implemented method is the +/// same as the number of generic parameters of the trait method. +/// Returns `false` if the comparison fails. +fn compare_generic_param_num<'db>( + db: &'db dyn HirAnalysisDb, + impl_m: FuncDef<'db>, + trait_m: FuncDef<'db>, + sink: &mut Vec>, +) -> bool { + let impl_params = impl_m.explicit_params(db); + let trait_params = trait_m.explicit_params(db); + + if impl_params.len() == trait_params.len() { + true + } else { + sink.push( + ImplDiag::method_param_num_mismatch( + impl_m.name_span(db), + trait_params.len(), + impl_params.len(), + ) + .into(), + ); + false + } +} + +/// Checks if the generic parameter kinds are the same. +/// Returns `false` if the comparison fails. +fn compare_generic_param_kind<'db>( + db: &'db dyn HirAnalysisDb, + impl_m: FuncDef<'db>, + trait_m: FuncDef<'db>, + sink: &mut Vec>, +) -> bool { + let mut err = false; + for (idx, (&trait_m_param, &impl_m_param)) in trait_m + .explicit_params(db) + .iter() + .zip(impl_m.explicit_params(db)) + .enumerate() + { + let trait_m_kind = trait_m_param.kind(db); + let impl_m_kind = impl_m_param.kind(db); + + if !trait_m_kind.does_match(impl_m_kind) { + let span = impl_m + .hir_func_def(db) + .unwrap() + .lazy_span() + .generic_params_moved() + .param_moved(idx) + .into(); + sink.push(ImplDiag::method_param_kind_mismatch(span, trait_m_kind, impl_m_kind).into()); + err = true; + } + } + + !err +} + +/// Checks if the arity of the implemented method is the same as the arity of +/// the trait method. +/// Returns `false` if the comparison fails. +fn compare_arity<'db>( + db: &'db dyn HirAnalysisDb, + impl_m: FuncDef<'db>, + trait_m: FuncDef<'db>, + sink: &mut Vec>, +) -> bool { + let impl_m_arity = impl_m.arg_tys(db).len(); + let trait_m_arity = trait_m.arg_tys(db).len(); + + // Checks if the arity are the same. + if impl_m_arity == trait_m_arity { + true + } else { + sink.push( + ImplDiag::method_arg_num_mismatch( + impl_m.param_list_span(db), + trait_m_arity, + impl_m_arity, + ) + .into(), + ); + false + } +} + +/// Checks if the argument labels of the implemented method are the same as the +/// argument labels of the trait method. +/// Returns `false` if the comparison fails. +fn compare_arg_label<'db>( + db: &'db dyn HirAnalysisDb, + impl_m: FuncDef<'db>, + trait_m: FuncDef<'db>, + sink: &mut Vec>, +) -> bool { + let hir_db = db.as_hir_db(); + + let mut err = false; + let hir_impl_m = impl_m.hir_func_def(db).unwrap(); + let hir_trait_m = trait_m.hir_func_def(db).unwrap(); + + let (Some(impl_m_params), Some(trait_m_params)) = ( + hir_impl_m.params(hir_db).to_opt(), + hir_trait_m.params(hir_db).to_opt(), + ) else { + return true; + }; + + for (idx, (expected_param, method_param)) in trait_m_params + .data(hir_db) + .iter() + .zip(impl_m_params.data(hir_db)) + .enumerate() + { + let Some(expected_label) = expected_param + .label + .or_else(|| expected_param.name.to_opt()) + else { + continue; + }; + + let Some(method_label) = method_param.label.or_else(|| method_param.name.to_opt()) else { + continue; + }; + + if expected_label != method_label { + let primary = hir_impl_m.lazy_span().params_moved().param(idx).into(); + let sub = hir_trait_m.lazy_span().params_moved().param(idx).into(); + + sink.push( + ImplDiag::method_arg_label_mismatch(db, primary, sub, expected_label, method_label) + .into(), + ); + err = true; + } + } + + !err +} + +/// Checks if the argument types and return type of the implemented method are +/// the same as the argument types and return type of the trait method. +/// Returns `false` if the comparison fails. +fn compare_ty<'db>( + db: &'db dyn HirAnalysisDb, + impl_m: FuncDef<'db>, + trait_m: FuncDef<'db>, + map_to_impl: &[TyId<'db>], + sink: &mut Vec>, +) -> bool { + let mut err = false; + let impl_m_arg_tys = impl_m.arg_tys(db); + let trait_m_arg_tys = trait_m.arg_tys(db); + + for (idx, (&trait_m_ty, &impl_m_ty)) in trait_m_arg_tys.iter().zip(impl_m_arg_tys).enumerate() { + let trait_m_ty = trait_m_ty.instantiate(db, map_to_impl); + if trait_m_ty.has_invalid(db) { + continue; + } + let impl_m_ty = impl_m_ty.instantiate_identity(); + if !impl_m_ty.has_invalid(db) && trait_m_ty != impl_m_ty { + let span = impl_m.param_span(db, idx); + sink.push(ImplDiag::method_arg_ty_mismatch(db, span, trait_m_ty, impl_m_ty).into()); + err = true; + } + } + + let impl_m_ret_ty = impl_m.ret_ty(db).instantiate_identity(); + let trait_m_ret_ty = trait_m.ret_ty(db).instantiate(db, map_to_impl); + if !impl_m_ret_ty.has_invalid(db) && trait_m_ret_ty != impl_m_ret_ty { + sink.push( + ImplDiag::method_ret_type_mismatch( + db, + impl_m.hir_func_def(db).unwrap().lazy_span().ret_ty().into(), + trait_m_ret_ty, + impl_m_ret_ty, + ) + .into(), + ); + + err = true; + } + + !err +} + +/// Checks if the method constraints are stricter than the trait constraints. +/// This check is performed by checking if the `impl_method` constraints are +/// satisfied under the assumptions that is obtained from the `expected_method` +/// constraints. +/// Returns `false` if the comparison fails. +fn compare_constraints<'db>( + db: &'db dyn HirAnalysisDb, + impl_m: FuncDef<'db>, + trait_m: FuncDef<'db>, + map_to_impl: &[TyId<'db>], + sink: &mut Vec>, +) -> bool { + let impl_m_constraints = collect_func_def_constraints(db, impl_m, false).instantiate_identity(); + let trait_m_constraints = + collect_func_def_constraints(db, trait_m, false).instantiate(db, map_to_impl); + let mut unsatisfied_goals = vec![]; + for &goal in impl_m_constraints.list(db) { + let canonical_goal = Canonical::new(db, goal); + let ingot = trait_m.ingot(db); + match is_goal_satisfiable(db, ingot, canonical_goal, trait_m_constraints) { + GoalSatisfiability::Satisfied(_) | GoalSatisfiability::ContainsInvalid => {} + GoalSatisfiability::NeedsConfirmation(_) => unreachable!(), + GoalSatisfiability::UnSat(_) => { + unsatisfied_goals.push(goal); + } + } + } + + if unsatisfied_goals.is_empty() { + true + } else { + unsatisfied_goals.sort_by_key(|goal| goal.self_ty(db).pretty_print(db)); + sink.push( + ImplDiag::method_stricter_bound(db, impl_m.name_span(db), &unsatisfied_goals).into(), + ); + false + } +} diff --git a/crates/hir-analysis/src/ty/method_table.rs b/crates/hir-analysis/src/ty/method_table.rs new file mode 100644 index 0000000000..96e34a7b49 --- /dev/null +++ b/crates/hir-analysis/src/ty/method_table.rs @@ -0,0 +1,233 @@ +use hir::hir_def::{Enum, IdentId, Impl, IngotId, VariantKind}; +use rustc_hash::FxHashMap; + +use super::{ + adt_def::{lower_adt, AdtRefId}, + binder::Binder, + canonical::Canonical, + func_def::{lower_func, FuncDef, HirFuncDefKind}, + ty_def::{InvalidCause, TyBase, TyId}, + ty_lower::lower_hir_ty, + unify::UnificationTable, +}; +use crate::{ + ty::{ty_def::TyData, ty_lower::GenericParamTypeSet}, + HirAnalysisDb, +}; + +#[salsa::tracked(return_ref)] +pub(crate) fn collect_methods<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, +) -> MethodTable<'db> { + let mut collector = MethodCollector::new(db, ingot); + + let enums = ingot.all_enums(db.as_hir_db()); + collector.collect_variant_ctors(enums); + + let impls = ingot.all_impls(db.as_hir_db()); + + collector.collect_impls(impls); + collector.finalize() +} + +#[salsa::tracked(return_ref)] +pub(crate) fn probe_method<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + ty: Canonical>, + name: IdentId<'db>, +) -> Vec> { + let table = collect_methods(db, ingot); + table.probe(db, ty, name) +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct MethodTable<'db> { + buckets: FxHashMap, MethodBucket<'db>>, +} + +impl<'db> MethodTable<'db> { + fn probe( + &self, + db: &'db dyn HirAnalysisDb, + ty: Canonical>, + name: IdentId<'db>, + ) -> Vec> { + let mut table = UnificationTable::new(db); + let ty = ty.extract_identity(&mut table); + let Some(base) = Self::extract_ty_base(ty, db) else { + return vec![]; + }; + + if let Some(bucket) = self.buckets.get(base) { + bucket.probe(&mut table, ty, name) + } else { + vec![] + } + } + + fn new() -> Self { + Self { + buckets: FxHashMap::default(), + } + } + + fn finalize(self) -> Self { + self + } + + fn insert(&mut self, db: &'db dyn HirAnalysisDb, ty: TyId<'db>, func: FuncDef<'db>) { + let Some(base) = Self::extract_ty_base(ty, db) else { + return; + }; + + let name = func.name(db); + let bucket = self.buckets.entry(*base).or_insert_with(MethodBucket::new); + let methods = bucket.methods.entry(Binder::bind(ty)).or_default(); + methods.insert(name, func); + } + + fn extract_ty_base(ty: TyId<'db>, db: &'db dyn HirAnalysisDb) -> Option<&'db TyBase<'db>> { + let base = ty.base_ty(db); + match base.data(db) { + TyData::TyBase(base) => Some(base), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct MethodBucket<'db> { + methods: FxHashMap>, FxHashMap, FuncDef<'db>>>, +} + +impl<'db> MethodBucket<'db> { + fn new() -> Self { + Self { + methods: FxHashMap::default(), + } + } + + fn probe( + &self, + table: &mut UnificationTable<'db>, + ty: TyId<'db>, + name: IdentId<'db>, + ) -> Vec> { + let mut methods = vec![]; + for (&cand_ty, funcs) in self.methods.iter() { + let snapshot = table.snapshot(); + + let ty = table.instantiate_to_term(ty); + let cand_ty = table.instantiate_with_fresh_vars(cand_ty); + let cand_ty = table.instantiate_to_term(cand_ty); + + if table.unify(cand_ty, ty).is_ok() { + if let Some(func) = funcs.get(&name) { + methods.push(*func) + } + } + table.rollback_to(snapshot); + } + + methods + } +} + +struct MethodCollector<'db> { + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + method_table: MethodTable<'db>, +} + +impl<'db> MethodCollector<'db> { + fn new(db: &'db dyn HirAnalysisDb, ingot: IngotId<'db>) -> Self { + Self { + db, + ingot, + method_table: MethodTable::new(), + } + } + + fn collect_variant_ctors(&mut self, enums: &[Enum<'db>]) { + let hir_db = self.db.as_hir_db(); + for &enum_ in enums { + let adt_ref = AdtRefId::new(self.db, enum_.into()); + let adt = lower_adt(self.db, adt_ref); + for (i, variant) in enum_.variants(hir_db).data(hir_db).iter().enumerate() { + if !matches!(variant.kind, VariantKind::Tuple(_)) { + continue; + }; + let (Some(name), Some(variant)) = + (variant.name.to_opt(), adt.fields(self.db).get(i)) + else { + continue; + }; + + let arg_tys = variant.iter_types(self.db).collect(); + let mut ret_ty = TyId::adt(self.db, adt); + let adt_param_set = adt.param_set(self.db); + ret_ty = TyId::foldl(self.db, ret_ty, adt.param_set(self.db).params(self.db)); + + let param_set = GenericParamTypeSet::new( + self.db, + adt_param_set.params_precursor(self.db).to_vec(), + adt_param_set.scope(self.db), + adt_param_set.len(self.db), + ); + + let func = FuncDef::new( + self.db, + HirFuncDefKind::VariantCtor(enum_, i), + name, + param_set, + arg_tys, + Binder::bind(ret_ty), + ); + + self.insert(ret_ty, func) + } + } + } + + fn collect_impls(&mut self, impls: &[Impl<'db>]) { + for impl_ in impls { + let ty = match impl_.ty(self.db.as_hir_db()).to_opt() { + Some(ty) => lower_hir_ty(self.db, ty, impl_.scope()), + None => TyId::invalid(self.db, InvalidCause::Other), + }; + + if ty.has_invalid(self.db) | !ty.is_inherent_impl_allowed(self.db, self.ingot) { + continue; + } + + for func in impl_.funcs(self.db.as_hir_db()) { + let Some(func) = lower_func(self.db, func) else { + continue; + }; + + self.insert(ty, func) + } + } + } + + fn finalize(self) -> MethodTable<'db> { + self.method_table.finalize() + } + + fn insert(&mut self, ty: TyId<'db>, func: FuncDef<'db>) { + let ty = match func.receiver_ty(self.db) { + Some(ty) => ty.instantiate_identity(), + None => ty, + }; + + if self + .method_table + .probe(self.db, Canonical::new(self.db, ty), func.name(self.db)) + .is_empty() + { + self.method_table.insert(self.db, ty, func) + } + } +} diff --git a/crates/hir-analysis/src/ty/mod.rs b/crates/hir-analysis/src/ty/mod.rs new file mode 100644 index 0000000000..b7d4a4a5bb --- /dev/null +++ b/crates/hir-analysis/src/ty/mod.rs @@ -0,0 +1,214 @@ +use hir::{analysis_pass::ModuleAnalysisPass, hir_def::TopLevelMod}; + +use self::{ + adt_def::AdtRefId, + def_analysis::{ + analyze_adt, analyze_func, analyze_impl, analyze_impl_trait, analyze_trait, + analyze_type_alias, + }, +}; +use crate::HirAnalysisDb; + +pub mod adt_def; +pub mod binder; +pub mod const_ty; +pub mod def_analysis; +pub mod diagnostics; +pub mod fold; +pub mod func_def; +pub mod method_table; +pub mod trait_def; +pub mod trait_lower; +pub mod trait_resolution; +pub mod ty_check; +pub mod ty_def; +pub mod ty_lower; +pub mod visitor; + +mod canonical; +mod method_cmp; +mod unify; + +/// An analysis pass for type definitions. +pub struct AdtDefAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} +impl<'db> AdtDefAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass<'db> for AdtDefAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + let hir_db = self.db.as_hir_db(); + let adts = top_mod + .all_structs(hir_db) + .iter() + .map(|s| AdtRefId::from_struct(self.db, *s)) + .chain( + top_mod + .all_enums(hir_db) + .iter() + .map(|e| AdtRefId::from_enum(self.db, *e)), + ) + .chain( + top_mod + .all_contracts(hir_db) + .iter() + .map(|c| AdtRefId::from_contract(self.db, *c)), + ); + + adts.flat_map(|adt| analyze_adt(self.db, adt).iter()) + .map(|diag| diag.to_voucher()) + .collect() + } +} + +pub struct BodyAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} +impl<'db> BodyAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} +impl<'db> ModuleAnalysisPass<'db> for BodyAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + top_mod + .all_funcs(self.db.as_hir_db()) + .iter() + .flat_map(|func| &ty_check::check_func_body(self.db, *func).0) + .map(|diag| diag.to_voucher()) + .collect() + } +} + +/// An analysis pass for trait definitions. +pub struct TraitAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} +impl<'db> TraitAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass<'db> for TraitAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + top_mod + .all_traits(self.db.as_hir_db()) + .iter() + .flat_map(|trait_| analyze_trait(self.db, *trait_)) + .map(|diag| diag.to_voucher()) + .collect() + } +} + +pub struct ImplAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> ImplAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass<'db> for ImplAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + top_mod + .all_impls(self.db.as_hir_db()) + .iter() + .flat_map(|impl_| analyze_impl(self.db, *impl_)) + .map(|diag| diag.to_voucher()) + .collect() + } +} + +/// An analysis pass for `ImplTrait'. +pub struct ImplTraitAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> ImplTraitAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass<'db> for ImplTraitAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + top_mod + .all_impl_traits(self.db.as_hir_db()) + .iter() + .flat_map(|trait_| analyze_impl_trait(self.db, *trait_)) + .map(|diag| diag.to_voucher()) + .collect() + } +} + +/// An analysis pass for `ImplTrait'. +pub struct FuncAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> FuncAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass<'db> for FuncAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + top_mod + .all_funcs(self.db.as_hir_db()) + .iter() + .flat_map(|func| analyze_func(self.db, *func)) + .map(|diag| diag.to_voucher()) + .collect() + } +} + +/// An analysis pass for type aliases. +pub struct TypeAliasAnalysisPass<'db> { + db: &'db dyn HirAnalysisDb, +} + +impl<'db> TypeAliasAnalysisPass<'db> { + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { db } + } +} + +impl<'db> ModuleAnalysisPass<'db> for TypeAliasAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + top_mod + .all_type_aliases(self.db.as_hir_db()) + .iter() + .flat_map(|alias| analyze_type_alias(self.db, *alias).iter()) + .map(|diag| diag.to_voucher()) + .collect() + } +} diff --git a/crates/hir-analysis/src/ty/trait_def.rs b/crates/hir-analysis/src/ty/trait_def.rs new file mode 100644 index 0000000000..c8cf2c27ff --- /dev/null +++ b/crates/hir-analysis/src/ty/trait_def.rs @@ -0,0 +1,358 @@ +//! This module contains all trait related types definitions. + +use common::indexmap::{IndexMap, IndexSet}; +use hir::{ + hir_def::{IdentId, ImplTrait, IngotId, Trait}, + span::DynLazySpan, +}; +use rustc_hash::FxHashMap; + +use super::{ + binder::Binder, + canonical::Canonical, + diagnostics::{TraitConstraintDiag, TyDiagCollection}, + func_def::FuncDef, + trait_lower::collect_implementor_methods, + trait_resolution::{ + check_trait_inst_wf, + constraint::{collect_implementor_constraints, collect_super_traits}, + PredicateListId, WellFormedness, + }, + ty_def::{Kind, TyId}, + ty_lower::GenericParamTypeSet, + unify::UnificationTable, +}; +use crate::{ty::trait_lower::collect_trait_impls, HirAnalysisDb}; + +/// Returns [`TraitEnv`] for the given ingot. +#[salsa::tracked(return_ref)] +pub(crate) fn ingot_trait_env<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, +) -> TraitEnv<'db> { + TraitEnv::collect(db, ingot) +} + +/// Returns all [`Implementor`] for the given trait inst. +#[salsa::tracked(return_ref)] +pub(crate) fn impls_for_trait<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + trait_: Canonical>, +) -> Vec>> { + let mut table = UnificationTable::new(db); + let trait_ = trait_.extract_identity(&mut table); + + let env = ingot_trait_env(db, ingot); + let Some(impls) = env.impls.get(&trait_.def(db)) else { + return vec![]; + }; + + impls + .iter() + .filter(|impl_| { + let snapshot = table.snapshot(); + let impl_ = table.instantiate_with_fresh_vars(**impl_); + let is_ok = table.unify(impl_.trait_(db), trait_).is_ok(); + table.rollback_to(snapshot); + is_ok + }) + .cloned() + .collect() +} + +/// Returns all [`Implementor`] for the given `ty`. +#[salsa::tracked(return_ref)] +pub(crate) fn impls_for_ty<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + ty: Canonical>, +) -> Vec>> { + let mut table = UnificationTable::new(db); + let ty = ty.extract_identity(&mut table); + + let env = ingot_trait_env(db, ingot); + if ty.has_invalid(db) { + return vec![]; + } + + let mut cands = vec![]; + for (key, insts) in env.ty_to_implementors.iter() { + let snapshot = table.snapshot(); + let key = table.instantiate_with_fresh_vars(*key); + if table.unify(key, ty.base_ty(db)).is_ok() { + cands.push(insts); + } + + table.rollback_to(snapshot); + } + + cands + .into_iter() + .flatten() + .copied() + .filter(|impl_| { + let snapshot = table.snapshot(); + + let impl_ = table.instantiate_with_fresh_vars(*impl_); + let impl_ty = table.instantiate_to_term(impl_.self_ty(db)); + let ty = table.instantiate_to_term(ty); + let is_ok = table.unify(impl_ty, ty).is_ok(); + + table.rollback_to(snapshot); + + is_ok + }) + .collect() +} + +/// Represents the trait environment of an ingot, which maintain all trait +/// implementors which can be used in the ingot. +#[derive(Debug, PartialEq, Eq, Clone)] +pub(crate) struct TraitEnv<'db> { + pub(super) impls: FxHashMap, Vec>>>, + hir_to_implementor: FxHashMap, Binder>>, + + /// This maintains a mapping from the base type to the implementors. + ty_to_implementors: FxHashMap>, Vec>>>, + + ingot: IngotId<'db>, +} + +impl<'db> TraitEnv<'db> { + fn collect(db: &'db dyn HirAnalysisDb, ingot: IngotId<'db>) -> Self { + let mut impls: FxHashMap<_, Vec>> = FxHashMap::default(); + let mut hir_to_implementor: FxHashMap> = + FxHashMap::default(); + let mut ty_to_implementors: FxHashMap, Vec>> = + FxHashMap::default(); + + for impl_map in ingot + .external_ingots(db.as_hir_db()) + .iter() + .map(|(_, external)| collect_trait_impls(db, *external)) + .chain(std::iter::once(collect_trait_impls(db, ingot))) + { + // `collect_trait_impls` ensures that there are no conflicting impls, so we can + // just extend the map. + for (trait_def, implementors) in impl_map.iter() { + impls + .entry(*trait_def) + .or_default() + .extend(implementors.iter().copied()); + + hir_to_implementor.extend(implementors.iter().map(|implementor| { + (implementor.skip_binder().hir_impl_trait(db), *implementor) + })); + + for implementor in implementors { + ty_to_implementors + .entry(Binder::bind( + implementor.instantiate_identity().self_ty(db).base_ty(db), + )) + .or_default() + .push(*implementor); + } + } + } + + Self { + impls, + hir_to_implementor, + ty_to_implementors, + ingot, + } + } + + /// Returns the corresponding implementor of the given `impl Trait` type. + pub(crate) fn map_impl_trait(&self, trait_ref: ImplTrait) -> Option> { + self.hir_to_implementor.get(&trait_ref).copied() + } +} + +/// Represents an implementor of a trait, which can be thought of as a lowered +/// `impl Trait`. +#[salsa::interned] +pub(crate) struct Implementor<'db> { + /// The trait that this implementor implements. + pub(crate) trait_: TraitInstId<'db>, + + /// The type parameters of this implementor. + #[return_ref] + pub(crate) params: Vec>, + + /// The original hir. + pub(crate) hir_impl_trait: ImplTrait<'db>, +} + +impl<'db> Implementor<'db> { + /// Returns the trait definition that this implementor implements. + pub(crate) fn trait_def(self, db: &'db dyn HirAnalysisDb) -> TraitDef<'db> { + self.trait_(db).def(db) + } + + pub(crate) fn original_params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + self.params(db) + } + + /// The self type of the impl trait. + pub(crate) fn self_ty(self, db: &'db dyn HirAnalysisDb) -> TyId<'db> { + self.trait_(db).self_ty(db) + } + + /// Returns the constraints that the implementor requires when the + /// implementation is selected. + pub(super) fn constraints(self, db: &'db dyn HirAnalysisDb) -> PredicateListId<'db> { + collect_implementor_constraints(db, self).instantiate(db, self.params(db)) + } + + pub(super) fn methods( + self, + db: &'db dyn HirAnalysisDb, + ) -> &'db IndexMap, FuncDef<'db>> { + collect_implementor_methods(db, self) + } +} + +/// Returns `true` if the given two implementor conflicts. +pub(super) fn does_impl_trait_conflict( + db: &dyn HirAnalysisDb, + a: Binder, + b: Binder, +) -> bool { + let mut table = UnificationTable::new(db); + let a = table.instantiate_with_fresh_vars(a); + let b = table.instantiate_with_fresh_vars(b); + + table.unify(a, b).is_ok() +} + +/// Represents an instantiated trait, which can be thought of as a trait +/// reference from a HIR perspective. +#[salsa::interned] +pub struct TraitInstId<'db> { + pub def: TraitDef<'db>, + #[return_ref] + pub args: Vec>, +} + +impl<'db> TraitInstId<'db> { + pub fn pretty_print(self, db: &dyn HirAnalysisDb, as_pred: bool) -> String { + if as_pred { + let inst = self.pretty_print(db, false); + let self_ty = self.self_ty(db); + format! {"{}: {}", self_ty.pretty_print(db), inst} + } else { + let mut s = self.def(db).name(db).unwrap_or("").to_string(); + + let mut args = self.args(db).iter().map(|ty| ty.pretty_print(db)); + // Skip the first type parameter since it's the implementor type. + args.next(); + + if let Some(first) = args.next() { + s.push('<'); + s.push_str(first); + for arg in args { + s.push_str(", "); + s.push_str(arg); + } + s.push('>'); + } + + s + } + } + + pub fn self_ty(self, db: &'db dyn HirAnalysisDb) -> TyId<'db> { + self.args(db)[0] + } + + pub(super) fn ingot(self, db: &'db dyn HirAnalysisDb) -> IngotId<'db> { + self.def(db).ingot(db) + } + + pub(super) fn emit_sat_diag( + self, + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + assumptions: PredicateListId<'db>, + span: DynLazySpan<'db>, + ) -> Option> { + if let WellFormedness::IllFormed { goal, subgoal } = + check_trait_inst_wf(db, ingot, self, assumptions) + { + Some(TraitConstraintDiag::trait_bound_not_satisfied(db, span, goal, subgoal).into()) + } else { + None + } + } +} + +/// Represents a trait definition. +#[salsa::tracked] +pub struct TraitDef<'db> { + pub trait_: Trait<'db>, + #[return_ref] + pub(crate) param_set: GenericParamTypeSet<'db>, + #[return_ref] + pub methods: IndexMap, TraitMethod<'db>>, +} + +impl<'db> TraitDef<'db> { + pub fn params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + self.param_set(db).params(db) + } + + pub fn self_param(self, db: &'db dyn HirAnalysisDb) -> TyId<'db> { + self.param_set(db).trait_self(db).unwrap() + } + + pub fn original_params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + self.param_set(db).explicit_params(db) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, salsa::Update)] +pub struct TraitMethod<'db>(pub FuncDef<'db>); + +impl TraitMethod<'_> { + pub fn has_default_impl(self, db: &dyn HirAnalysisDb) -> bool { + self.0 + .hir_func_def(db) + .unwrap() + .body(db.as_hir_db()) + .is_some() + } +} + +impl<'db> TraitDef<'db> { + /// Returns the type kind that implementor type must have. + pub(crate) fn expected_implementor_kind(self, db: &'db dyn HirAnalysisDb) -> &'db Kind { + self.self_param(db).kind(db) + } + + /// Returns `ingot` in which this trait is defined. + pub(crate) fn ingot(self, db: &'db dyn HirAnalysisDb) -> IngotId<'db> { + let hir_db = db.as_hir_db(); + self.trait_(db).top_mod(hir_db).ingot(hir_db) + } + + pub(super) fn super_traits( + self, + db: &'db dyn HirAnalysisDb, + ) -> &'db IndexSet>> { + use std::sync::OnceLock; + static EMPTY: OnceLock>> = OnceLock::new(); + + collect_super_traits(db, self) + .as_ref() + .unwrap_or_else(|_| EMPTY.get_or_init(IndexSet::new)) + } + + fn name(self, db: &'db dyn HirAnalysisDb) -> Option<&'db str> { + self.trait_(db) + .name(db.as_hir_db()) + .to_opt() + .map(|name| name.data(db.as_hir_db()).as_str()) + } +} diff --git a/crates/hir-analysis/src/ty/trait_lower.rs b/crates/hir-analysis/src/ty/trait_lower.rs new file mode 100644 index 0000000000..9f495af15b --- /dev/null +++ b/crates/hir-analysis/src/ty/trait_lower.rs @@ -0,0 +1,306 @@ +//! This module implements the trait and impl trait lowering process. + +use common::indexmap::IndexMap; +use hir::hir_def::{scope_graph::ScopeId, IdentId, ImplTrait, IngotId, Partial, Trait, TraitRefId}; +use rustc_hash::FxHashMap; + +use super::{ + binder::Binder, + func_def::FuncDef, + trait_def::{does_impl_trait_conflict, Implementor, TraitDef, TraitInstId, TraitMethod}, + ty_def::{InvalidCause, Kind, TyId}, + ty_lower::{ + collect_generic_params, lower_generic_arg_list, GenericParamOwnerId, GenericParamTypeSet, + }, +}; +use crate::{ + name_resolution::{resolve_path, PathRes}, + ty::{func_def::lower_func, ty_def::TyData, ty_lower::lower_hir_ty}, + HirAnalysisDb, +}; + +type TraitImplTable<'db> = FxHashMap, Vec>>>; + +#[salsa::tracked] +pub(crate) fn lower_trait<'db>(db: &'db dyn HirAnalysisDb, trait_: Trait<'db>) -> TraitDef<'db> { + TraitBuilder::new(db, trait_).build() +} + +/// Collect all trait implementors in the ingot. +/// The returned table doesn't contain the const(external) ingot +/// implementors. If you need to obtain the environment that contains all +/// available implementors in the ingot, please use +/// [`TraitEnv`](super::trait_def::TraitEnv). +#[salsa::tracked(return_ref)] +pub(crate) fn collect_trait_impls<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, +) -> TraitImplTable<'db> { + let const_impls = ingot + .external_ingots(db.as_hir_db()) + .iter() + .map(|(_, external)| collect_trait_impls(db, *external)) + .collect(); + + let impl_traits = ingot.all_impl_traits(db.as_hir_db()); + ImplementorCollector::new(db, const_impls).collect(impl_traits) +} + +/// Returns the corresponding implementors for the given [`ImplTrait`]. +/// If the implementor type or the trait reference is ill-formed, returns +/// `None`. +#[salsa::tracked] +pub(crate) fn lower_impl_trait<'db>( + db: &'db dyn HirAnalysisDb, + impl_trait: ImplTrait<'db>, +) -> Option>> { + let hir_db = db.as_hir_db(); + let scope = impl_trait.scope(); + + let hir_ty = impl_trait.ty(hir_db).to_opt()?; + let ty = lower_hir_ty(db, hir_ty, scope); + if ty.has_invalid(db) { + return None; + } + + let trait_ = lower_trait_ref( + db, + ty, + impl_trait.trait_ref(hir_db).to_opt()?, + impl_trait.scope(), + ) + .ok()?; + + let impl_trait_ingot = impl_trait.top_mod(hir_db).ingot(hir_db); + + if Some(impl_trait_ingot) != ty.ingot(db) && impl_trait_ingot != trait_.def(db).ingot(db) { + return None; + } + + let param_owner = GenericParamOwnerId::new(db, impl_trait.into()); + let params = collect_generic_params(db, param_owner).params(db).to_vec(); + + let implementor = Implementor::new(db, trait_, params, impl_trait); + + Some(Binder::bind(implementor)) +} + +/// Lower a trait reference to a trait instance. +#[salsa::tracked] +pub(crate) fn lower_trait_ref<'db>( + db: &'db dyn HirAnalysisDb, + self_ty: TyId<'db>, + trait_ref: TraitRefId<'db>, + scope: ScopeId<'db>, +) -> Result, TraitRefLowerError<'db>> { + let hir_db = db.as_hir_db(); + + let mut args = vec![self_ty]; + if let Some(generic_args) = trait_ref.generic_args(hir_db) { + args.extend(lower_generic_arg_list(db, generic_args, scope)); + }; + + let Partial::Present(path) = trait_ref.path(hir_db) else { + return Err(TraitRefLowerError::Other); + }; + + let trait_def = match resolve_path(db, path, scope, false) { + Ok(PathRes::Trait(t)) => t, + _ => return Err(TraitRefLowerError::Other), + }; + + // The first parameter of the trait is the self type, so we need to skip it. + if trait_def.params(db).len() != args.len() { + return Err(TraitRefLowerError::ArgNumMismatch { + expected: trait_def.params(db).len() - 1, + given: args.len() - 1, + }); + } + + for (param, arg) in trait_def + .params(db) + .iter() + .skip(1) + .zip(args.iter_mut().skip(1)) + { + if !param.kind(db).does_match(arg.kind(db)) { + return Err(TraitRefLowerError::ArgKindMisMatch { + expected: param.kind(db).clone(), + given: *arg, + }); + } + + let expected_const_ty = match param.data(db) { + TyData::ConstTy(expected_ty) => expected_ty.ty(db).into(), + _ => None, + }; + + match arg.evaluate_const_ty(db, expected_const_ty) { + Ok(ty) => *arg = ty, + + Err(InvalidCause::ConstTyMismatch { expected, given }) => { + return Err(TraitRefLowerError::ArgTypeMismatch { + expected: Some(expected), + given: Some(given), + }); + } + + Err(InvalidCause::ConstTyExpected { expected }) => { + return Err(TraitRefLowerError::ArgTypeMismatch { + expected: Some(expected), + given: None, + }); + } + + Err(InvalidCause::NormalTypeExpected { given }) => { + return Err(TraitRefLowerError::ArgTypeMismatch { + expected: None, + given: Some(given), + }) + } + + _ => return Err(TraitRefLowerError::Other), + } + } + + Ok(TraitInstId::new(db, trait_def, args)) +} + +#[salsa::tracked(return_ref)] +pub(crate) fn collect_implementor_methods<'db>( + db: &'db dyn HirAnalysisDb, + implementor: Implementor<'db>, +) -> IndexMap, FuncDef<'db>> { + let mut methods = IndexMap::default(); + + for method in implementor.hir_impl_trait(db).methods(db.as_hir_db()) { + if let Some(func) = lower_func(db, method) { + methods.insert(func.name(db), func); + } + } + + methods +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) enum TraitRefLowerError<'db> { + /// The number of arguments doesn't match the number of parameters. + ArgNumMismatch { expected: usize, given: usize }, + + /// The kind of the argument doesn't match the kind of the parameter of the + /// trait. + ArgKindMisMatch { expected: Kind, given: TyId<'db> }, + + /// The argument type doesn't match the const parameter type. + ArgTypeMismatch { + expected: Option>, + given: Option>, + }, + + /// Other errors, which is reported by another pass. So we don't need to + /// report this error kind. + Other, +} + +struct TraitBuilder<'db> { + db: &'db dyn HirAnalysisDb, + trait_: Trait<'db>, + param_set: GenericParamTypeSet<'db>, + methods: IndexMap, TraitMethod<'db>>, +} + +impl<'db> TraitBuilder<'db> { + fn new(db: &'db dyn HirAnalysisDb, trait_: Trait<'db>) -> Self { + let params_owner_id = GenericParamOwnerId::new(db, trait_.into()); + let param_set = collect_generic_params(db, params_owner_id); + + Self { + db, + trait_, + param_set, + methods: IndexMap::default(), + } + } + + fn build(mut self) -> TraitDef<'db> { + self.collect_params(); + self.collect_methods(); + + TraitDef::new(self.db, self.trait_, self.param_set, self.methods) + } + + fn collect_params(&mut self) { + let params_owner_id = GenericParamOwnerId::new(self.db, self.trait_.into()); + self.param_set = collect_generic_params(self.db, params_owner_id); + } + + fn collect_methods(&mut self) { + let hir_db = self.db.as_hir_db(); + for method in self.trait_.methods(hir_db) { + let Some(func) = lower_func(self.db, method) else { + continue; + }; + + let name = func.name(self.db); + let trait_method = TraitMethod(func); + // We can simply ignore the conflict here because it's already handled by the + // name resolution. + self.methods.entry(name).or_insert(trait_method); + } + } +} + +/// Collect all implementors in an ingot. +struct ImplementorCollector<'db> { + db: &'db dyn HirAnalysisDb, + impl_table: TraitImplTable<'db>, + const_impl_maps: Vec<&'db TraitImplTable<'db>>, +} + +impl<'db> ImplementorCollector<'db> { + fn new(db: &'db dyn HirAnalysisDb, const_impl_maps: Vec<&'db TraitImplTable>) -> Self { + Self { + db, + impl_table: TraitImplTable::default(), + const_impl_maps, + } + } + + fn collect(mut self, impl_traits: &[ImplTrait<'db>]) -> TraitImplTable<'db> { + for &impl_ in impl_traits { + let Some(implementor) = lower_impl_trait(self.db, impl_) else { + continue; + }; + + if !self.does_conflict(implementor) { + self.impl_table + .entry(implementor.instantiate_identity().trait_def(self.db)) + .or_default() + .push(implementor); + } + } + + self.impl_table + } + + /// Returns `true` if `implementor` conflicts with any existing implementor. + fn does_conflict(&mut self, implementor: Binder) -> bool { + let def = implementor.instantiate_identity().trait_def(self.db); + for impl_map in self + .const_impl_maps + .iter() + .chain(std::iter::once(&&self.impl_table)) + { + let Some(impls) = impl_map.get(&def) else { + continue; + }; + for already_implemented in impls { + if does_impl_trait_conflict(self.db, *already_implemented, implementor) { + return true; + } + } + } + + false + } +} diff --git a/crates/hir-analysis/src/ty/trait_resolution/constraint.rs b/crates/hir-analysis/src/ty/trait_resolution/constraint.rs new file mode 100644 index 0000000000..54dadf4bba --- /dev/null +++ b/crates/hir-analysis/src/ty/trait_resolution/constraint.rs @@ -0,0 +1,356 @@ +use common::indexmap::IndexSet; +use hir::hir_def::{ + scope_graph::ScopeId, GenericParam, GenericParamOwner, Impl, ItemKind, TypeBound, +}; +use salsa::plumbing::FromId; + +use crate::{ + ty::{ + adt_def::{lower_adt, AdtDef, AdtRefId}, + binder::Binder, + func_def::{FuncDef, HirFuncDefKind}, + trait_def::{Implementor, TraitDef, TraitInstId}, + trait_lower::{lower_impl_trait, lower_trait, lower_trait_ref}, + trait_resolution::PredicateListId, + ty_def::{TyBase, TyData, TyId, TyVarSort}, + ty_lower::{collect_generic_params, lower_hir_ty, GenericParamOwnerId}, + unify::InferenceKey, + }, + HirAnalysisDb, +}; + +/// Returns a constraints list which is derived from the given type. +#[salsa::tracked] +pub(crate) fn ty_constraints<'db>( + db: &'db dyn HirAnalysisDb, + ty: TyId<'db>, +) -> PredicateListId<'db> { + let (base, args) = ty.decompose_ty_app(db); + let (params, base_constraints) = match base.data(db) { + TyData::TyBase(TyBase::Adt(adt)) => (adt.params(db), collect_adt_constraints(db, *adt)), + TyData::TyBase(TyBase::Func(func_def)) => ( + func_def.params(db), + collect_func_def_constraints(db, *func_def, true), + ), + _ => { + return PredicateListId::empty_list(db); + } + }; + + let mut args = args.to_vec(); + + // Generalize unbound type parameters. + for &arg in params.iter().skip(args.len()) { + let key = InferenceKey(args.len() as u32, Default::default()); + let ty_var = TyId::ty_var(db, TyVarSort::General, arg.kind(db).clone(), key); + args.push(ty_var); + } + + base_constraints.instantiate(db, &args) +} + +/// Collect super traits of the given trait. +/// The returned trait ref is bound by the given trait's generic parameters. +#[salsa::tracked(return_ref, recovery_fn = recover_collect_super_traits)] +pub(crate) fn collect_super_traits<'db>( + db: &'db dyn HirAnalysisDb, + trait_: TraitDef<'db>, +) -> Result>>, SuperTraitCycle<'db>> { + let collector = SuperTraitCollector::new(db, trait_); + let insts = collector.collect(); + + let mut cycles = IndexSet::new(); + // Check for cycles. + for &inst in &insts { + if let Err(err) = collect_super_traits(db, inst.skip_binder().def(db)) { + cycles.extend(err.0.iter().copied()); + } + } + + if cycles.is_empty() { + Ok(insts) + } else { + Err(SuperTraitCycle(cycles)) + } +} + +/// Collect trait constraints that are specified by the given trait definition. +/// This constraints describes 1. the constraints about self type(i.e., +/// implementor type), and 2. the generic parameter constraints. +#[salsa::tracked] +pub(crate) fn collect_trait_constraints<'db>( + db: &'db dyn HirAnalysisDb, + trait_: TraitDef<'db>, +) -> Binder> { + let hir_trait = trait_.trait_(db); + let collector = ConstraintCollector::new(db, GenericParamOwnerId::new(db, hir_trait.into())); + + Binder::bind(collector.collect()) +} + +/// Collect constraints that are specified by the given ADT definition. +#[salsa::tracked] +pub(crate) fn collect_adt_constraints<'db>( + db: &'db dyn HirAnalysisDb, + adt: AdtDef<'db>, +) -> Binder> { + let Some(owner) = adt.as_generic_param_owner(db) else { + return Binder::bind(PredicateListId::empty_list(db)); + }; + let collector = ConstraintCollector::new(db, owner); + + Binder::bind(collector.collect()) +} + +#[salsa::tracked] +pub(crate) fn collect_impl_block_constraints<'db>( + db: &'db dyn HirAnalysisDb, + impl_: Impl<'db>, +) -> Binder> { + let owner = GenericParamOwnerId::new(db, impl_.into()); + Binder::bind(ConstraintCollector::new(db, owner).collect()) +} + +/// Collect constraints that are specified by the given implementor(i.e., impl +/// trait). +#[salsa::tracked] +pub(crate) fn collect_implementor_constraints<'db>( + db: &'db dyn HirAnalysisDb, + implementor: Implementor<'db>, +) -> Binder> { + let impl_trait = implementor.hir_impl_trait(db); + let collector = ConstraintCollector::new(db, GenericParamOwnerId::new(db, impl_trait.into())); + + Binder::bind(collector.collect()) +} + +#[salsa::tracked] +pub(crate) fn collect_func_def_constraints<'db>( + db: &'db dyn HirAnalysisDb, + func: FuncDef<'db>, + include_parent: bool, +) -> Binder> { + let hir_func = match func.hir_def(db) { + HirFuncDefKind::Func(func) => func, + HirFuncDefKind::VariantCtor(enum_, _) => { + let adt_ref = AdtRefId::new(db, enum_.into()); + let adt = lower_adt(db, adt_ref); + if include_parent { + return collect_adt_constraints(db, adt); + } else { + return Binder::bind(PredicateListId::empty_list(db)); + } + } + }; + + let func_constraints = collect_func_def_constraints_impl(db, func); + if !include_parent { + return func_constraints; + } + + let parent_constraints = match hir_func.scope().parent_item(db.as_hir_db()) { + Some(ItemKind::Trait(trait_)) => collect_trait_constraints(db, lower_trait(db, trait_)), + + Some(ItemKind::Impl(impl_)) => collect_impl_block_constraints(db, impl_), + + Some(ItemKind::ImplTrait(impl_trait)) => { + let Some(implementor) = lower_impl_trait(db, impl_trait) else { + return func_constraints; + }; + collect_implementor_constraints(db, implementor.instantiate_identity()) + } + + _ => return func_constraints, + }; + + Binder::bind( + func_constraints + .instantiate_identity() + .merge(db, parent_constraints.instantiate_identity()), + ) +} + +#[salsa::tracked] +pub(crate) fn collect_func_def_constraints_impl<'db>( + db: &'db dyn HirAnalysisDb, + func: FuncDef<'db>, +) -> Binder> { + let hir_func = match func.hir_def(db) { + HirFuncDefKind::Func(func) => func, + HirFuncDefKind::VariantCtor(enum_, _) => { + let adt_ref = AdtRefId::new(db, enum_.into()); + let adt = lower_adt(db, adt_ref); + return collect_adt_constraints(db, adt); + } + }; + + Binder::bind( + ConstraintCollector::new(db, GenericParamOwnerId::new(db, hir_func.into())).collect(), + ) +} + +pub(crate) fn recover_collect_super_traits<'db>( + _db: &'db dyn HirAnalysisDb, + cycle: &salsa::Cycle, + _trait_: TraitDef<'db>, +) -> Result>>, SuperTraitCycle<'db>> { + let mut trait_cycle = IndexSet::new(); + for key in cycle.participant_keys() { + let id = key.key_index(); + let inst = TraitDef::from_id(id); + trait_cycle.insert(inst); + } + + Err(SuperTraitCycle(trait_cycle)) +} + +struct SuperTraitCollector<'db> { + db: &'db dyn HirAnalysisDb, + trait_: TraitDef<'db>, + super_traits: IndexSet>>, + scope: ScopeId<'db>, +} + +impl<'db> SuperTraitCollector<'db> { + fn new(db: &'db dyn HirAnalysisDb, trait_: TraitDef<'db>) -> Self { + Self { + db, + trait_, + super_traits: IndexSet::default(), + scope: trait_.trait_(db).scope(), + } + } + + fn collect(mut self) -> IndexSet>> { + let hir_trait = self.trait_.trait_(self.db); + let hir_db = self.db.as_hir_db(); + let self_param = self.trait_.self_param(self.db); + + for &super_ in hir_trait.super_traits(hir_db).iter() { + if let Ok(inst) = lower_trait_ref(self.db, self_param, super_, self.scope) { + self.super_traits.insert(Binder::bind(inst)); + } + } + + for pred in hir_trait.where_clause(hir_db).data(hir_db) { + if pred + .ty + .to_opt() + .map(|ty| ty.is_self_ty(hir_db)) + .unwrap_or_default() + { + for bound in &pred.bounds { + if let TypeBound::Trait(bound) = bound { + if let Ok(inst) = lower_trait_ref(self.db, self_param, *bound, self.scope) { + self.super_traits.insert(Binder::bind(inst)); + } + } + } + } + } + + self.super_traits + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub(crate) struct SuperTraitCycle<'db>(IndexSet>); +impl<'db> SuperTraitCycle<'db> { + pub fn contains(&self, def: TraitDef<'db>) -> bool { + self.0.contains(&def) + } +} + +struct ConstraintCollector<'db> { + db: &'db dyn HirAnalysisDb, + owner: GenericParamOwnerId<'db>, + predicates: IndexSet>, +} + +impl<'db> ConstraintCollector<'db> { + fn new(db: &'db dyn HirAnalysisDb, owner: GenericParamOwnerId<'db>) -> Self { + Self { + db, + owner, + + predicates: IndexSet::new(), + } + } + + fn collect(mut self) -> PredicateListId<'db> { + self.collect_constraints_from_generic_params(); + self.collect_constraints_from_where_clause(); + + // Collect super traits from the trait definition and add them to the predicate + // list. + if let GenericParamOwner::Trait(trait_) = self.owner.data(self.db) { + let trait_def = lower_trait(self.db, trait_); + self.push_predicate(TraitInstId::new( + self.db, + trait_def, + collect_generic_params(self.db, self.owner) + .params(self.db) + .to_vec(), + )); + } + + PredicateListId::new(self.db, self.predicates.into_iter().collect::>()) + } + + fn push_predicate(&mut self, pred: TraitInstId<'db>) { + self.predicates.insert(pred); + } + + fn collect_constraints_from_where_clause(&mut self) { + let Some(where_clause) = self.owner.where_clause(self.db) else { + return; + }; + + for hir_pred in where_clause.data(self.db.as_hir_db()) { + let Some(hir_ty) = hir_pred.ty.to_opt() else { + continue; + }; + + let ty = lower_hir_ty(self.db, hir_ty, self.owner.scope(self.db)); + + // We don't need to collect super traits, please refer to + // [`collect_super_traits`] function for details. + if ty.has_invalid(self.db) || ty.is_trait_self(self.db) { + continue; + } + + self.add_bounds(ty, &hir_pred.bounds); + } + } + + fn collect_constraints_from_generic_params(&mut self) { + let param_set = collect_generic_params(self.db, self.owner); + let param_list = self.owner.params(self.db); + + for (i, hir_param) in param_list.data(self.db.as_hir_db()).iter().enumerate() { + let GenericParam::Type(hir_param) = hir_param else { + continue; + }; + + let ty = param_set.param_by_original_idx(self.db, i).unwrap(); + let bounds = &hir_param.bounds; + self.add_bounds(ty, bounds) + } + } + + fn add_bounds(&mut self, bound_ty: TyId<'db>, bounds: &[TypeBound<'db>]) { + for bound in bounds { + let TypeBound::Trait(trait_ref) = bound else { + continue; + }; + + let Ok(trait_inst) = + lower_trait_ref(self.db, bound_ty, *trait_ref, self.owner.scope(self.db)) + else { + continue; + }; + + self.push_predicate(trait_inst); + } + } +} diff --git a/crates/hir-analysis/src/ty/trait_resolution/mod.rs b/crates/hir-analysis/src/ty/trait_resolution/mod.rs new file mode 100644 index 0000000000..c911f034e1 --- /dev/null +++ b/crates/hir-analysis/src/ty/trait_resolution/mod.rs @@ -0,0 +1,170 @@ +use common::indexmap::IndexSet; +use hir::hir_def::IngotId; + +use super::{ + canonical::{Canonical, Canonicalized, Solution}, + trait_def::TraitInstId, + ty_def::{TyFlags, TyId}, +}; +use crate::{ + ty::{ + trait_resolution::{ + constraint::{collect_trait_constraints, ty_constraints}, + proof_forest::ProofForest, + }, + unify::UnificationTable, + visitor::collect_flags, + }, + HirAnalysisDb, +}; + +pub(crate) mod constraint; +mod proof_forest; + +#[salsa::tracked(return_ref)] +pub fn is_goal_satisfiable<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + goal: Canonical>, + assumptions: PredicateListId<'db>, +) -> GoalSatisfiability<'db> { + let flags = collect_flags(db, goal.value); + if flags.contains(TyFlags::HAS_INVALID) { + return GoalSatisfiability::ContainsInvalid; + }; + + ProofForest::new(db, ingot, goal, assumptions).solve() +} + +/// Checks if the given type is well-formed, i.e., the arguments of the given +/// type applications satisfies the constraints under the given assumptions. +#[salsa::tracked] +pub(crate) fn check_ty_wf<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + ty: TyId<'db>, + assumptions: PredicateListId<'db>, +) -> WellFormedness<'db> { + let (_, args) = ty.decompose_ty_app(db); + + for &arg in args { + let wf = check_ty_wf(db, ingot, arg, assumptions); + if !wf.is_wf() { + return wf; + } + } + + let constraints = ty_constraints(db, ty); + + for &goal in constraints.list(db) { + let mut table = UnificationTable::new(db); + let canonical_goal = Canonicalized::new(db, goal); + + if let GoalSatisfiability::UnSat(subgoal) = + is_goal_satisfiable(db, ingot, canonical_goal.value, assumptions) + { + let subgoal = + subgoal.map(|subgoal| canonical_goal.extract_solution(&mut table, subgoal)); + return WellFormedness::IllFormed { goal, subgoal }; + } + } + + WellFormedness::WellFormed +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum WellFormedness<'db> { + WellFormed, + IllFormed { + goal: TraitInstId<'db>, + subgoal: Option>, + }, +} + +impl WellFormedness<'_> { + fn is_wf(self) -> bool { + matches!(self, WellFormedness::WellFormed) + } +} + +/// Checks if the given trait instance are well-formed, i.e., the arguments of +/// the trait satisfies all constraints under the given assumptions. +#[salsa::tracked] +pub(crate) fn check_trait_inst_wf<'db>( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + trait_inst: TraitInstId<'db>, + assumptions: PredicateListId<'db>, +) -> WellFormedness<'db> { + let constraints = + collect_trait_constraints(db, trait_inst.def(db)).instantiate(db, trait_inst.args(db)); + + for &goal in constraints.list(db) { + let mut table = UnificationTable::new(db); + let canonical_goal = Canonicalized::new(db, goal); + if let GoalSatisfiability::UnSat(subgoal) = + is_goal_satisfiable(db, ingot, canonical_goal.value, assumptions) + { + let subgoal = + subgoal.map(|subgoal| canonical_goal.extract_solution(&mut table, subgoal)); + return WellFormedness::IllFormed { goal, subgoal }; + } + } + + WellFormedness::WellFormed +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum GoalSatisfiability<'db> { + /// Goal is satisfied with the unique solution. + Satisfied(Solution>), + /// Goal might be satisfied, but needs more type information to determine + /// satisfiability and uniqueness. + NeedsConfirmation(IndexSet>>), + + /// Goal contains invalid. + ContainsInvalid, + /// The gaol is not satisfied. + /// It contains an unsatisfied subgoal if we can know the exact subgoal + /// that makes the proof step stuck. + UnSat(Option>>), +} + +impl GoalSatisfiability<'_> { + pub fn is_satisfied(&self) -> bool { + matches!( + self, + Self::Satisfied(_) | Self::NeedsConfirmation(_) | Self::ContainsInvalid + ) + } +} + +#[salsa::interned] +pub struct PredicateListId<'db> { + #[return_ref] + pub list: Vec>, +} + +impl<'db> PredicateListId<'db> { + pub(super) fn merge(self, db: &'db dyn HirAnalysisDb, other: Self) -> Self { + let mut predicates = self.list(db).clone(); + predicates.extend(other.list(db)); + PredicateListId::new(db, predicates) + } + + pub(super) fn empty_list(db: &'db dyn HirAnalysisDb) -> Self { + Self::new(db, Vec::new()) + } + + fn extend_by_super(self, db: &'db dyn HirAnalysisDb) -> Self { + let mut super_traits: IndexSet<_> = self.list(db).iter().copied().collect(); + for &pred in self.list(db) { + for &super_trait in pred.def(db).super_traits(db).iter() { + let super_trait = super_trait.instantiate(db, pred.args(db)); + super_traits.insert(super_trait); + } + } + + Self::new(db, super_traits.into_iter().collect::>()) + } +} diff --git a/crates/hir-analysis/src/ty/trait_resolution/proof_forest.rs b/crates/hir-analysis/src/ty/trait_resolution/proof_forest.rs new file mode 100644 index 0000000000..f4cc301a0d --- /dev/null +++ b/crates/hir-analysis/src/ty/trait_resolution/proof_forest.rs @@ -0,0 +1,587 @@ +//! The algorithm for the trait resolution here is based on [`Tabled Typeclass Resolution`](https://arxiv.org/abs/2001.04301). +//! Also, [`XSB: Extending Prolog with Tabled Logic Programming`](https://arxiv.org/pdf/1012.5123) is a nice entry point for more detailed discussions about tabled logic solver. + +use std::collections::BinaryHeap; + +use common::indexmap::IndexSet; +use cranelift_entity::{entity_impl, PrimaryMap}; +use hir::hir_def::IngotId; +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::{GoalSatisfiability, PredicateListId}; +use crate::{ + ty::{ + binder::Binder, + canonical::{Canonical, Canonicalized}, + fold::{TyFoldable, TyFolder}, + trait_def::{impls_for_trait, Implementor, TraitInstId}, + ty_def::{TyData, TyId}, + unify::PersistentUnificationTable, + visitor::{TyVisitable, TyVisitor}, + }, + HirAnalysisDb, +}; +const MAXIMUM_SOLUTION_NUM: usize = 2; +/// The maximum depth of any type that the solver will consider. +/// +/// This constant defines the upper limit on the depth of types that the solver +/// will handle. It is used as a termination condition to prevent the solver +/// from entering infinite loops when encountering coinductive cycles. If a +/// solution for subgoal or goal exceeds this limit, the solver stops search and +/// giveup. +const MAXIMUM_TYPE_DEPTH: usize = 256; + +/// The query goal. +/// Since `TraitInstId` contains `Self` type as its first argument, +/// the query for `Implements>` is represented as +/// `Trait`. +type Goal<'db> = Canonical>; +type Solution<'db> = crate::ty::canonical::Solution>; + +/// A structure representing a proof forest used for solving trait goals. +/// +/// The `ProofForest` contains generator and consumer nodes which work together +/// to find solutions to trait goals. It maintains stacks for generator and +/// consumer nodes to keep track of the solving process, and a mapping from +/// goals to generator nodes to avoid redundant computations. +pub(super) struct ProofForest<'db> { + ingot: IngotId<'db>, + + /// The root generator node. + root: GeneratorNode, + + /// An arena of generator nodes. + g_nodes: PrimaryMap>, + /// An arena of consumer nodes. + c_nodes: PrimaryMap>, + /// A stack of generator nodes to be processed. + g_stack: Vec, + /// A binary heap used for managing consumer nodes and their solutions. + /// + /// This heap stores tuples of [`OrderedConsumerNode`] and [`Solution`], + /// allowing the solver to efficiently retrieve and prioritize + /// consumer nodes that are closer to the original goal. + c_heap: BinaryHeap<(OrderedConsumerNode, Solution<'db>)>, + + /// A mapping from goals to generator nodes. + goal_to_node: FxHashMap, GeneratorNode>, + + /// The list of assumptions. + assumptions: PredicateListId<'db>, + + /// The maximum number of solutions. + maximum_solution_num: usize, + /// The database for HIR analysis. + db: &'db dyn HirAnalysisDb, +} + +/// A structure representing an ordered consumer node in the proof forest. +/// +/// The `OrderedConsumerNode` contains a consumer node and its root generator +/// node. It is used to prioritize consumer nodes based on their proximity to +/// the original goal. This allows the solver to efficiently retrieve and +/// process consumer nodes that are closer to the original goal, improving the +/// overall solving process. +#[derive(Debug, PartialEq, Eq)] +struct OrderedConsumerNode { + node: ConsumerNode, + root: GeneratorNode, +} +impl PartialOrd for OrderedConsumerNode { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} +impl Ord for OrderedConsumerNode { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + other.root.cmp(&self.root) + } +} + +impl<'db> ProofForest<'db> { + /// Creates a new `ProofForest` with the given initial goal and assumptions. + /// + /// This function initializes the proof forest with a root generator node + /// for the given goal and sets up the necessary data structures for + /// solving trait goals. + /// + /// # Parameters + /// - `db`: A reference to the HIR analysis database. + /// - `goal`: The initial trait goal to be solved. + /// - `assumptions`: The list of assumptions to be used during the solving + /// process. + /// + /// # Returns + /// A new instance of `ProofForest` initialized with the given goal and + /// assumptions. + pub(super) fn new( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + goal: Goal<'db>, + assumptions: PredicateListId<'db>, + ) -> Self { + let assumptions = assumptions.extend_by_super(db); + + let mut forest = Self { + ingot, + root: GeneratorNode(0), // Set temporary root. + g_nodes: PrimaryMap::new(), + c_nodes: PrimaryMap::new(), + g_stack: Vec::new(), + c_heap: BinaryHeap::new(), + goal_to_node: FxHashMap::default(), + assumptions, + maximum_solution_num: MAXIMUM_SOLUTION_NUM, + db, + }; + + let root = forest.new_generator_node(goal); + forest.root = root; + forest + } + + /// Solves the trait goal using a proof forest approach. + /// + /// This function iteratively processes generator and consumer nodes until + /// either the maximum number of solutions is found or no more nodes can + /// be processed. The solving process involves: + /// - Popping solutions from the consumer stack and applying them. + /// - Stepping through generator nodes to find new solutions or sub-goals. + /// - Registering solutions and propagating them to dependent consumer + /// nodes. + /// + /// The function returns `GoalSatisfiability` indicating the status of the + /// goal: + /// - `Satisfied` if exactly one solution is found. + /// - `UnSat` if no solutions are found and an unresolved subgoal is + /// identified. + /// - `NeedsConfirmation` if multiple solutions are found. + pub(super) fn solve(mut self) -> GoalSatisfiability<'db> { + loop { + if self.g_nodes[self.root].solutions.len() >= self.maximum_solution_num { + break; + } + + if let Some((c_node, solution)) = self.c_heap.pop() { + if !c_node.node.apply_solution(&mut self, solution) { + return GoalSatisfiability::NeedsConfirmation(IndexSet::default()); + } + continue; + } + + if let Some(&g_node) = self.g_stack.last() { + if !g_node.step(&mut self) { + self.g_stack.pop(); + } + continue; + } + + break; + } + + let solutions = std::mem::take(&mut self.g_nodes[self.root].solutions); + match solutions.len() { + 1 => GoalSatisfiability::Satisfied(solutions.into_iter().next().unwrap()), + 0 => { + let unresolved_subgoal = self.root.unresolved_subgoal(&mut self); + GoalSatisfiability::UnSat(unresolved_subgoal) + } + _ => GoalSatisfiability::NeedsConfirmation(solutions), + } + } + + fn new_generator_node(&mut self, goal: Goal<'db>) -> GeneratorNode { + let ingot = self.ingot; + let g_node_data = GeneratorNodeData::new(self.db, ingot, goal, self.assumptions); + let g_node = self.g_nodes.push(g_node_data); + self.goal_to_node.insert(goal, g_node); + self.g_stack.push(g_node); + g_node + } + + /// Creates a new consumer node and registers it with the proof forest. + /// + /// This function takes a root generator node, a list of remaining goals, + /// and a persistent unification table. It creates a consumer node that + /// represents a sub-goal that needs to be solved and remaining + /// subgoals. If the goal is not already associated with a generator + /// node, a new generator node is created for it. + /// + /// The consumer node is then registered as a dependent of the corresponding + /// generator node, ensuring that solutions found for the generator node are + /// propagated to the consumer node. + /// + /// # Parameters + /// - `root`: The root generator node of the consumer node. + /// - `remaining_goals`: A list of trait instances that represent the + /// remaining goals to be solved. + /// - `table`: A persistent unification table used for managing unification + /// operations. + /// + /// # Returns + /// A new `ConsumerNode` that is registered with the proof forest. + fn new_consumer_node( + &mut self, + root: GeneratorNode, + mut remaining_goals: Vec>, + table: PersistentUnificationTable<'db>, + ) -> ConsumerNode { + let query = remaining_goals.pop().unwrap(); + let canonicalized_query = Canonicalized::new(self.db, query); + let goal = canonicalized_query.value; + + let c_node_data = ConsumerNodeData { + applied_solutions: FxHashSet::default(), + remaining_goals, + root, + query: (query, canonicalized_query), + table, + children: Vec::new(), + }; + + let c_node = self.c_nodes.push(c_node_data); + if !self.goal_to_node.contains_key(&goal) { + self.new_generator_node(goal); + } + + self.goal_to_node[&goal].add_dependent(self, c_node); + c_node + } +} + +/// A structure representing the data associated with a generator node in the +/// proof forest. +/// +/// The `GeneratorNodeData` contains information about the goal, the unification +/// table, the candidate implementors, the solutions found, and the dependents +/// of the generator node. It also keeps track of the assumptions, the next +/// candidate to be processed, and the child consumer nodes. +struct GeneratorNodeData<'db> { + table: PersistentUnificationTable<'db>, + /// The canonical goal associated with the generator node. + goal: Goal<'db>, + /// The trait instance extracted from the goal. + extracted_goal: TraitInstId<'db>, + /// A set of solutions found for the goal. + solutions: IndexSet>, + /// A list of consumer nodes that depend on this generator node. + dependents: Vec, + /// A list of candidate implementors for the trait. + cands: &'db [Binder>], + /// The list of assumptions for the goal. + assumptions: PredicateListId<'db>, + /// The index of the next candidate to be tried. + next_cand: usize, + /// A list of child consumer nodes created for sub-goals. + children: Vec, +} +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct GeneratorNode(u32); +entity_impl!(GeneratorNode); + +impl<'db> GeneratorNodeData<'db> { + fn new( + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + goal: Goal<'db>, + assumptions: PredicateListId<'db>, + ) -> Self { + let mut table = PersistentUnificationTable::new(db); + let extracted_goal = goal.extract_identity(&mut table); + let cands = impls_for_trait(db, ingot, goal); + + Self { + table, + goal, + extracted_goal, + solutions: IndexSet::default(), + dependents: Vec::new(), + cands, + assumptions, + next_cand: 0, + children: Vec::new(), + } + } +} + +impl GeneratorNode { + /// Registers the given solution with the proof forest and propagates it to + /// dependent consumer nodes. + /// + /// This function canonicalizes the solution and inserts it into the set of + /// solutions for the generator node. If the solution is new, it + /// propagates the solution to all dependent consumer nodes. + /// + /// # Parameters + /// - `pf`: A mutable reference to the `ProofForest`. + /// - `table`: A mutable reference to the `PersistentUnificationTable` used + /// for managing unification operations. + fn register_solution_with<'db>( + self, + pf: &mut ProofForest<'db>, + table: &mut PersistentUnificationTable<'db>, + ) { + let g_node = &mut pf.g_nodes[self]; + let solution = g_node + .goal + .canonicalize_solution(table.db(), table, g_node.extracted_goal); + if g_node.solutions.insert(solution) { + for &c_node in g_node.dependents.iter() { + let ordred_c_node = OrderedConsumerNode { + node: c_node, + root: pf.c_nodes[c_node].root, + }; + pf.c_heap.push((ordred_c_node, solution)); + } + } + } + + /// Advances the solving process for the generator node. + /// + /// This function attempts to find a new solution or sub-goal for the + /// generator node. It iterates through the candidate implementors and + /// assumptions, unifying them with the goal. If a solution is found, it + /// is registered. If a sub-goal is found, a new consumer node is + /// created to handle it. + /// + /// # Parameters + /// - `pf`: A mutable reference to the `ProofForest`. + /// + /// # Returns + /// `true` if a new solution or sub-goal was found and processed; `false` + /// otherwise. + fn step(self, pf: &mut ProofForest) -> bool { + let g_node = &mut pf.g_nodes[self]; + let db = g_node.table.db(); + + while let Some(&cand) = g_node.cands.get(g_node.next_cand) { + g_node.next_cand += 1; + + let mut table = g_node.table.clone(); + let gen_cand = table.instantiate_with_fresh_vars(cand); + if table + .unify(gen_cand.trait_(db), g_node.extracted_goal) + .is_err() + { + continue; + } + + let constraints = gen_cand.constraints(db); + + if constraints.list(db).is_empty() { + self.register_solution_with(pf, &mut table); + } else { + let sub_goals = constraints + .list(db) + .iter() + .map(|c| c.fold_with(&mut table)) + .collect(); + let child = pf.new_consumer_node(self, sub_goals, table); + pf.g_nodes[self].children.push(child); + } + + return true; + } + + let mut next_cand = g_node.next_cand - g_node.cands.len(); + while let Some(&assumption) = g_node.assumptions.list(db).get(next_cand) { + g_node.next_cand += 1; + next_cand += 1; + let mut table = g_node.table.clone(); + if table.unify(assumption, g_node.extracted_goal).is_ok() { + self.register_solution_with(pf, &mut table); + return true; + } + } + + false + } + + fn add_dependent(self, pf: &mut ProofForest, dependent: ConsumerNode) { + let g_node = &mut pf.g_nodes[self]; + g_node.dependents.push(dependent); + for &solution in g_node.solutions.iter() { + let ordered_c_node = OrderedConsumerNode { + node: dependent, + root: pf.c_nodes[dependent].root, + }; + pf.c_heap.push((ordered_c_node, solution)) + } + } + + fn unresolved_subgoal<'db>(self, pf: &mut ProofForest<'db>) -> Option> { + let g_node = &pf.g_nodes[self]; + // If the child nodes branch out more than one, we give up identifying the + // unresolved subgoal to avoid generating a large number of uncertain unresolved + // subgoals. + if g_node.children.len() != 1 { + return None; + } + + let child = g_node.children[0]; + child.unresolved_subgoal(pf) + } +} + +struct ConsumerNodeData<'db> { + /// Holds solutions that are already applied. + applied_solutions: FxHashSet>, + remaining_goals: Vec>, + /// The root generator node of the consumer node. + root: GeneratorNode, + + /// The current pending query that is resolved by another [`GeneratorNode`]. + query: (TraitInstId<'db>, Canonicalized<'db, TraitInstId<'db>>), + table: PersistentUnificationTable<'db>, + children: Vec, +} +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct ConsumerNode(u32); +entity_impl!(ConsumerNode); + +impl ConsumerNode { + /// Applies a given solution to the consumer node. + /// + /// This function checks if the solution has already been applied. If not, + /// it attempts to unify the solution with the pending query of the + /// consumer node. If the unification is successful and there are no + /// remaining goals, the solution is registered with the root generator + /// node. If there are remaining goals, a new consumer node is created + /// to handle them. + /// + /// # Parameters + /// - `pf`: A mutable reference to the `ProofForest`. + /// - `solution`: The solution to be applied. + fn apply_solution<'db>(self, pf: &mut ProofForest<'db>, solution: Solution<'db>) -> bool { + let c_node = &mut pf.c_nodes[self]; + + // If the solutions is already applied, do nothing. + if !c_node.applied_solutions.insert(solution) { + return true; + } + + let mut table = c_node.table.clone(); + + // Extract solution to the current env. + let (pending_inst, canonicalized_pending_inst) = &c_node.query; + let solution = canonicalized_pending_inst.extract_solution(&mut table, solution); + + // Try to unifies pending inst and solution. + if table.unify(*pending_inst, solution).is_err() { + return true; + } + + let tree_root = c_node.root; + + if c_node.remaining_goals.is_empty() { + // If no remaining goals in the consumer node, it's the solution for the root + // goal. + tree_root.register_solution_with(pf, &mut table); + } else { + // Create a child consumer node for the subgoals. + let remaining_goals = c_node.remaining_goals.clone(); + let child = pf.new_consumer_node(tree_root, remaining_goals, table); + pf.c_nodes[self].children.push(child); + } + + maximum_ty_depth(pf.db, solution) <= MAXIMUM_TYPE_DEPTH + } + + fn unresolved_subgoal<'db>(self, pf: &mut ProofForest<'db>) -> Option> { + let c_node = &mut pf.c_nodes[self]; + if c_node.children.len() != 1 { + let unsat = c_node.query.0; + let unsat = pf.g_nodes[c_node.root].goal.canonicalize_solution( + c_node.table.db(), + &mut c_node.table, + unsat, + ); + return Some(unsat); + } + + c_node.children[0].unresolved_subgoal(pf) + } +} + +/// Computes the depth of a given type. +/// +/// The depth of a type is defined as the maximum depth of its subcomponents +/// plus one. For example, a simple type like `i32` has a depth of 1, while a +/// compound type like `Option>` would have a depth +/// reflecting the nesting of its components. +/// +/// # Parameters +/// - `db`: A reference to the HIR analysis database. +/// - `ty`: The type for which the depth is to be computed. +/// +/// # Returns +/// The depth of the type as a `usize`. +/// +/// # Note +/// This function is a stop gap solution to ensure termination when the solver +/// encounters coinductive cycles. It serves as a temporary solution until the +/// solver can properly handle coinductive cycles. +#[salsa::tracked] +pub(crate) fn ty_depth_impl<'db>(db: &'db dyn HirAnalysisDb, ty: TyId<'db>) -> usize { + match ty.data(db) { + TyData::ConstTy(cty) => ty_depth_impl(db, cty.ty(db)), + TyData::Invalid(_) + | TyData::Never + | TyData::TyBase(_) + | TyData::TyParam(_) + | TyData::TyVar(_) => 1, + TyData::TyApp(lhs, rhs) => { + let lhs_depth = ty_depth_impl(db, *lhs); + let rhs_depth = ty_depth_impl(db, *rhs); + std::cmp::max(lhs_depth, rhs_depth) + 1 + } + } +} + +/// Computes the maximum depth of any type within a visitable structure. +/// +/// This function traverses the given visitable structure and computes the +/// maximum depth of any type it encounters. The depth of a type is defined +/// as the maximum depth of its subcomponents plus one. For example, a simple +/// type like `i32` has a depth of 1, while a compound type like +/// `Option>` would have a depth reflecting the nesting +/// of its components. +/// +/// # Parameters +/// - `db`: A reference to the HIR analysis database. +/// - `v`: The visitable structure for which the maximum type depth is to be +/// computed. +/// +/// # Returns +/// The maximum depth of any type within the visitable structure as a `usize`. +/// +/// # Note +/// This function is a stop gap solution to ensure termination when the solver +/// encounters coinductive cycles. It serves as a temporary solution until the +/// solver can properly handle coinductive cycles. +fn maximum_ty_depth<'db, V>(db: &'db dyn HirAnalysisDb, v: V) -> usize +where + V: TyVisitable<'db>, +{ + struct DepthVisitor<'db> { + db: &'db dyn HirAnalysisDb, + max_depth: usize, + } + + impl<'db> TyVisitor<'db> for DepthVisitor<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn visit_ty(&mut self, ty: TyId) { + let depth = ty_depth_impl(self.db, ty); + if depth > self.max_depth { + self.max_depth = depth; + } + } + } + + let mut visitor = DepthVisitor { db, max_depth: 0 }; + v.visit_with(&mut visitor); + visitor.max_depth +} diff --git a/crates/hir-analysis/src/ty/ty_check/callable.rs b/crates/hir-analysis/src/ty/ty_check/callable.rs new file mode 100644 index 0000000000..5765305c15 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_check/callable.rs @@ -0,0 +1,228 @@ +use hir::{ + hir_def::{CallArg as HirCallArg, ExprId, GenericArgListId, IdentId}, + span::{ + expr::{LazyCallArgListSpan, LazyCallArgSpan}, + params::LazyGenericArgListSpan, + DynLazySpan, + }, +}; +use if_chain::if_chain; + +use super::{ExprProp, TyChecker}; +use crate::{ + ty::{ + diagnostics::{BodyDiag, FuncBodyDiag}, + fold::{TyFoldable, TyFolder}, + func_def::FuncDef, + ty_def::{TyBase, TyData, TyId}, + ty_lower::lower_generic_arg_list, + visitor::{TyVisitable, TyVisitor}, + }, + HirAnalysisDb, +}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Callable<'db> { + func_def: FuncDef<'db>, + generic_args: Vec>, +} + +impl<'db> TyVisitable<'db> for Callable<'db> { + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + self.generic_args.visit_with(visitor) + } +} + +impl<'db> TyFoldable<'db> for Callable<'db> { + fn super_fold_with(self, folder: &mut F) -> Self + where + F: TyFolder<'db>, + { + Self { + func_def: self.func_def, + generic_args: self.generic_args.fold_with(folder), + } + } +} + +impl<'db> Callable<'db> { + pub(super) fn new( + db: &'db dyn HirAnalysisDb, + ty: TyId<'db>, + span: DynLazySpan<'db>, + ) -> Result> { + let (base, args) = ty.decompose_ty_app(db); + + if base.is_ty_var(db) { + return Err(BodyDiag::TypeMustBeKnown(span).into()); + } + + let TyData::TyBase(TyBase::Func(func_def)) = base.data(db) else { + return Err(BodyDiag::not_callable(db, span, ty).into()); + }; + + let params = ty.generic_args(db); + assert_eq!(params.len(), args.len()); + + Ok(Self { + func_def: *func_def, + generic_args: args.to_vec(), + }) + } + + pub fn ret_ty(&self, db: &'db dyn HirAnalysisDb) -> TyId<'db> { + self.func_def.ret_ty(db).instantiate(db, &self.generic_args) + } + + pub fn ty(&self, db: &'db dyn HirAnalysisDb) -> TyId<'db> { + let ty = TyId::func(db, self.func_def); + TyId::foldl(db, ty, &self.generic_args) + } + + pub(super) fn unify_generic_args( + &mut self, + tc: &mut TyChecker<'db>, + args: GenericArgListId<'db>, + span: LazyGenericArgListSpan<'db>, + ) -> bool { + let db = tc.db; + let hir_db = db.as_hir_db(); + + if !args.is_given(hir_db) { + return true; + } + + let given_args = lower_generic_arg_list(db, args, tc.env.scope()); + let offset = self.func_def.offset_to_explicit_params_position(db); + let current_args = &mut self.generic_args[offset..]; + + if current_args.len() != given_args.len() { + let diag = BodyDiag::CallGenericArgNumMismatch { + primary: span.into(), + def_span: self.func_def.name_span(db), + given: given_args.len(), + expected: current_args.len(), + }; + tc.push_diag(diag); + + return false; + } + + for (i, (&given, arg)) in given_args.iter().zip(current_args.iter_mut()).enumerate() { + *arg = tc.equate_ty(given, *arg, span.arg(i).into()); + } + + true + } + + pub(super) fn check_args( + &self, + tc: &mut TyChecker<'db>, + call_args: &[HirCallArg<'db>], + span: LazyCallArgListSpan<'db>, + receiver: Option<(ExprId, ExprProp<'db>)>, + ) { + let db = tc.db; + + let expected_arity = self.func_def.arg_tys(db).len(); + let given_arity = if receiver.is_some() { + call_args.len() + 1 + } else { + call_args.len() + }; + if given_arity != expected_arity { + let diag = BodyDiag::CallArgNumMismatch { + primary: span.into(), + def_span: self.func_def.name_span(db), + given: given_arity, + expected: expected_arity, + }; + tc.push_diag(diag); + return; + } + + let mut args = if let Some((receiver_expr, receiver_prop)) = receiver { + let mut args = Vec::with_capacity(call_args.len() + 1); + let arg = CallArg::new( + IdentId::make_self(db.as_hir_db()).into(), + receiver_prop, + None, + receiver_expr.lazy_span(tc.body()).into(), + ); + args.push(arg); + args + } else { + Vec::with_capacity(call_args.len()) + }; + + for (i, hir_arg) in call_args.iter().enumerate() { + let arg = CallArg::from_hir_arg(tc, hir_arg, span.arg(i)); + args.push(arg); + } + + for (i, (given, expected)) in args + .into_iter() + .zip(self.func_def.arg_tys(db).iter()) + .enumerate() + { + if_chain! { + if let Some(expected_label) = self.func_def.param_label(db, i); + if !expected_label.is_self(db.as_hir_db()); + if Some(expected_label) != given.label; + then { + let diag = BodyDiag::CallArgLabelMismatch { + primary: given.label_span.unwrap_or(given.expr_span.clone()), + def_span: self.func_def.name_span(db), + given: given.label, + expected: expected_label, + }; + tc.push_diag(diag); + } + } + + let expected = expected.instantiate(db, &self.generic_args); + tc.equate_ty(given.expr_prop.ty, expected, given.expr_span); + } + } +} + +/// The lowered representation of [`HirCallArg`] +struct CallArg<'db> { + label: Option>, + expr_prop: ExprProp<'db>, + label_span: Option>, + expr_span: DynLazySpan<'db>, +} + +impl<'db> CallArg<'db> { + fn from_hir_arg( + tc: &mut TyChecker<'db>, + arg: &HirCallArg<'db>, + span: LazyCallArgSpan<'db>, + ) -> Self { + let ty = tc.fresh_ty(); + let expr_prop = tc.check_expr(arg.expr, ty); + let label = arg.label_eagerly(tc.db.as_hir_db(), tc.body()); + let label_span = arg.label.is_some().then(|| span.label().into()); + let expr_span = span.expr().into(); + + Self::new(label, expr_prop, label_span, expr_span) + } + + fn new( + label: Option>, + expr_prop: ExprProp<'db>, + label_span: Option>, + expr_span: DynLazySpan<'db>, + ) -> Self { + Self { + label, + expr_prop, + label_span, + expr_span, + } + } +} diff --git a/crates/hir-analysis/src/ty/ty_check/env.rs b/crates/hir-analysis/src/ty/ty_check/env.rs new file mode 100644 index 0000000000..0827ff2bd9 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_check/env.rs @@ -0,0 +1,466 @@ +use hir::{ + hir_def::{ + prim_ty::PrimTy, scope_graph::ScopeId, Body, BodyKind, Expr, ExprId, Func, IdentId, + IntegerId, Partial, Pat, PatId, Stmt, StmtId, + }, + span::DynLazySpan, +}; +use rustc_hash::FxHashMap; + +use super::{Callable, TypedBody}; +use crate::{ + ty::{ + canonical::{Canonical, Canonicalized}, + const_ty::{ConstTyData, ConstTyId, EvaluatedConstTy}, + diagnostics::{BodyDiag, FuncBodyDiag}, + fold::{TyFoldable, TyFolder}, + func_def::{lower_func, FuncDef}, + trait_def::TraitInstId, + trait_resolution::{ + constraint::collect_func_def_constraints, is_goal_satisfiable, GoalSatisfiability, + PredicateListId, + }, + ty_def::{InvalidCause, TyData, TyId, TyVarSort}, + ty_lower::lower_hir_ty, + unify::UnificationTable, + }, + HirAnalysisDb, +}; + +pub(super) struct TyCheckEnv<'db> { + db: &'db dyn HirAnalysisDb, + body: Body<'db>, + + pat_ty: FxHashMap>, + expr_ty: FxHashMap>, + callables: FxHashMap>, + + pending_confirmations: Vec<(TraitInstId<'db>, DynLazySpan<'db>)>, + + var_env: Vec>, + pending_vars: FxHashMap, LocalBinding<'db>>, + loop_stack: Vec, +} + +impl<'db> TyCheckEnv<'db> { + pub(super) fn new_with_func(db: &'db dyn HirAnalysisDb, func: Func<'db>) -> Result { + let hir_db = db.as_hir_db(); + let Some(body) = func.body(hir_db) else { + return Err(()); + }; + + let mut env = Self { + db, + body, + pat_ty: FxHashMap::default(), + expr_ty: FxHashMap::default(), + callables: FxHashMap::default(), + pending_confirmations: Vec::new(), + var_env: vec![BlockEnv::new(func.scope(), 0)], + pending_vars: FxHashMap::default(), + loop_stack: Vec::new(), + }; + + env.enter_scope(body.expr(hir_db)); + + let Some(params) = func.params(hir_db).to_opt() else { + return Err(()); + }; + + for (idx, param) in params.data(hir_db).iter().enumerate() { + let Some(name) = param.name() else { + continue; + }; + + let mut ty = match param.ty { + Partial::Present(hir_ty) => lower_hir_ty(db, hir_ty, func.scope()), + Partial::Absent => TyId::invalid(db, InvalidCause::Other), + }; + + if !ty.is_star_kind(db) { + ty = TyId::invalid(db, InvalidCause::Other); + } + let var = LocalBinding::Param { + idx, + ty, + is_mut: param.is_mut, + }; + + env.var_env.last_mut().unwrap().register_var(name, var); + } + + Ok(env) + } + + pub(super) fn typed_expr(&self, expr: ExprId) -> Option> { + self.expr_ty.get(&expr).copied() + } + + pub(super) fn binding_def_span(&self, binding: LocalBinding<'db>) -> DynLazySpan<'db> { + binding.def_span(self) + } + + pub(super) fn register_callable(&mut self, expr: ExprId, callable: Callable<'db>) { + if self.callables.insert(expr, callable).is_some() { + panic!("callable is already registered for the given expr") + } + } + pub(super) fn binding_name(&self, binding: LocalBinding<'db>) -> IdentId<'db> { + binding.binding_name(self) + } + + /// Returns a function if the `body` being checked has `BodyKind::FuncBody`. + /// If the `body` has `BodyKind::Anonymous`, returns None + pub(super) fn func(&self) -> Option> { + let func = match self.body.body_kind(self.db.as_hir_db()) { + BodyKind::FuncBody => self.var_env.first()?.scope.item().try_into().ok(), + BodyKind::Anonymous => None, + }?; + + lower_func(self.db, func) + } + + pub(super) fn assumptions(&self) -> PredicateListId<'db> { + match self.func() { + Some(func) => collect_func_def_constraints(self.db, func, true).instantiate_identity(), + None => PredicateListId::empty_list(self.db), + } + } + + pub(super) fn body(&self) -> Body<'db> { + self.body + } + + pub(super) fn lookup_binding_ty(&self, binding: LocalBinding<'db>) -> TyId<'db> { + match binding { + LocalBinding::Local { pat, .. } => self + .pat_ty + .get(&pat) + .copied() + .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)), + + LocalBinding::Param { ty, .. } => ty, + } + } + + pub(super) fn enter_scope(&mut self, block: ExprId) { + let new_scope = match block.data(self.db.as_hir_db(), self.body) { + Partial::Present(Expr::Block(_)) => ScopeId::Block(self.body, block), + _ => self.scope(), + }; + + let var_env = BlockEnv::new(new_scope, self.var_env.len()); + self.var_env.push(var_env); + } + + pub(super) fn leave_scope(&mut self) { + self.var_env.pop().unwrap(); + } + + pub(super) fn enter_loop(&mut self, stmt: StmtId) { + self.loop_stack.push(stmt); + } + + pub(super) fn leave_loop(&mut self) { + self.loop_stack.pop(); + } + + pub(super) fn current_loop(&self) -> Option { + self.loop_stack.last().copied() + } + + pub(super) fn type_expr(&mut self, expr: ExprId, typed: ExprProp<'db>) { + self.expr_ty.insert(expr, typed); + } + + pub(super) fn type_pat(&mut self, pat: PatId, ty: TyId<'db>) { + self.pat_ty.insert(pat, ty); + } + + /// Register a pending binding which will be added when `flush_pending_vars` + /// is called. + pub(super) fn register_pending_binding( + &mut self, + name: IdentId<'db>, + binding: LocalBinding<'db>, + ) { + self.pending_vars.insert(name, binding); + } + + /// Flush pending bindings to the current scope environment. + pub(super) fn flush_pending_bindings(&mut self) { + let var_env = self.var_env.last_mut().unwrap(); + for (name, binding) in self.pending_vars.drain() { + var_env.register_var(name, binding); + } + } + + pub(super) fn register_confirmation(&mut self, inst: TraitInstId<'db>, span: DynLazySpan<'db>) { + self.pending_confirmations.push((inst, span)) + } + + pub(super) fn finish( + mut self, + table: &mut UnificationTable<'db>, + sink: &mut Vec>, + ) -> TypedBody<'db> { + let mut prober = Prober { table }; + self.perform_pending_confirmation(&mut prober, sink); + + self.expr_ty + .values_mut() + .for_each(|ty| *ty = ty.fold_with(&mut prober)); + + self.pat_ty + .values_mut() + .for_each(|ty| *ty = ty.fold_with(&mut prober)); + + let callables = self + .callables + .into_iter() + .map(|(expr, callable)| (expr, callable.fold_with(&mut prober))) + .collect(); + + TypedBody { + body: Some(self.body), + pat_ty: self.pat_ty, + expr_ty: self.expr_ty, + callables, + } + } + + pub(super) fn expr_data(&self, expr: ExprId) -> &'db Partial> { + expr.data(self.db.as_hir_db(), self.body) + } + + pub(super) fn stmt_data(&self, stmt: StmtId) -> &'db Partial> { + stmt.data(self.db.as_hir_db(), self.body) + } + + pub(super) fn scope(&self) -> ScopeId<'db> { + self.var_env.last().unwrap().scope + } + + pub(super) fn current_block_idx(&self) -> usize { + self.var_env.last().unwrap().idx + } + + pub(super) fn get_block(&self, idx: usize) -> &BlockEnv<'db> { + &self.var_env[idx] + } + + fn perform_pending_confirmation( + &self, + prober: &mut Prober<'db, '_>, + sink: &mut Vec>, + ) { + let assumptions = self.assumptions(); + let mut changed = true; + let hir_db = self.db.as_hir_db(); + let ingot = self.body().top_mod(hir_db).ingot(hir_db); + // Try to perform confirmation until all pending confirmations reaches to + // the fixed point. + while changed { + changed = false; + for (inst, _) in &self.pending_confirmations { + let inst = inst.fold_with(prober); + let canonical_inst = Canonicalized::new(self.db, inst); + if let GoalSatisfiability::Satisfied(solution) = + is_goal_satisfiable(self.db, ingot, canonical_inst.value, assumptions) + { + let solution = canonical_inst.extract_solution(prober.table, *solution); + prober.table.unify(inst, solution).unwrap(); + + // We need compare old and new inst in a canonical form since a new inst might + // introduce new type variable in some cases. + // In other word, we need to check ⍺-equivalence to know whether the + // confirmation step move forward. + let new_canonical_inst = Canonical::new(self.db, inst.fold_with(prober.table)); + changed |= new_canonical_inst != canonical_inst.value; + } + } + } + + // Finds ambiguous trait inst and emits diags. + for (inst, span) in &self.pending_confirmations { + let inst = inst.fold_with(prober); + let canonical_inst = Canonicalized::new(self.db, inst); + match is_goal_satisfiable(self.db, ingot, canonical_inst.value, assumptions) { + GoalSatisfiability::NeedsConfirmation(ambiguous) => { + let insts = ambiguous + .iter() + .map(|solution| canonical_inst.extract_solution(prober.table, *solution)) + .collect(); + + if !inst.self_ty(self.db).has_var(self.db) { + let diag = BodyDiag::ambiguous_trait_inst(self.db, span.clone(), insts); + sink.push(diag.into()) + } + } + + _ => { + // WF is checked by `TyCheckerFinalizer` + } + } + } + } +} + +pub(super) struct BlockEnv<'db> { + pub(super) scope: ScopeId<'db>, + pub(super) vars: FxHashMap, LocalBinding<'db>>, + idx: usize, +} + +impl<'db> BlockEnv<'db> { + pub(super) fn lookup_var(&self, var: IdentId<'db>) -> Option> { + self.vars.get(&var).copied() + } + + fn new(scope: ScopeId<'db>, idx: usize) -> Self { + Self { + scope, + vars: FxHashMap::default(), + idx, + } + } + + fn register_var(&mut self, name: IdentId<'db>, var: LocalBinding<'db>) { + self.vars.insert(name, var); + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ExprProp<'db> { + pub ty: TyId<'db>, + pub is_mut: bool, + pub(crate) binding: Option>, +} + +impl<'db> ExprProp<'db> { + pub(super) fn new(ty: TyId<'db>, is_mut: bool) -> Self { + Self { + ty, + is_mut, + binding: None, + } + } + + pub(super) fn new_binding_ref(ty: TyId<'db>, is_mut: bool, binding: LocalBinding<'db>) -> Self { + Self { + ty, + is_mut, + binding: Some(binding), + } + } + + pub(super) fn binding(&self) -> Option> { + self.binding + } + + pub(super) fn swap_ty(&mut self, ty: TyId<'db>) -> TyId<'db> { + std::mem::replace(&mut self.ty, ty) + } + + pub(super) fn invalid(db: &'db dyn HirAnalysisDb) -> Self { + Self { + ty: TyId::invalid(db, InvalidCause::Other), + is_mut: true, + binding: None, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub(crate) enum LocalBinding<'db> { + Local { + pat: PatId, + is_mut: bool, + }, + Param { + idx: usize, + ty: TyId<'db>, + is_mut: bool, + }, +} + +impl<'db> LocalBinding<'db> { + pub(super) fn local(pat: PatId, is_mut: bool) -> Self { + Self::Local { pat, is_mut } + } + + pub(super) fn is_mut(&self) -> bool { + match self { + LocalBinding::Local { is_mut, .. } | LocalBinding::Param { is_mut, .. } => *is_mut, + } + } + + pub(super) fn binding_name(&self, env: &TyCheckEnv<'db>) -> IdentId<'db> { + let hir_db = env.db.as_hir_db(); + match self { + Self::Local { pat, .. } => { + let Partial::Present(Pat::Path(Partial::Present(path), ..)) = + pat.data(hir_db, env.body()) + else { + unreachable!(); + }; + path.ident(hir_db).to_opt().unwrap() + } + + Self::Param { idx, .. } => { + let func = env.func().unwrap(); + let Partial::Present(func_params) = + func.hir_func_def(env.db).unwrap().params(hir_db) + else { + unreachable!(); + }; + + func_params.data(hir_db)[*idx].name().unwrap() + } + } + } + + fn def_span(&self, env: &TyCheckEnv<'db>) -> DynLazySpan<'db> { + match self { + LocalBinding::Local { pat, .. } => pat.lazy_span(env.body).into(), + LocalBinding::Param { idx, .. } => { + let hir_func = env.func().unwrap().hir_func_def(env.db).unwrap(); + hir_func + .lazy_span() + .params_moved() + .param(*idx) + .name_moved() + .into() + } + } + } +} + +struct Prober<'db, 'a> { + table: &'a mut UnificationTable<'db>, +} + +impl<'db> TyFolder<'db> for Prober<'db, '_> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.table.db() + } + + fn fold_ty(&mut self, ty: TyId<'db>) -> TyId<'db> { + let ty = self.table.fold_ty(ty); + let TyData::TyVar(var) = ty.data(self.db()) else { + return ty.super_fold_with(self); + }; + + // String type variable fallback. + if let TyVarSort::String(len) = var.sort { + let ty = TyId::new(self.db(), TyData::TyBase(PrimTy::String.into())); + let len = EvaluatedConstTy::LitInt(IntegerId::new(self.db().as_hir_db(), len.into())); + let len = + ConstTyData::Evaluated(len, ty.applicable_ty(self.db()).unwrap().const_ty.unwrap()); + let len = TyId::const_ty(self.db(), ConstTyId::new(self.db(), len)); + TyId::app(self.db(), ty, len) + } else { + ty.super_fold_with(self) + } + } +} diff --git a/crates/hir-analysis/src/ty/ty_check/expr.rs b/crates/hir-analysis/src/ty/ty_check/expr.rs new file mode 100644 index 0000000000..d83645022c --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_check/expr.rs @@ -0,0 +1,1208 @@ +use either::Either; +use hir::{ + hir_def::{ + ArithBinOp, BinOp, Expr, ExprId, FieldIndex, GenericArgListId, IdentId, Partial, PathId, + UnOp, VariantKind, + }, + span::path::LazyPathSpan, +}; + +use super::{ + env::{ExprProp, LocalBinding, TyCheckEnv}, + path::ResolvedPathInBody, + RecordLike, Typeable, +}; +use crate::{ + name_resolution::{ + diagnostics::NameResDiag, is_scope_visible_from, resolve_name_res, resolve_path, + resolve_query, EarlyNameQueryId, NameDomain, NameResBucket, PathRes, QueryDirective, + }, + ty::{ + canonical::Canonicalized, + const_ty::ConstTyId, + diagnostics::BodyDiag, + ty_check::{ + callable::Callable, + method_selection::{select_method_candidate, Candidate}, + path::RecordInitChecker, + TyChecker, + }, + ty_def::{InvalidCause, TyId}, + }, + HirAnalysisDb, +}; + +impl<'db> TyChecker<'db> { + pub(super) fn check_expr(&mut self, expr: ExprId, expected: TyId<'db>) -> ExprProp<'db> { + let Partial::Present(expr_data) = self.env.expr_data(expr) else { + let typed = ExprProp::invalid(self.db); + self.env.type_expr(expr, typed); + return typed; + }; + + let mut actual = match expr_data { + Expr::Lit(lit) => { + let ty = self.lit_ty(lit); + ExprProp::new(ty, true) + } + Expr::Block(..) => self.check_block(expr, expr_data, expected), + Expr::Un(..) => self.check_unary(expr, expr_data), + Expr::Bin(..) => self.check_binary(expr, expr_data), + Expr::Call(..) => self.check_call(expr, expr_data), + Expr::MethodCall(..) => self.check_method_call(expr, expr_data), + Expr::Path(..) => self.check_path(expr, expr_data), + Expr::RecordInit(..) => self.check_record_init(expr, expr_data), + Expr::Field(..) => self.check_field(expr, expr_data), + Expr::Tuple(..) => self.check_tuple(expr, expr_data, expected), + Expr::Index(..) => self.check_index(expr, expr_data), + Expr::Array(..) => self.check_array(expr, expr_data, expected), + Expr::ArrayRep(..) => self.check_array_rep(expr, expr_data, expected), + Expr::If(..) => self.check_if(expr, expr_data), + Expr::Match(..) => self.check_match(expr, expr_data), + Expr::Assign(..) => self.check_assign(expr, expr_data), + Expr::AugAssign(..) => self.check_aug_assign(expr, expr_data), + }; + + let typeable = Typeable::Expr(expr, actual); + let ty = self.unify_ty(typeable, actual.ty, expected); + actual.swap_ty(ty); + actual + } + + fn check_block( + &mut self, + expr: ExprId, + expr_data: &Expr<'db>, + expected: TyId<'db>, + ) -> ExprProp<'db> { + let Expr::Block(stmts) = expr_data else { + unreachable!() + }; + + if stmts.is_empty() { + ExprProp::new(TyId::unit(self.db), true) + } else { + self.env.enter_scope(expr); + for &stmt in stmts[..stmts.len() - 1].iter() { + let ty = self.fresh_ty(); + self.check_stmt(stmt, ty); + } + + let last_stmt = stmts[stmts.len() - 1]; + let res = self.check_stmt(last_stmt, expected); + self.env.leave_scope(); + ExprProp::new(res, true) + } + } + + fn check_unary(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::Un(lhs, op) = expr_data else { + unreachable!() + }; + let Partial::Present(op) = op else { + return ExprProp::invalid(self.db); + }; + + let expr_ty = self.fresh_ty(); + let typed_expr = self.check_expr(*lhs, expr_ty); + let expr_ty = typed_expr.ty; + + if expr_ty.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + match op { + UnOp::Plus | UnOp::Minus => { + if expr_ty.is_integral(self.db) { + return typed_expr; + } + } + + UnOp::Not => { + if expr_ty.is_bool(self.db) { + return typed_expr; + } + } + + UnOp::BitNot => { + if expr_ty.is_integral(self.db) { + return typed_expr; + } + } + } + + let base_ty = expr_ty.base_ty(self.db); + if base_ty.is_ty_var(self.db) { + let diag = BodyDiag::TypeMustBeKnown(lhs.lazy_span(self.body()).into()); + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + + // TODO: We need to check if the type implements a trait corresponding to the + // operator when these traits are defined in `std`. + let diag = BodyDiag::ops_trait_not_implemented( + self.db, + expr.lazy_span(self.body()).into(), + expr_ty, + *op, + ); + self.push_diag(diag); + + ExprProp::invalid(self.db) + } + + fn check_binary(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::Bin(lhs, rhs, op) = expr_data else { + unreachable!() + }; + let Partial::Present(op) = op else { + return ExprProp::invalid(self.db); + }; + + let lhs_ty = self.fresh_ty(); + let typed_lhs = self.check_expr(*lhs, lhs_ty); + let lhs_ty = typed_lhs.ty; + if lhs_ty.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + match op { + BinOp::Arith(arith_op) => { + use hir::hir_def::ArithBinOp::*; + + let typed_rhs = self.check_expr(*rhs, lhs_ty); + let rhs_ty = typed_rhs.ty; + if rhs_ty.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + match arith_op { + Add | Sub | Mul | Div | Rem | Pow | LShift | RShift => { + if lhs_ty.is_integral(self.db) { + return typed_rhs; + } + } + + BitAnd | BitOr | BitXor => { + if lhs_ty.is_integral(self.db) | lhs_ty.is_bool(self.db) { + return typed_rhs; + } + } + } + } + + BinOp::Comp(comp_op) => { + use hir::hir_def::CompBinOp::*; + + let typed_rhs = self.check_expr(*rhs, lhs_ty); + let rhs_ty = typed_rhs.ty; + if rhs_ty.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + match comp_op { + Eq | NotEq => { + if lhs_ty.is_integral(self.db) | lhs_ty.is_bool(self.db) { + let ty = TyId::bool(self.db); + return ExprProp::new(ty, true); + } + } + + Lt | LtEq | Gt | GtEq => { + if lhs_ty.is_integral(self.db) { + let ty = TyId::bool(self.db); + return ExprProp::new(ty, true); + } + } + } + } + + BinOp::Logical(logical_op) => { + use hir::hir_def::LogicalBinOp::*; + + let typed_rhs = self.check_expr(*rhs, lhs_ty); + let rhs_ty = typed_rhs.ty; + if rhs_ty.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + match logical_op { + And | Or => { + if lhs_ty.is_bool(self.db) & rhs_ty.is_bool(self.db) { + let ty = TyId::bool(self.db); + return ExprProp::new(ty, true); + } + } + } + } + } + + let lhs_base_ty = lhs_ty.base_ty(self.db); + if lhs_base_ty.is_ty_var(self.db) { + let diag = BodyDiag::TypeMustBeKnown(lhs.lazy_span(self.body()).into()); + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + + // TODO: We need to check if the type implements a trait corresponding to the + // operator when these traits are defined in `std`. + let diag = BodyDiag::ops_trait_not_implemented( + self.db, + expr.lazy_span(self.body()).into(), + lhs_ty, + *op, + ); + self.push_diag(diag); + + ExprProp::invalid(self.db) + } + + fn check_call(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::Call(callee, args) = expr_data else { + unreachable!() + }; + let callee_ty = self.fresh_ty(); + let callee_ty = self.check_expr(*callee, callee_ty).ty; + + if callee_ty.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + let mut callable = + match Callable::new(self.db, callee_ty, callee.lazy_span(self.body()).into()) { + Ok(callable) => callable, + Err(diag) => { + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + }; + + let call_span = expr.lazy_span(self.body()).into_call_expr(); + + if let Partial::Present(Expr::Path(Partial::Present(path))) = + callee.data(self.db.as_hir_db(), self.body()) + { + let idx = path.segment_index(self.db.as_hir_db()); + + if !callable.unify_generic_args( + self, + path.generic_args(self.db.as_hir_db()), + expr.lazy_span(self.body()) + .into_path_expr() + .path() + .segment(idx) + .generic_args(), + ) { + return ExprProp::invalid(self.db); + } + }; + + callable.check_args(self, args, call_span.args_moved(), None); + + let ret_ty = callable.ret_ty(self.db); + self.env.register_callable(expr, callable); + ExprProp::new(ret_ty, true) + } + + fn check_method_call(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::MethodCall(receiver, method_name, generic_args, args) = expr_data else { + unreachable!() + }; + let call_span = expr.lazy_span(self.body()).into_method_call_expr(); + let Some(method_name) = method_name.to_opt() else { + return ExprProp::invalid(self.db); + }; + + let receiver_prop = self.fresh_ty(); + let receiver_prop = self.check_expr(*receiver, receiver_prop); + if receiver_prop.ty.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + let assumptions = self.env.assumptions(); + + let canonical_r_ty = Canonicalized::new(self.db, receiver_prop.ty); + let candidate = match select_method_candidate( + self.db, + (canonical_r_ty.value, receiver.lazy_span(self.body()).into()), + (method_name, call_span.method_name().into()), + self.env.scope(), + assumptions, + ) { + Ok(candidate) => candidate, + Err(diag) => { + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + }; + + let func_ty = match candidate { + Candidate::InherentMethod(func_def) => { + let func_ty = TyId::func(self.db, func_def); + self.table.instantiate_to_term(func_ty) + } + + Candidate::TraitMethod(cand) => { + let inst = canonical_r_ty.extract_solution(&mut self.table, cand.inst); + let trait_method = cand.method; + trait_method.instantiate_with_inst(&mut self.table, receiver_prop.ty, inst) + } + + Candidate::NeedsConfirmation(cand) => { + let inst = canonical_r_ty.extract_solution(&mut self.table, cand.inst); + self.env + .register_confirmation(inst, call_span.clone().into()); + let trait_method = cand.method; + trait_method.instantiate_with_inst(&mut self.table, receiver_prop.ty, inst) + } + }; + + let mut callable = + match Callable::new(self.db, func_ty, receiver.lazy_span(self.body()).into()) { + Ok(callable) => callable, + Err(diag) => { + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + }; + + if !callable.unify_generic_args(self, *generic_args, call_span.generic_args()) { + return ExprProp::invalid(self.db); + } + + callable.check_args( + self, + args, + call_span.args_moved(), + Some((*receiver, receiver_prop)), + ); + let ret_ty = callable.ret_ty(self.db); + self.env.register_callable(expr, callable); + ExprProp::new(ret_ty, true) + } + + fn check_path(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::Path(path) = expr_data else { + unreachable!() + }; + + let Partial::Present(path) = path else { + return ExprProp::invalid(self.db); + }; + + let span = expr.lazy_span(self.body()).into_path_expr(); + + let res = if path.is_bare_ident(self.db.as_hir_db()) { + resolve_ident_expr(self.db, &self.env, *path) + } else { + self.resolve_path(*path, true) + .map_or_else(|_| ResolvedPathInBody::Invalid, ResolvedPathInBody::Reso) + }; + + match res { + ResolvedPathInBody::Binding(binding) => { + let ty = self.env.lookup_binding_ty(binding); + let is_mut = binding.is_mut(); + ExprProp::new_binding_ref(ty, is_mut, binding) + } + ResolvedPathInBody::NewBinding(ident) => { + let diag = BodyDiag::UndefinedVariable(span.into(), ident); + self.push_diag(diag); + + ExprProp::invalid(self.db) + } + ResolvedPathInBody::Diag(diag) => { + self.push_diag(diag); + ExprProp::invalid(self.db) + } + ResolvedPathInBody::Invalid => ExprProp::invalid(self.db), + + ResolvedPathInBody::Reso(reso) => match reso { + PathRes::Ty(ty) => { + if let Some(const_ty_ty) = ty.const_ty_ty(self.db) { + ExprProp::new(self.table.instantiate_to_term(const_ty_ty), true) + } else { + let diag = if ty.is_struct(self.db) { + BodyDiag::unit_variant_expected(self.db, span.into(), ty) + } else { + BodyDiag::NotValue { + primary: span.into(), + given: Either::Right(ty), + } + }; + self.push_diag(diag); + + ExprProp::invalid(self.db) + } + } + PathRes::Func(ty) => ExprProp::new(self.table.instantiate_to_term(ty), true), + PathRes::Trait(trait_) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(trait_.trait_(self.db).into()), + }; + self.push_diag(diag); + ExprProp::invalid(self.db) + } + PathRes::EnumVariant(variant) => { + let ty = match variant.variant_kind(self.db) { + VariantKind::Unit => variant.ty, + VariantKind::Tuple(_) => self + .select_method_candidate_for_path(variant.ty, *path, span.path()) + .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)), + VariantKind::Record(_) => { + let diag = BodyDiag::unit_variant_expected( + self.db, + expr.lazy_span(self.body()).into(), + variant, + ); + self.push_diag(diag); + + TyId::invalid(self.db, InvalidCause::Other) + } + }; + + ExprProp::new(self.table.instantiate_to_term(ty), true) + } + PathRes::Const(ty) => ExprProp::new(ty, true), + PathRes::TypeMemberTbd(parent_ty) => { + let ty = self + .select_method_candidate_for_path(parent_ty, *path, span.path()) + .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)); + ExprProp::new(self.table.instantiate_to_term(ty), true) + } + PathRes::Mod(_) | PathRes::FuncParam(..) => todo!(), + }, + } + } + + fn check_record_init(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::RecordInit(path, ..) = expr_data else { + unreachable!() + }; + let span = expr.lazy_span(self.body()).into_record_init_expr(); + + let Partial::Present(path) = path else { + return ExprProp::invalid(self.db); + }; + + let Ok(reso) = resolve_path(self.db, *path, self.env.scope(), true) else { + return ExprProp::invalid(self.db); + }; + + match reso { + PathRes::Ty(ty) if ty.is_record(self.db) => { + let ty = self.table.instantiate_to_term(ty); + self.check_record_init_fields(ty, expr); + ExprProp::new(ty, true) + } + + PathRes::Ty(ty) | PathRes::Func(ty) | PathRes::Const(ty) => { + let diag = BodyDiag::record_expected(self.db, span.path().into(), Some(ty)); + self.push_diag(diag); + ExprProp::invalid(self.db) + } + PathRes::TypeMemberTbd(_) | PathRes::FuncParam(..) => { + let diag = BodyDiag::record_expected::(self.db, span.path().into(), None); + self.push_diag(diag); + ExprProp::invalid(self.db) + } + + PathRes::EnumVariant(variant) => { + if variant.is_record(self.db) { + let ty = self.table.instantiate_to_term(variant.ty); + self.check_record_init_fields(variant, expr); + ExprProp::new(ty, true) + } else { + let diag = + BodyDiag::record_expected::>(self.db, span.path().into(), None); + self.push_diag(diag); + + ExprProp::invalid(self.db) + } + } + PathRes::Mod(scope) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(scope.item()), + }; + self.push_diag(diag); + ExprProp::invalid(self.db) + } + PathRes::Trait(trait_) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(trait_.trait_(self.db).into()), + }; + self.push_diag(diag); + ExprProp::invalid(self.db) + } + } + } + + fn check_record_init_fields>(&mut self, mut record_like: T, expr: ExprId) { + let hir_db = self.db.as_hir_db(); + + let Partial::Present(Expr::RecordInit(_, fields)) = expr.data(hir_db, self.body()) else { + unreachable!() + }; + let span = expr.lazy_span(self.body()).into_record_init_expr(); + + let mut rec_checker = RecordInitChecker::new(self, &mut record_like); + + for (i, field) in fields.iter().enumerate() { + let label = field.label_eagerly(rec_checker.tc.db.as_hir_db(), rec_checker.tc.body()); + let field_span = span.fields().field(i).into(); + + let expected = match rec_checker.feed_label(label, field_span) { + Ok(ty) => ty, + Err(diag) => { + rec_checker.tc.push_diag(diag); + TyId::invalid(rec_checker.tc.db, InvalidCause::Other) + } + }; + + rec_checker.tc.check_expr(field.expr, expected); + } + + if let Err(diag) = rec_checker.finalize(span.fields().into(), false) { + self.push_diag(diag); + } + } + + fn check_field(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::Field(lhs, index) = expr_data else { + unreachable!() + }; + let Partial::Present(field) = index else { + return ExprProp::invalid(self.db); + }; + + let lhs_ty = self.fresh_ty(); + let typed_lhs = self.check_expr(*lhs, lhs_ty); + let lhs_ty = typed_lhs.ty; + let (ty_base, ty_args) = lhs_ty.decompose_ty_app(self.db); + + if ty_base.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + if ty_base.is_ty_var(self.db) { + let diag = BodyDiag::TypeMustBeKnown(lhs.lazy_span(self.body()).into()); + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + + match field { + FieldIndex::Ident(label) => { + if let Some(field_ty) = lhs_ty.record_field_ty(self.db, *label) { + if let Some(scope) = lhs_ty.record_field_scope(self.db, *label) { + if !is_scope_visible_from(self.db, scope, self.env.scope()) { + // Check the visibility of the field. + let diag = NameResDiag::Invisible( + expr.lazy_span(self.body()) + .into_field_expr() + .accessor() + .into(), + *label, + scope.name_span(self.db.as_hir_db()), + ); + + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + } + return ExprProp::new(field_ty, typed_lhs.is_mut); + } + } + + FieldIndex::Index(i) => { + let arg_len = ty_args.len().into(); + if ty_base.is_tuple(self.db) && i.data(self.db.as_hir_db()) < &arg_len { + let i: usize = i.data(self.db.as_hir_db()).try_into().unwrap(); + let ty = ty_args[i]; + return ExprProp::new(ty, typed_lhs.is_mut); + } + } + }; + + let diag = BodyDiag::accessed_field_not_found( + self.db, + expr.lazy_span(self.body()).into(), + lhs_ty, + *field, + ); + self.push_diag(diag); + + ExprProp::invalid(self.db) + } + + fn check_tuple( + &mut self, + _expr: ExprId, + expr_data: &Expr<'db>, + expected: TyId<'db>, + ) -> ExprProp<'db> { + let Expr::Tuple(elems) = expr_data else { + unreachable!() + }; + + let elem_tys = match expected.decompose_ty_app(self.db) { + (base, args) if base.is_tuple(self.db) && args.len() == elems.len() => args.to_vec(), + _ => self.fresh_tys_n(elems.len()), + }; + + for (elem, elem_ty) in elems.iter().zip(elem_tys.iter()) { + self.check_expr(*elem, *elem_ty); + } + + let ty = TyId::tuple_with_elems(self.db, &elem_tys); + ExprProp::new(ty, true) + } + + fn check_index(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::Index(lhs, index) = expr_data else { + unreachable!() + }; + + let lhs_ty = self.fresh_ty(); + let typed_lhs = self.check_expr(*lhs, lhs_ty); + let lhs_ty = typed_lhs.ty; + let (lhs_base, args) = lhs_ty.decompose_ty_app(self.db); + + if lhs_base.is_ty_var(self.db) { + let diag = BodyDiag::TypeMustBeKnown(lhs.lazy_span(self.body()).into()); + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + + if lhs_base.has_invalid(self.db) { + return ExprProp::invalid(self.db); + } + + if lhs_base.is_array(self.db) { + let elem_ty = args[0]; + let index_ty = args[1].const_ty_ty(self.db).unwrap(); + self.check_expr(*index, index_ty); + return ExprProp::new(elem_ty, typed_lhs.is_mut); + } + + // TODO: We need to check if the type implements the `Index` trait when `Index` + // is defined in `std`. + let diag = BodyDiag::ops_trait_not_implemented( + self.db, + expr.lazy_span(self.body()).into(), + lhs_ty, + IndexingOp {}, + ); + self.push_diag(diag); + ExprProp::invalid(self.db) + } + + fn check_array( + &mut self, + _expr: ExprId, + expr_data: &Expr<'db>, + expected: TyId<'db>, + ) -> ExprProp<'db> { + let Expr::Array(elems) = expr_data else { + unreachable!() + }; + + let mut expected_elem_ty = match expected.decompose_ty_app(self.db) { + (base, args) if base.is_array(self.db) => args[0], + _ => self.fresh_ty(), + }; + + for elem in elems { + expected_elem_ty = self.check_expr(*elem, expected_elem_ty).ty; + } + + let ty = TyId::array_with_len(self.db, expected_elem_ty, elems.len()); + ExprProp::new(ty, true) + } + + fn check_array_rep( + &mut self, + _expr: ExprId, + expr_data: &Expr<'db>, + expected: TyId<'db>, + ) -> ExprProp<'db> { + let Expr::ArrayRep(elem, len) = expr_data else { + unreachable!() + }; + + let mut expected_elem_ty = match expected.decompose_ty_app(self.db) { + (base, args) if base.is_array(self.db) => args[0], + _ => self.fresh_ty(), + }; + + expected_elem_ty = self.check_expr(*elem, expected_elem_ty).ty; + + let array = TyId::array(self.db, expected_elem_ty); + let ty = if let Some(len_body) = len.to_opt() { + let len_ty = ConstTyId::from_body(self.db, len_body); + let len_ty = TyId::const_ty(self.db, len_ty); + let array_ty = TyId::app(self.db, array, len_ty); + + if let Some(diag) = array_ty.emit_diag(self.db, len_body.lazy_span().into()) { + self.push_diag(diag); + } + + array_ty + } else { + let len_ty = ConstTyId::invalid(self.db, InvalidCause::Other); + let len_ty = TyId::const_ty(self.db, len_ty); + TyId::app(self.db, array, len_ty) + }; + + ExprProp::new(ty, true) + } + + fn check_if(&mut self, _expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::If(cond, then, else_) = expr_data else { + unreachable!() + }; + + self.check_expr(*cond, TyId::bool(self.db)); + + let if_ty = self.fresh_ty(); + let ty = match else_ { + Some(else_) => { + self.check_expr_in_new_scope(*then, if_ty); + self.check_expr_in_new_scope(*else_, if_ty).ty + } + + None => { + // If there is no else branch, the if expression itself typed as `()` + self.check_expr_in_new_scope(*then, if_ty); + TyId::unit(self.db) + } + }; + + ExprProp::new(ty, true) + } + + fn check_match(&mut self, _expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::Match(scrutinee, arms) = expr_data else { + unreachable!() + }; + + let scrutinee_ty = self.fresh_ty(); + let scrutinee_ty = self.check_expr(*scrutinee, scrutinee_ty).ty; + + let Partial::Present(arms) = arms else { + return ExprProp::invalid(self.db); + }; + + let mut match_ty = self.fresh_ty(); + for arm in arms { + self.check_pat(arm.pat, scrutinee_ty); + + self.env.enter_scope(arm.body); + self.env.flush_pending_bindings(); + + match_ty = self.check_expr(arm.body, match_ty).ty; + + self.env.leave_scope(); + } + + ExprProp::new(match_ty, true) + } + + fn check_assign(&mut self, _expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + let Expr::Assign(lhs, rhs) = expr_data else { + unreachable!() + }; + + let lhs_ty = self.fresh_ty(); + let typed_lhs = self.check_expr(*lhs, lhs_ty); + self.check_expr(*rhs, lhs_ty); + + let result_ty = TyId::unit(self.db); + + self.check_assign_lhs(*lhs, &typed_lhs); + + ExprProp::new(result_ty, true) + } + + fn check_aug_assign(&mut self, expr: ExprId, expr_data: &Expr<'db>) -> ExprProp<'db> { + use ArithBinOp::*; + + let Expr::AugAssign(lhs, rhs, op) = expr_data else { + unreachable!() + }; + + let unit_ty = TyId::unit(self.db); + + let lhs_ty = self.fresh_ty(); + let typed_lhs = self.check_expr(*lhs, lhs_ty); + let lhs_ty = typed_lhs.ty; + if lhs_ty.has_invalid(self.db) { + return ExprProp::new(unit_ty, true); + } + + match op { + Add | Sub | Mul | Div | Rem | Pow | LShift | RShift => { + self.check_expr(*rhs, lhs_ty); + if lhs_ty.is_integral(self.db) { + self.check_assign_lhs(*lhs, &typed_lhs); + return ExprProp::new(unit_ty, true); + } + } + + BitAnd | BitOr | BitXor => { + self.check_expr(*rhs, lhs_ty); + if lhs_ty.is_integral(self.db) | lhs_ty.is_bool(self.db) { + self.check_assign_lhs(*lhs, &typed_lhs); + return ExprProp::new(unit_ty, true); + } + } + } + + let lhs_base_ty = lhs_ty.base_ty(self.db); + if lhs_base_ty.is_ty_var(self.db) { + let diag = BodyDiag::TypeMustBeKnown(lhs.lazy_span(self.body()).into()); + self.push_diag(diag); + return ExprProp::invalid(self.db); + } + + // TODO: We need to check if the type implements a trait corresponding to the + // operator when these traits are defined in `std`. + let diag = BodyDiag::ops_trait_not_implemented( + self.db, + expr.lazy_span(self.body()).into(), + lhs_ty, + AugAssignOp(*op), + ); + self.push_diag(diag); + + ExprProp::invalid(self.db) + } + + fn check_assign_lhs(&mut self, lhs: ExprId, typed_lhs: &ExprProp<'db>) { + if !self.is_assignable_expr(lhs) { + let diag = BodyDiag::NonAssignableExpr(lhs.lazy_span(self.body()).into()); + self.push_diag(diag); + + return; + } + + if !typed_lhs.is_mut { + let binding = self.find_base_binding(lhs); + let diag = match binding { + Some(binding) => { + let (ident, def_span) = ( + self.env.binding_name(binding), + self.env.binding_def_span(binding), + ); + + BodyDiag::ImmutableAssignment { + primary: lhs.lazy_span(self.body()).into(), + binding: Some((ident, def_span)), + } + } + + None => BodyDiag::ImmutableAssignment { + primary: lhs.lazy_span(self.body()).into(), + binding: None, + }, + }; + + self.push_diag(diag); + } + } + + fn check_expr_in_new_scope(&mut self, expr: ExprId, expected: TyId<'db>) -> ExprProp<'db> { + self.env.enter_scope(expr); + let ty = self.check_expr(expr, expected); + self.env.leave_scope(); + + ty + } + + fn select_method_candidate_for_path( + &mut self, + receiver_ty: TyId<'db>, + path: PathId<'db>, + span: LazyPathSpan<'db>, + ) -> Option> { + let db = self.db; + let hir_db = self.db.as_hir_db(); + + let name = *path.ident(hir_db).unwrap(); + let canonical_r_ty = Canonicalized::new(db, receiver_ty); + let candidate = match select_method_candidate( + db, + (canonical_r_ty.value, span.clone().into()), + (name, span.segment(path.segment_index(hir_db)).into()), + self.env.scope(), + self.env.assumptions(), + ) { + Ok(candidate) => candidate, + Err(diag) => { + self.diags.push(diag); + return None; + } + }; + + let trait_cand = match candidate { + Candidate::InherentMethod(func_def) => { + let mut method_ty = TyId::func(db, func_def); + + for &arg in receiver_ty.generic_args(db) { + // If the method is defined in "specialized" impl block + // of a generic type (eg `impl Option`), then + // calling `TyId::app(db, method_ty, ..)` will result in + // `TyId::invalid`. + if method_ty.applicable_ty(db).is_some() { + method_ty = TyId::app(db, method_ty, arg); + } else { + break; + } + } + + return Some(self.table.instantiate_to_term(method_ty)); + } + + Candidate::TraitMethod(cand) | Candidate::NeedsConfirmation(cand) => cand, + }; + + let method = trait_cand.method; + let inst = canonical_r_ty.extract_solution(&mut self.table, trait_cand.inst); + + if matches!(candidate, Candidate::NeedsConfirmation(_)) { + self.env.register_confirmation(inst, span.clone().into()); + } + + let method_ty = method.instantiate_with_inst(&mut self.table, receiver_ty, inst); + Some(self.table.instantiate_to_term(method_ty)) + } + + /// Returns the base binding for a given expression if it exists. + /// + /// This function traverses the expression tree to find the base binding, + /// which is the original variable or binding that the expression refers to. + /// + /// # Parameters + /// + /// - `expr`: The expression ID for which to find the base binding. + /// + /// # Returns + /// + /// An `Option` containing the `LocalBinding` if a base binding is found, + /// or `None` if there is no base binding. + fn find_base_binding(&self, expr: ExprId) -> Option> { + let Partial::Present(expr_data) = self.env.expr_data(expr) else { + return None; + }; + + match expr_data { + Expr::Field(lhs, ..) | Expr::Index(lhs, ..) => self.find_base_binding(*lhs), + Expr::Path(..) => self.env.typed_expr(expr)?.binding(), + _ => None, + } + } + + /// Returns `true`` if the expression can be used as an left hand side of an + /// assignment. + /// This method doesn't take mutability into account. + fn is_assignable_expr(&self, expr: ExprId) -> bool { + let Partial::Present(expr_data) = expr.data(self.db.as_hir_db(), self.body()) else { + return false; + }; + + matches!( + expr_data, + Expr::Path(..) | Expr::Field(..) | Expr::Index(..) + ) + } +} + +fn resolve_ident_expr<'db>( + db: &'db dyn HirAnalysisDb, + env: &TyCheckEnv<'db>, + path: PathId<'db>, +) -> ResolvedPathInBody<'db> { + let ident = *path.ident(db.as_hir_db()).unwrap(); + + let resolve_bucket = |bucket: &NameResBucket<'db>, scope| { + let Ok(res) = bucket.pick_any(&[NameDomain::VALUE, NameDomain::TYPE]) else { + return ResolvedPathInBody::Invalid; + }; + let Ok(reso) = resolve_name_res(db, res, None, path, scope) else { + return ResolvedPathInBody::Invalid; + }; + ResolvedPathInBody::Reso(reso) + }; + + let mut current_idx = env.current_block_idx(); + + loop { + let block = env.get_block(current_idx); + if let Some(binding) = block.lookup_var(ident) { + return ResolvedPathInBody::Binding(binding); + } + + let scope = block.scope; + let directive = QueryDirective::new().disallow_lex(); + let query = EarlyNameQueryId::new(db, ident, scope, directive); + let bucket = resolve_query(db, query); + + let resolved = resolve_bucket(bucket, scope); + match resolved { + ResolvedPathInBody::Invalid => { + if current_idx == 0 { + break; + } else { + current_idx -= 1; + } + } + _ => return resolved, + } + } + + let query = EarlyNameQueryId::new(db, ident, env.body().scope(), QueryDirective::default()); + let bucket = resolve_query(db, query); + match resolve_bucket(bucket, env.scope()) { + ResolvedPathInBody::Invalid => ResolvedPathInBody::NewBinding(ident), + r => r, + } +} + +/// This traits are intended to be implemented by the operators that can work as +/// a syntax sugar for a trait method. For example, binary `+` operator +/// implements this trait to be able to work as a syntax sugar for +/// `std::ops::Add` trait method. +/// +/// TODO: We need to refine this trait definition to connect std library traits +/// smoothly. +pub(crate) trait TraitOps { + fn trait_path<'db>(&self, db: &'db dyn HirAnalysisDb) -> PathId<'db> { + let hir_db = db.as_hir_db(); + let path = std_ops_path(db); + path.push( + hir_db, + Partial::Present(self.trait_name(db)), + GenericArgListId::none(hir_db), + ) + } + + fn trait_name<'db>(&self, db: &'db dyn HirAnalysisDb) -> IdentId<'db> { + self.triple(db)[0] + } + + fn op_symbol<'db>(&self, db: &'db dyn HirAnalysisDb) -> IdentId<'db> { + self.triple(db)[2] + } + + fn triple<'db>(&self, db: &'db dyn HirAnalysisDb) -> [IdentId<'db>; 3]; +} + +impl TraitOps for UnOp { + fn triple<'db>(&self, db: &'db dyn HirAnalysisDb) -> [IdentId<'db>; 3] { + let triple = match self { + UnOp::Plus => ["UnaryPlus", "add", "+"], + UnOp::Minus => ["Neg", "neg", "-"], + UnOp::Not => ["Not", "not", "!"], + UnOp::BitNot => ["BitNot", "bit_not", "~"], + }; + + triple.map(|s| IdentId::new(db.as_hir_db(), s.to_string())) + } +} + +impl TraitOps for BinOp { + fn triple<'db>(&self, db: &'db dyn HirAnalysisDb) -> [IdentId<'db>; 3] { + let triple = match self { + BinOp::Arith(arith_op) => { + use ArithBinOp::*; + + match arith_op { + Add => ["Add", "add", "+"], + Sub => ["Sub", "sub", "-"], + Mul => ["Mul", "mul", "*"], + Div => ["Div", "div", "/"], + Rem => ["Rem", "rem", "%"], + Pow => ["Pow", "pow", "**"], + LShift => ["Shl", "shl", "<<"], + RShift => ["Shr", "shr", ">>"], + BitAnd => ["BitAnd", "bitand", "&"], + BitOr => ["BitOr", "bitor", "|"], + BitXor => ["BitXor", "bitxor", "^"], + } + } + + BinOp::Comp(comp_op) => { + use hir::hir_def::CompBinOp::*; + + // Comp + match comp_op { + Eq => ["Eq", "eq", "=="], + NotEq => ["Eq", "ne", "!="], + Lt => ["Ord", "lt", "<"], + LtEq => ["Ord", "le", "<="], + Gt => ["Ord", "gt", ">"], + GtEq => ["Ord", "ge", ">="], + } + } + + BinOp::Logical(logical_op) => { + use hir::hir_def::LogicalBinOp::*; + + match logical_op { + And => ["And", "and", "&&"], + Or => ["Or", "or", "||"], + } + } + }; + + triple.map(|s| IdentId::new(db.as_hir_db(), s.to_string())) + } +} + +struct IndexingOp {} + +impl TraitOps for IndexingOp { + fn triple<'db>(&self, db: &'db dyn HirAnalysisDb) -> [IdentId<'db>; 3] { + let name = "Index"; + let method_name = "index"; + let symbol = "[]"; + + [ + IdentId::new(db.as_hir_db(), name.to_string()), + IdentId::new(db.as_hir_db(), method_name.to_string()), + IdentId::new(db.as_hir_db(), symbol.to_string()), + ] + } +} + +struct AugAssignOp(ArithBinOp); + +impl TraitOps for AugAssignOp { + fn triple<'db>(&self, db: &'db dyn HirAnalysisDb) -> [IdentId<'db>; 3] { + use ArithBinOp::*; + let triple = match self.0 { + Add => ["AddAssign", "add_assign", "+="], + Sub => ["SubAssign", "sub_assign", "-="], + Mul => ["MulAssign", "mul_assign", "*="], + Div => ["DivAssign", "div_assign", "/="], + Rem => ["RemAssign", "rem_assign", "%="], + Pow => ["PowAssign", "pow_assign", "**="], + LShift => ["ShlAssign", "shl_assign", "<<="], + RShift => ["ShrAssign", "shr_assign", ">>="], + BitAnd => ["BitAndAssign", "bitand_assign", "&="], + BitOr => ["BitOrAssign", "bitor_assign", "|="], + BitXor => ["BitXorAssign", "bitxor_assign", "^="], + }; + + triple.map(|s| IdentId::new(db.as_hir_db(), s.to_string())) + } +} + +fn std_ops_path(db: &dyn HirAnalysisDb) -> PathId { + let db = db.as_hir_db(); + let std_ = IdentId::new(db, "std".to_string()); + let ops_ = IdentId::new(db, "ops".to_string()); + PathId::from_ident(db, std_).push_ident(db, ops_) +} diff --git a/crates/hir-analysis/src/ty/ty_check/method_selection.rs b/crates/hir-analysis/src/ty/ty_check/method_selection.rs new file mode 100644 index 0000000000..2971b560e3 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_check/method_selection.rs @@ -0,0 +1,406 @@ +use common::indexmap::IndexSet; +use either::Either; +use hir::{ + hir_def::{scope_graph::ScopeId, IdentId, Trait}, + span::DynLazySpan, +}; +use itertools::Itertools; +use rustc_hash::FxHashSet; + +use crate::{ + name_resolution::{available_traits_in_scope, diagnostics::NameResDiag, is_scope_visible_from}, + ty::{ + canonical::{Canonical, Canonicalized, Solution}, + diagnostics::{BodyDiag, FuncBodyDiag}, + fold::TyFoldable, + func_def::FuncDef, + method_table::probe_method, + trait_def::{impls_for_ty, TraitDef, TraitInstId, TraitMethod}, + trait_lower::lower_trait, + trait_resolution::{is_goal_satisfiable, GoalSatisfiability, PredicateListId}, + ty_def::TyId, + unify::UnificationTable, + }, + HirAnalysisDb, +}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum Candidate<'db> { + InherentMethod(FuncDef<'db>), + TraitMethod(TraitMethodCand<'db>), + NeedsConfirmation(TraitMethodCand<'db>), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub(super) struct TraitMethodCand<'db> { + pub(super) inst: Solution>, + pub(super) method: TraitMethod<'db>, +} + +impl<'db> TraitMethodCand<'db> { + fn new(inst: Solution>, method: TraitMethod<'db>) -> Self { + Self { inst, method } + } +} + +pub(super) fn select_method_candidate<'db>( + db: &'db dyn HirAnalysisDb, + receiver: (Canonical>, DynLazySpan<'db>), + method_name: (IdentId<'db>, DynLazySpan<'db>), + scope: ScopeId<'db>, + assumptions: PredicateListId<'db>, +) -> Result, FuncBodyDiag<'db>> { + if receiver.0.value.is_ty_var(db) { + return Err(BodyDiag::TypeMustBeKnown(method_name.1).into()); + } + + let candidates = assemble_method_candidates(db, receiver.0, method_name.0, scope, assumptions); + + let selector = MethodSelector { + db, + receiver: receiver.0, + scope, + candidates, + assumptions, + }; + + match selector.select() { + Ok(candidate) => Ok(candidate), + + Err(MethodSelectionError::AmbiguousInherentMethod(cands)) => { + let cand_spans = cands.into_iter().map(|cand| cand.name_span(db)).collect(); + let diag = BodyDiag::AmbiguousInherentMethodCall { + primary: method_name.1, + method_name: method_name.0, + cand_spans, + }; + + Err(diag.into()) + } + + Err(MethodSelectionError::AmbiguousTraitMethod(traits)) => { + let traits = traits.into_iter().map(|def| def.trait_(db)).collect(); + + let diag = BodyDiag::AmbiguousTrait { + primary: method_name.1, + method_name: method_name.0, + traits, + }; + + Err(diag.into()) + } + + Err(MethodSelectionError::NotFound) => { + let base_ty = receiver.0.value.base_ty(db); + let diag = + BodyDiag::method_not_found(db, method_name.1, method_name.0, Either::Left(base_ty)); + Err(diag.into()) + } + + Err(MethodSelectionError::InvisibleInherentMethod(func)) => { + let diag = + NameResDiag::Invisible(method_name.1, method_name.0, func.name_span(db).into()); + Err(diag.into()) + } + + Err(MethodSelectionError::InvisibleTraitMethod(traits)) => { + let diag = BodyDiag::InvisibleAmbiguousTrait { + primary: method_name.1, + traits, + }; + Err(diag.into()) + } + } +} + +fn assemble_method_candidates<'db>( + db: &'db dyn HirAnalysisDb, + receiver_ty: Canonical>, + method_name: IdentId<'db>, + scope: ScopeId<'db>, + assumptions: PredicateListId<'db>, +) -> AssembledCandidates<'db> { + CandidateAssembler { + db, + receiver_ty, + method_name, + scope, + assumptions, + candidates: AssembledCandidates::default(), + } + .assemble() +} + +struct CandidateAssembler<'db> { + db: &'db dyn HirAnalysisDb, + /// The type that method is being called on. + receiver_ty: Canonical>, + /// The name of the method being called. + method_name: IdentId<'db>, + /// The scope that candidates are being assembled in. + scope: ScopeId<'db>, + /// The assumptions for the type bound in the current scope. + assumptions: PredicateListId<'db>, + candidates: AssembledCandidates<'db>, +} + +impl<'db> CandidateAssembler<'db> { + fn assemble(mut self) -> AssembledCandidates<'db> { + self.assemble_inherent_method_candidates(); + self.assemble_trait_method_candidates(); + self.candidates + } + + fn assemble_inherent_method_candidates(&mut self) { + let ingot = self.scope.ingot(self.db.as_hir_db()); + for &method in probe_method(self.db, ingot, self.receiver_ty, self.method_name) { + self.candidates.insert_inherent_method(method); + } + } + + fn assemble_trait_method_candidates(&mut self) { + let ingot = self.scope.ingot(self.db.as_hir_db()); + let mut table = UnificationTable::new(self.db); + let extracted_receiver_ty = self.receiver_ty.extract_identity(&mut table); + + for &implementor in impls_for_ty(self.db, ingot, self.receiver_ty) { + let trait_def = implementor.skip_binder().trait_def(self.db); + self.insert_trait_method_cand(trait_def) + } + + for &pred in self.assumptions.list(self.db) { + let snapshot = table.snapshot(); + let self_ty = pred.self_ty(self.db); + let self_ty = table.instantiate_to_term(self_ty); + + if table.unify(extracted_receiver_ty, self_ty).is_ok() { + self.insert_trait_method_cand(pred.def(self.db)); + for super_trait in pred.def(self.db).super_traits(self.db) { + let super_trait = super_trait.instantiate(self.db, pred.args(self.db)); + self.insert_trait_method_cand(super_trait.def(self.db)); + } + } + + table.rollback_to(snapshot); + } + } + + fn insert_trait_method_cand(&mut self, trait_def: TraitDef<'db>) { + if let Some(&trait_method) = trait_def.methods(self.db).get(&self.method_name) { + self.candidates.insert_trait(trait_def, trait_method); + } + } +} + +struct MethodSelector<'db> { + db: &'db dyn HirAnalysisDb, + receiver: Canonical>, + scope: ScopeId<'db>, + candidates: AssembledCandidates<'db>, + assumptions: PredicateListId<'db>, +} + +impl<'db> MethodSelector<'db> { + fn select(self) -> Result, MethodSelectionError<'db>> { + if let Some(res) = self.select_inherent_method() { + return res; + } + + self.select_trait_methods() + } + + fn select_inherent_method(&self) -> Option, MethodSelectionError<'db>>> { + let inherent_methods = &self.candidates.inherent_methods; + let visible_inherent_methods: Vec<_> = inherent_methods + .iter() + .copied() + .filter(|cand| self.is_inherent_method_visible(*cand)) + .collect(); + + match visible_inherent_methods.len() { + 0 => { + if inherent_methods.is_empty() { + None + } else { + Some(Err(MethodSelectionError::InvisibleInherentMethod( + *inherent_methods.iter().next().unwrap(), + ))) + } + } + 1 => Some(Ok(Candidate::InherentMethod(visible_inherent_methods[0]))), + + _ => Some(Err(MethodSelectionError::AmbiguousInherentMethod( + inherent_methods.iter().copied().collect(), + ))), + } + } + + /// Selects the most appropriate trait method candidate. + /// + /// This function checks the available trait method candidates and attempts + /// to find the best match. If there is only one candidate, it is returned. + /// If there are multiple candidates, it checks for visibility and + /// ambiguity. + /// + /// **NOTE**: If there is no ambiguity, the trait does not need to be + /// visible. + /// + /// # Returns + /// + /// * `Ok(Candidate)` - The selected method candidate. + /// * `Err(MethodSelectionError)` - An error indicating the reason for + /// failure. + fn select_trait_methods(&self) -> Result, MethodSelectionError<'db>> { + let traits = &self.candidates.traits; + + if traits.len() == 1 { + let (def, method) = traits.iter().next().unwrap(); + return Ok(self.find_inst(*def, *method)); + } + + let available_traits = self.available_traits(); + let visible_traits: Vec<_> = traits + .iter() + .copied() + .filter(|cand| available_traits.contains(&cand.0)) + .collect(); + + match visible_traits.len() { + 0 => { + if traits.is_empty() { + Err(MethodSelectionError::NotFound) + } else { + // Suggests trait imports. + let traits = traits.iter().map(|(def, _)| def.trait_(self.db)).collect(); + Err(MethodSelectionError::InvisibleTraitMethod(traits)) + } + } + + 1 => { + let (def, method) = visible_traits[0]; + Ok(self.find_inst(def, method)) + } + + _ => Err(MethodSelectionError::AmbiguousTraitMethod( + visible_traits.into_iter().map(|cand| cand.0).collect(), + )), + } + } + + /// Finds an instance of a trait method for the given trait definition and + /// method. + /// + /// This function attempts to unify the receiver type with the method's self + /// type, and assigns type variables to the trait parameters. It then + /// checks if the goal is satisfiable given the current assumptions. + /// Depending on the result, it either returns a confirmed trait method + /// candidate or one that needs further confirmation. + /// + /// # Arguments + /// + /// * `def` - The trait definition. + /// * `method` - The trait method. + /// + /// # Returns + /// + /// A `Candidate` representing the found trait method instance. + fn find_inst(&self, def: TraitDef<'db>, method: TraitMethod<'db>) -> Candidate<'db> { + let mut table = UnificationTable::new(self.db); + let receiver = self.receiver.extract_identity(&mut table); + + // Assign type variables to trait parameters. + let inst_args = def + .params(self.db) + .iter() + .map(|ty| table.new_var_from_param(*ty)) + .collect_vec(); + + let cand = TraitInstId::new(self.db, def, inst_args); + // Unify receiver and method self. + method.instantiate_with_inst(&mut table, receiver, cand); + + let cand = cand.fold_with(&mut table); + let canonical_cand = Canonicalized::new(self.db, cand); + + match is_goal_satisfiable( + self.db, + self.scope.ingot(self.db.as_hir_db()), + canonical_cand.value, + self.assumptions, + ) { + GoalSatisfiability::Satisfied(solution) => { + // Map back the solution to the current context. + let solution = canonical_cand.extract_solution(&mut table, *solution); + + // Unify candidate to solution. + table.unify(cand, solution).unwrap(); + + Candidate::TraitMethod(TraitMethodCand::new( + self.receiver + .canonicalize_solution(self.db, &mut table, cand), + method, + )) + } + + &GoalSatisfiability::NeedsConfirmation(_) + | GoalSatisfiability::ContainsInvalid + | GoalSatisfiability::UnSat(_) => Candidate::NeedsConfirmation(TraitMethodCand::new( + self.receiver + .canonicalize_solution(self.db, &mut table, cand), + method, + )), + } + } + + fn is_inherent_method_visible(&self, def: FuncDef) -> bool { + is_scope_visible_from(self.db, def.scope(self.db), self.scope) + } + + fn available_traits(&self) -> FxHashSet> { + let mut traits = FxHashSet::default(); + + let mut insert_trait = |trait_def: TraitDef<'db>| { + traits.insert(trait_def); + + for trait_ in trait_def.super_traits(self.db) { + traits.insert(trait_.skip_binder().def(self.db)); + } + }; + + for &trait_ in available_traits_in_scope(self.db, self.scope) { + let trait_def = lower_trait(self.db, trait_); + insert_trait(trait_def); + } + + for pred in self.assumptions.list(self.db) { + let trait_def = pred.def(self.db); + insert_trait(trait_def) + } + + traits + } +} + +pub enum MethodSelectionError<'db> { + AmbiguousInherentMethod(Vec>), + AmbiguousTraitMethod(Vec>), + NotFound, + InvisibleInherentMethod(FuncDef<'db>), + InvisibleTraitMethod(Vec>), +} + +#[derive(Default)] +struct AssembledCandidates<'db> { + inherent_methods: FxHashSet>, + traits: IndexSet<(TraitDef<'db>, TraitMethod<'db>)>, +} + +impl<'db> AssembledCandidates<'db> { + fn insert_inherent_method(&mut self, method: FuncDef<'db>) { + self.inherent_methods.insert(method); + } + + fn insert_trait(&mut self, def: TraitDef<'db>, method: TraitMethod<'db>) { + self.traits.insert((def, method)); + } +} diff --git a/crates/hir-analysis/src/ty/ty_check/mod.rs b/crates/hir-analysis/src/ty/ty_check/mod.rs new file mode 100644 index 0000000000..b8b52588a8 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_check/mod.rs @@ -0,0 +1,407 @@ +mod callable; +mod env; +mod expr; +mod method_selection; +mod pat; +mod path; +mod stmt; + +pub use callable::Callable; +pub use env::ExprProp; +use env::TyCheckEnv; +pub(super) use expr::TraitOps; +use hir::{ + hir_def::{Body, Expr, ExprId, Func, LitKind, Pat, PatId, PathId, TypeId as HirTyId}, + span::{expr::LazyExprSpan, pat::LazyPatSpan, DynLazySpan}, + visitor::{walk_expr, walk_pat, Visitor, VisitorCtxt}, +}; +pub(super) use path::RecordLike; + +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::{ + diagnostics::{BodyDiag, FuncBodyDiag, TyDiagCollection, TyLowerDiag}, + fold::{TyFoldable, TyFolder}, + trait_def::{TraitInstId, TraitMethod}, + trait_resolution::PredicateListId, + ty_def::{InvalidCause, Kind, TyId, TyVarSort}, + ty_lower::lower_hir_ty, + unify::{InferenceKey, UnificationError, UnificationTable}, +}; +use crate::{ + name_resolution::{resolve_path, PathRes, PathResError}, + ty::ty_def::{inference_keys, TyFlags}, + HirAnalysisDb, +}; + +#[salsa::tracked(return_ref)] +pub fn check_func_body<'db>( + db: &'db dyn HirAnalysisDb, + func: Func<'db>, +) -> (Vec>, TypedBody<'db>) { + let Ok(mut checker) = TyChecker::new_with_func(db, func) else { + return (Vec::new(), TypedBody::empty()); + }; + + checker.run(); + checker.finish() +} + +pub struct TyChecker<'db> { + db: &'db dyn HirAnalysisDb, + env: TyCheckEnv<'db>, + table: UnificationTable<'db>, + expected: TyId<'db>, + diags: Vec>, +} + +impl<'db> TyChecker<'db> { + fn new_with_func(db: &'db dyn HirAnalysisDb, func: Func<'db>) -> Result { + let env = TyCheckEnv::new_with_func(db, func)?; + let expected_ty = match func.ret_ty(db.as_hir_db()) { + Some(hir_ty) => { + let ty = lower_hir_ty(db, hir_ty, func.scope()); + if ty.is_star_kind(db) { + ty + } else { + TyId::invalid(db, InvalidCause::Other) + } + } + None => TyId::unit(db), + }; + + Ok(Self::new(db, env, expected_ty)) + } + + fn run(&mut self) { + let root_expr = self.env.body().expr(self.db.as_hir_db()); + self.check_expr(root_expr, self.expected); + } + + fn finish(self) -> (Vec>, TypedBody<'db>) { + TyCheckerFinalizer::new(self).finish() + } + + fn new(db: &'db dyn HirAnalysisDb, env: TyCheckEnv<'db>, expected: TyId<'db>) -> Self { + let table = UnificationTable::new(db); + Self { + db, + env, + table, + expected, + diags: Vec::new(), + } + } + + fn push_diag(&mut self, diag: impl Into>) { + self.diags.push(diag.into()) + } + + fn body(&self) -> Body<'db> { + self.env.body() + } + + fn lit_ty(&mut self, lit: &LitKind<'db>) -> TyId<'db> { + match lit { + LitKind::Bool(_) => TyId::bool(self.db), + LitKind::Int(_) => self.table.new_var(TyVarSort::Integral, &Kind::Star), + LitKind::String(s) => { + let len_bytes = s.len_bytes(self.db.as_hir_db()); + self.table + .new_var(TyVarSort::String(len_bytes), &Kind::Star) + } + } + } + + fn lower_ty( + &mut self, + hir_ty: HirTyId<'db>, + span: DynLazySpan<'db>, + star_kind_required: bool, + ) -> TyId<'db> { + let ty = lower_hir_ty(self.db, hir_ty, self.env.scope()); + if let Some(diag) = ty.emit_diag(self.db, span.clone()) { + self.push_diag(diag) + } + + if star_kind_required && ty.is_star_kind(self.db) { + ty + } else { + let diag: TyDiagCollection = TyLowerDiag::expected_star_kind_ty(span).into(); + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + } + + /// Returns the fresh type variable for pattern and expr type checking. The + /// kind of the type variable is `*`, and the sort is `General`. + fn fresh_ty(&mut self) -> TyId<'db> { + self.table.new_var(TyVarSort::General, &Kind::Star) + } + + fn fresh_tys_n(&mut self, n: usize) -> Vec> { + (0..n).map(|_| self.fresh_ty()).collect() + } + + fn unify_ty(&mut self, t: T, actual: TyId<'db>, expected: TyId<'db>) -> TyId<'db> + where + T: Into>, + { + let t = t.into(); + let actual = self.equate_ty(actual, expected, t.lazy_span(self.env.body())); + + match t { + Typeable::Expr(expr, mut typed_expr) => { + typed_expr.swap_ty(actual); + self.env.type_expr(expr, typed_expr) + } + Typeable::Pat(pat) => self.env.type_pat(pat, actual), + } + + actual + } + + fn equate_ty( + &mut self, + actual: TyId<'db>, + expected: TyId<'db>, + span: DynLazySpan<'db>, + ) -> TyId<'db> { + // FIXME: This is a temporary workaround, this should be removed when we + // implement subtyping. + if expected.is_never(self.db) && !actual.is_never(self.db) { + let diag = BodyDiag::type_mismatch(self.db, span, expected, actual); + self.push_diag(diag); + return TyId::invalid(self.db, InvalidCause::Other); + }; + + match self.table.unify(actual, expected) { + Ok(()) => { + // FIXME: This is a temporary workaround, this should be removed when we + // implement subtyping. + let actual = actual.fold_with(&mut self.table); + if actual.is_never(self.db) { + expected + } else { + actual + } + } + + Err(UnificationError::TypeMismatch) => { + let actual = actual.fold_with(&mut self.table); + let expected = expected.fold_with(&mut self.table); + self.push_diag(BodyDiag::type_mismatch(self.db, span, expected, actual)); + TyId::invalid(self.db, InvalidCause::Other) + } + + Err(UnificationError::OccursCheckFailed) => { + self.push_diag(BodyDiag::InfiniteOccurrence(span)); + + TyId::invalid(self.db, InvalidCause::Other) + } + } + } + + fn resolve_path( + &mut self, + path: PathId<'db>, + resolve_tail_as_value: bool, + ) -> Result, PathResError<'db>> { + match resolve_path(self.db, path, self.env.scope(), resolve_tail_as_value) { + Ok(r) => Ok(r.map_over_ty(|ty| self.table.instantiate_to_term(ty))), + Err(err) => Err(err), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TypedBody<'db> { + body: Option>, + pat_ty: FxHashMap>, + expr_ty: FxHashMap>, + callables: FxHashMap>, +} + +impl<'db> TypedBody<'db> { + pub fn expr_ty(&self, db: &'db dyn HirAnalysisDb, expr: ExprId) -> TyId<'db> { + self.expr_prop(db, expr).ty + } + + pub fn expr_prop(&self, db: &'db dyn HirAnalysisDb, expr: ExprId) -> ExprProp<'db> { + self.expr_ty + .get(&expr) + .copied() + .unwrap_or_else(|| ExprProp::invalid(db)) + } + + pub fn pat_ty(&self, db: &'db dyn HirAnalysisDb, pat: PatId) -> TyId<'db> { + self.pat_ty + .get(&pat) + .copied() + .unwrap_or_else(|| TyId::invalid(db, InvalidCause::Other)) + } + + pub fn callable_expr(&self, expr: ExprId) -> Option<&Callable<'db>> { + self.callables.get(&expr) + } + + fn empty() -> Self { + Self { + body: None, + pat_ty: FxHashMap::default(), + expr_ty: FxHashMap::default(), + callables: FxHashMap::default(), + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, derive_more::From)] +enum Typeable<'db> { + Expr(ExprId, ExprProp<'db>), + Pat(PatId), +} + +impl Typeable<'_> { + fn lazy_span(self, body: Body) -> DynLazySpan { + match self { + Self::Expr(expr, ..) => expr.lazy_span(body).into(), + Self::Pat(pat) => pat.lazy_span(body).into(), + } + } +} + +impl<'db> TraitMethod<'db> { + fn instantiate_with_inst( + self, + table: &mut UnificationTable<'db>, + receiver_ty: TyId<'db>, + inst: TraitInstId<'db>, + ) -> TyId<'db> { + let db = table.db(); + let ty = TyId::foldl(db, TyId::func(db, self.0), inst.args(db)); + + let inst_self = table.instantiate_to_term(inst.self_ty(db)); + table.unify(inst_self, receiver_ty).unwrap(); + + table.instantiate_to_term(ty) + } +} + +struct TyCheckerFinalizer<'db> { + db: &'db dyn HirAnalysisDb, + body: TypedBody<'db>, + assumptions: PredicateListId<'db>, + ty_vars: FxHashSet>, + diags: Vec>, +} + +impl<'db> Visitor<'db> for TyCheckerFinalizer<'db> { + fn visit_pat( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyPatSpan<'db>>, + pat: PatId, + _: &Pat<'db>, + ) { + let ty = self.body.pat_ty(self.db, pat); + let span = ctxt.span().unwrap(); + self.check_unknown(ty, span.clone().into()); + + walk_pat(self, ctxt, pat) + } + + fn visit_expr( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyExprSpan<'db>>, + expr: ExprId, + expr_data: &Expr<'db>, + ) { + // Skip the check if the expr is block. + if !matches!(expr_data, Expr::Block(..)) { + let prop = self.body.expr_prop(self.db, expr); + let span = ctxt.span().unwrap(); + self.check_unknown(prop.ty, span.clone().into()); + if prop.binding.is_none() { + self.check_wf(prop.ty, span.into()); + } + } + + // We need this additional check for method call because the callable type is + // not tied to the expression type. + if let Expr::MethodCall(..) = expr_data { + if let Some(callable) = self.body.callable_expr(expr) { + let callable_ty = callable.ty(self.db); + let span = ctxt.span().unwrap().into_method_call_expr().method_name(); + self.check_unknown(callable_ty, span.clone().into()); + self.check_wf(callable_ty, span.into()) + } + } + + walk_expr(self, ctxt, expr); + } + + fn visit_item( + &mut self, + _: &mut VisitorCtxt<'db, hir::visitor::prelude::LazyItemSpan<'db>>, + _: hir::hir_def::ItemKind<'db>, + ) { + } +} + +impl<'db> TyCheckerFinalizer<'db> { + fn new(mut checker: TyChecker<'db>) -> Self { + let assumptions = checker.env.assumptions(); + let body = checker.env.finish(&mut checker.table, &mut checker.diags); + + Self { + db: checker.db, + body, + assumptions, + ty_vars: FxHashSet::default(), + diags: checker.diags, + } + } + + fn finish(mut self) -> (Vec>, TypedBody<'db>) { + self.check_unknown_types(); + (self.diags, self.body) + } + + fn check_unknown_types(&mut self) { + if let Some(body) = self.body.body { + let mut ctxt = VisitorCtxt::with_body(self.db.as_hir_db(), body); + self.visit_body(&mut ctxt, body); + } + } + + fn check_unknown(&mut self, ty: TyId<'db>, span: DynLazySpan<'db>) { + let flags = ty.flags(self.db); + if flags.contains(TyFlags::HAS_INVALID) || !flags.contains(TyFlags::HAS_VAR) { + return; + } + + let mut skip_diag = false; + for key in inference_keys(self.db, &ty) { + // If at least one of the inference keys are already seen, we will skip emitting + // diagnostics. + skip_diag |= !self.ty_vars.insert(key); + } + + if !skip_diag { + let diag = BodyDiag::type_annotation_needed(self.db, span, ty); + self.diags.push(diag.into()) + } + } + + fn check_wf(&mut self, ty: TyId<'db>, span: DynLazySpan<'db>) { + let flags = ty.flags(self.db); + if flags.contains(TyFlags::HAS_INVALID) || flags.contains(TyFlags::HAS_VAR) { + return; + } + + let hir_db = self.db.as_hir_db(); + let ingot = self.body.body.unwrap().top_mod(hir_db).ingot(hir_db); + if let Some(diag) = ty.emit_wf_diag(self.db, ingot, self.assumptions, span) { + self.diags.push(diag.into()); + } + } +} diff --git a/crates/hir-analysis/src/ty/ty_check/pat.rs b/crates/hir-analysis/src/ty/ty_check/pat.rs new file mode 100644 index 0000000000..d328928b57 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_check/pat.rs @@ -0,0 +1,458 @@ +use std::ops::Range; + +use either::Either; +use hir::hir_def::{Partial, Pat, PatId, VariantKind}; + +use super::{env::LocalBinding, path::RecordInitChecker, RecordLike, TyChecker}; +use crate::{ + name_resolution::PathRes, + ty::{ + binder::Binder, + diagnostics::BodyDiag, + ty_def::{InvalidCause, Kind, TyId, TyVarSort}, + ty_lower::lower_hir_ty, + }, +}; + +impl<'db> TyChecker<'db> { + pub(super) fn check_pat(&mut self, pat: PatId, expected: TyId<'db>) -> TyId<'db> { + let Partial::Present(pat_data) = pat.data(self.db.as_hir_db(), self.body()) else { + let actual = TyId::invalid(self.db, InvalidCause::Other); + return self.unify_ty(pat, actual, expected); + }; + + let ty = match pat_data { + Pat::WildCard => { + let ty_var = self.table.new_var(TyVarSort::General, &Kind::Star); + self.unify_ty(pat, ty_var, expected) + } + + Pat::Rest => unreachable!(), + Pat::Lit(..) => self.check_lit_pat(pat, pat_data), + Pat::Tuple(..) => self.check_tuple_pat(pat, pat_data, expected), + Pat::Path(..) => self.check_path_pat(pat, pat_data), + Pat::PathTuple(..) => self.check_path_tuple_pat(pat, pat_data), + Pat::Record(..) => self.check_record_pat(pat, pat_data), + + Pat::Or(lhs, rhs) => { + self.check_pat(*lhs, expected); + self.check_pat(*rhs, expected) + } + }; + + self.unify_ty(pat, ty, expected) + } + + fn check_lit_pat(&mut self, _pat: PatId, pat_data: &Pat<'db>) -> TyId<'db> { + let Pat::Lit(lit) = pat_data else { + unreachable!() + }; + + match lit { + Partial::Present(lit) => self.lit_ty(lit), + Partial::Absent => TyId::invalid(self.db, InvalidCause::Other), + } + } + + fn check_tuple_pat( + &mut self, + pat: PatId, + pat_data: &Pat<'db>, + expected: TyId<'db>, + ) -> TyId<'db> { + let Pat::Tuple(pat_tup) = pat_data else { + unreachable!() + }; + + let expected_len = match expected.decompose_ty_app(self.db) { + (base, args) if base.is_tuple(self.db) => Some(args.len()), + _ => None, + }; + let (actual, rest_range) = self.unpack_rest_pat(pat_tup, expected_len); + let actual = TyId::tuple_with_elems(self.db, &actual); + + let unified = self.unify_ty(pat, actual, expected); + if unified.has_invalid(self.db) { + pat_tup.iter().for_each(|&pat| { + self.env + .type_pat(pat, TyId::invalid(self.db, InvalidCause::Other)); + }); + return unified; + } + + let mut pat_idx = 0; + for (i, &pat_ty) in unified.decompose_ty_app(self.db).1.iter().enumerate() { + if pat_idx >= pat_tup.len() { + break; + }; + + if pat_tup[pat_idx].is_rest(self.db.as_hir_db(), self.body()) { + pat_idx += 1; + continue; + } + + if rest_range.contains(&i) { + continue; + } + + self.check_pat(pat_tup[pat_idx], pat_ty); + pat_idx += 1; + } + + unified + } + + fn check_path_pat(&mut self, pat: PatId, pat_data: &Pat<'db>) -> TyId<'db> { + let Pat::Path(path, is_mut) = pat_data else { + unreachable!() + }; + + let Partial::Present(path) = path else { + return TyId::invalid(self.db, InvalidCause::Other); + }; + + let span = pat.lazy_span(self.body()).into_path_pat(); + let res = self.resolve_path(*path, true); + + if path.is_bare_ident(self.db.as_hir_db()) { + match res { + Ok(PathRes::Ty(ty)) if ty.is_record(self.db) => { + let diag = BodyDiag::unit_variant_expected( + self.db, + pat.lazy_span(self.body()).into(), + ty, + ); + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + Ok(PathRes::EnumVariant(variant)) => { + if matches!(variant.variant_kind(self.db), VariantKind::Unit) { + self.table.instantiate_to_term(variant.ty) + } else { + let diag = BodyDiag::unit_variant_expected( + self.db, + pat.lazy_span(self.body()).into(), + variant, + ); + + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + } + _ => { + let binding = LocalBinding::local(pat, *is_mut); + self.env.register_pending_binding( + *path.ident(self.db.as_hir_db()).unwrap(), + binding, + ); + self.fresh_ty() + } + } + } else { + match res { + Ok(PathRes::Ty(ty) | PathRes::Func(ty) | PathRes::Const(ty)) => { + let diag = BodyDiag::unit_variant_expected( + self.db, + pat.lazy_span(self.body()).into(), + ty, + ); + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + Ok(PathRes::Trait(trait_)) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(trait_.trait_(self.db).into()), + }; + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + Ok(PathRes::EnumVariant(variant)) => { + if matches!(variant.variant_kind(self.db), VariantKind::Unit) { + self.table.instantiate_to_term(variant.ty) + } else { + let diag = BodyDiag::unit_variant_expected( + self.db, + pat.lazy_span(self.body()).into(), + variant, + ); + + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + } + Ok(PathRes::Mod(scope_id)) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(scope_id.item()), + }; + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + Ok(PathRes::TypeMemberTbd(_) | PathRes::FuncParam(..)) => { + // TODO: diagnostic? + TyId::invalid(self.db, InvalidCause::Other) + } + + Err(_) => TyId::invalid(self.db, InvalidCause::Other), + } + } + } + + fn check_path_tuple_pat(&mut self, pat: PatId, pat_data: &Pat<'db>) -> TyId<'db> { + let Pat::PathTuple(Partial::Present(path), elems) = pat_data else { + return TyId::invalid(self.db, InvalidCause::Other); + }; + + let span = pat.lazy_span(self.body()).into_path_tuple_pat(); + + let (variant, expected_elems) = match self.resolve_path(*path, true) { + Ok(res) => match res { + PathRes::Ty(ty) | PathRes::Func(ty) | PathRes::Const(ty) => { + let diag = BodyDiag::tuple_variant_expected( + self.db, + pat.lazy_span(self.body()).into(), + Some(ty), + ); + self.push_diag(diag); + return TyId::invalid(self.db, InvalidCause::Other); + } + + PathRes::Trait(trait_) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(trait_.trait_(self.db).into()), + }; + + self.push_diag(diag); + return TyId::invalid(self.db, InvalidCause::Other); + } + PathRes::EnumVariant(variant) => match variant.variant_kind(self.db) { + VariantKind::Tuple(elems) => (variant, elems), + _ => { + let diag = BodyDiag::tuple_variant_expected( + self.db, + pat.lazy_span(self.body()).into(), + Some(variant), + ); + self.push_diag(diag); + return TyId::invalid(self.db, InvalidCause::Other); + } + }, + + PathRes::Mod(scope) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(scope.item()), + }; + self.push_diag(diag); + return TyId::invalid(self.db, InvalidCause::Other); + } + + PathRes::TypeMemberTbd(_) | PathRes::FuncParam(..) => { + let diag = BodyDiag::tuple_variant_expected::(self.db, span.into(), None); + self.push_diag(diag); + return TyId::invalid(self.db, InvalidCause::Other); + } + }, + Err(_) => return TyId::invalid(self.db, InvalidCause::Other), + }; + + let expected_len = expected_elems.len(self.db.as_hir_db()); + + let (actual_elems, rest_range) = self.unpack_rest_pat(elems, Some(expected_len)); + if actual_elems.len() != expected_len { + let diag = BodyDiag::MismatchedFieldCount { + primary: pat.lazy_span(self.body()).into(), + expected: expected_len, + given: actual_elems.len(), + }; + + self.push_diag(diag); + return variant.ty; + }; + + let mut arg_idx = 0; + for (i, &hir_ty) in expected_elems.data(self.db.as_hir_db()).iter().enumerate() { + if arg_idx >= elems.len() { + break; + } + + if elems[arg_idx].is_rest(self.db.as_hir_db(), self.body()) { + arg_idx += 1; + continue; + } + + if rest_range.contains(&i) { + continue; + } + let elem_ty = match hir_ty.to_opt() { + Some(ty) => { + let ty = lower_hir_ty(self.db, ty, variant.enum_(self.db).scope()); + Binder::bind(ty).instantiate(self.db, variant.ty.generic_args(self.db)) + } + _ => TyId::invalid(self.db, InvalidCause::Other), + }; + + self.check_pat(elems[arg_idx], elem_ty); + arg_idx += 1; + } + + variant.ty + } + + fn check_record_pat(&mut self, pat: PatId, pat_data: &Pat<'db>) -> TyId<'db> { + let Pat::Record(Partial::Present(path), _) = pat_data else { + return TyId::invalid(self.db, InvalidCause::Other); + }; + + let span = pat.lazy_span(self.body()).into_record_pat(); + + match self.resolve_path(*path, true) { + Ok(reso) => match reso { + PathRes::Ty(ty) if ty.is_record(self.db) => { + self.check_record_pat_fields(ty, pat); + ty + } + + PathRes::Ty(ty) | PathRes::Func(ty) | PathRes::Const(ty) => { + let diag = BodyDiag::record_expected( + self.db, + pat.lazy_span(self.body()).into(), + Some(ty), + ); + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + + PathRes::Trait(trait_) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(trait_.trait_(self.db).into()), + }; + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + + PathRes::EnumVariant(variant) if variant.is_record(self.db) => { + let ty = variant.ty; + self.check_record_pat_fields(variant, pat); + ty + } + + PathRes::EnumVariant(variant) => { + let diag = BodyDiag::record_expected( + self.db, + pat.lazy_span(self.body()).into(), + Some(variant), + ); + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + + PathRes::Mod(scope) => { + let diag = BodyDiag::NotValue { + primary: span.into(), + given: Either::Left(scope.item()), + }; + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + + PathRes::TypeMemberTbd(_) | PathRes::FuncParam(..) => { + let diag = BodyDiag::record_expected::( + self.db, + pat.lazy_span(self.body()).into(), + None, + ); + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } + }, + Err(_) => TyId::invalid(self.db, InvalidCause::Other), + } + } + + fn check_record_pat_fields(&mut self, mut record_like: T, pat: PatId) + where + T: RecordLike<'db>, + { + let Partial::Present(Pat::Record(_, fields)) = pat.data(self.db.as_hir_db(), self.body()) + else { + unreachable!() + }; + + let hir_db = self.db.as_hir_db(); + let mut contains_rest = false; + + let pat_span = pat.lazy_span(self.body()).into_record_pat(); + let mut rec_checker = RecordInitChecker::new(self, &mut record_like); + + for (i, field_pat) in fields.iter().enumerate() { + let field_pat_span = pat_span.fields().field(i); + + if field_pat.pat.is_rest(hir_db, rec_checker.tc.body()) { + if contains_rest { + let diag = BodyDiag::DuplicatedRestPat( + field_pat.pat.lazy_span(rec_checker.tc.body()).into(), + ); + rec_checker.tc.push_diag(diag); + continue; + } + + contains_rest = true; + continue; + } + + let label = field_pat.label(hir_db, rec_checker.tc.body()); + let expected = match rec_checker.feed_label(label, field_pat_span.into()) { + Ok(ty) => ty, + Err(diag) => { + rec_checker.tc.push_diag(diag); + TyId::invalid(rec_checker.tc.db, InvalidCause::Other) + } + }; + + rec_checker.tc.check_pat(field_pat.pat, expected); + } + + if let Err(diag) = rec_checker.finalize(pat_span.fields().into(), contains_rest) { + self.push_diag(diag); + } + } + + fn unpack_rest_pat( + &mut self, + pat_tup: &[PatId], + expected_len: Option, + ) -> (Vec>, std::ops::Range) { + let mut rest_start = None; + for (i, &pat) in pat_tup.iter().enumerate() { + if pat.is_rest(self.db.as_hir_db(), self.body()) && rest_start.replace(i).is_some() { + let span = pat.lazy_span(self.body()); + self.push_diag(BodyDiag::DuplicatedRestPat(span.into())); + return ( + self.fresh_tys_n(expected_len.unwrap_or(0)), + Range::default(), + ); + } + } + + match rest_start { + Some(rest_start) => { + let expected_len = expected_len.unwrap_or(0); + let minimum_len = pat_tup.len() - 1; + + if minimum_len <= expected_len { + let diff = expected_len - minimum_len; + let range = rest_start..rest_start + diff; + (self.fresh_tys_n(expected_len), range) + } else { + (self.fresh_tys_n(minimum_len), Range::default()) + } + } + + None => (self.fresh_tys_n(pat_tup.len()), Range::default()), + } + } +} diff --git a/crates/hir-analysis/src/ty/ty_check/path.rs b/crates/hir-analysis/src/ty/ty_check/path.rs new file mode 100644 index 0000000000..92566908c5 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_check/path.rs @@ -0,0 +1,360 @@ +use std::collections::hash_map::Entry; + +use hir::{ + hir_def::{ + scope_graph::{FieldParent, ScopeId}, + FieldDefListId as HirFieldDefListId, IdentId, VariantKind as HirVariantKind, + }, + span::DynLazySpan, +}; +use rustc_hash::FxHashMap; + +use super::{env::LocalBinding, TyChecker}; +use crate::{ + name_resolution::{diagnostics::NameResDiag, is_scope_visible_from, PathRes, ResolvedVariant}, + ty::{ + adt_def::{AdtDef, AdtField, AdtRef, AdtRefId}, + diagnostics::{BodyDiag, FuncBodyDiag}, + ty_def::{InvalidCause, TyData, TyId}, + }, + HirAnalysisDb, +}; + +impl<'db> TyId<'db> { + pub(crate) fn adt_ref(&self, db: &'db dyn HirAnalysisDb) -> Option> { + self.adt_def(db).map(|def| def.adt_ref(db)) + } + + pub(crate) fn adt_def(&self, db: &'db dyn HirAnalysisDb) -> Option> { + let base = self.decompose_ty_app(db).0; + match base.data(db) { + TyData::TyBase(base) => base.adt(), + _ => None, + } + } +} + +#[derive(Clone, Debug)] +pub(super) enum ResolvedPathInBody<'db> { + Reso(PathRes<'db>), + Binding(LocalBinding<'db>), + NewBinding(IdentId<'db>), + #[allow(dead_code)] // TODO: we might be failing to report some errors + Diag(FuncBodyDiag<'db>), + Invalid, +} + +pub(super) struct RecordInitChecker<'tc, 'db, 'a, T> { + pub(super) tc: &'tc mut TyChecker<'db>, + data: &'a mut T, + already_given: FxHashMap, DynLazySpan<'db>>, + invalid_field_given: bool, +} + +impl<'tc, 'db, 'a, T> RecordInitChecker<'tc, 'db, 'a, T> +where + T: RecordLike<'db>, +{ + /// Create a new `RecordInitChecker` for the given record path. + /// + /// ## Panics + /// Panics if the given `data` is not a record. + pub(super) fn new(tc: &'tc mut TyChecker<'db>, data: &'a mut T) -> Self { + assert!(data.is_record(tc.db)); + + Self { + tc, + data, + already_given: FxHashMap::default(), + invalid_field_given: false, + } + } + + /// Feed a label to the checker. + /// Returns the type of the field if the label is valid, otherwise returns + /// an error. + pub(super) fn feed_label( + &mut self, + label: Option>, + field_span: DynLazySpan<'db>, + ) -> Result, FuncBodyDiag<'db>> { + let label = match label { + Some(label) => match self.already_given.entry(label) { + Entry::Occupied(first_use) => { + let diag = BodyDiag::DuplicatedRecordFieldBind { + primary: field_span.clone(), + first_use: first_use.get().clone(), + name: label, + }; + + self.invalid_field_given = true; + return Err(diag.into()); + } + + Entry::Vacant(entry) => { + entry.insert(field_span.clone()); + label + } + }, + + None => { + let diag = BodyDiag::ExplicitLabelExpectedInRecord { + primary: field_span, + hint: self.data.initializer_hint(self.tc.db), + }; + + self.invalid_field_given = true; + return Err(diag.into()); + } + }; + + let Some(ty) = self.data.record_field_ty(self.tc.db, label) else { + let diag = BodyDiag::record_field_not_found(field_span, label); + + self.invalid_field_given = true; + return Err(diag.into()); + }; + + let field_scope = self.data.record_field_scope(self.tc.db, label).unwrap(); + if is_scope_visible_from(self.tc.db, field_scope, self.tc.env.scope()) { + Ok(ty) + } else { + let diag = NameResDiag::Invisible( + field_span, + label, + field_scope.name_span(self.tc.db.as_hir_db()), + ); + + self.invalid_field_given = true; + Err(diag.into()) + } + } + + /// Finalize the checker and return an error if there are missing fields. + pub(super) fn finalize( + self, + initializer_span: DynLazySpan<'db>, + allow_missing_field: bool, + ) -> Result<(), FuncBodyDiag<'db>> { + if !self.invalid_field_given && !allow_missing_field { + let expected_labels = self.data.record_labels(self.tc.db); + let missing_fields: Vec = expected_labels + .iter() + .filter(|f| !self.already_given.contains_key(f)) + .cloned() + .collect(); + + if !missing_fields.is_empty() { + let diag = BodyDiag::MissingRecordFields { + primary: initializer_span, + missing_fields, + hint: self.data.initializer_hint(self.tc.db), + }; + + return Err(diag.into()); + } + } + + Ok(()) + } +} + +pub(crate) trait RecordLike<'db> { + fn is_record(&self, db: &'db dyn HirAnalysisDb) -> bool; + + fn record_field_ty(&self, db: &'db dyn HirAnalysisDb, name: IdentId<'db>) -> Option>; + + fn record_field_list( + &self, + db: &'db dyn HirAnalysisDb, + ) -> Option<(HirFieldDefListId<'db>, &'db AdtField<'db>)>; + + fn record_field_idx(&self, db: &'db dyn HirAnalysisDb, name: IdentId<'db>) -> Option { + let (hir_field_list, _) = self.record_field_list(db)?; + hir_field_list.field_idx(db.as_hir_db(), name) + } + + fn record_field_scope( + &self, + db: &'db dyn HirAnalysisDb, + name: IdentId<'db>, + ) -> Option>; + + fn record_labels(&self, db: &'db dyn HirAnalysisDb) -> Vec>; + + fn initializer_hint(&self, db: &'db dyn HirAnalysisDb) -> Option; + + fn kind_name(&self, db: &'db dyn HirAnalysisDb) -> String; +} + +impl<'db> RecordLike<'db> for TyId<'db> { + fn is_record(&self, db: &'db dyn HirAnalysisDb) -> bool { + let Some(adt_ref) = self.adt_ref(db) else { + return false; + }; + + matches!(adt_ref.data(db), AdtRef::Struct(..)) + } + + fn record_field_ty(&self, db: &'db dyn HirAnalysisDb, name: IdentId<'db>) -> Option> { + let args = self.generic_args(db); + let hir_db = db.as_hir_db(); + + let (hir_field_list, field_list) = self.record_field_list(db)?; + + let field_idx = hir_field_list.field_idx(hir_db, name)?; + let field_ty = field_list.ty(db, field_idx).instantiate(db, args); + + if field_ty.is_star_kind(db) { + field_ty + } else { + TyId::invalid(db, InvalidCause::Other) + } + .into() + } + + fn record_field_list( + &self, + db: &'db dyn HirAnalysisDb, + ) -> Option<(HirFieldDefListId<'db>, &'db AdtField<'db>)> { + let hir_db = db.as_hir_db(); + + let adt_def = self.adt_def(db)?; + match adt_def.adt_ref(db).data(db) { + AdtRef::Struct(s) => (s.fields(hir_db), &adt_def.fields(db)[0]).into(), + AdtRef::Contract(c) => (c.fields(hir_db), &adt_def.fields(db)[0]).into(), + + _ => None, + } + } + + fn record_field_scope( + &self, + db: &'db dyn HirAnalysisDb, + name: IdentId<'db>, + ) -> Option> { + let field_idx = self.record_field_idx(db, name)?; + let adt_ref = self.adt_ref(db)?; + + let parent = FieldParent::Item(adt_ref.as_item(db)); + Some(ScopeId::Field(parent, field_idx)) + } + + fn record_labels(&self, db: &'db dyn HirAnalysisDb) -> Vec> { + let hir_db = db.as_hir_db(); + let Some(adt_ref) = self.adt_ref(db) else { + return Vec::default(); + }; + let fields = match adt_ref.data(db) { + AdtRef::Struct(s) => s.fields(hir_db), + AdtRef::Contract(c) => c.fields(hir_db), + + _ => return Vec::default(), + }; + + fields + .data(hir_db) + .iter() + .filter_map(|field| field.name.to_opt()) + .collect() + } + + fn kind_name(&self, db: &'db dyn HirAnalysisDb) -> String { + if let Some(adt_ref) = self.adt_ref(db) { + adt_ref.kind_name(db).to_string() + } else if self.is_func(db) { + "fn".to_string() + } else { + self.pretty_print(db).to_string() + } + } + + fn initializer_hint(&self, db: &'db dyn HirAnalysisDb) -> Option { + let hir_db = db.as_hir_db(); + + if self.adt_ref(db).is_some() { + let AdtRef::Struct(s) = self.adt_ref(db)?.data(db) else { + return None; + }; + + let name = s.name(hir_db).unwrap().data(hir_db); + let init_args = s.format_initializer_args(db.as_hir_db()); + Some(format!("{}{}", name, init_args)) + } else { + None + } + } +} + +impl<'db> RecordLike<'db> for ResolvedVariant<'db> { + fn is_record(&self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.variant_kind(db), HirVariantKind::Record(..)) + } + + fn record_field_ty(&self, db: &'db dyn HirAnalysisDb, name: IdentId<'db>) -> Option> { + let args = self.ty.generic_args(db); + let hir_db = db.as_hir_db(); + + let (hir_field_list, field_list) = self.record_field_list(db)?; + let field_idx = hir_field_list.field_idx(hir_db, name)?; + + Some(field_list.ty(db, field_idx).instantiate(db, args)) + } + + fn record_field_list( + &self, + db: &'db dyn HirAnalysisDb, + ) -> Option<(HirFieldDefListId<'db>, &'db AdtField<'db>)> { + match self.variant_kind(db) { + hir::hir_def::VariantKind::Record(fields) => { + (fields, &self.ty.adt_def(db).unwrap().fields(db)[self.idx]).into() + } + + _ => None, + } + } + + fn record_field_scope( + &self, + db: &'db dyn HirAnalysisDb, + name: IdentId<'db>, + ) -> Option> { + let field_idx = self.record_field_idx(db, name)?; + let parent = FieldParent::Variant(self.enum_(db).into(), self.idx); + Some(ScopeId::Field(parent, field_idx)) + } + + fn record_labels(&self, db: &'db dyn HirAnalysisDb) -> Vec> { + let hir_db = db.as_hir_db(); + + let fields = match self.variant_kind(db) { + hir::hir_def::VariantKind::Record(fields) => fields, + _ => return Vec::default(), + }; + + fields + .data(hir_db) + .iter() + .filter_map(|field| field.name.to_opt()) + .collect() + } + + fn kind_name(&self, db: &'db dyn HirAnalysisDb) -> String { + let hir_db = db.as_hir_db(); + match self.enum_(db).variants(hir_db).data(hir_db)[self.idx].kind { + HirVariantKind::Unit => "unit variant", + HirVariantKind::Tuple(_) => "tuple variant", + HirVariantKind::Record(_) => "record variant", + } + .to_string() + } + + fn initializer_hint(&self, db: &'db dyn HirAnalysisDb) -> Option { + let hir_db = db.as_hir_db(); + let expected_sub_pat = + self.enum_(db).variants(hir_db).data(hir_db)[self.idx].format_initializer_args(hir_db); + + let path = self.path.pretty_print(hir_db); + Some(format!("{}{}", path, expected_sub_pat)) + } +} diff --git a/crates/hir-analysis/src/ty/ty_check/stmt.rs b/crates/hir-analysis/src/ty/ty_check/stmt.rs new file mode 100644 index 0000000000..79e6731015 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_check/stmt.rs @@ -0,0 +1,167 @@ +use hir::hir_def::{IdentId, Partial, Stmt, StmtId}; + +use super::TyChecker; +use crate::ty::{ + diagnostics::BodyDiag, + fold::TyFoldable, + ty_def::{InvalidCause, TyId}, +}; + +impl<'db> TyChecker<'db> { + pub(super) fn check_stmt(&mut self, stmt: StmtId, expected: TyId<'db>) -> TyId<'db> { + let Partial::Present(stmt_data) = self.env.stmt_data(stmt) else { + return TyId::invalid(self.db, InvalidCause::Other); + }; + + match stmt_data { + Stmt::Let(..) => self.check_let(stmt, stmt_data), + Stmt::For(..) => self.check_for(stmt, stmt_data), + Stmt::While(..) => self.check_while(stmt, stmt_data), + Stmt::Continue => self.check_continue(stmt, stmt_data), + Stmt::Break => self.check_break(stmt, stmt_data), + Stmt::Return(..) => self.check_return(stmt, stmt_data), + Stmt::Expr(expr) => self.check_expr(*expr, expected).ty, + } + } + + fn check_let(&mut self, stmt: StmtId, stmt_data: &Stmt<'db>) -> TyId<'db> { + let Stmt::Let(pat, ascription, expr) = stmt_data else { + unreachable!() + }; + + let span = stmt.lazy_span(self.env.body()).into_let_stmt(); + + let ascription = match ascription { + Some(ty) => self.lower_ty(*ty, span.ty_moved().into(), true), + None => self.fresh_ty(), + }; + + if let Some(expr) = expr { + self.check_expr(*expr, ascription); + } + + self.check_pat(*pat, ascription); + self.env.flush_pending_bindings(); + TyId::unit(self.db) + } + + fn check_for(&mut self, stmt: StmtId, stmt_data: &Stmt<'db>) -> TyId<'db> { + let Stmt::For(pat, expr, body) = stmt_data else { + unreachable!() + }; + + let expr_ty = self.fresh_ty(); + let typed_expr = self.check_expr(*expr, expr_ty).fold_with(&mut self.table); + let expr_ty = typed_expr.ty; + + let (base, arg) = expr_ty.decompose_ty_app(self.db); + // TODO: We can generalize this by just checking the `expr_ty` implements + // `Iterator` trait when `std::iter::Iterator` is implemented. + let elem_ty = if base.is_array(self.db) { + arg[0] + } else if base.has_invalid(self.db) { + TyId::invalid(self.db, InvalidCause::Other) + } else if base.is_ty_var(self.db) { + let diag = BodyDiag::TypeMustBeKnown(expr.lazy_span(self.body()).into()); + self.push_diag(diag); + TyId::invalid(self.db, InvalidCause::Other) + } else { + let diag = BodyDiag::TraitNotImplemented { + primary: expr.lazy_span(self.body()).into(), + ty: expr_ty.pretty_print(self.db).to_string(), + trait_name: IdentId::new(self.db.as_hir_db(), "Iterator".to_string()), + }; + self.push_diag(diag); + + TyId::invalid(self.db, InvalidCause::Other) + }; + + self.check_pat(*pat, elem_ty); + + self.env.enter_loop(stmt); + self.env.enter_scope(*body); + self.env.flush_pending_bindings(); + + let body_ty = self.fresh_ty(); + self.check_expr(*body, body_ty); + + self.env.leave_scope(); + self.env.leave_loop(); + + TyId::unit(self.db) + } + + fn check_while(&mut self, stmt: StmtId, stmt_data: &Stmt<'db>) -> TyId<'db> { + let Stmt::While(cond, body) = stmt_data else { + unreachable!() + }; + + self.check_expr(*cond, TyId::bool(self.db)); + + self.env.enter_loop(stmt); + self.check_expr(*body, TyId::unit(self.db)); + self.env.leave_loop(); + + TyId::unit(self.db) + } + + fn check_continue(&mut self, stmt: StmtId, stmt_data: &Stmt<'db>) -> TyId<'db> { + assert!(matches!(stmt_data, Stmt::Continue)); + + if self.env.current_loop().is_none() { + let span = stmt.lazy_span(self.env.body()); + let diag = BodyDiag::LoopControlOutsideOfLoop { + primary: span.into(), + is_break: false, + }; + self.push_diag(diag); + } + + TyId::never(self.db) + } + + fn check_break(&mut self, stmt: StmtId, stmt_data: &Stmt<'db>) -> TyId<'db> { + assert!(matches!(stmt_data, Stmt::Break)); + + if self.env.current_loop().is_none() { + let span = stmt.lazy_span(self.env.body()); + let diag = BodyDiag::LoopControlOutsideOfLoop { + primary: span.into(), + is_break: true, + }; + self.push_diag(diag); + } + + TyId::never(self.db) + } + + fn check_return(&mut self, stmt: StmtId, stmt_data: &Stmt<'db>) -> TyId<'db> { + let Stmt::Return(expr) = stmt_data else { + unreachable!() + }; + + let returned_ty = if let Some(expr) = expr { + let returned_ty = self.fresh_ty(); + self.check_expr(*expr, returned_ty); + returned_ty.fold_with(&mut self.table) + } else { + TyId::unit(self.db) + }; + + if self.table.unify(returned_ty, self.expected).is_err() { + let func = self.env.func(); + let span = stmt.lazy_span(self.env.body()); + let diag = BodyDiag::returned_type_mismatch( + self.db, + span.into(), + returned_ty, + self.expected, + func.map(|f| f.hir_func_def(self.db).unwrap()), + ); + + self.push_diag(diag); + } + + TyId::never(self.db) + } +} diff --git a/crates/hir-analysis/src/ty/ty_def.rs b/crates/hir-analysis/src/ty/ty_def.rs new file mode 100644 index 0000000000..2c7e892225 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_def.rs @@ -0,0 +1,1305 @@ +//! This module contains the type definitions for the Fe type system. + +use std::fmt; + +use bitflags::bitflags; +use common::{indexmap::IndexSet, input::IngotKind}; +use hir::{ + hir_def::{ + prim_ty::{IntTy as HirIntTy, PrimTy as HirPrimTy, UintTy as HirUintTy}, + scope_graph::ScopeId, + Body, Enum, IdentId, IngotId, IntegerId, TypeAlias as HirTypeAlias, + }, + span::DynLazySpan, +}; +use if_chain::if_chain; +use rustc_hash::FxHashSet; + +use super::{ + adt_def::AdtDef, + const_ty::{ConstTyData, ConstTyId, EvaluatedConstTy}, + diagnostics::{TraitConstraintDiag, TyDiagCollection, TyLowerDiag}, + func_def::FuncDef, + trait_resolution::{PredicateListId, WellFormedness}, + ty_lower::{collect_generic_params, GenericParamOwnerId}, + unify::InferenceKey, + visitor::{TyVisitable, TyVisitor}, +}; +use crate::{ + ty::{adt_def::AdtRef, trait_resolution::check_ty_wf}, + HirAnalysisDb, +}; + +#[salsa::interned] +pub struct TyId<'db> { + #[return_ref] + pub data: TyData<'db>, +} + +impl<'db> TyId<'db> { + /// Returns the kind of the type. + pub fn kind(self, db: &'db dyn HirAnalysisDb) -> &'db Kind { + ty_kind(db, self) + } + + /// Returns the current arguments of the type. + /// ## Example + /// Calling this method for `TyApp, U>` returns `[T, U]`. + pub fn generic_args(self, db: &'db dyn HirAnalysisDb) -> &'db [Self] { + let (_, args) = self.decompose_ty_app(db); + args + } + + /// Returns teh base type of this type. + /// ## Example + /// `TyApp` returns `Adt`. + /// `TyApp, i32>` returns `TyParam`. + pub fn base_ty(self, db: &'db dyn HirAnalysisDb) -> Self { + self.decompose_ty_app(db).0 + } + + /// Returns the type of const type if the type is a const type. + pub fn const_ty_ty(self, db: &'db dyn HirAnalysisDb) -> Option { + match self.data(db) { + TyData::ConstTy(const_ty) => Some(const_ty.ty(db)), + _ => None, + } + } + + /// Returns `true` is the type has `*` kind. + pub fn is_star_kind(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.kind(db), Kind::Star | Kind::Any) + } + + /// Returns `true` if the type is an integral type(like `u32`, `i32` etc.) + pub fn is_integral(self, db: &dyn HirAnalysisDb) -> bool { + match self.data(db) { + TyData::TyBase(ty_base) => ty_base.is_integral(), + TyData::TyVar(var) => { + matches!(var.sort, TyVarSort::Integral) + } + _ => false, + } + } + + /// Returns `true` if the type is a bool type. + pub fn is_bool(self, db: &dyn HirAnalysisDb) -> bool { + match self.data(db) { + TyData::TyBase(ty_base) => ty_base.is_bool(), + _ => false, + } + } + + /// Returns `true` if the type is a never type. + pub fn is_never(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.data(db), TyData::Never) + } + + /// Returns `IngotId` that declares the type. + pub fn ingot(self, db: &'db dyn HirAnalysisDb) -> Option> { + match self.data(db) { + TyData::TyBase(TyBase::Adt(adt)) => adt.ingot(db).into(), + TyData::TyBase(TyBase::Func(def)) => def.ingot(db).into(), + TyData::TyApp(lhs, _) => lhs.ingot(db), + _ => None, + } + } + + pub fn invalid_cause(self, db: &'db dyn HirAnalysisDb) -> Option> { + match self.data(db) { + TyData::Invalid(cause) => Some(cause.clone()), + _ => None, + } + } + + pub fn flags(self, db: &dyn HirAnalysisDb) -> TyFlags { + ty_flags(db, self) + } + + pub fn has_invalid(self, db: &dyn HirAnalysisDb) -> bool { + self.flags(db).contains(TyFlags::HAS_INVALID) + } + + pub fn has_param(self, db: &dyn HirAnalysisDb) -> bool { + self.flags(db).contains(TyFlags::HAS_PARAM) + } + + pub fn has_var(self, db: &dyn HirAnalysisDb) -> bool { + self.flags(db).contains(TyFlags::HAS_VAR) + } + + /// Returns `true` if the type has a `*` kind. + pub fn has_star_kind(self, db: &dyn HirAnalysisDb) -> bool { + !matches!(self.kind(db), Kind::Abs(_, _)) + } + + pub fn pretty_print(self, db: &'db dyn HirAnalysisDb) -> &'db str { + pretty_print_ty(db, self) + } + + pub fn is_inherent_impl_allowed(self, db: &dyn HirAnalysisDb, ingot: IngotId) -> bool { + if self.is_param(db) { + return false; + }; + + let ty_ingot = self.ingot(db); + match ingot.kind(db.as_hir_db()) { + IngotKind::Std => ty_ingot.is_none() || ty_ingot == Some(ingot), + _ => ty_ingot == Some(ingot), + } + } + + /// Decompose type application into the base type and type arguments, this + /// doesn't perform deconstruction recursively. e.g., + /// `App(App(T, U), App(V, W))` -> `(T, [U, App(V, W)])` + pub(super) fn decompose_ty_app( + self, + db: &'db dyn HirAnalysisDb, + ) -> (TyId<'db>, &'db [TyId<'db>]) { + let (base, args) = decompose_ty_app(db, self); + (*base, args) + } + + pub(super) fn ptr(db: &'db dyn HirAnalysisDb) -> TyId<'db> { + Self::new(db, TyData::TyBase(TyBase::Prim(PrimTy::Ptr))) + } + + pub(super) fn tuple(db: &'db dyn HirAnalysisDb, n: usize) -> Self { + Self::new(db, TyData::TyBase(TyBase::tuple(n))) + } + + pub(super) fn tuple_with_elems(db: &'db dyn HirAnalysisDb, elems: &[TyId<'db>]) -> Self { + let base = TyBase::tuple(elems.len()); + let mut ty = Self::new(db, TyData::TyBase(base)); + for &elem in elems { + ty = Self::app(db, ty, elem); + } + ty + } + + pub(super) fn bool(db: &'db dyn HirAnalysisDb) -> Self { + Self::new(db, TyData::TyBase(TyBase::Prim(PrimTy::Bool))) + } + + pub(super) fn array(db: &'db dyn HirAnalysisDb, elem: TyId<'db>) -> Self { + let base = TyBase::Prim(PrimTy::Array); + let array = Self::new(db, TyData::TyBase(base)); + Self::app(db, array, elem) + } + + pub(super) fn array_with_len(db: &'db dyn HirAnalysisDb, elem: TyId<'db>, len: usize) -> Self { + let array = Self::array(db, elem); + + let len = EvaluatedConstTy::LitInt(IntegerId::new(db.as_hir_db(), len.into())); + let len = ConstTyData::Evaluated(len, array.applicable_ty(db).unwrap().const_ty.unwrap()); + let len = TyId::const_ty(db, ConstTyId::new(db, len)); + + TyId::app(db, array, len) + } + + pub(super) fn unit(db: &'db dyn HirAnalysisDb) -> Self { + Self::tuple(db, 0) + } + + pub(super) fn never(db: &'db dyn HirAnalysisDb) -> Self { + Self::new(db, TyData::Never) + } + + pub(super) fn const_ty(db: &'db dyn HirAnalysisDb, const_ty: ConstTyId<'db>) -> Self { + Self::new(db, TyData::ConstTy(const_ty)) + } + + pub(crate) fn adt(db: &'db dyn HirAnalysisDb, adt: AdtDef<'db>) -> Self { + Self::new(db, TyData::TyBase(TyBase::Adt(adt))) + } + + pub(crate) fn func(db: &'db dyn HirAnalysisDb, func: FuncDef<'db>) -> Self { + Self::new(db, TyData::TyBase(TyBase::Func(func))) + } + + pub(crate) fn is_func(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.base_ty(db).data(db), TyData::TyBase(TyBase::Func(_))) + } + + pub(crate) fn is_trait_self(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.base_ty(db).data(db), TyData::TyParam(ty_param) if ty_param.is_trait_self) + } + + pub(crate) fn is_ty_var(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.base_ty(db).data(db), TyData::TyVar(_)) + } + + pub(crate) fn is_const_ty(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.base_ty(db).data(db), TyData::ConstTy(_)) + } + + pub(crate) fn is_tuple(self, db: &dyn HirAnalysisDb) -> bool { + matches!( + self.base_ty(db).data(db), + TyData::TyBase(TyBase::Prim(PrimTy::Tuple(_))) + ) + } + + pub(crate) fn is_array(self, db: &dyn HirAnalysisDb) -> bool { + matches!( + self.base_ty(db).data(db), + TyData::TyBase(TyBase::Prim(PrimTy::Array)) + ) + } + + pub(crate) fn is_string(self, db: &dyn HirAnalysisDb) -> bool { + matches!( + self.base_ty(db).data(db), + TyData::TyBase(TyBase::Prim(PrimTy::String)) + ) + } + + pub(crate) fn is_param(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.base_ty(db).data(db), TyData::TyParam(_)) + } + + /// Returns `true` if the base type is a user defined `struct` type. + pub(crate) fn is_struct(self, db: &dyn HirAnalysisDb) -> bool { + let base_ty = self.base_ty(db); + match base_ty.data(db) { + TyData::TyBase(TyBase::Adt(adt)) => adt.is_struct(db), + _ => false, + } + } + + pub fn is_prim(self, db: &dyn HirAnalysisDb) -> bool { + matches!(self.base_ty(db).data(db), TyData::TyBase(TyBase::Prim(_))) + } + + /// Returns `true` if the base type is a user defined `enum` type. + pub(crate) fn as_enum(self, db: &'db dyn HirAnalysisDb) -> Option> { + let base_ty = self.base_ty(db); + if_chain! { + if let Some(adt_ref) = base_ty.adt_ref(db); + if let AdtRef::Enum(enum_) = adt_ref.data(db); + then { + Some(enum_) + } else { + None + } + } + } + + pub(crate) fn as_scope(self, db: &'db dyn HirAnalysisDb) -> Option> { + match self.base_ty(db).data(db) { + TyData::TyParam(param) => Some(param.scope(db)), + TyData::TyBase(TyBase::Adt(adt)) => Some(adt.scope(db)), + TyData::TyBase(TyBase::Func(func)) => Some(func.scope(db)), + TyData::TyBase(TyBase::Prim(..)) => None, + TyData::ConstTy(const_ty) => match const_ty.data(db) { + ConstTyData::TyVar(..) => None, + ConstTyData::TyParam(ty_param, _) => Some(ty_param.scope(db)), + ConstTyData::Evaluated(..) => None, + ConstTyData::UnEvaluated(body) => Some(body.scope()), + }, + + TyData::Never | TyData::Invalid(_) | TyData::TyVar(_) => None, + TyData::TyApp(..) => unreachable!(), + } + } + + /// Emit diagnostics for the type if the type contains invalid types. + pub(super) fn emit_diag( + self, + db: &'db dyn HirAnalysisDb, + span: DynLazySpan<'db>, + ) -> Option> { + struct EmitDiagVisitor<'db> { + db: &'db dyn HirAnalysisDb, + diag: Option>, + span: DynLazySpan<'db>, + } + + impl<'db> TyVisitor<'db> for EmitDiagVisitor<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn visit_invalid(&mut self, cause: &InvalidCause<'db>) { + let db = self.db; + + let span = self.span.clone(); + let diag = match cause { + InvalidCause::NotFullyApplied => { + TyLowerDiag::expected_star_kind_ty(span).into() + } + + InvalidCause::KindMismatch { expected, given } => { + TyLowerDiag::invalid_type_arg_kind(db, span, expected.clone(), *given) + .into() + } + + InvalidCause::TooManyGenericArgs { expected, given } => { + TyLowerDiag::TooManyGenericArgs { + span, + expected: *expected, + given: *given, + } + .into() + } + + InvalidCause::InvalidConstParamTy => { + TyLowerDiag::invalid_const_param_ty(span).into() + } + + InvalidCause::RecursiveConstParamTy => { + TyLowerDiag::RecursiveConstParamTy(span).into() + } + + InvalidCause::ConstTyMismatch { expected, given } => { + TyLowerDiag::const_ty_mismatch(db, span, *expected, *given).into() + } + + InvalidCause::ConstTyExpected { expected } => { + TyLowerDiag::const_ty_expected(db, span, *expected).into() + } + + InvalidCause::NormalTypeExpected { given } => { + TyLowerDiag::normal_type_expected(db, span, *given).into() + } + + InvalidCause::UnboundTypeAliasParam { + alias, + n_given_args: n_given_arg, + } => TyLowerDiag::unbound_type_alias_param(span, *alias, *n_given_arg).into(), + + InvalidCause::AssocTy => TyLowerDiag::assoc_ty(span).into(), + + InvalidCause::InvalidConstTyExpr { body } => { + TyLowerDiag::InvalidConstTyExpr(body.lazy_span().into()).into() + } + + InvalidCause::Other => return, + }; + + self.diag.get_or_insert(diag); + } + } + + if !self.has_invalid(db) { + return None; + } + + let mut visitor = EmitDiagVisitor { + db, + diag: None, + span, + }; + + visitor.visit_ty(self); + visitor.diag + } + + pub(super) fn emit_wf_diag( + self, + db: &'db dyn HirAnalysisDb, + ingot: IngotId<'db>, + assumptions: PredicateListId<'db>, + span: DynLazySpan<'db>, + ) -> Option> { + if let WellFormedness::IllFormed { goal, subgoal } = + check_ty_wf(db, ingot, self, assumptions) + { + Some(TraitConstraintDiag::trait_bound_not_satisfied(db, span, goal, subgoal).into()) + } else { + None + } + } + + pub(super) fn ty_var( + db: &'db dyn HirAnalysisDb, + sort: TyVarSort, + kind: Kind, + key: InferenceKey<'db>, + ) -> Self { + Self::new(db, TyData::TyVar(TyVar { sort, kind, key })) + } + + pub(super) fn const_ty_var( + db: &'db dyn HirAnalysisDb, + ty: TyId<'db>, + key: InferenceKey<'db>, + ) -> Self { + let ty_var = TyVar { + sort: TyVarSort::General, + kind: ty.kind(db).clone(), + key, + }; + + let data = ConstTyData::TyVar(ty_var, ty); + Self::new(db, TyData::ConstTy(ConstTyId::new(db, data))) + } + + /// Perform type level application. + pub(crate) fn app(db: &'db dyn HirAnalysisDb, lhs: Self, rhs: Self) -> TyId<'db> { + let Some(applicable_ty) = lhs.applicable_ty(db) else { + return Self::invalid(db, InvalidCause::kind_mismatch(None, rhs)); + }; + + let rhs = rhs + .evaluate_const_ty(db, applicable_ty.const_ty) + .unwrap_or_else(|cause| Self::invalid(db, cause)); + + let applicable_kind = applicable_ty.kind; + if !applicable_kind.does_match(rhs.kind(db)) { + return Self::invalid(db, InvalidCause::kind_mismatch(Some(&applicable_kind), rhs)); + }; + + Self::new(db, TyData::TyApp(lhs, rhs)) + } + + /// Folds over a series of type applications from left to right. + /// + /// For example, given base type B and arg types [A1, A2, A3], + /// foldl would produce ((B A1) A2) A3). + pub fn foldl(db: &'db dyn HirAnalysisDb, mut base: Self, args: &[Self]) -> Self { + for (i, arg) in args.iter().enumerate() { + if base.applicable_ty(db).is_some() { + base = Self::app(db, base, *arg); + } else { + return Self::invalid( + db, + InvalidCause::TooManyGenericArgs { + expected: i, + given: args.len(), + }, + ); + } + } + base + } + + /// Returns `true` if the type is a pointer or a pointer application. + pub(super) fn is_ptr(self, db: &dyn HirAnalysisDb) -> bool { + match self.data(db) { + TyData::TyBase(TyBase::Prim(PrimTy::Ptr)) => true, + TyData::TyApp(abs, _) => abs.is_ptr(db), + _ => false, + } + } + + /// Returns `true` if the type is an indirect wrapper type like a pointer or + /// reference(when we introduce it). + pub(super) fn is_indirect(self, db: &dyn HirAnalysisDb) -> bool { + // TODO: FiX here when reference type is introduced. + self.is_ptr(db) + } + + pub fn invalid(db: &'db dyn HirAnalysisDb, cause: InvalidCause<'db>) -> Self { + Self::new(db, TyData::Invalid(cause)) + } + + pub(crate) fn from_hir_prim_ty(db: &'db dyn HirAnalysisDb, hir_prim: HirPrimTy) -> Self { + Self::new(db, TyData::TyBase(hir_prim.into())) + } + + pub(super) fn const_ty_param(self, db: &'db dyn HirAnalysisDb) -> Option> { + if let TyData::ConstTy(const_ty) = self.data(db) { + Some(const_ty.ty(db)) + } else { + None + } + } + + pub(super) fn evaluate_const_ty( + self, + db: &'db dyn HirAnalysisDb, + expected_ty: Option>, + ) -> Result, InvalidCause<'db>> { + match (expected_ty, self.data(db)) { + (Some(expected_const_ty), TyData::ConstTy(const_ty)) => { + if expected_const_ty.has_invalid(db) { + Err(InvalidCause::Other) + } else { + let evaluated_const_ty = const_ty.evaluate(db, expected_const_ty.into()); + let evaluated_const_ty_ty = evaluated_const_ty.ty(db); + if let Some(cause) = evaluated_const_ty_ty.invalid_cause(db) { + Err(cause) + } else { + Ok(TyId::const_ty(db, evaluated_const_ty)) + } + } + } + + (Some(expected_const_ty), _) => { + if expected_const_ty.has_invalid(db) { + Err(InvalidCause::Other) + } else { + Err(InvalidCause::ConstTyExpected { + expected: expected_const_ty, + }) + } + } + + (None, TyData::ConstTy(const_ty)) => { + let evaluated_const_ty = const_ty.evaluate(db, None); + Err(InvalidCause::NormalTypeExpected { + given: TyId::const_ty(db, evaluated_const_ty), + }) + } + + (None, _) => Ok(self), + } + } + + /// Returns the property of the type that can be applied to the `self`. + pub fn applicable_ty(self, db: &'db dyn HirAnalysisDb) -> Option> { + let applicable_kind = match self.kind(db) { + Kind::Star => return None, + Kind::Abs(arg, _) => *arg.clone(), + Kind::Any => Kind::Any, + }; + + let (base, args) = self.decompose_ty_app(db); + let TyData::TyBase(base) = base.data(db) else { + return Some(ApplicableTyProp { + kind: applicable_kind.clone(), + const_ty: None, + }); + }; + + let const_ty = match base { + TyBase::Adt(adt_def) => { + let params = adt_def.params(db); + let param = params.get(args.len()).copied(); + param.and_then(|ty| ty.const_ty_ty(db)) + } + + TyBase::Func(func_def) => { + let params = func_def.params(db); + let param = params.get(args.len()).copied(); + param.and_then(|ty| ty.const_ty_ty(db)) + } + + TyBase::Prim(PrimTy::Array) => { + if args.len() == 1 { + Some(TyId::new(db, TyData::TyBase(TyBase::Prim(PrimTy::U256)))) + } else { + None + } + } + + TyBase::Prim(PrimTy::String) => { + if args.is_empty() { + Some(TyId::new(db, TyData::TyBase(TyBase::Prim(PrimTy::U256)))) + } else { + None + } + } + + _ => None, + }; + + Some(ApplicableTyProp { + kind: applicable_kind.clone(), + const_ty, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ApplicableTyProp<'db> { + /// A kind of the applicable type. + pub kind: Kind, + /// An expected type of const type if the applicable type is a const type. + pub const_ty: Option>, +} + +#[salsa::tracked(return_ref)] +pub fn ty_kind<'db>(db: &'db dyn HirAnalysisDb, ty: TyId<'db>) -> Kind { + ty.data(db).kind(db) +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TyData<'db> { + /// Type variable. + TyVar(TyVar<'db>), + + /// Type Parameter. + TyParam(TyParam<'db>), + + // Type application, + // e.g., `Option` is represented as `TApp(TyConst(Option), TyConst(i32))`. + TyApp(TyId<'db>, TyId<'db>), + + /// A concrete type, e.g., `i32`, `u32`, `bool`, `String`, `Result` etc. + TyBase(TyBase<'db>), + + ConstTy(ConstTyId<'db>), + + /// A never(bottom) type. + Never, + + // Invalid type which means the type is ill-formed. + // This type can be unified with any other types. + // NOTE: For type soundness check in this level, we don't consider trait satisfiability. + Invalid(InvalidCause<'db>), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum InvalidCause<'db> { + /// Type is not fully applied where it is required. + NotFullyApplied, + + /// Kind mismatch between two types. + KindMismatch { + expected: Option, + given: TyId<'db>, + }, + + TooManyGenericArgs { + expected: usize, + given: usize, + }, + + InvalidConstParamTy, + + RecursiveConstParamTy, + + /// The given type doesn't match the expected const type. + ConstTyMismatch { + expected: TyId<'db>, + given: TyId<'db>, + }, + + /// The given type is not a const type where it is required. + ConstTyExpected { + expected: TyId<'db>, + }, + + /// The given type is const type where it is *NOT* required. + NormalTypeExpected { + given: TyId<'db>, + }, + + /// Type alias parameter is not bound. + /// NOTE: In our type system, type alias is a macro, so we can't perform + /// partial application to type alias. + UnboundTypeAliasParam { + alias: HirTypeAlias<'db>, + n_given_args: usize, + }, + + /// Associated Type is not allowed at the moment. + AssocTy, + + // The given expression is not supported yet in the const type context. + // TODO: Remove this error kind and introduce a new error kind for more specific cause when + // type inference is implemented. + InvalidConstTyExpr { + body: Body<'db>, + }, + + // TraitConstraintNotSat(PredicateId), + /// `Other` indicates the cause is already reported in other analysis + /// passes, e.g., parser or name resolution. + Other, +} + +impl<'db> InvalidCause<'db> { + pub(super) fn kind_mismatch(expected: Option<&Kind>, ty: TyId<'db>) -> Self { + Self::KindMismatch { + expected: expected.cloned(), + given: ty, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Kind { + /// Represents star kind, i.e., `*` kind. + Star, + + /// Represents higher kinded types. + /// e.g., + /// `* -> *`, `(* -> *) -> *` or `* -> (* -> *) -> *` + Abs(Box, Box), + + /// `Any` kind is set to the type iff the type is `Invalid`. + Any, +} + +impl Kind { + fn abs(lhs: Kind, rhs: Kind) -> Self { + Kind::Abs(Box::new(lhs), Box::new(rhs)) + } + + pub(super) fn does_match(&self, other: &Self) -> bool { + match (self, other) { + (Self::Star, Self::Star) => true, + (Self::Abs(lhs1, rhs1), Self::Abs(lhs2, rhs2)) => { + lhs1.does_match(lhs2) && rhs1.does_match(rhs2) + } + (Self::Any, _) => true, + (_, Self::Any) => true, + _ => false, + } + } +} + +impl fmt::Display for Kind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Star => write!(f, "*"), + Self::Abs(lhs, rhs) => write!(f, "({} -> {})", lhs, rhs), + Self::Any => write!(f, "Any"), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TyVar<'db> { + pub sort: TyVarSort, + pub kind: Kind, + pub(super) key: InferenceKey<'db>, +} + +impl std::cmp::PartialOrd for TyVar<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} +impl std::cmp::Ord for TyVar<'_> { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + if self == other { + return std::cmp::Ordering::Equal; + } + self.key.cmp(&other.key) + } +} + +/// Represents the sort of a type variable that indicates what type domain +/// can be unified with the type variable. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum TyVarSort { + /// Type variable that can be unified with any other types. + General, + + /// Type variable that can be unified with only string types that has at + /// least the given length. + String(usize), + + /// Type variable that can be unified with only integral types. + Integral, +} + +impl PartialOrd for TyVarSort { + fn partial_cmp(&self, other: &Self) -> Option { + match (self, other) { + (Self::General, Self::General) => Some(std::cmp::Ordering::Equal), + (Self::General, _) => Some(std::cmp::Ordering::Less), + (_, Self::General) => Some(std::cmp::Ordering::Greater), + (Self::String(n1), Self::String(n2)) => n1.partial_cmp(n2), + (Self::String(_), _) | (_, Self::String(_)) => None, + (Self::Integral, Self::Integral) => Some(std::cmp::Ordering::Equal), + } + } +} + +impl TyVar<'_> { + pub(super) fn pretty_print(&self) -> String { + match self.sort { + TyVarSort::General => ("_").to_string(), + TyVarSort::Integral => "{integer}".to_string(), + TyVarSort::String(n) => format!("String<{}>", n).to_string(), + } + } +} + +/// Type generics parameter. We also treat `Self` type in a trait definition as +/// a special type parameter. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TyParam<'db> { + pub name: IdentId<'db>, + // The index points to the lowered type parameter list, which means that the idx doesn't + // correspond to the index of the type parameter in the original source code. + // E.g., + // ```fe + // impl Foo { + // fn foo(v: V) {} + // ``` + // The `foo`'s type parameter list is lowered to [`T`, `U`, `V`], so the index of `V` is 2. + pub idx: usize, + pub kind: Kind, + pub is_trait_self: bool, + pub owner: ScopeId<'db>, +} + +impl<'db> TyParam<'db> { + pub(super) fn pretty_print(&self, db: &dyn HirAnalysisDb) -> String { + self.name.data(db.as_hir_db()).to_string() + } + + pub(super) fn normal_param( + name: IdentId<'db>, + idx: usize, + kind: Kind, + scope: ScopeId<'db>, + ) -> Self { + Self { + name, + idx, + kind, + is_trait_self: false, + owner: scope, + } + } + + pub(super) fn trait_self(db: &'db dyn HirAnalysisDb, kind: Kind, scope: ScopeId<'db>) -> Self { + Self { + name: IdentId::make_self_ty(db.as_hir_db()), + idx: 0, + kind, + is_trait_self: true, + owner: scope, + } + } + + pub fn original_idx(&self, db: &'db dyn HirAnalysisDb) -> usize { + let owner = GenericParamOwnerId::from_item_opt(db, self.owner.item()).unwrap(); + let param_set = collect_generic_params(db, owner); + let offset = param_set.offset_to_explicit_params_position(db); + + // TyParam.idx includes implicit params, subtract offset to get original idx + self.idx - offset + } + + pub fn scope(&self, db: &'db dyn HirAnalysisDb) -> ScopeId<'db> { + if self.is_trait_self { + self.owner + } else { + ScopeId::GenericParam(self.owner.item(), self.original_idx(db)) + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From)] +pub enum TyBase<'db> { + Prim(PrimTy), + Adt(AdtDef<'db>), + Func(FuncDef<'db>), +} + +impl<'db> TyBase<'db> { + pub fn is_integral(self) -> bool { + match self { + Self::Prim(prim) => prim.is_integral(), + _ => false, + } + } + + pub fn is_bool(self) -> bool { + match self { + Self::Prim(prim) => prim.is_bool(), + _ => false, + } + } + + pub(super) fn tuple(n: usize) -> Self { + Self::Prim(PrimTy::Tuple(n)) + } + + pub(super) fn bool() -> Self { + Self::Prim(PrimTy::Bool) + } + + fn pretty_print(&self, db: &dyn HirAnalysisDb) -> String { + match self { + Self::Prim(prim) => match prim { + PrimTy::Bool => "bool", + PrimTy::U8 => "u8", + PrimTy::U16 => "u16", + PrimTy::U32 => "u32", + PrimTy::U64 => "u64", + PrimTy::U128 => "u128", + PrimTy::U256 => "u256", + PrimTy::Usize => "usize", + PrimTy::I8 => "i8", + PrimTy::I16 => "i16", + PrimTy::I32 => "i32", + PrimTy::I64 => "i64", + PrimTy::I128 => "i128", + PrimTy::I256 => "i256", + PrimTy::Isize => "isize", + PrimTy::String => "String", + PrimTy::Array => "[]", + PrimTy::Tuple(_) => "()", + PrimTy::Ptr => "*", + } + .to_string(), + + Self::Adt(adt) => adt.name(db).data(db.as_hir_db()).to_string(), + + Self::Func(func) => format!("fn {}", func.name(db).data(db.as_hir_db())), + } + } + + pub(super) fn adt(self) -> Option> { + match self { + Self::Adt(adt) => Some(adt), + _ => None, + } + } +} + +impl From for TyBase<'_> { + fn from(hir_prim: HirPrimTy) -> Self { + match hir_prim { + HirPrimTy::Bool => Self::Prim(PrimTy::Bool), + + HirPrimTy::Int(int_ty) => match int_ty { + HirIntTy::I8 => Self::Prim(PrimTy::I8), + HirIntTy::I16 => Self::Prim(PrimTy::I16), + HirIntTy::I32 => Self::Prim(PrimTy::I32), + HirIntTy::I64 => Self::Prim(PrimTy::I64), + HirIntTy::I128 => Self::Prim(PrimTy::I128), + HirIntTy::I256 => Self::Prim(PrimTy::I256), + HirIntTy::Isize => Self::Prim(PrimTy::Isize), + }, + + HirPrimTy::Uint(uint_ty) => match uint_ty { + HirUintTy::U8 => Self::Prim(PrimTy::U8), + HirUintTy::U16 => Self::Prim(PrimTy::U16), + HirUintTy::U32 => Self::Prim(PrimTy::U32), + HirUintTy::U64 => Self::Prim(PrimTy::U64), + HirUintTy::U128 => Self::Prim(PrimTy::U128), + HirUintTy::U256 => Self::Prim(PrimTy::U256), + HirUintTy::Usize => Self::Prim(PrimTy::Usize), + }, + + HirPrimTy::String => Self::Prim(PrimTy::String), + } + } +} + +#[derive(Debug, Clone, PartialEq, Copy, Eq, Hash)] +pub enum PrimTy { + Bool, + U8, + U16, + U32, + U64, + U128, + U256, + Usize, + I8, + I16, + I32, + I64, + I128, + I256, + Isize, + String, + Array, + Tuple(usize), + Ptr, +} + +impl PrimTy { + pub fn is_integral(self) -> bool { + matches!( + self, + Self::U8 + | Self::U16 + | Self::U32 + | Self::U64 + | Self::U128 + | Self::U256 + | Self::Usize + | Self::I8 + | Self::I16 + | Self::I32 + | Self::I64 + | Self::I128 + | Self::I256 + | Self::Isize + ) + } + + pub fn is_bool(self) -> bool { + matches!(self, Self::Bool) + } +} + +pub(super) trait HasKind { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind; +} + +impl HasKind for TyData<'_> { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { + match self { + TyData::TyVar(ty_var) => ty_var.kind(db), + TyData::TyParam(ty_param) => ty_param.kind.clone(), + TyData::TyBase(ty_const) => ty_const.kind(db), + TyData::TyApp(abs, _) => match abs.kind(db) { + // `TyId::app` method handles the kind mismatch, so we don't need to verify it again + // here. + Kind::Abs(_, ret) => ret.as_ref().clone(), + _ => Kind::Any, + }, + + TyData::ConstTy(const_ty) => const_ty.ty(db).kind(db).clone(), + + TyData::Never => Kind::Any, + + TyData::Invalid(_) => Kind::Any, + } + } +} + +impl HasKind for TyVar<'_> { + fn kind(&self, _db: &dyn HirAnalysisDb) -> Kind { + self.kind.clone() + } +} + +impl HasKind for TyBase<'_> { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { + match self { + TyBase::Prim(prim) => prim.kind(db), + TyBase::Adt(adt) => adt.kind(db), + TyBase::Func(func) => func.kind(db), + } + } +} + +impl HasKind for PrimTy { + fn kind(&self, _: &dyn HirAnalysisDb) -> Kind { + match self { + Self::Array => (0..2).fold(Kind::Star, |acc, _| Kind::abs(Kind::Star, acc)), + Self::Tuple(n) => (0..*n).fold(Kind::Star, |acc, _| Kind::abs(Kind::Star, acc)), + Self::Ptr => Kind::abs(Kind::Star, Kind::Star), + Self::String => Kind::abs(Kind::Star, Kind::Star), + _ => Kind::Star, + } + } +} + +impl HasKind for AdtDef<'_> { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { + let mut kind = Kind::Star; + for param in self.params(db).iter().rev() { + kind = Kind::abs(ty_kind(db, *param).clone(), kind); + } + + kind + } +} + +impl HasKind for FuncDef<'_> { + fn kind(&self, db: &dyn HirAnalysisDb) -> Kind { + let mut kind = Kind::Star; + for param in self.params(db).iter().rev() { + kind = Kind::abs(ty_kind(db, *param).clone(), kind); + } + + kind + } +} + +pub(crate) fn collect_variables<'db, V>( + db: &'db dyn HirAnalysisDb, + visitable: &V, +) -> IndexSet> +where + V: TyVisitable<'db>, +{ + struct TyVarCollector<'db> { + db: &'db dyn HirAnalysisDb, + vars: IndexSet>, + } + + impl<'db> TyVisitor<'db> for TyVarCollector<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn visit_var(&mut self, var: &TyVar<'db>) { + self.vars.insert(var.clone()); + } + } + let mut collector = TyVarCollector { + db, + vars: IndexSet::default(), + }; + + visitable.visit_with(&mut collector); + + collector.vars +} + +pub(crate) fn inference_keys<'db, V>( + db: &'db dyn HirAnalysisDb, + visitable: &V, +) -> FxHashSet> +where + V: TyVisitable<'db>, +{ + struct FreeInferenceKeyCollector<'db> { + db: &'db dyn HirAnalysisDb, + keys: FxHashSet>, + } + + impl<'db> TyVisitor<'db> for FreeInferenceKeyCollector<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn visit_var(&mut self, var: &TyVar<'db>) { + self.keys.insert(var.key); + } + } + + let mut collector = FreeInferenceKeyCollector { + db, + keys: FxHashSet::default(), + }; + + visitable.visit_with(&mut collector); + collector.keys +} + +#[salsa::tracked(return_ref)] +pub(crate) fn pretty_print_ty<'db>(db: &'db dyn HirAnalysisDb, ty: TyId<'db>) -> String { + match ty.data(db) { + TyData::TyVar(var) => var.pretty_print(), + TyData::TyParam(param) => param.pretty_print(db), + TyData::TyApp(_, _) => pretty_print_ty_app(db, ty), + TyData::TyBase(ty_con) => ty_con.pretty_print(db), + TyData::ConstTy(const_ty) => const_ty.pretty_print(db), + TyData::Never => "!".to_string(), + TyData::Invalid(..) => "".to_string(), + } +} + +fn pretty_print_ty_app<'db>(db: &'db dyn HirAnalysisDb, ty: TyId<'db>) -> String { + use PrimTy::*; + use TyBase::*; + + let (base, args) = decompose_ty_app(db, ty); + match base.data(db) { + TyData::TyBase(Prim(Array)) => { + let elem_ty = args[0].pretty_print(db); + let len = args[1].pretty_print(db); + format!("[{}; {}]", elem_ty, len) + } + + TyData::TyBase(Prim(Tuple(_))) => { + let mut args = args.iter(); + let mut s = ("(").to_string(); + if let Some(first) = args.next() { + s.push_str(first.pretty_print(db)); + for arg in args { + s.push_str(", "); + s.push_str(arg.pretty_print(db)); + } + } + s.push(')'); + s + } + + _ => { + let mut args = args.iter(); + let mut s = (base.pretty_print(db)).to_string(); + if let Some(first) = args.next() { + s.push('<'); + s.push_str(first.pretty_print(db)); + for arg in args { + s.push_str(", "); + s.push_str(arg.pretty_print(db)); + } + s.push('>'); + } + s + } + } +} + +/// Decompose type application into the base type and type arguments. +/// e.g., `App(App(T, U), App(V, W))` -> `(T, [U, App(V, W)])` +#[salsa::tracked(return_ref)] +pub(crate) fn decompose_ty_app<'db>( + db: &'db dyn HirAnalysisDb, + ty: TyId<'db>, +) -> (TyId<'db>, Vec>) { + struct TyAppDecomposer<'db> { + db: &'db dyn HirAnalysisDb, + base: Option>, + args: Vec>, + } + + impl<'db> TyVisitor<'db> for TyAppDecomposer<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn visit_ty(&mut self, ty: TyId<'db>) { + let db = self.db; + + match ty.data(db) { + TyData::TyApp(lhs, rhs) => { + self.visit_ty(*lhs); + self.args.push(*rhs); + } + _ => self.base = Some(ty), + } + } + } + + let mut decomposer = TyAppDecomposer { + db, + base: None, + args: Vec::new(), + }; + + ty.visit_with(&mut decomposer); + (decomposer.base.unwrap(), decomposer.args) +} + +bitflags! { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub struct TyFlags: u32 { + const HAS_INVALID = 0b0000_0001; + const HAS_VAR = 0b0000_0010; + const HAS_PARAM = 0b0000_0100; + } +} + +#[salsa::tracked] +pub(crate) fn ty_flags<'db>(db: &'db dyn HirAnalysisDb, ty: TyId<'db>) -> TyFlags { + struct Collector<'db> { + db: &'db dyn HirAnalysisDb, + flags: TyFlags, + } + + impl<'db> TyVisitor<'db> for Collector<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn visit_var(&mut self, _: &TyVar) { + self.flags.insert(TyFlags::HAS_VAR); + } + + fn visit_param(&mut self, _: &TyParam) { + self.flags.insert(TyFlags::HAS_PARAM) + } + + fn visit_invalid(&mut self, _: &InvalidCause) { + self.flags.insert(TyFlags::HAS_INVALID) + } + } + + let mut collector = Collector { + db, + flags: TyFlags::empty(), + }; + + ty.visit_with(&mut collector); + collector.flags +} diff --git a/crates/hir-analysis/src/ty/ty_lower.rs b/crates/hir-analysis/src/ty/ty_lower.rs new file mode 100644 index 0000000000..7cad904275 --- /dev/null +++ b/crates/hir-analysis/src/ty/ty_lower.rs @@ -0,0 +1,610 @@ +use hir::hir_def::{ + scope_graph::ScopeId, GenericArg, GenericArgListId, GenericParam, GenericParamListId, + GenericParamOwner, IdentId, ItemKind, KindBound as HirKindBound, Partial, PathId, TupleTypeId, + TypeAlias as HirTypeAlias, TypeBound, TypeId as HirTyId, TypeKind as HirTyKind, WhereClauseId, +}; +use salsa::plumbing::FromId; + +use super::{ + const_ty::{ConstTyData, ConstTyId}, + ty_def::{InvalidCause, Kind, TyData, TyId, TyParam}, +}; +use crate::{ + name_resolution::{resolve_ident_to_bucket, resolve_path, NameDomain, NameResKind, PathRes}, + ty::binder::Binder, + HirAnalysisDb, +}; + +/// Lowers the given HirTy to `TyId`. +#[salsa::tracked] +pub fn lower_hir_ty<'db>( + db: &'db dyn HirAnalysisDb, + ty: HirTyId<'db>, + scope: ScopeId<'db>, +) -> TyId<'db> { + TyBuilder::new(db, scope).lower_ty(ty) +} + +/// Collects the generic parameters of the given generic parameter owner. +#[salsa::tracked] +pub(crate) fn collect_generic_params<'db>( + db: &'db dyn HirAnalysisDb, + owner: GenericParamOwnerId<'db>, +) -> GenericParamTypeSet<'db> { + GenericParamCollector::new(db, owner.data(db)).finalize() +} + +/// Lowers the given type alias to [`TyAlias`]. +#[salsa::tracked(return_ref, recovery_fn = recover_lower_type_alias_cycle)] +pub(crate) fn lower_type_alias<'db>( + db: &'db dyn HirAnalysisDb, + alias: HirTypeAlias<'db>, +) -> Result, AliasCycle<'db>> { + let param_set = collect_generic_params(db, GenericParamOwnerId::new(db, alias.into())); + + let Some(hir_ty) = alias.ty(db.as_hir_db()).to_opt() else { + return Ok(TyAlias { + alias, + alias_to: Binder::bind(TyId::invalid(db, InvalidCause::Other)), + param_set, + }); + }; + + let alias_to = lower_hir_ty(db, hir_ty, alias.scope()); + let alias_to = Binder::bind(if alias_to.has_invalid(db) { + TyId::invalid(db, InvalidCause::Other) + } else { + alias_to + }); + Ok(TyAlias { + alias, + alias_to, + param_set, + }) +} + +fn recover_lower_type_alias_cycle<'db>( + db: &'db dyn HirAnalysisDb, + cycle: &salsa::Cycle, + _alias: HirTypeAlias<'db>, +) -> Result, AliasCycle<'db>> { + let alias_cycle = cycle + .participant_keys() + .filter_map(|key| { + // TODO Salsa 3.0: add method to lookup IngredientIndex for type + if db.ingredient_debug_name(key.ingredient_index()) == "lower_type_alias" { + let id = key.key_index(); + Some(HirTypeAlias::from_id(id)) + } else { + None + } + }) + .collect(); + + Err(AliasCycle(alias_cycle)) +} + +#[doc(hidden)] +#[salsa::tracked(return_ref)] +pub(crate) fn evaluate_params_precursor<'db>( + db: &'db dyn HirAnalysisDb, + set: GenericParamTypeSet<'db>, +) -> Vec> { + set.params_precursor(db) + .iter() + .enumerate() + .map(|(i, p)| p.evaluate(db, set.scope(db), i)) + .collect() +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) struct AliasCycle<'db>(Vec>); + +impl<'db> AliasCycle<'db> { + pub(super) fn representative(&self) -> HirTypeAlias<'db> { + *self.0.first().unwrap() + } + + pub(super) fn participants(&self) -> impl Iterator> + '_ { + self.0.iter().skip(1).copied() + } +} + +/// Represents a lowered type alias. `TyAlias` itself isn't a type, but +/// can be instantiated to a `TyId` by substituting its type +/// parameters with actual types. +/// +/// NOTE: `TyAlias` can't become an alias to partial applied types, i.e., the +/// right hand side of the alias declaration must be a fully applied type. +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct TyAlias<'db> { + alias: HirTypeAlias<'db>, + pub alias_to: Binder>, + param_set: GenericParamTypeSet<'db>, +} + +impl<'db> TyAlias<'db> { + pub fn params(&self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + self.param_set.params(db) + } +} + +struct TyBuilder<'db> { + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, +} + +impl<'db> TyBuilder<'db> { + pub(super) fn new(db: &'db dyn HirAnalysisDb, scope: ScopeId<'db>) -> Self { + Self { db, scope } + } + + pub(super) fn lower_ty(&mut self, ty: HirTyId<'db>) -> TyId<'db> { + match ty.data(self.db.as_hir_db()) { + HirTyKind::Ptr(pointee) => self.lower_ptr(*pointee), + + HirTyKind::Path(path) => self.lower_path(*path), + + HirTyKind::SelfType(args) => self.lower_self_ty(*args), + + HirTyKind::Tuple(tuple_id) => self.lower_tuple(*tuple_id), + + HirTyKind::Array(hir_elem_ty, len) => { + let elem_ty = self.lower_opt_hir_ty(*hir_elem_ty); + let len_ty = ConstTyId::from_opt_body(self.db, *len); + let len_ty = TyId::const_ty(self.db, len_ty); + let array = TyId::array(self.db, elem_ty); + TyId::app(self.db, array, len_ty) + } + + HirTyKind::Never => TyId::never(self.db), + } + } + + pub(super) fn lower_path(&mut self, path: Partial>) -> TyId<'db> { + let Some(path) = path.to_opt() else { + return TyId::invalid(self.db, InvalidCause::Other); + }; + + match resolve_path(self.db, path, self.scope, false) { + Ok(PathRes::Ty(ty) | PathRes::Func(ty)) => ty, + // Other cases should be reported as errors by nameres + _ => TyId::invalid(self.db, InvalidCause::Other), + } + } + + pub(super) fn lower_const_ty_ty(&mut self, ty: HirTyId<'db>) -> TyId<'db> { + let hir_db = self.db.as_hir_db(); + let HirTyKind::Path(path) = ty.data(hir_db) else { + return TyId::invalid(self.db, InvalidCause::InvalidConstParamTy); + }; + + if !path + .to_opt() + .map(|p| p.generic_args(hir_db).is_empty(hir_db)) + .unwrap_or(true) + { + return TyId::invalid(self.db, InvalidCause::InvalidConstParamTy); + } + let ty = self.lower_path(*path); + + if ty.has_invalid(self.db) || ty.is_integral(self.db) || ty.is_bool(self.db) { + ty + } else { + TyId::invalid(self.db, InvalidCause::InvalidConstParamTy) + } + } + + pub(super) fn lower_self_ty(&mut self, args: GenericArgListId<'db>) -> TyId<'db> { + let path = PathId::self_ty(self.db.as_hir_db(), args); + match resolve_path(self.db, path, self.scope, false) { + Ok(PathRes::Ty(ty)) => ty, + Ok(_) => unreachable!(), + Err(_) => TyId::invalid(self.db, InvalidCause::Other), + } + } + + fn lower_ptr(&mut self, pointee: Partial>) -> TyId<'db> { + let pointee = self.lower_opt_hir_ty(pointee); + + let ptr = TyId::ptr(self.db); + TyId::app(self.db, ptr, pointee) + } + + fn lower_tuple(&mut self, tuple_id: TupleTypeId<'db>) -> TyId<'db> { + let elems = tuple_id.data(self.db.as_hir_db()); + let len = elems.len(); + let tuple = TyId::tuple(self.db, len); + elems.iter().fold(tuple, |acc, &elem| { + let elem_ty = self.lower_opt_hir_ty(elem); + if !elem_ty.has_star_kind(self.db) { + return TyId::invalid(self.db, InvalidCause::NotFullyApplied); + } + + TyId::app(self.db, acc, elem_ty) + }) + } + + fn lower_opt_hir_ty(&self, hir_ty: Partial>) -> TyId<'db> { + hir_ty + .to_opt() + .map(|hir_ty| lower_hir_ty(self.db, hir_ty, self.scope)) + .unwrap_or_else(|| TyId::invalid(self.db, InvalidCause::Other)) + } +} + +pub(super) fn lower_generic_arg<'db>( + db: &'db dyn HirAnalysisDb, + arg: &GenericArg<'db>, + scope: ScopeId<'db>, +) -> TyId<'db> { + match arg { + GenericArg::Type(ty_arg) => ty_arg + .ty + .to_opt() + .map(|ty| lower_hir_ty(db, ty, scope)) + .unwrap_or_else(|| TyId::invalid(db, InvalidCause::Other)), + + GenericArg::Const(const_arg) => { + let const_ty = ConstTyId::from_opt_body(db, const_arg.body); + TyId::const_ty(db, const_ty) + } + } +} + +pub(crate) fn lower_generic_arg_list<'db>( + db: &'db dyn HirAnalysisDb, + args: GenericArgListId<'db>, + scope: ScopeId<'db>, +) -> Vec> { + args.data(db.as_hir_db()) + .iter() + .map(|arg| lower_generic_arg(db, arg, scope)) + .collect() +} + +#[salsa::interned] +pub struct GenericParamTypeSet<'db> { + #[return_ref] + pub(crate) params_precursor: Vec>, + pub(crate) scope: ScopeId<'db>, + offset_to_explicit: usize, +} + +impl<'db> GenericParamTypeSet<'db> { + pub(crate) fn params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + evaluate_params_precursor(db, self) + } + + pub(crate) fn explicit_params(self, db: &'db dyn HirAnalysisDb) -> &'db [TyId<'db>] { + let offset = self.offset_to_explicit(db); + &self.params(db)[offset..] + } + + pub(crate) fn empty(db: &'db dyn HirAnalysisDb, scope: ScopeId<'db>) -> Self { + Self::new(db, Vec::new(), scope, 0) + } + + pub(crate) fn len(self, db: &dyn HirAnalysisDb) -> usize { + self.params_precursor(db).len() + } + + pub(crate) fn trait_self(&self, db: &'db dyn HirAnalysisDb) -> Option> { + let params = self.params_precursor(db); + let cand = params.first()?; + + if cand.is_trait_self() { + Some(cand.evaluate(db, self.scope(db), 0)) + } else { + None + } + } + + pub(super) fn offset_to_explicit_params_position(&self, db: &dyn HirAnalysisDb) -> usize { + self.offset_to_explicit(db) + } + + pub(crate) fn param_by_original_idx( + &self, + db: &'db dyn HirAnalysisDb, + original_idx: usize, + ) -> Option> { + let idx = self.offset_to_explicit(db) + original_idx; + self.params_precursor(db) + .get(idx) + .map(|p| p.evaluate(db, self.scope(db), idx)) + } +} + +struct GenericParamCollector<'db> { + db: &'db dyn HirAnalysisDb, + owner: GenericParamOwner<'db>, + params: Vec>, + offset_to_original: usize, +} + +impl<'db> GenericParamCollector<'db> { + fn new(db: &'db dyn HirAnalysisDb, owner: GenericParamOwner<'db>) -> Self { + let params = match owner { + GenericParamOwner::Trait(_) => { + vec![TyParamPrecursor::trait_self(db, None)] + } + + GenericParamOwner::Func(func) if func.is_associated_func(db.as_hir_db()) => { + let parent = owner.parent(db.as_hir_db()).unwrap(); + collect_generic_params(db, GenericParamOwnerId::new(db, parent)) + .params_precursor(db) + .to_vec() + } + + _ => vec![], + }; + + let offset_to_original = params.len(); + Self { + db, + owner, + params, + offset_to_original, + } + } + + fn collect_generic_params(&mut self) { + let hir_db = self.db.as_hir_db(); + let param_list = self.owner.params(hir_db); + for (idx, param) in param_list.data(hir_db).iter().enumerate() { + let idx = idx + self.offset_to_original; + + match param { + GenericParam::Type(param) => { + let name = param.name; + + let kind = self.extract_kind(param.bounds.as_slice()); + self.params + .push(TyParamPrecursor::ty_param(name, idx, kind)); + } + + GenericParam::Const(param) => { + let name = param.name; + let hir_ty = param.ty.to_opt(); + + self.params + .push(TyParamPrecursor::const_ty_param(name, idx, hir_ty)) + } + } + } + } + + fn collect_kind_in_where_clause(&mut self) { + let Some(where_clause_owner) = self.owner.where_clause_owner() else { + return; + }; + + let hir_db = self.db.as_hir_db(); + let where_clause = where_clause_owner.where_clause(hir_db); + for pred in where_clause.data(hir_db) { + match self.param_idx_from_ty(pred.ty.to_opt()) { + ParamLoc::Idx(idx) => { + if self.params[idx].kind.is_none() && !self.params[idx].is_const_ty { + self.params[idx].kind = self.extract_kind(pred.bounds.as_slice()); + } + } + + ParamLoc::TraitSelf => { + let kind = self.extract_kind(pred.bounds.as_slice()); + let trait_self = self.trait_self_ty_mut().unwrap(); + + if trait_self.kind.is_none() { + trait_self.kind = kind; + } + } + + ParamLoc::NonParam => {} + }; + } + } + + fn finalize(mut self) -> GenericParamTypeSet<'db> { + self.collect_generic_params(); + self.collect_kind_in_where_clause(); + + GenericParamTypeSet::new( + self.db, + self.params, + self.owner.scope(), + self.offset_to_original, + ) + } + + fn extract_kind(&self, bounds: &[TypeBound]) -> Option { + for bound in bounds { + if let TypeBound::Kind(Partial::Present(k)) = bound { + return Some(lower_kind(k)); + } + } + + None + } + + fn param_idx_from_ty(&self, ty: Option) -> ParamLoc { + let Some(ty) = ty else { + return ParamLoc::NonParam; + }; + + let hir_db = self.db.as_hir_db(); + + let path = match ty.data(self.db.as_hir_db()) { + HirTyKind::Path(Partial::Present(path)) => { + if path.is_bare_ident(hir_db) { + *path + } else { + return ParamLoc::NonParam; + } + } + + HirTyKind::SelfType(args) => { + return if matches!(self.owner.into(), ItemKind::Trait(_)) && args.is_empty(hir_db) { + ParamLoc::TraitSelf + } else { + ParamLoc::NonParam + }; + } + + _ => return ParamLoc::NonParam, + }; + + let bucket = resolve_ident_to_bucket(self.db, path, self.owner.scope()); + match bucket.pick(NameDomain::TYPE) { + Ok(res) => match res.kind { + NameResKind::Scope(ScopeId::GenericParam(scope, idx)) + if scope == self.owner.scope().item() => + { + ParamLoc::Idx(idx + self.offset_to_original) + } + _ => ParamLoc::NonParam, + }, + _ => ParamLoc::NonParam, + } + } + + fn trait_self_ty_mut(&mut self) -> Option<&mut TyParamPrecursor<'db>> { + let cand = self.params.get_mut(0)?; + cand.is_trait_self().then_some(cand) + } +} + +enum ParamLoc { + TraitSelf, + Idx(usize), + NonParam, +} + +#[doc(hidden)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TyParamPrecursor<'db> { + name: Partial>, + original_idx: Option, + kind: Option, + const_ty_ty: Option>, + is_const_ty: bool, +} + +impl<'db> TyParamPrecursor<'db> { + fn evaluate( + &self, + db: &'db dyn HirAnalysisDb, + scope: ScopeId<'db>, + lowered_idx: usize, + ) -> TyId<'db> { + let Partial::Present(name) = self.name else { + return TyId::invalid(db, InvalidCause::Other); + }; + + let kind = self.kind.clone().unwrap_or(Kind::Star); + + if self.original_idx.is_none() { + let param = TyParam::trait_self(db, kind, scope); + return TyId::new(db, TyData::TyParam(param)); + } + + let param = TyParam::normal_param(name, lowered_idx, kind, scope); + + if !self.is_const_ty { + return TyId::new(db, TyData::TyParam(param)); + } + + let const_ty_ty = match self.const_ty_ty { + Some(ty) => { + let mut ty_builder = TyBuilder::new(db, scope); + ty_builder.lower_const_ty_ty(ty) + } + + None => TyId::invalid(db, InvalidCause::Other), + }; + + let const_ty = ConstTyId::new(db, ConstTyData::TyParam(param, const_ty_ty)); + + TyId::new(db, TyData::ConstTy(const_ty)) + } + + fn ty_param(name: Partial>, idx: usize, kind: Option) -> Self { + Self { + name, + original_idx: idx.into(), + kind, + const_ty_ty: None, + is_const_ty: false, + } + } + + fn const_ty_param(name: Partial>, idx: usize, ty: Option>) -> Self { + Self { + name, + original_idx: idx.into(), + kind: None, + const_ty_ty: ty, + is_const_ty: true, + } + } + + fn trait_self(db: &'db dyn HirAnalysisDb, kind: Option) -> Self { + let name = Partial::Present(IdentId::make_self_ty(db.as_hir_db())); + Self { + name, + original_idx: None, + kind, + const_ty_ty: None, + is_const_ty: false, + } + } + + fn is_trait_self(&self) -> bool { + self.original_idx.is_none() + } +} + +pub(super) fn lower_kind(kind: &HirKindBound) -> Kind { + match kind { + HirKindBound::Mono => Kind::Star, + HirKindBound::Abs(lhs, rhs) => match (lhs, rhs) { + (Partial::Present(lhs), Partial::Present(rhs)) => { + Kind::Abs(Box::new(lower_kind(lhs)), Box::new(lower_kind(rhs))) + } + (Partial::Present(lhs), Partial::Absent) => { + Kind::Abs(Box::new(lower_kind(lhs)), Box::new(Kind::Any)) + } + (Partial::Absent, Partial::Present(rhs)) => { + Kind::Abs(Box::new(Kind::Any), Box::new(lower_kind(rhs))) + } + (Partial::Absent, Partial::Absent) => { + Kind::Abs(Box::new(Kind::Any), Box::new(Kind::Any)) + } + }, + } +} + +#[salsa::interned] +pub(crate) struct GenericParamOwnerId<'db> { + pub(crate) data: GenericParamOwner<'db>, +} + +impl<'db> GenericParamOwnerId<'db> { + pub(crate) fn scope(self, db: &'db dyn HirAnalysisDb) -> ScopeId<'db> { + self.data(db).scope() + } + + pub(crate) fn where_clause(self, db: &'db dyn HirAnalysisDb) -> Option> { + self.data(db) + .where_clause_owner() + .map(|owner| owner.where_clause(db.as_hir_db())) + } + + pub(crate) fn params(self, db: &'db dyn HirAnalysisDb) -> GenericParamListId<'db> { + self.data(db).params(db.as_hir_db()) + } + + pub(crate) fn from_item_opt(db: &'db dyn HirAnalysisDb, item: ItemKind<'db>) -> Option { + let owner = GenericParamOwner::from_item_opt(item)?; + Self::new(db, owner).into() + } +} diff --git a/crates/hir-analysis/src/ty/unify.rs b/crates/hir-analysis/src/ty/unify.rs new file mode 100644 index 0000000000..07098427a6 --- /dev/null +++ b/crates/hir-analysis/src/ty/unify.rs @@ -0,0 +1,538 @@ +//! This module contains the unification table for type inference and trait +//! satisfiability checking. + +use std::marker::PhantomData; + +use either::Either; +use ena::unify::{InPlace, UnifyKey, UnifyValue}; +use num_bigint::BigUint; + +use super::{ + binder::Binder, + fold::{TyFoldable, TyFolder}, + trait_def::{Implementor, TraitInstId}, + ty_def::{inference_keys, ApplicableTyProp, Kind, TyData, TyId, TyVar, TyVarSort}, +}; +use crate::{ + ty::const_ty::{ConstTyData, EvaluatedConstTy}, + HirAnalysisDb, +}; + +pub(crate) type UnificationTable<'db> = UnificationTableBase<'db, InPlace>>; + +/// This table should only be used in the trait resolution where the performance +/// of `clone` is the critical. This table provides the very cheap clone +/// operation at the cost of update operations. +/// +/// [`UnificationTable`] is probably the one that you need to use for other +/// components. +pub(crate) type PersistentUnificationTable<'db> = + UnificationTableBase<'db, ena::unify::Persistent>>; + +pub type Snapshot = ena::unify::Snapshot; +pub type UnificationResult = Result<(), UnificationError>; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum UnificationError { + OccursCheckFailed, + TypeMismatch, +} + +pub(crate) trait UnificationStore<'db>: + Default + + ena::unify::UnificationStoreBase, Value = InferenceValue<'db>> + + ena::unify::UnificationStore + + ena::unify::UnificationStoreMut +{ +} + +impl<'db, U> UnificationStore<'db> for U where + U: Default + + ena::unify::UnificationStoreBase, Value = InferenceValue<'db>> + + ena::unify::UnificationStoreBase + + ena::unify::UnificationStore + + ena::unify::UnificationStoreMut +{ +} + +#[derive(Clone)] +pub(crate) struct UnificationTableBase<'db, U> +where + U: ena::unify::UnificationStoreBase, +{ + pub db: &'db dyn HirAnalysisDb, + table: ena::unify::UnificationTable, +} + +impl<'db, U> UnificationTableBase<'db, U> +where + U: UnificationStore<'db>, +{ + pub fn new(db: &'db dyn HirAnalysisDb) -> Self { + Self { + db, + table: ena::unify::UnificationTable::new(), + } + } + + /// Returns the number of the created keys. + pub fn len(&self) -> usize { + self.table.len() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + pub fn rollback_to(&mut self, snapshot: Snapshot) { + self.table.rollback_to(snapshot); + } + + pub fn snapshot(&mut self) -> Snapshot { + self.table.snapshot() + } + + pub fn unify(&mut self, lhs: T, rhs: T) -> UnificationResult + where + T: Unifiable<'db>, + { + let snapshot = self.snapshot(); + match lhs.unify(self, rhs) { + Ok(()) => { + self.table.commit(snapshot); + Ok(()) + } + Err(err) => { + self.rollback_to(snapshot); + Err(err) + } + } + } + + /// Returns `Ok()` if the two types were unified, otherwise returns an + /// error. This method doesn't roll back the unification table. Please + /// refer to `unify`[Self::unify] if you need to roll back the table + /// automatically when unification fails. + fn unify_ty(&mut self, ty1: TyId<'db>, ty2: TyId<'db>) -> UnificationResult { + if !ty1.kind(self.db).does_match(ty2.kind(self.db)) { + return Err(UnificationError::TypeMismatch); + } + + let ty1 = ty1.fold_with(self); + let ty2 = ty2.fold_with(self); + + match (ty1.data(self.db), ty2.data(self.db)) { + (TyData::TyVar(_), TyData::TyVar(_)) => self.unify_var_var(ty1, ty2), + + (TyData::TyVar(var), _) => self.unify_var_value(var, ty2), + + (_, TyData::TyVar(var)) => self.unify_var_value(var, ty1), + + (TyData::TyApp(ty1_1, ty1_2), TyData::TyApp(ty2_1, ty2_2)) => { + self.unify_ty(*ty1_1, *ty2_1)?; + self.unify_ty(*ty1_2, *ty2_2) + } + + (TyData::TyParam(_), TyData::TyParam(_)) | (TyData::TyBase(_), TyData::TyBase(_)) => { + if ty1 == ty2 { + Ok(()) + } else { + Err(UnificationError::TypeMismatch) + } + } + + (TyData::Invalid(_), _) + | (_, TyData::Invalid(_)) + | (TyData::Never, _) + | (_, TyData::Never) => Ok(()), + + (TyData::ConstTy(const_ty1), TyData::ConstTy(const_ty2)) => { + self.unify_ty(const_ty1.ty(self.db), const_ty2.ty(self.db))?; + + match (const_ty1.data(self.db), const_ty2.data(self.db)) { + (ConstTyData::TyVar(..), ConstTyData::TyVar(..)) => { + self.unify_var_var(ty1, ty2) + } + + (ConstTyData::TyVar(var, _), _) => self.unify_var_value(var, ty2), + + (_, ConstTyData::TyVar(var, _)) => self.unify_var_value(var, ty1), + + (ConstTyData::TyParam(..), ConstTyData::TyParam(..)) + | (ConstTyData::Evaluated(..), ConstTyData::Evaluated(..)) => { + if const_ty1 == const_ty2 { + Ok(()) + } else { + Err(UnificationError::TypeMismatch) + } + } + + _ => Err(UnificationError::TypeMismatch), + } + } + + _ => Err(UnificationError::TypeMismatch), + } + } + + pub fn new_var(&mut self, sort: TyVarSort, kind: &Kind) -> TyId<'db> { + let key = self.new_key(kind, sort); + TyId::ty_var(self.db, sort, kind.clone(), key) + } + + pub(super) fn new_var_from_param(&mut self, ty: TyId<'db>) -> TyId<'db> { + match ty.data(self.db) { + TyData::TyParam(param) => { + let sort = TyVarSort::General; + let key = self.new_key(¶m.kind, sort); + TyId::ty_var(self.db, sort, param.kind.clone(), key) + } + + TyData::ConstTy(const_ty) => { + if let ConstTyData::TyParam(_, ty) = const_ty.data(self.db) { + let key = self.new_key(ty.kind(self.db), TyVarSort::General); + TyId::const_ty_var(self.db, *ty, key) + } else { + panic!() + } + } + _ => panic!(), + } + } + + pub(super) fn new_var_for(&mut self, ty_prop: ApplicableTyProp<'db>) -> TyId<'db> { + let kind = ty_prop.kind; + let sort = TyVarSort::General; + let key = self.new_key(&kind, sort); + + match ty_prop.const_ty { + Some(const_ty) => TyId::const_ty_var(self.db, const_ty, key), + None => TyId::ty_var(self.db, TyVarSort::General, kind, key), + } + } + + pub fn instantiate_with_fresh_vars(&mut self, value: Binder) -> T + where + T: TyFoldable<'db>, + { + value.instantiate_with(self.db, |ty| self.new_var_from_param(ty)) + } + + pub fn instantiate_to_term(&mut self, mut ty: TyId<'db>) -> TyId<'db> { + if ty.has_invalid(self.db) { + return ty; + }; + + while let Some(prop) = ty.applicable_ty(self.db) { + let arg = self.new_var_for(prop); + ty = TyId::app(self.db, ty, arg); + } + + ty + } + + pub fn new_key(&mut self, kind: &Kind, sort: TyVarSort) -> InferenceKey<'db> { + self.table + .new_key(InferenceValue::Unbound(kind.clone(), sort)) + } + + fn probe_impl(&mut self, key: InferenceKey<'db>) -> Either, TyVar<'db>> { + let root_key = self.table.find(key); + match self.table.probe_value(key) { + InferenceValue::Bound(ty) => Either::Left(ty), + InferenceValue::Unbound(kind, sort) => Either::Right(TyVar { + key: root_key, + kind, + sort, + }), + } + } + + /// Try to unify two type variables. + /// + /// When the two variables are in the same sort, we can just unify them. + /// + /// When the two variables are *NOT* in the same sort, a type variable + /// that has a broader sort are narrowed down to the narrower one. + /// + /// NOTE: This method assumes that we have only two sorts: General and Int. + fn unify_var_var(&mut self, ty_var1: TyId<'db>, ty_var2: TyId<'db>) -> UnificationResult { + let (var1, var2) = match (ty_var1.data(self.db), ty_var2.data(self.db)) { + (TyData::TyVar(var1), TyData::TyVar(var2)) => (var1, var2), + (TyData::ConstTy(const_ty1), TyData::ConstTy(const_ty2)) => { + match (const_ty1.data(self.db), const_ty2.data(self.db)) { + (ConstTyData::TyVar(var1, _), ConstTyData::TyVar(var2, _)) => (var1, var2), + _ => panic!(), + } + } + _ => panic!(), + }; + + match (var1.sort, var2.sort) { + (sort1, sort2) if sort1 == sort2 => self.table.unify_var_var(var1.key, var2.key), + + (TyVarSort::General, _) | (_, TyVarSort::General) => { + self.table.unify_var_var(var1.key, var2.key) + } + + (TyVarSort::String(_), TyVarSort::String(_)) => { + self.table.unify_var_var(var1.key, var2.key) + } + + (_, _) => Err(UnificationError::TypeMismatch), + } + } + + /// Try to unify a type variable to a type. + /// We perform the following checks: + /// 1. Occurrence check: The same type variable must not occur in the type. + /// 2. Universe check: The sort of the type variable must match the sort of + /// the type. + fn unify_var_value(&mut self, var: &TyVar<'db>, value: TyId<'db>) -> UnificationResult { + if inference_keys(self.db, &value).contains(&var.key) { + return Err(UnificationError::OccursCheckFailed); + } + + if value.has_invalid(self.db) { + return self + .table + .unify_var_value(var.key, InferenceValue::Bound(value)); + } + + let root_key = self.table.find(var.key); + let root_value = self.table.probe_value(root_key); + let root_var = match root_value { + InferenceValue::Unbound(kind, sort) => TyVar { + key: root_key, + sort, + kind, + }, + + InferenceValue::Bound(ty) => { + if ty == value { + return Ok(()); + } else { + return Err(UnificationError::TypeMismatch); + } + } + }; + + match root_var.sort { + TyVarSort::General => self + .table + .unify_var_value(root_var.key, InferenceValue::Bound(value)), + + TyVarSort::Integral => { + if value.is_integral(self.db) { + self.table + .unify_var_value(root_var.key, InferenceValue::Bound(value)) + } else if value.is_never(self.db) { + Ok(()) + } else { + Err(UnificationError::TypeMismatch) + } + } + + TyVarSort::String(n_var) => { + let (base, args) = value.decompose_ty_app(self.db); + + if base.is_never(self.db) { + return Ok(()); + } + + if !base.is_string(self.db) || args.len() != 1 { + return Err(UnificationError::TypeMismatch); + } + + let TyData::ConstTy(const_ty) = args[0].data(self.db) else { + return Ok(()); + }; + + let ConstTyData::Evaluated(EvaluatedConstTy::LitInt(n_value), _) = + const_ty.data(self.db) + else { + return Ok(()); + }; + + if &BigUint::from(n_var) <= n_value.data(self.db.as_hir_db()) { + self.table + .unify_var_value(root_var.key, InferenceValue::Bound(value)) + } else { + Err(UnificationError::TypeMismatch) + } + } + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct InferenceKey<'db>(pub(super) u32, pub(super) PhantomData<&'db ()>); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum InferenceValue<'db> { + Bound(TyId<'db>), + Unbound(Kind, TyVarSort), +} + +impl<'db> UnifyKey for InferenceKey<'db> { + type Value = InferenceValue<'db>; + + fn index(&self) -> u32 { + self.0 + } + + fn from_index(idx: u32) -> Self { + Self(idx, Default::default()) + } + + fn tag() -> &'static str { + "InferenceKey" + } +} + +impl UnifyValue for InferenceValue<'_> { + type Error = UnificationError; + + fn unify_values(v1: &Self, v2: &Self) -> Result { + match (v1, v2) { + (InferenceValue::Unbound(k1, sort1), InferenceValue::Unbound(k2, sort2)) => { + assert!(k1.does_match(k2)); + if sort1 < sort2 { + Ok(InferenceValue::Unbound(k2.clone(), *sort2)) + } else { + Ok(InferenceValue::Unbound(k1.clone(), *sort1)) + } + } + + (InferenceValue::Unbound(_, _), InferenceValue::Bound(ty)) + | (InferenceValue::Bound(ty), InferenceValue::Unbound(_, _)) => { + Ok(InferenceValue::Bound(*ty)) + } + + (InferenceValue::Bound(ty1), InferenceValue::Bound(ty2)) => { + if ty1 == ty2 { + Ok(InferenceValue::Bound(*ty1)) + } else { + Err(UnificationError::TypeMismatch) + } + } + } + } +} + +pub(crate) trait Unifiable<'db> { + fn unify>( + self, + table: &mut UnificationTableBase<'db, U>, + other: Self, + ) -> UnificationResult; +} + +impl<'db> Unifiable<'db> for TyId<'db> { + fn unify>( + self, + table: &mut UnificationTableBase<'db, U>, + other: Self, + ) -> UnificationResult { + table.unify_ty(self, other) + } +} + +impl<'db> Unifiable<'db> for TraitInstId<'db> { + fn unify>( + self, + table: &mut UnificationTableBase<'db, U>, + other: Self, + ) -> UnificationResult { + let db = table.db; + if self.def(db) != other.def(db) { + return Err(UnificationError::TypeMismatch); + } + + for (&self_arg, &other_arg) in self.args(db).iter().zip(other.args(db)) { + table.unify_ty(self_arg, other_arg)?; + } + + Ok(()) + } +} + +impl<'db> Unifiable<'db> for Implementor<'db> { + fn unify>( + self, + table: &mut UnificationTableBase<'db, U>, + other: Self, + ) -> UnificationResult { + let db = table.db; + table.unify(self.trait_(db), other.trait_(db)) + } +} + +impl<'db, U> TyFolder<'db> for UnificationTableBase<'db, U> +where + U: UnificationStore<'db>, +{ + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn fold_ty(&mut self, ty: TyId<'db>) -> TyId<'db> { + let mut resolver = TyVarResolver { + table: self, + var_stack: vec![], + }; + + ty.fold_with(&mut resolver) + } +} + +struct TyVarResolver<'a, 'db, U> +where + U: UnificationStore<'db>, +{ + table: &'a mut UnificationTableBase<'db, U>, + var_stack: Vec>, +} + +impl<'db, U> TyFolder<'db> for TyVarResolver<'_, 'db, U> +where + U: UnificationStore<'db>, +{ + fn db(&self) -> &'db dyn HirAnalysisDb { + self.table.db + } + + fn fold_ty(&mut self, ty: TyId<'db>) -> TyId<'db> { + let db = self.table.db; + let (shallow_resolved, key) = match ty.data(db) { + TyData::TyVar(var) if !self.var_stack.contains(&var.key) => { + match self.table.probe_impl(var.key) { + Either::Left(ty) => (ty, var.key), + Either::Right(var) => return TyId::ty_var(db, var.sort, var.kind, var.key), + } + } + + TyData::ConstTy(cty) => match cty.data(db) { + ConstTyData::TyVar(var, ty) if !self.var_stack.contains(&var.key) => { + match self.table.probe_impl(var.key) { + Either::Left(ty) => (ty, var.key), + Either::Right(var) => { + return TyId::const_ty_var(db, *ty, var.key); + } + } + } + _ => { + return ty.super_fold_with(self); + } + }, + _ => { + return ty.super_fold_with(self); + } + }; + + self.var_stack.push(key); + let resolved = shallow_resolved.fold_with(self); + self.var_stack.pop(); + resolved + } +} diff --git a/crates/hir-analysis/src/ty/visitor.rs b/crates/hir-analysis/src/ty/visitor.rs new file mode 100644 index 0000000000..6b2c935c0c --- /dev/null +++ b/crates/hir-analysis/src/ty/visitor.rs @@ -0,0 +1,215 @@ +use common::indexmap::IndexSet; + +use super::{ + adt_def::AdtDef, + const_ty::{ConstTyData, ConstTyId}, + func_def::FuncDef, + trait_def::{Implementor, TraitInstId}, + trait_resolution::PredicateListId, + ty_check::ExprProp, + ty_def::{InvalidCause, PrimTy, TyBase, TyData, TyFlags, TyId, TyParam, TyVar}, +}; +use crate::HirAnalysisDb; + +pub trait TyVisitable<'db> { + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>; +} + +pub trait TyVisitor<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb; + + fn visit_ty(&mut self, ty: TyId<'db>) { + walk_ty(self, ty) + } + + #[allow(unused_variables)] + fn visit_var(&mut self, var: &TyVar<'db>) {} + + #[allow(unused_variables)] + fn visit_param(&mut self, ty_param: &TyParam<'db>) {} + + #[allow(unused_variables)] + fn visit_const_param(&mut self, ty_param: &TyParam<'db>, const_ty_ty: TyId<'db>) {} + + fn visit_app(&mut self, abs: TyId<'db>, arg: TyId<'db>) { + self.visit_ty(abs); + self.visit_ty(arg); + } + + #[allow(unused_variables)] + fn visit_ty_base(&mut self, ty_base: &TyBase<'db>) { + walk_ty_base(self, ty_base); + } + + #[allow(unused_variables)] + fn visit_invalid(&mut self, cause: &InvalidCause<'db>) {} + + #[allow(unused_variables)] + fn visit_prim(&mut self, prim: &PrimTy) {} + + #[allow(unused_variables)] + fn visit_adt(&mut self, adt: AdtDef<'db>) {} + + #[allow(unused_variables)] + fn visit_func(&mut self, func: FuncDef<'db>) {} + + #[allow(unused_variables)] + fn visit_const_ty(&mut self, const_ty: &ConstTyId<'db>) { + walk_const_ty(self, const_ty) + } +} + +pub fn walk_ty<'db, V>(visitor: &mut V, ty: TyId<'db>) +where + V: TyVisitor<'db> + ?Sized, +{ + match ty.data(visitor.db()) { + TyData::TyVar(var) => visitor.visit_var(var), + + TyData::TyParam(param) => visitor.visit_param(param), + + TyData::TyApp(abs, arg) => visitor.visit_app(*abs, *arg), + + TyData::TyBase(ty_con) => visitor.visit_ty_base(ty_con), + + TyData::ConstTy(const_ty) => visitor.visit_const_ty(const_ty), + + TyData::Never => {} + + TyData::Invalid(cause) => visitor.visit_invalid(cause), + } +} + +pub fn walk_ty_base<'db, V>(visitor: &mut V, ty_con: &TyBase<'db>) +where + V: TyVisitor<'db> + ?Sized, +{ + match ty_con { + TyBase::Prim(prim) => visitor.visit_prim(prim), + TyBase::Adt(adt) => visitor.visit_adt(*adt), + TyBase::Func(func) => visitor.visit_func(*func), + } +} + +pub fn walk_const_ty<'db, V>(visitor: &mut V, const_ty: &ConstTyId<'db>) +where + V: TyVisitor<'db> + ?Sized, +{ + let db = visitor.db(); + visitor.visit_ty(const_ty.ty(db)); + match &const_ty.data(db) { + ConstTyData::TyVar(var, _) => visitor.visit_var(var), + ConstTyData::TyParam(param, ty) => visitor.visit_const_param(param, *ty), + ConstTyData::Evaluated(..) | ConstTyData::UnEvaluated(..) => {} + } +} + +impl<'db> TyVisitable<'db> for TyId<'db> { + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + visitor.visit_ty(*self) + } +} + +impl<'db, T> TyVisitable<'db> for Vec +where + T: TyVisitable<'db>, +{ + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + self.iter().for_each(|ty| ty.visit_with(visitor)) + } +} + +impl<'db, T> TyVisitable<'db> for &[T] +where + T: TyVisitable<'db>, +{ + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + self.iter().for_each(|ty| ty.visit_with(visitor)) + } +} + +impl<'db, T> TyVisitable<'db> for IndexSet +where + T: TyVisitable<'db>, +{ + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + self.iter().for_each(|ty| ty.visit_with(visitor)) + } +} + +impl<'db> TyVisitable<'db> for TraitInstId<'db> { + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + let db = visitor.db(); + self.args(db).visit_with(visitor); + } +} + +impl<'db> TyVisitable<'db> for Implementor<'db> { + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + let db = visitor.db(); + self.params(db).visit_with(visitor); + } +} + +impl<'db> TyVisitable<'db> for PredicateListId<'db> { + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + self.list(visitor.db()).visit_with(visitor) + } +} + +impl<'db> TyVisitable<'db> for ExprProp<'db> { + fn visit_with(&self, visitor: &mut V) + where + V: TyVisitor<'db>, + { + self.ty.visit_with(visitor) + } +} + +pub fn collect_flags<'db, V: TyVisitable<'db>>(db: &'db dyn HirAnalysisDb, v: V) -> TyFlags { + struct Collector<'db> { + db: &'db dyn HirAnalysisDb, + flags: TyFlags, + } + impl<'db> TyVisitor<'db> for Collector<'db> { + fn db(&self) -> &'db dyn HirAnalysisDb { + self.db + } + + fn visit_ty(&mut self, ty: TyId) { + let ty_flags = ty.flags(self.db); + self.flags = self.flags.union(ty_flags); + } + } + + let mut collector = Collector { + db, + flags: TyFlags::empty(), + }; + v.visit_with(&mut collector); + + collector.flags +} diff --git a/crates/hir-analysis/test_files/constraints/specialized.fe b/crates/hir-analysis/test_files/constraints/specialized.fe new file mode 100644 index 0000000000..fcf6fb3759 --- /dev/null +++ b/crates/hir-analysis/test_files/constraints/specialized.fe @@ -0,0 +1,31 @@ +trait Trait { + fn f(self) -> i32 +} + +struct S { + t: T +} + +struct S2 { + s: S +} + +impl Trait for S { + fn f(self) -> i32 { + self.t + } +} + +impl Trait for S2 +where S: Trait { + fn f(self) -> i32 { + self.s.f() + } +} + +fn bar() { + let t: i32 = 1 + let s = S { t } + let s2 = S2 { s } + let _ = s2.f() +} diff --git a/crates/hir-analysis/test_files/def_analysis/self_constraints.fe b/crates/hir-analysis/test_files/def_analysis/self_constraints.fe new file mode 100644 index 0000000000..ee8d73de1a --- /dev/null +++ b/crates/hir-analysis/test_files/def_analysis/self_constraints.fe @@ -0,0 +1,14 @@ +trait Ring {} +trait CommRing: Ring {} +trait AddCommGroup {} + +trait Module +where + Self: Ring, + M: AddCommGroup +{} + +impl Module for K +where K: CommRing, + M: AddCommGroup +{} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/early_path_resolution/alias_res.fe b/crates/hir-analysis/test_files/early_path_resolution/alias_res.fe new file mode 100644 index 0000000000..22b44d9c17 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/alias_res.fe @@ -0,0 +1,10 @@ +use foo::Bar as FooBar + +struct Foo { + x: FooBar, + y: foo::Bar +} + +mod foo { + pub struct Bar {} +} diff --git a/crates/hir-analysis/test_files/early_path_resolution/alias_res.snap b/crates/hir-analysis/test_files/early_path_resolution/alias_res.snap new file mode 100644 index 0000000000..ed5545735d --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/alias_res.snap @@ -0,0 +1,16 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/alias_res.fe +--- +note: + ┌─ alias_res.fe:4:8 + │ +4 │ x: FooBar, + │ ^^^^^^ alias_res::foo::Bar + +note: + ┌─ alias_res.fe:5:13 + │ +5 │ y: foo::Bar + │ ^^^ alias_res::foo::Bar diff --git a/crates/hir-analysis/test_files/early_path_resolution/generic_param.fe b/crates/hir-analysis/test_files/early_path_resolution/generic_param.fe new file mode 100644 index 0000000000..b3c66bff2d --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/generic_param.fe @@ -0,0 +1,15 @@ +trait InnerTrait { } + +trait TraitWithGenerics +where U: InnerTrait +{ +} + +pub struct MyS +where T: TraitWithGenerics, + U: InnerTrait +{ + x: T, + y: U, + +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/early_path_resolution/generic_param.snap b/crates/hir-analysis/test_files/early_path_resolution/generic_param.snap new file mode 100644 index 0000000000..db18858451 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/generic_param.snap @@ -0,0 +1,58 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/generic_param.fe +--- +note: + ┌─ generic_param.fe:4:7 + │ +4 │ where U: InnerTrait + │ ^ generic_param::TraitWithGenerics::U + +note: + ┌─ generic_param.fe:4:10 + │ +4 │ where U: InnerTrait + │ ^^^^^^^^^^ generic_param::InnerTrait + +note: + ┌─ generic_param.fe:9:7 + │ +9 │ where T: TraitWithGenerics, + │ ^ generic_param::MyS::T + +note: + ┌─ generic_param.fe:9:10 + │ +9 │ where T: TraitWithGenerics, + │ ^^^^^^^^^^^^^^^^^ generic_param::TraitWithGenerics + +note: + ┌─ generic_param.fe:9:28 + │ +9 │ where T: TraitWithGenerics, + │ ^ generic_param::MyS::U + +note: + ┌─ generic_param.fe:10:7 + │ +10 │ U: InnerTrait + │ ^ generic_param::MyS::U + +note: + ┌─ generic_param.fe:10:10 + │ +10 │ U: InnerTrait + │ ^^^^^^^^^^ generic_param::InnerTrait + +note: + ┌─ generic_param.fe:12:8 + │ +12 │ x: T, + │ ^ generic_param::MyS::T + +note: + ┌─ generic_param.fe:13:8 + │ +13 │ y: U, + │ ^ generic_param::MyS::U diff --git a/crates/hir-analysis/test_files/early_path_resolution/nested_block.fe b/crates/hir-analysis/test_files/early_path_resolution/nested_block.fe new file mode 100644 index 0000000000..399a2b6ef8 --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/nested_block.fe @@ -0,0 +1,33 @@ +fn foo() { + struct Foo {} + + { + struct Foo {} + let f: Foo + } + + let f: Foo +} + +fn bar() { + struct Bar {} + + let x: i32 = { + { + struct Bar {} + + impl Bar { + fn len() -> u256 { + 1 + } + } + let bar: Bar = Bar {} + } + + struct Bar {} + let bar: Bar = Bar {} + 1 + } + + let bar: Bar = Bar {} +} diff --git a/crates/hir-analysis/test_files/early_path_resolution/nested_block.snap b/crates/hir-analysis/test_files/early_path_resolution/nested_block.snap new file mode 100644 index 0000000000..7123d950ea --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/nested_block.snap @@ -0,0 +1,52 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/nested_block.fe +--- +note: + ┌─ nested_block.fe:6:16 + │ +6 │ let f: Foo + │ ^^^ nested_block::foo::{fn_body}::{block0}::{block1}::Foo + +note: + ┌─ nested_block.fe:9:12 + │ +9 │ let f: Foo + │ ^^^ nested_block::foo::{fn_body}::{block0}::Foo + +note: + ┌─ nested_block.fe:15:12 + │ +15 │ let x: i32 = { + │ ^^^ i32 + +note: + ┌─ nested_block.fe:19:18 + │ +19 │ impl Bar { + │ ^^^ nested_block::bar::{fn_body}::{block0}::{block1}::{block2}::Bar + +note: + ┌─ nested_block.fe:20:29 + │ +20 │ fn len() -> u256 { + │ ^^^^ u256 + +note: + ┌─ nested_block.fe:24:22 + │ +24 │ let bar: Bar = Bar {} + │ ^^^ nested_block::bar::{fn_body}::{block0}::{block1}::{block2}::Bar + +note: + ┌─ nested_block.fe:28:18 + │ +28 │ let bar: Bar = Bar {} + │ ^^^ nested_block::bar::{fn_body}::{block0}::{block1}::Bar + +note: + ┌─ nested_block.fe:32:14 + │ +32 │ let bar: Bar = Bar {} + │ ^^^ nested_block::bar::{fn_body}::{block0}::Bar diff --git a/crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe b/crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe new file mode 100644 index 0000000000..b87b559a3e --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe @@ -0,0 +1,14 @@ +pub fn foo() { + { + use mod1::Foo + let v: Foo + } + + let v: Foo +} + +struct Foo {} + +mod mod1 { + pub struct Foo {} +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/early_path_resolution/scoped_import.snap b/crates/hir-analysis/test_files/early_path_resolution/scoped_import.snap new file mode 100644 index 0000000000..4242c2f0ec --- /dev/null +++ b/crates/hir-analysis/test_files/early_path_resolution/scoped_import.snap @@ -0,0 +1,16 @@ +--- +source: crates/hir-analysis/tests/early_path_resolution.rs +expression: res +input_file: crates/hir-analysis/test_files/early_path_resolution/scoped_import.fe +--- +note: + ┌─ scoped_import.fe:4:16 + │ +4 │ let v: Foo + │ ^^^ scoped_import::mod1::Foo + +note: + ┌─ scoped_import.fe:7:12 + │ +7 │ let v: Foo + │ ^^^ scoped_import::Foo diff --git a/crates/hir-analysis/test_files/imports/cycle_glob.fe b/crates/hir-analysis/test_files/imports/cycle_glob.fe new file mode 100644 index 0000000000..6f2c26e358 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/cycle_glob.fe @@ -0,0 +1,17 @@ +pub mod mod1 { + // `Foo`, `Bar`, and `BarImported` are visible in this scope. + pub use super::mod2::Bar as BarImported + pub use super::mod2::* + + pub struct Foo {} + +} + +pub mod mod2 { + // `Foo`, `Bar`, `BarImported`, and `BarPriv` are visible in this scope. + pub use super::mod1::* + + pub struct Bar {} + + struct BarPriv {} +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/imports/cycle_glob.snap b/crates/hir-analysis/test_files/imports/cycle_glob.snap new file mode 100644 index 0000000000..4bbc9da721 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/cycle_glob.snap @@ -0,0 +1,22 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/cycle_glob.fe +--- +note: + ┌─ cycle_glob.fe:3:5 + │ +3 │ pub use super::mod2::Bar as BarImported + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cycle_glob::mod2::Bar + +note: + ┌─ cycle_glob.fe:4:5 + │ +4 │ pub use super::mod2::* + │ ^^^^^^^^^^^^^^^^^^^^^^ cycle_glob::mod1::Foo as Foo | cycle_glob::mod2::Bar as Bar | cycle_glob::mod2::Bar as BarImported + +note: + ┌─ cycle_glob.fe:12:5 + │ +12 │ pub use super::mod1::* + │ ^^^^^^^^^^^^^^^^^^^^^^ cycle_glob::mod1::Foo as Foo | cycle_glob::mod2::Bar as Bar | cycle_glob::mod2::Bar as BarImported diff --git a/crates/hir-analysis/test_files/imports/glob_chain.fe b/crates/hir-analysis/test_files/imports/glob_chain.fe new file mode 100644 index 0000000000..f90e5b23b8 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_chain.fe @@ -0,0 +1,12 @@ +use foo::* + +mod foo { + pub use MyEnum::* + + pub struct Variant {} + + pub enum MyEnum { + Variant, + Variant2, + } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/imports/glob_chain.snap b/crates/hir-analysis/test_files/imports/glob_chain.snap new file mode 100644 index 0000000000..32203ebd5a --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_chain.snap @@ -0,0 +1,16 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/glob_chain.fe +--- +note: + ┌─ glob_chain.fe:1:1 + │ +1 │ use foo::* + │ ^^^^^^^^^^ glob_chain::foo::MyEnum as MyEnum | glob_chain::foo::MyEnum::Variant as Variant | glob_chain::foo::MyEnum::Variant2 as Variant2 | glob_chain::foo::Variant as Variant + +note: + ┌─ glob_chain.fe:4:5 + │ +4 │ pub use MyEnum::* + │ ^^^^^^^^^^^^^^^^^ glob_chain::foo::MyEnum::Variant as Variant | glob_chain::foo::MyEnum::Variant2 as Variant2 diff --git a/crates/hir-analysis/test_files/imports/glob_mutual_dep.fe b/crates/hir-analysis/test_files/imports/glob_mutual_dep.fe new file mode 100644 index 0000000000..6909683f98 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_mutual_dep.fe @@ -0,0 +1,13 @@ +use foo::* + +pub mod foo { + pub use super::bar::* + + pub struct Foo {} +} + +pub mod bar { + pub use super::foo::* + + pub struct Bar {} +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap b/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap new file mode 100644 index 0000000000..36ce438dd0 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_mutual_dep.snap @@ -0,0 +1,22 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/glob_mutual_dep.fe +--- +note: + ┌─ glob_mutual_dep.fe:1:1 + │ +1 │ use foo::* + │ ^^^^^^^^^^ glob_mutual_dep::bar::Bar as Bar | glob_mutual_dep::foo::Foo as Foo + +note: + ┌─ glob_mutual_dep.fe:4:5 + │ +4 │ pub use super::bar::* + │ ^^^^^^^^^^^^^^^^^^^^^ glob_mutual_dep::bar::Bar as Bar | glob_mutual_dep::foo::Foo as Foo + +note: + ┌─ glob_mutual_dep.fe:10:5 + │ +10 │ pub use super::foo::* + │ ^^^^^^^^^^^^^^^^^^^^^ glob_mutual_dep::bar::Bar as Bar | glob_mutual_dep::foo::Foo as Foo diff --git a/crates/hir-analysis/test_files/imports/glob_shadow.fe b/crates/hir-analysis/test_files/imports/glob_shadow.fe new file mode 100644 index 0000000000..b7829adbde --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_shadow.fe @@ -0,0 +1,12 @@ +use foo::* + +mod foo { + pub use MyEnum::* + + pub const Variant: i32 = 0 + + pub enum MyEnum { + Variant, + Variant2, + } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/imports/glob_shadow.snap b/crates/hir-analysis/test_files/imports/glob_shadow.snap new file mode 100644 index 0000000000..ed6213a968 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/glob_shadow.snap @@ -0,0 +1,16 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/glob_shadow.fe +--- +note: + ┌─ glob_shadow.fe:1:1 + │ +1 │ use foo::* + │ ^^^^^^^^^^ glob_shadow::foo::MyEnum as MyEnum | glob_shadow::foo::MyEnum::Variant2 as Variant2 | glob_shadow::foo::Variant as Variant + +note: + ┌─ glob_shadow.fe:4:5 + │ +4 │ pub use MyEnum::* + │ ^^^^^^^^^^^^^^^^^ glob_shadow::foo::MyEnum::Variant as Variant | glob_shadow::foo::MyEnum::Variant2 as Variant2 diff --git a/crates/hir-analysis/test_files/imports/multiple_domains.fe b/crates/hir-analysis/test_files/imports/multiple_domains.fe new file mode 100644 index 0000000000..903dcd068e --- /dev/null +++ b/crates/hir-analysis/test_files/imports/multiple_domains.fe @@ -0,0 +1,6 @@ +use foo::S + +mod foo { + pub struct S {} + pub fn S() {} +} diff --git a/crates/hir-analysis/test_files/imports/multiple_domains.snap b/crates/hir-analysis/test_files/imports/multiple_domains.snap new file mode 100644 index 0000000000..64837f85a6 --- /dev/null +++ b/crates/hir-analysis/test_files/imports/multiple_domains.snap @@ -0,0 +1,10 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/multiple_domains.fe +--- +note: + ┌─ multiple_domains.fe:1:1 + │ +1 │ use foo::S + │ ^^^^^^^^^^ multiple_domains::foo::S | multiple_domains::foo::S diff --git a/crates/hir-analysis/test_files/imports/use_depends_glob.fe b/crates/hir-analysis/test_files/imports/use_depends_glob.fe new file mode 100644 index 0000000000..219fefc84f --- /dev/null +++ b/crates/hir-analysis/test_files/imports/use_depends_glob.fe @@ -0,0 +1,9 @@ +use bar::Bar +use foo::* + +mod foo { + pub mod bar { + pub struct Bar {} + } + +} diff --git a/crates/hir-analysis/test_files/imports/use_depends_glob.snap b/crates/hir-analysis/test_files/imports/use_depends_glob.snap new file mode 100644 index 0000000000..19a87b590f --- /dev/null +++ b/crates/hir-analysis/test_files/imports/use_depends_glob.snap @@ -0,0 +1,16 @@ +--- +source: crates/hir-analysis/tests/import.rs +expression: res +input_file: crates/hir-analysis/test_files/imports/use_depends_glob.fe +--- +note: + ┌─ use_depends_glob.fe:1:1 + │ +1 │ use bar::Bar + │ ^^^^^^^^^^^^ use_depends_glob::foo::bar::Bar + +note: + ┌─ use_depends_glob.fe:2:1 + │ +2 │ use foo::* + │ ^^^^^^^^^^ use_depends_glob::foo::bar as bar diff --git a/crates/hir-analysis/test_files/ty_check/array.fe b/crates/hir-analysis/test_files/ty_check/array.fe new file mode 100644 index 0000000000..4c7f1315ad --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/array.fe @@ -0,0 +1,10 @@ +struct Foo {} + +fn foo() { + let i_array: [i32; 2] = [1, 2] + let b_array = [true, false] + + let array_rep = [true; 10] + let array_rep2 = [Foo {}; 5] + let array_rep3: [i32; 5] = [1; 5] +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/array.snap b/crates/hir-analysis/test_files/ty_check/array.snap new file mode 100644 index 0000000000..edec143e83 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/array.snap @@ -0,0 +1,119 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/array.fe +--- +note: + ┌─ array.fe:3:10 + │ + 3 │ fn foo() { + │ ╭──────────^ + 4 │ │ let i_array: [i32; 2] = [1, 2] + 5 │ │ let b_array = [true, false] + 6 │ │ + · │ + 9 │ │ let array_rep3: [i32; 5] = [1; 5] +10 │ │ } + │ ╰─^ () + +note: + ┌─ array.fe:4:9 + │ +4 │ let i_array: [i32; 2] = [1, 2] + │ ^^^^^^^ [i32; 2] + +note: + ┌─ array.fe:4:29 + │ +4 │ let i_array: [i32; 2] = [1, 2] + │ ^^^^^^ [i32; 2] + +note: + ┌─ array.fe:4:30 + │ +4 │ let i_array: [i32; 2] = [1, 2] + │ ^ i32 + +note: + ┌─ array.fe:4:33 + │ +4 │ let i_array: [i32; 2] = [1, 2] + │ ^ i32 + +note: + ┌─ array.fe:5:9 + │ +5 │ let b_array = [true, false] + │ ^^^^^^^ [bool; 2] + +note: + ┌─ array.fe:5:19 + │ +5 │ let b_array = [true, false] + │ ^^^^^^^^^^^^^ [bool; 2] + +note: + ┌─ array.fe:5:20 + │ +5 │ let b_array = [true, false] + │ ^^^^ bool + +note: + ┌─ array.fe:5:26 + │ +5 │ let b_array = [true, false] + │ ^^^^^ bool + +note: + ┌─ array.fe:7:9 + │ +7 │ let array_rep = [true; 10] + │ ^^^^^^^^^ [bool; 10] + +note: + ┌─ array.fe:7:21 + │ +7 │ let array_rep = [true; 10] + │ ^^^^^^^^^^ [bool; 10] + +note: + ┌─ array.fe:7:22 + │ +7 │ let array_rep = [true; 10] + │ ^^^^ bool + +note: + ┌─ array.fe:8:9 + │ +8 │ let array_rep2 = [Foo {}; 5] + │ ^^^^^^^^^^ [Foo; 5] + +note: + ┌─ array.fe:8:22 + │ +8 │ let array_rep2 = [Foo {}; 5] + │ ^^^^^^^^^^^ [Foo; 5] + +note: + ┌─ array.fe:8:23 + │ +8 │ let array_rep2 = [Foo {}; 5] + │ ^^^^^^ Foo + +note: + ┌─ array.fe:9:9 + │ +9 │ let array_rep3: [i32; 5] = [1; 5] + │ ^^^^^^^^^^ [i32; 5] + +note: + ┌─ array.fe:9:32 + │ +9 │ let array_rep3: [i32; 5] = [1; 5] + │ ^^^^^^ [i32; 5] + +note: + ┌─ array.fe:9:33 + │ +9 │ let array_rep3: [i32; 5] = [1; 5] + │ ^ i32 diff --git a/crates/hir-analysis/test_files/ty_check/assign.fe b/crates/hir-analysis/test_files/ty_check/assign.fe new file mode 100644 index 0000000000..4584c9c7d8 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/assign.fe @@ -0,0 +1,48 @@ +pub struct Inner { + x: i32, + y: u32, +} + +pub struct Outer { + inner: Inner, +} + +pub enum Option { + Some(T), + None +} + +pub struct Gen { + t: T +} + +impl Outer { + fn set_inner(mut self, x: i32, y: u32) { + self.inner = Inner { x, y } + } +} + +pub fn foo(opt: Option) { + let mut x = 1 + let y = 2 + + let z = x = 2 + + let mut arr = [false; 10] + arr[1] = true + + let mut tuple = (true, false, Inner { x, y }) + tuple.2.x = 1 + + let mut outer = Outer { inner: Inner { x, y } } + outer.inner.x = 2 + + match opt { + Option::Some(mut x) => { + x = 2 + } + Option::None => {} + } + + Gen { t: false }.t = true +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/assign.snap b/crates/hir-analysis/test_files/ty_check/assign.snap new file mode 100644 index 0000000000..56bf98670f --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/assign.snap @@ -0,0 +1,388 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/assign.fe +--- +note: + ┌─ assign.fe:20:44 + │ +20 │ fn set_inner(mut self, x: i32, y: u32) { + │ ╭────────────────────────────────────────────^ +21 │ │ self.inner = Inner { x, y } +22 │ │ } + │ ╰─────^ () + +note: + ┌─ assign.fe:21:9 + │ +21 │ self.inner = Inner { x, y } + │ ^^^^ Outer + +note: + ┌─ assign.fe:21:9 + │ +21 │ self.inner = Inner { x, y } + │ ^^^^^^^^^^ Inner + +note: + ┌─ assign.fe:21:9 + │ +21 │ self.inner = Inner { x, y } + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ () + +note: + ┌─ assign.fe:21:22 + │ +21 │ self.inner = Inner { x, y } + │ ^^^^^^^^^^^^^^ Inner + +note: + ┌─ assign.fe:21:30 + │ +21 │ self.inner = Inner { x, y } + │ ^ i32 + +note: + ┌─ assign.fe:21:33 + │ +21 │ self.inner = Inner { x, y } + │ ^ u32 + +note: + ┌─ assign.fe:25:30 + │ +25 │ pub fn foo(opt: Option) { + │ ╭──────────────────────────────^ +26 │ │ let mut x = 1 +27 │ │ let y = 2 +28 │ │ + · │ +47 │ │ Gen { t: false }.t = true +48 │ │ } + │ ╰─^ () + +note: + ┌─ assign.fe:26:9 + │ +26 │ let mut x = 1 + │ ^^^^^ i32 + +note: + ┌─ assign.fe:26:17 + │ +26 │ let mut x = 1 + │ ^ i32 + +note: + ┌─ assign.fe:27:9 + │ +27 │ let y = 2 + │ ^ u32 + +note: + ┌─ assign.fe:27:13 + │ +27 │ let y = 2 + │ ^ u32 + +note: + ┌─ assign.fe:29:9 + │ +29 │ let z = x = 2 + │ ^ () + +note: + ┌─ assign.fe:29:13 + │ +29 │ let z = x = 2 + │ ^ i32 + +note: + ┌─ assign.fe:29:13 + │ +29 │ let z = x = 2 + │ ^^^^^ () + +note: + ┌─ assign.fe:29:17 + │ +29 │ let z = x = 2 + │ ^ i32 + +note: + ┌─ assign.fe:31:9 + │ +31 │ let mut arr = [false; 10] + │ ^^^^^^^ [bool; 10] + +note: + ┌─ assign.fe:31:19 + │ +31 │ let mut arr = [false; 10] + │ ^^^^^^^^^^^ [bool; 10] + +note: + ┌─ assign.fe:31:20 + │ +31 │ let mut arr = [false; 10] + │ ^^^^^ bool + +note: + ┌─ assign.fe:32:5 + │ +32 │ arr[1] = true + │ ^^^ [bool; 10] + +note: + ┌─ assign.fe:32:5 + │ +32 │ arr[1] = true + │ ^^^^^^ bool + +note: + ┌─ assign.fe:32:5 + │ +32 │ arr[1] = true + │ ^^^^^^^^^^^^^ () + +note: + ┌─ assign.fe:32:9 + │ +32 │ arr[1] = true + │ ^ u256 + +note: + ┌─ assign.fe:32:14 + │ +32 │ arr[1] = true + │ ^^^^ bool + +note: + ┌─ assign.fe:34:9 + │ +34 │ let mut tuple = (true, false, Inner { x, y }) + │ ^^^^^^^^^ (bool, bool, Inner) + +note: + ┌─ assign.fe:34:21 + │ +34 │ let mut tuple = (true, false, Inner { x, y }) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (bool, bool, Inner) + +note: + ┌─ assign.fe:34:22 + │ +34 │ let mut tuple = (true, false, Inner { x, y }) + │ ^^^^ bool + +note: + ┌─ assign.fe:34:28 + │ +34 │ let mut tuple = (true, false, Inner { x, y }) + │ ^^^^^ bool + +note: + ┌─ assign.fe:34:35 + │ +34 │ let mut tuple = (true, false, Inner { x, y }) + │ ^^^^^^^^^^^^^^ Inner + +note: + ┌─ assign.fe:34:43 + │ +34 │ let mut tuple = (true, false, Inner { x, y }) + │ ^ i32 + +note: + ┌─ assign.fe:34:46 + │ +34 │ let mut tuple = (true, false, Inner { x, y }) + │ ^ u32 + +note: + ┌─ assign.fe:35:5 + │ +35 │ tuple.2.x = 1 + │ ^^^^^ (bool, bool, Inner) + +note: + ┌─ assign.fe:35:5 + │ +35 │ tuple.2.x = 1 + │ ^^^^^^^ Inner + +note: + ┌─ assign.fe:35:5 + │ +35 │ tuple.2.x = 1 + │ ^^^^^^^^^ i32 + +note: + ┌─ assign.fe:35:5 + │ +35 │ tuple.2.x = 1 + │ ^^^^^^^^^^^^^ () + +note: + ┌─ assign.fe:35:17 + │ +35 │ tuple.2.x = 1 + │ ^ i32 + +note: + ┌─ assign.fe:37:9 + │ +37 │ let mut outer = Outer { inner: Inner { x, y } } + │ ^^^^^^^^^ Outer + +note: + ┌─ assign.fe:37:21 + │ +37 │ let mut outer = Outer { inner: Inner { x, y } } + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Outer + +note: + ┌─ assign.fe:37:36 + │ +37 │ let mut outer = Outer { inner: Inner { x, y } } + │ ^^^^^^^^^^^^^^ Inner + +note: + ┌─ assign.fe:37:44 + │ +37 │ let mut outer = Outer { inner: Inner { x, y } } + │ ^ i32 + +note: + ┌─ assign.fe:37:47 + │ +37 │ let mut outer = Outer { inner: Inner { x, y } } + │ ^ u32 + +note: + ┌─ assign.fe:38:5 + │ +38 │ outer.inner.x = 2 + │ ^^^^^ Outer + +note: + ┌─ assign.fe:38:5 + │ +38 │ outer.inner.x = 2 + │ ^^^^^^^^^^^ Inner + +note: + ┌─ assign.fe:38:5 + │ +38 │ outer.inner.x = 2 + │ ^^^^^^^^^^^^^ i32 + +note: + ┌─ assign.fe:38:5 + │ +38 │ outer.inner.x = 2 + │ ^^^^^^^^^^^^^^^^^ () + +note: + ┌─ assign.fe:38:21 + │ +38 │ outer.inner.x = 2 + │ ^ i32 + +note: + ┌─ assign.fe:40:5 + │ +40 │ ╭ match opt { +41 │ │ Option::Some(mut x) => { +42 │ │ x = 2 +43 │ │ } +44 │ │ Option::None => {} +45 │ │ } + │ ╰─────^ () + +note: + ┌─ assign.fe:40:11 + │ +40 │ match opt { + │ ^^^ Option + +note: + ┌─ assign.fe:41:9 + │ +41 │ Option::Some(mut x) => { + │ ^^^^^^^^^^^^^^^^^^^ Option + +note: + ┌─ assign.fe:41:22 + │ +41 │ Option::Some(mut x) => { + │ ^^^^^ i32 + +note: + ┌─ assign.fe:41:32 + │ +41 │ Option::Some(mut x) => { + │ ╭────────────────────────────────^ +42 │ │ x = 2 +43 │ │ } + │ ╰─────────^ () + +note: + ┌─ assign.fe:42:13 + │ +42 │ x = 2 + │ ^ i32 + +note: + ┌─ assign.fe:42:13 + │ +42 │ x = 2 + │ ^^^^^ () + +note: + ┌─ assign.fe:42:17 + │ +42 │ x = 2 + │ ^ i32 + +note: + ┌─ assign.fe:44:9 + │ +44 │ Option::None => {} + │ ^^^^^^^^^^^^ Option + +note: + ┌─ assign.fe:44:25 + │ +44 │ Option::None => {} + │ ^^ () + +note: + ┌─ assign.fe:47:5 + │ +47 │ Gen { t: false }.t = true + │ ^^^^^^^^^^^^^^^^ Gen + +note: + ┌─ assign.fe:47:5 + │ +47 │ Gen { t: false }.t = true + │ ^^^^^^^^^^^^^^^^^^ bool + +note: + ┌─ assign.fe:47:5 + │ +47 │ Gen { t: false }.t = true + │ ^^^^^^^^^^^^^^^^^^^^^^^^^ () + +note: + ┌─ assign.fe:47:14 + │ +47 │ Gen { t: false }.t = true + │ ^^^^^ bool + +note: + ┌─ assign.fe:47:26 + │ +47 │ Gen { t: false }.t = true + │ ^^^^ bool diff --git a/crates/hir-analysis/test_files/ty_check/aug_assign.fe b/crates/hir-analysis/test_files/ty_check/aug_assign.fe new file mode 100644 index 0000000000..191d4ab1e6 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/aug_assign.fe @@ -0,0 +1,19 @@ +struct Foo { + x: i32, +} + +pub fn foo(mut foo: Foo, mut b: bool) -> i32 { + foo.x *= 2 + b |= false + + let mut x = 1 + x += 1 + x *= 1 + x <<= 1 + x **= 2 + x + + let mut arr = [x, 1, 2] + arr[0] -= arr[1] + 1 + arr[0] +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/aug_assign.snap b/crates/hir-analysis/test_files/ty_check/aug_assign.snap new file mode 100644 index 0000000000..0a87b8b605 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/aug_assign.snap @@ -0,0 +1,251 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/aug_assign.fe +--- +note: + ┌─ aug_assign.fe:5:46 + │ + 5 │ pub fn foo(mut foo: Foo, mut b: bool) -> i32 { + │ ╭──────────────────────────────────────────────^ + 6 │ │ foo.x *= 2 + 7 │ │ b |= false + 8 │ │ + · │ +18 │ │ arr[0] +19 │ │ } + │ ╰─^ i32 + +note: + ┌─ aug_assign.fe:6:5 + │ +6 │ foo.x *= 2 + │ ^^^ Foo + +note: + ┌─ aug_assign.fe:6:5 + │ +6 │ foo.x *= 2 + │ ^^^^^ i32 + +note: + ┌─ aug_assign.fe:6:5 + │ +6 │ foo.x *= 2 + │ ^^^^^^^^^^ () + +note: + ┌─ aug_assign.fe:6:14 + │ +6 │ foo.x *= 2 + │ ^ i32 + +note: + ┌─ aug_assign.fe:7:5 + │ +7 │ b |= false + │ ^ bool + +note: + ┌─ aug_assign.fe:7:5 + │ +7 │ b |= false + │ ^^^^^^^^^^ () + +note: + ┌─ aug_assign.fe:7:10 + │ +7 │ b |= false + │ ^^^^^ bool + +note: + ┌─ aug_assign.fe:9:9 + │ +9 │ let mut x = 1 + │ ^^^^^ i32 + +note: + ┌─ aug_assign.fe:9:17 + │ +9 │ let mut x = 1 + │ ^ i32 + +note: + ┌─ aug_assign.fe:10:5 + │ +10 │ x += 1 + │ ^ i32 + +note: + ┌─ aug_assign.fe:10:5 + │ +10 │ x += 1 + │ ^^^^^^ () + +note: + ┌─ aug_assign.fe:10:10 + │ +10 │ x += 1 + │ ^ i32 + +note: + ┌─ aug_assign.fe:11:5 + │ +11 │ x *= 1 + │ ^ i32 + +note: + ┌─ aug_assign.fe:11:5 + │ +11 │ x *= 1 + │ ^^^^^^ () + +note: + ┌─ aug_assign.fe:11:10 + │ +11 │ x *= 1 + │ ^ i32 + +note: + ┌─ aug_assign.fe:12:5 + │ +12 │ x <<= 1 + │ ^ i32 + +note: + ┌─ aug_assign.fe:12:5 + │ +12 │ x <<= 1 + │ ^^^^^^^ () + +note: + ┌─ aug_assign.fe:12:11 + │ +12 │ x <<= 1 + │ ^ i32 + +note: + ┌─ aug_assign.fe:13:5 + │ +13 │ x **= 2 + │ ^ i32 + +note: + ┌─ aug_assign.fe:13:5 + │ +13 │ x **= 2 + │ ^^^^^^^ () + +note: + ┌─ aug_assign.fe:13:11 + │ +13 │ x **= 2 + │ ^ i32 + +note: + ┌─ aug_assign.fe:14:5 + │ +14 │ x + │ ^ i32 + +note: + ┌─ aug_assign.fe:16:9 + │ +16 │ let mut arr = [x, 1, 2] + │ ^^^^^^^ [i32; 3] + +note: + ┌─ aug_assign.fe:16:19 + │ +16 │ let mut arr = [x, 1, 2] + │ ^^^^^^^^^ [i32; 3] + +note: + ┌─ aug_assign.fe:16:20 + │ +16 │ let mut arr = [x, 1, 2] + │ ^ i32 + +note: + ┌─ aug_assign.fe:16:23 + │ +16 │ let mut arr = [x, 1, 2] + │ ^ i32 + +note: + ┌─ aug_assign.fe:16:26 + │ +16 │ let mut arr = [x, 1, 2] + │ ^ i32 + +note: + ┌─ aug_assign.fe:17:5 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^^^ [i32; 3] + +note: + ┌─ aug_assign.fe:17:5 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^^^^^^ i32 + +note: + ┌─ aug_assign.fe:17:5 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^^^^^^^^^^^^^^^^^^^^ () + +note: + ┌─ aug_assign.fe:17:9 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^ u256 + +note: + ┌─ aug_assign.fe:17:15 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^^^ [i32; 3] + +note: + ┌─ aug_assign.fe:17:15 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^^^^^^ i32 + +note: + ┌─ aug_assign.fe:17:15 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^^^^^^^^^^ i32 + +note: + ┌─ aug_assign.fe:17:19 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^ u256 + +note: + ┌─ aug_assign.fe:17:24 + │ +17 │ arr[0] -= arr[1] + 1 + │ ^ i32 + +note: + ┌─ aug_assign.fe:18:5 + │ +18 │ arr[0] + │ ^^^ [i32; 3] + +note: + ┌─ aug_assign.fe:18:5 + │ +18 │ arr[0] + │ ^^^^^^ i32 + +note: + ┌─ aug_assign.fe:18:9 + │ +18 │ arr[0] + │ ^ u256 diff --git a/crates/hir-analysis/test_files/ty_check/binary.fe b/crates/hir-analysis/test_files/ty_check/binary.fe new file mode 100644 index 0000000000..8c0fd54d3c --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/binary.fe @@ -0,0 +1,6 @@ +fn foo(x: u32, y: u32) { + x + x + true && false + (x < 1) && (y > 10) || (x == y) + let z = 1 + x +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/binary.snap b/crates/hir-analysis/test_files/ty_check/binary.snap new file mode 100644 index 0000000000..7cd76e1163 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/binary.snap @@ -0,0 +1,142 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/binary.fe +--- +note: + ┌─ binary.fe:1:24 + │ +1 │ fn foo(x: u32, y: u32) { + │ ╭────────────────────────^ +2 │ │ x + x +3 │ │ true && false +4 │ │ (x < 1) && (y > 10) || (x == y) +5 │ │ let z = 1 + x +6 │ │ } + │ ╰─^ () + +note: + ┌─ binary.fe:2:5 + │ +2 │ x + x + │ ^ u32 + +note: + ┌─ binary.fe:2:5 + │ +2 │ x + x + │ ^^^^^ u32 + +note: + ┌─ binary.fe:2:9 + │ +2 │ x + x + │ ^ u32 + +note: + ┌─ binary.fe:3:5 + │ +3 │ true && false + │ ^^^^ bool + +note: + ┌─ binary.fe:3:5 + │ +3 │ true && false + │ ^^^^^^^^^^^^^ bool + +note: + ┌─ binary.fe:3:13 + │ +3 │ true && false + │ ^^^^^ bool + +note: + ┌─ binary.fe:4:5 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^^^^^^^^^^^^^^^^^^^ bool + +note: + ┌─ binary.fe:4:5 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ bool + +note: + ┌─ binary.fe:4:6 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^ u32 + +note: + ┌─ binary.fe:4:6 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^^^^^ bool + +note: + ┌─ binary.fe:4:10 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^ u32 + +note: + ┌─ binary.fe:4:17 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^ u32 + +note: + ┌─ binary.fe:4:17 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^^^^^^ bool + +note: + ┌─ binary.fe:4:21 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^^ u32 + +note: + ┌─ binary.fe:4:29 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^ u32 + +note: + ┌─ binary.fe:4:29 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^^^^^^ bool + +note: + ┌─ binary.fe:4:34 + │ +4 │ (x < 1) && (y > 10) || (x == y) + │ ^ u32 + +note: + ┌─ binary.fe:5:9 + │ +5 │ let z = 1 + x + │ ^ u32 + +note: + ┌─ binary.fe:5:13 + │ +5 │ let z = 1 + x + │ ^ u32 + +note: + ┌─ binary.fe:5:13 + │ +5 │ let z = 1 + x + │ ^^^^^ u32 + +note: + ┌─ binary.fe:5:17 + │ +5 │ let z = 1 + x + │ ^ u32 diff --git a/crates/hir-analysis/test_files/ty_check/call.fe b/crates/hir-analysis/test_files/ty_check/call.fe new file mode 100644 index 0000000000..0729bb3d86 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/call.fe @@ -0,0 +1,35 @@ +pub fn add(x: i32, y: i32) -> i32 { + x + y +} + +pub fn use_add() -> i32 { + add(x: 1, y: 2) +} + +pub fn make_pair(first: T, second: U) -> (T, U) { + (first, second) +} + +pub fn use_make_pair() -> (i32, bool) { + make_pair(first: 1, second: false) +} + +pub fn make_pair_i32_U(first: i32, second: U) -> (i32, U) { + make_pair(first, second) +} + +pub fn make_pair_explicit(first: i32, second: u32) -> (i32, u32) { + make_pair(first, second) +} + +pub fn hkt_func * -> *>(t: T) { } + +pub struct Foo { + t: T, + u: U, +} + +pub fn use_hkt_func() { + let foo = Foo { t: 1, u: 2 } + hkt_func(t: foo) +} diff --git a/crates/hir-analysis/test_files/ty_check/call.snap b/crates/hir-analysis/test_files/ty_check/call.snap new file mode 100644 index 0000000000..31f320b9f8 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/call.snap @@ -0,0 +1,248 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/call.fe +--- +note: + ┌─ call.fe:1:35 + │ +1 │ pub fn add(x: i32, y: i32) -> i32 { + │ ╭───────────────────────────────────^ +2 │ │ x + y +3 │ │ } + │ ╰─^ i32 + +note: + ┌─ call.fe:2:5 + │ +2 │ x + y + │ ^ i32 + +note: + ┌─ call.fe:2:5 + │ +2 │ x + y + │ ^^^^^ i32 + +note: + ┌─ call.fe:2:9 + │ +2 │ x + y + │ ^ i32 + +note: + ┌─ call.fe:5:25 + │ +5 │ pub fn use_add() -> i32 { + │ ╭─────────────────────────^ +6 │ │ add(x: 1, y: 2) +7 │ │ } + │ ╰─^ i32 + +note: + ┌─ call.fe:6:5 + │ +6 │ add(x: 1, y: 2) + │ ^^^ fn add + +note: + ┌─ call.fe:6:5 + │ +6 │ add(x: 1, y: 2) + │ ^^^^^^^^^^^^^^^ i32 + +note: + ┌─ call.fe:6:12 + │ +6 │ add(x: 1, y: 2) + │ ^ i32 + +note: + ┌─ call.fe:6:18 + │ +6 │ add(x: 1, y: 2) + │ ^ i32 + +note: + ┌─ call.fe:9:55 + │ + 9 │ pub fn make_pair(first: T, second: U) -> (T, U) { + │ ╭───────────────────────────────────────────────────────^ +10 │ │ (first, second) +11 │ │ } + │ ╰─^ (T, U) + +note: + ┌─ call.fe:10:5 + │ +10 │ (first, second) + │ ^^^^^^^^^^^^^^^ (T, U) + +note: + ┌─ call.fe:10:6 + │ +10 │ (first, second) + │ ^^^^^ T + +note: + ┌─ call.fe:10:13 + │ +10 │ (first, second) + │ ^^^^^^ U + +note: + ┌─ call.fe:13:39 + │ +13 │ pub fn use_make_pair() -> (i32, bool) { + │ ╭───────────────────────────────────────^ +14 │ │ make_pair(first: 1, second: false) +15 │ │ } + │ ╰─^ (i32, bool) + +note: + ┌─ call.fe:14:5 + │ +14 │ make_pair(first: 1, second: false) + │ ^^^^^^^^^ fn make_pair + +note: + ┌─ call.fe:14:5 + │ +14 │ make_pair(first: 1, second: false) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (i32, bool) + +note: + ┌─ call.fe:14:22 + │ +14 │ make_pair(first: 1, second: false) + │ ^ i32 + +note: + ┌─ call.fe:14:33 + │ +14 │ make_pair(first: 1, second: false) + │ ^^^^^ bool + +note: + ┌─ call.fe:17:62 + │ +17 │ pub fn make_pair_i32_U(first: i32, second: U) -> (i32, U) { + │ ╭──────────────────────────────────────────────────────────────^ +18 │ │ make_pair(first, second) +19 │ │ } + │ ╰─^ (i32, U) + +note: + ┌─ call.fe:18:5 + │ +18 │ make_pair(first, second) + │ ^^^^^^^^^ fn make_pair + +note: + ┌─ call.fe:18:5 + │ +18 │ make_pair(first, second) + │ ^^^^^^^^^^^^^^^^^^^^^^^^ (i32, U) + +note: + ┌─ call.fe:18:15 + │ +18 │ make_pair(first, second) + │ ^^^^^ i32 + +note: + ┌─ call.fe:18:22 + │ +18 │ make_pair(first, second) + │ ^^^^^^ U + +note: + ┌─ call.fe:21:66 + │ +21 │ pub fn make_pair_explicit(first: i32, second: u32) -> (i32, u32) { + │ ╭──────────────────────────────────────────────────────────────────^ +22 │ │ make_pair(first, second) +23 │ │ } + │ ╰─^ (i32, u32) + +note: + ┌─ call.fe:22:5 + │ +22 │ make_pair(first, second) + │ ^^^^^^^^^^^^^^^^^^^ fn make_pair + +note: + ┌─ call.fe:22:5 + │ +22 │ make_pair(first, second) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (i32, u32) + +note: + ┌─ call.fe:22:25 + │ +22 │ make_pair(first, second) + │ ^^^^^ i32 + +note: + ┌─ call.fe:22:32 + │ +22 │ make_pair(first, second) + │ ^^^^^^ u32 + +note: + ┌─ call.fe:25:49 + │ +25 │ pub fn hkt_func * -> *>(t: T) { } + │ ^^^ () + +note: + ┌─ call.fe:32:23 + │ +32 │ pub fn use_hkt_func() { + │ ╭───────────────────────^ +33 │ │ let foo = Foo { t: 1, u: 2 } +34 │ │ hkt_func(t: foo) +35 │ │ } + │ ╰─^ () + +note: + ┌─ call.fe:33:9 + │ +33 │ let foo = Foo { t: 1, u: 2 } + │ ^^^ Foo + +note: + ┌─ call.fe:33:15 + │ +33 │ let foo = Foo { t: 1, u: 2 } + │ ^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ call.fe:33:24 + │ +33 │ let foo = Foo { t: 1, u: 2 } + │ ^ i32 + +note: + ┌─ call.fe:33:30 + │ +33 │ let foo = Foo { t: 1, u: 2 } + │ ^ u32 + +note: + ┌─ call.fe:34:5 + │ +34 │ hkt_func(t: foo) + │ ^^^^^^^^ fn hkt_func + +note: + ┌─ call.fe:34:5 + │ +34 │ hkt_func(t: foo) + │ ^^^^^^^^^^^^^^^^ () + +note: + ┌─ call.fe:34:17 + │ +34 │ hkt_func(t: foo) + │ ^^^ Foo diff --git a/crates/hir-analysis/test_files/ty_check/field_access.fe b/crates/hir-analysis/test_files/ty_check/field_access.fe new file mode 100644 index 0000000000..8b38bc63b3 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/field_access.fe @@ -0,0 +1,21 @@ +fn foo1(x: (i32, u32)) -> i32 { + let x: (i32, u32) + x.0 +} + +fn swap(x: (T, U)) -> (U, T) { + let elem0 = x.0 + let elem1 = x.1 + (elem1, elem0) +} + +struct Bar { + t: T, + u: U, +} + +fn swap2(bar: Bar) -> Bar { + let u = bar.t + let t = bar.u + Bar {t, u} +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/field_access.snap b/crates/hir-analysis/test_files/ty_check/field_access.snap new file mode 100644 index 0000000000..8c090d73bd --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/field_access.snap @@ -0,0 +1,162 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/field_access.fe +--- +note: + ┌─ field_access.fe:1:31 + │ +1 │ fn foo1(x: (i32, u32)) -> i32 { + │ ╭───────────────────────────────^ +2 │ │ let x: (i32, u32) +3 │ │ x.0 +4 │ │ } + │ ╰─^ i32 + +note: + ┌─ field_access.fe:2:9 + │ +2 │ let x: (i32, u32) + │ ^ (i32, u32) + +note: + ┌─ field_access.fe:3:5 + │ +3 │ x.0 + │ ^ (i32, u32) + +note: + ┌─ field_access.fe:3:5 + │ +3 │ x.0 + │ ^^^ i32 + +note: + ┌─ field_access.fe:6:36 + │ + 6 │ fn swap(x: (T, U)) -> (U, T) { + │ ╭────────────────────────────────────^ + 7 │ │ let elem0 = x.0 + 8 │ │ let elem1 = x.1 + 9 │ │ (elem1, elem0) +10 │ │ } + │ ╰─^ (U, T) + +note: + ┌─ field_access.fe:7:9 + │ +7 │ let elem0 = x.0 + │ ^^^^^ T + +note: + ┌─ field_access.fe:7:17 + │ +7 │ let elem0 = x.0 + │ ^ (T, U) + +note: + ┌─ field_access.fe:7:17 + │ +7 │ let elem0 = x.0 + │ ^^^ T + +note: + ┌─ field_access.fe:8:9 + │ +8 │ let elem1 = x.1 + │ ^^^^^ U + +note: + ┌─ field_access.fe:8:17 + │ +8 │ let elem1 = x.1 + │ ^ (T, U) + +note: + ┌─ field_access.fe:8:17 + │ +8 │ let elem1 = x.1 + │ ^^^ U + +note: + ┌─ field_access.fe:9:5 + │ +9 │ (elem1, elem0) + │ ^^^^^^^^^^^^^^ (U, T) + +note: + ┌─ field_access.fe:9:6 + │ +9 │ (elem1, elem0) + │ ^^^^^ U + +note: + ┌─ field_access.fe:9:13 + │ +9 │ (elem1, elem0) + │ ^^^^^ T + +note: + ┌─ field_access.fe:17:46 + │ +17 │ fn swap2(bar: Bar) -> Bar { + │ ╭──────────────────────────────────────────────^ +18 │ │ let u = bar.t +19 │ │ let t = bar.u +20 │ │ Bar {t, u} +21 │ │ } + │ ╰─^ Bar + +note: + ┌─ field_access.fe:18:9 + │ +18 │ let u = bar.t + │ ^ T + +note: + ┌─ field_access.fe:18:13 + │ +18 │ let u = bar.t + │ ^^^ Bar + +note: + ┌─ field_access.fe:18:13 + │ +18 │ let u = bar.t + │ ^^^^^ T + +note: + ┌─ field_access.fe:19:9 + │ +19 │ let t = bar.u + │ ^ i32 + +note: + ┌─ field_access.fe:19:13 + │ +19 │ let t = bar.u + │ ^^^ Bar + +note: + ┌─ field_access.fe:19:13 + │ +19 │ let t = bar.u + │ ^^^^^ i32 + +note: + ┌─ field_access.fe:20:5 + │ +20 │ Bar {t, u} + │ ^^^^^^^^^^ Bar + +note: + ┌─ field_access.fe:20:10 + │ +20 │ Bar {t, u} + │ ^ i32 + +note: + ┌─ field_access.fe:20:13 + │ +20 │ Bar {t, u} + │ ^ T diff --git a/crates/hir-analysis/test_files/ty_check/for_.fe b/crates/hir-analysis/test_files/ty_check/for_.fe new file mode 100644 index 0000000000..7d0823d182 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/for_.fe @@ -0,0 +1,23 @@ +struct Foo { + x: i32, + y: i32, +} + +fn foo() -> i256 { + let arr = [1, 2, 3, 4, 5] + let mut res = 0 + for i in arr { + res += i + } + + res +} + +fn bar(foo_arr: [Foo; 10]) -> i32 { + let mut res = 0 + for Foo {x, y} in foo_arr { + res += x + y + } + + res +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/for_.snap b/crates/hir-analysis/test_files/ty_check/for_.snap new file mode 100644 index 0000000000..091dbbd182 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/for_.snap @@ -0,0 +1,210 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/for_.fe +--- +note: + ┌─ for_.fe:6:18 + │ + 6 │ fn foo() -> i256 { + │ ╭──────────────────^ + 7 │ │ let arr = [1, 2, 3, 4, 5] + 8 │ │ let mut res = 0 + 9 │ │ for i in arr { + · │ +13 │ │ res +14 │ │ } + │ ╰─^ i256 + +note: + ┌─ for_.fe:7:9 + │ +7 │ let arr = [1, 2, 3, 4, 5] + │ ^^^ [i256; 5] + +note: + ┌─ for_.fe:7:15 + │ +7 │ let arr = [1, 2, 3, 4, 5] + │ ^^^^^^^^^^^^^^^ [i256; 5] + +note: + ┌─ for_.fe:7:16 + │ +7 │ let arr = [1, 2, 3, 4, 5] + │ ^ i256 + +note: + ┌─ for_.fe:7:19 + │ +7 │ let arr = [1, 2, 3, 4, 5] + │ ^ i256 + +note: + ┌─ for_.fe:7:22 + │ +7 │ let arr = [1, 2, 3, 4, 5] + │ ^ i256 + +note: + ┌─ for_.fe:7:25 + │ +7 │ let arr = [1, 2, 3, 4, 5] + │ ^ i256 + +note: + ┌─ for_.fe:7:28 + │ +7 │ let arr = [1, 2, 3, 4, 5] + │ ^ i256 + +note: + ┌─ for_.fe:8:9 + │ +8 │ let mut res = 0 + │ ^^^^^^^ i256 + +note: + ┌─ for_.fe:8:19 + │ +8 │ let mut res = 0 + │ ^ i256 + +note: + ┌─ for_.fe:9:9 + │ +9 │ for i in arr { + │ ^ i256 + +note: + ┌─ for_.fe:9:14 + │ +9 │ for i in arr { + │ ^^^ [i256; 5] + +note: + ┌─ for_.fe:9:18 + │ + 9 │ for i in arr { + │ ╭──────────────────^ +10 │ │ res += i +11 │ │ } + │ ╰─────^ () + +note: + ┌─ for_.fe:10:9 + │ +10 │ res += i + │ ^^^ i256 + +note: + ┌─ for_.fe:10:9 + │ +10 │ res += i + │ ^^^^^^^^ () + +note: + ┌─ for_.fe:10:16 + │ +10 │ res += i + │ ^ i256 + +note: + ┌─ for_.fe:13:5 + │ +13 │ res + │ ^^^ i256 + +note: + ┌─ for_.fe:16:35 + │ +16 │ fn bar(foo_arr: [Foo; 10]) -> i32 { + │ ╭───────────────────────────────────^ +17 │ │ let mut res = 0 +18 │ │ for Foo {x, y} in foo_arr { +19 │ │ res += x + y + · │ +22 │ │ res +23 │ │ } + │ ╰─^ i32 + +note: + ┌─ for_.fe:17:9 + │ +17 │ let mut res = 0 + │ ^^^^^^^ i32 + +note: + ┌─ for_.fe:17:19 + │ +17 │ let mut res = 0 + │ ^ i32 + +note: + ┌─ for_.fe:18:9 + │ +18 │ for Foo {x, y} in foo_arr { + │ ^^^^^^^^^^ Foo + +note: + ┌─ for_.fe:18:14 + │ +18 │ for Foo {x, y} in foo_arr { + │ ^ i32 + +note: + ┌─ for_.fe:18:17 + │ +18 │ for Foo {x, y} in foo_arr { + │ ^ i32 + +note: + ┌─ for_.fe:18:23 + │ +18 │ for Foo {x, y} in foo_arr { + │ ^^^^^^^ [Foo; 10] + +note: + ┌─ for_.fe:18:31 + │ +18 │ for Foo {x, y} in foo_arr { + │ ╭───────────────────────────────^ +19 │ │ res += x + y +20 │ │ } + │ ╰─────^ () + +note: + ┌─ for_.fe:19:9 + │ +19 │ res += x + y + │ ^^^ i32 + +note: + ┌─ for_.fe:19:9 + │ +19 │ res += x + y + │ ^^^^^^^^^^^^ () + +note: + ┌─ for_.fe:19:16 + │ +19 │ res += x + y + │ ^ i32 + +note: + ┌─ for_.fe:19:16 + │ +19 │ res += x + y + │ ^^^^^ i32 + +note: + ┌─ for_.fe:19:20 + │ +19 │ res += x + y + │ ^ i32 + +note: + ┌─ for_.fe:22:5 + │ +22 │ res + │ ^^^ i32 diff --git a/crates/hir-analysis/test_files/ty_check/if_.fe b/crates/hir-analysis/test_files/ty_check/if_.fe new file mode 100644 index 0000000000..dfcddff4af --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/if_.fe @@ -0,0 +1,44 @@ +fn lit_if() -> i32 { + if true { + 1 + } else { + 2 + } +} + +fn string_if() -> String<5> { + if true { + "1" + } else { + "Foo" + } +} + +// If expression should be typed as `()` when else block doesn't exist. +fn no_else() -> () { + let x = if true { + false + } +} + +fn else_if(b1: bool, b2: bool) -> i32 { + if b1 { + 1 + } else if b2 { + 2 + } else { + 3 + } +} + +fn nested(b1: bool, b2: bool) -> String<10> { + if b1 { + "Foo" + } else { + if b2 { + "Bar" + } else { + "Baz" + } + } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/if_.snap b/crates/hir-analysis/test_files/ty_check/if_.snap new file mode 100644 index 0000000000..8e55918f4a --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/if_.snap @@ -0,0 +1,367 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/if_.fe +--- +note: + ┌─ if_.fe:1:20 + │ +1 │ fn lit_if() -> i32 { + │ ╭────────────────────^ +2 │ │ if true { +3 │ │ 1 +4 │ │ } else { +5 │ │ 2 +6 │ │ } +7 │ │ } + │ ╰─^ i32 + +note: + ┌─ if_.fe:2:5 + │ +2 │ ╭ if true { +3 │ │ 1 +4 │ │ } else { +5 │ │ 2 +6 │ │ } + │ ╰─────^ i32 + +note: + ┌─ if_.fe:2:8 + │ +2 │ if true { + │ ^^^^ bool + +note: + ┌─ if_.fe:2:13 + │ +2 │ if true { + │ ╭─────────────^ +3 │ │ 1 +4 │ │ } else { + │ ╰─────^ i32 + +note: + ┌─ if_.fe:3:9 + │ +3 │ 1 + │ ^ i32 + +note: + ┌─ if_.fe:4:12 + │ +4 │ } else { + │ ╭────────────^ +5 │ │ 2 +6 │ │ } + │ ╰─────^ i32 + +note: + ┌─ if_.fe:5:9 + │ +5 │ 2 + │ ^ i32 + +note: + ┌─ if_.fe:9:29 + │ + 9 │ fn string_if() -> String<5> { + │ ╭─────────────────────────────^ +10 │ │ if true { +11 │ │ "1" +12 │ │ } else { +13 │ │ "Foo" +14 │ │ } +15 │ │ } + │ ╰─^ String<5> + +note: + ┌─ if_.fe:10:5 + │ +10 │ ╭ if true { +11 │ │ "1" +12 │ │ } else { +13 │ │ "Foo" +14 │ │ } + │ ╰─────^ String<5> + +note: + ┌─ if_.fe:10:8 + │ +10 │ if true { + │ ^^^^ bool + +note: + ┌─ if_.fe:10:13 + │ +10 │ if true { + │ ╭─────────────^ +11 │ │ "1" +12 │ │ } else { + │ ╰─────^ String<5> + +note: + ┌─ if_.fe:11:9 + │ +11 │ "1" + │ ^^^ String<5> + +note: + ┌─ if_.fe:12:12 + │ +12 │ } else { + │ ╭────────────^ +13 │ │ "Foo" +14 │ │ } + │ ╰─────^ String<5> + +note: + ┌─ if_.fe:13:9 + │ +13 │ "Foo" + │ ^^^^^ String<5> + +note: + ┌─ if_.fe:18:20 + │ +18 │ fn no_else() -> () { + │ ╭────────────────────^ +19 │ │ let x = if true { +20 │ │ false +21 │ │ } +22 │ │ } + │ ╰─^ () + +note: + ┌─ if_.fe:19:9 + │ +19 │ let x = if true { + │ ^ () + +note: + ┌─ if_.fe:19:13 + │ +19 │ let x = if true { + │ ╭─────────────^ +20 │ │ false +21 │ │ } + │ ╰─────^ () + +note: + ┌─ if_.fe:19:16 + │ +19 │ let x = if true { + │ ^^^^ bool + +note: + ┌─ if_.fe:19:21 + │ +19 │ let x = if true { + │ ╭─────────────────────^ +20 │ │ false +21 │ │ } + │ ╰─────^ bool + +note: + ┌─ if_.fe:20:9 + │ +20 │ false + │ ^^^^^ bool + +note: + ┌─ if_.fe:24:39 + │ +24 │ fn else_if(b1: bool, b2: bool) -> i32 { + │ ╭───────────────────────────────────────^ +25 │ │ if b1 { +26 │ │ 1 +27 │ │ } else if b2 { + · │ +31 │ │ } +32 │ │ } + │ ╰─^ i32 + +note: + ┌─ if_.fe:25:5 + │ +25 │ ╭ if b1 { +26 │ │ 1 +27 │ │ } else if b2 { +28 │ │ 2 +29 │ │ } else { +30 │ │ 3 +31 │ │ } + │ ╰─────^ i32 + +note: + ┌─ if_.fe:25:8 + │ +25 │ if b1 { + │ ^^ bool + +note: + ┌─ if_.fe:25:11 + │ +25 │ if b1 { + │ ╭───────────^ +26 │ │ 1 +27 │ │ } else if b2 { + │ ╰─────^ i32 + +note: + ┌─ if_.fe:26:9 + │ +26 │ 1 + │ ^ i32 + +note: + ┌─ if_.fe:27:12 + │ +27 │ } else if b2 { + │ ╭────────────^ +28 │ │ 2 +29 │ │ } else { +30 │ │ 3 +31 │ │ } + │ ╰─────^ i32 + +note: + ┌─ if_.fe:27:15 + │ +27 │ } else if b2 { + │ ^^ bool + +note: + ┌─ if_.fe:27:18 + │ +27 │ } else if b2 { + │ ╭──────────────────^ +28 │ │ 2 +29 │ │ } else { + │ ╰─────^ i32 + +note: + ┌─ if_.fe:28:9 + │ +28 │ 2 + │ ^ i32 + +note: + ┌─ if_.fe:29:12 + │ +29 │ } else { + │ ╭────────────^ +30 │ │ 3 +31 │ │ } + │ ╰─────^ i32 + +note: + ┌─ if_.fe:30:9 + │ +30 │ 3 + │ ^ i32 + +note: + ┌─ if_.fe:34:45 + │ +34 │ fn nested(b1: bool, b2: bool) -> String<10> { + │ ╭─────────────────────────────────────────────^ +35 │ │ if b1 { +36 │ │ "Foo" +37 │ │ } else { + · │ +43 │ │ } +44 │ │ } + │ ╰─^ String<10> + +note: + ┌─ if_.fe:35:5 + │ +35 │ ╭ if b1 { +36 │ │ "Foo" +37 │ │ } else { +38 │ │ if b2 { + · │ +42 │ │ } +43 │ │ } + │ ╰─────^ String<10> + +note: + ┌─ if_.fe:35:8 + │ +35 │ if b1 { + │ ^^ bool + +note: + ┌─ if_.fe:35:11 + │ +35 │ if b1 { + │ ╭───────────^ +36 │ │ "Foo" +37 │ │ } else { + │ ╰─────^ String<10> + +note: + ┌─ if_.fe:36:9 + │ +36 │ "Foo" + │ ^^^^^ String<10> + +note: + ┌─ if_.fe:37:12 + │ +37 │ } else { + │ ╭────────────^ +38 │ │ if b2 { +39 │ │ "Bar" +40 │ │ } else { +41 │ │ "Baz" +42 │ │ } +43 │ │ } + │ ╰─────^ String<10> + +note: + ┌─ if_.fe:38:9 + │ +38 │ ╭ if b2 { +39 │ │ "Bar" +40 │ │ } else { +41 │ │ "Baz" +42 │ │ } + │ ╰─────────^ String<10> + +note: + ┌─ if_.fe:38:12 + │ +38 │ if b2 { + │ ^^ bool + +note: + ┌─ if_.fe:38:15 + │ +38 │ if b2 { + │ ╭───────────────^ +39 │ │ "Bar" +40 │ │ } else { + │ ╰─────────^ String<10> + +note: + ┌─ if_.fe:39:13 + │ +39 │ "Bar" + │ ^^^^^ String<10> + +note: + ┌─ if_.fe:40:16 + │ +40 │ } else { + │ ╭────────────────^ +41 │ │ "Baz" +42 │ │ } + │ ╰─────────^ String<10> + +note: + ┌─ if_.fe:41:13 + │ +41 │ "Baz" + │ ^^^^^ String<10> diff --git a/crates/hir-analysis/test_files/ty_check/index.fe b/crates/hir-analysis/test_files/ty_check/index.fe new file mode 100644 index 0000000000..d602050c05 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/index.fe @@ -0,0 +1,4 @@ +pub fn foo() -> u32 { + let x = [10; 3] + x[1] +} diff --git a/crates/hir-analysis/test_files/ty_check/index.snap b/crates/hir-analysis/test_files/ty_check/index.snap new file mode 100644 index 0000000000..145be7dd16 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/index.snap @@ -0,0 +1,50 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/index.fe +--- +note: + ┌─ index.fe:1:21 + │ +1 │ pub fn foo() -> u32 { + │ ╭─────────────────────^ +2 │ │ let x = [10; 3] +3 │ │ x[1] +4 │ │ } + │ ╰─^ u32 + +note: + ┌─ index.fe:2:9 + │ +2 │ let x = [10; 3] + │ ^ [u32; 3] + +note: + ┌─ index.fe:2:13 + │ +2 │ let x = [10; 3] + │ ^^^^^^^ [u32; 3] + +note: + ┌─ index.fe:2:14 + │ +2 │ let x = [10; 3] + │ ^^ u32 + +note: + ┌─ index.fe:3:5 + │ +3 │ x[1] + │ ^ [u32; 3] + +note: + ┌─ index.fe:3:5 + │ +3 │ x[1] + │ ^^^^ u32 + +note: + ┌─ index.fe:3:7 + │ +3 │ x[1] + │ ^ u256 diff --git a/crates/hir-analysis/test_files/ty_check/let_binding.fe b/crates/hir-analysis/test_files/ty_check/let_binding.fe new file mode 100644 index 0000000000..0855d1d69e --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/let_binding.fe @@ -0,0 +1,4 @@ +pub fn foo() { + let x: i32 = 1 + let s = "Foo" +} diff --git a/crates/hir-analysis/test_files/ty_check/let_binding.snap b/crates/hir-analysis/test_files/ty_check/let_binding.snap new file mode 100644 index 0000000000..7a04e1565f --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/let_binding.snap @@ -0,0 +1,38 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/let_binding.fe +--- +note: + ┌─ let_binding.fe:1:14 + │ +1 │ pub fn foo() { + │ ╭──────────────^ +2 │ │ let x: i32 = 1 +3 │ │ let s = "Foo" +4 │ │ } + │ ╰─^ () + +note: + ┌─ let_binding.fe:2:9 + │ +2 │ let x: i32 = 1 + │ ^ i32 + +note: + ┌─ let_binding.fe:2:18 + │ +2 │ let x: i32 = 1 + │ ^ i32 + +note: + ┌─ let_binding.fe:3:9 + │ +3 │ let s = "Foo" + │ ^ String<3> + +note: + ┌─ let_binding.fe:3:13 + │ +3 │ let s = "Foo" + │ ^^^^^ String<3> diff --git a/crates/hir-analysis/test_files/ty_check/lit_int.fe b/crates/hir-analysis/test_files/ty_check/lit_int.fe new file mode 100644 index 0000000000..2d6f1ca251 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/lit_int.fe @@ -0,0 +1,7 @@ +fn lit_i32() -> i32 { + 1 +} + +fn lit_i64() -> i64 { + 1 +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/lit_int.snap b/crates/hir-analysis/test_files/ty_check/lit_int.snap new file mode 100644 index 0000000000..996293cf67 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/lit_int.snap @@ -0,0 +1,34 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/lit_int.fe +--- +note: + ┌─ lit_int.fe:1:21 + │ +1 │ fn lit_i32() -> i32 { + │ ╭─────────────────────^ +2 │ │ 1 +3 │ │ } + │ ╰─^ i32 + +note: + ┌─ lit_int.fe:2:5 + │ +2 │ 1 + │ ^ i32 + +note: + ┌─ lit_int.fe:5:21 + │ +5 │ fn lit_i64() -> i64 { + │ ╭─────────────────────^ +6 │ │ 1 +7 │ │ } + │ ╰─^ i64 + +note: + ┌─ lit_int.fe:6:5 + │ +6 │ 1 + │ ^ i64 diff --git a/crates/hir-analysis/test_files/ty_check/lit_str.fe b/crates/hir-analysis/test_files/ty_check/lit_str.fe new file mode 100644 index 0000000000..9f8aad9586 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/lit_str.fe @@ -0,0 +1,7 @@ +pub fn lit_str1() -> String<1> { + "a" +} + +pub fn lit_str2() -> String<10> { + "abc" +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/lit_str.snap b/crates/hir-analysis/test_files/ty_check/lit_str.snap new file mode 100644 index 0000000000..bcfdbcfef2 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/lit_str.snap @@ -0,0 +1,34 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/lit_str.fe +--- +note: + ┌─ lit_str.fe:1:32 + │ +1 │ pub fn lit_str1() -> String<1> { + │ ╭────────────────────────────────^ +2 │ │ "a" +3 │ │ } + │ ╰─^ String<1> + +note: + ┌─ lit_str.fe:2:5 + │ +2 │ "a" + │ ^^^ String<1> + +note: + ┌─ lit_str.fe:5:33 + │ +5 │ pub fn lit_str2() -> String<10> { + │ ╭─────────────────────────────────^ +6 │ │ "abc" +7 │ │ } + │ ╰─^ String<10> + +note: + ┌─ lit_str.fe:6:5 + │ +6 │ "abc" + │ ^^^^^ String<10> diff --git a/crates/hir-analysis/test_files/ty_check/match_.fe b/crates/hir-analysis/test_files/ty_check/match_.fe new file mode 100644 index 0000000000..b2bf5b412a --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/match_.fe @@ -0,0 +1,28 @@ +pub enum E { + Var { x: i32, u: i32 }, + Var2(E2) +} + +pub enum E2 { + Var(i32) +} + +impl E { + fn extract_num(self) -> i32 { + match self { + Self::Var { x, .. } => x + Self::Var2(E2::Var(x)) => x + } + } +} + +pub fn foo(e: E) -> i32 { + match e { + E::Var { x, .. } => { + x + } + E::Var2(E2::Var(x)) => { + x + } + } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/match_.snap b/crates/hir-analysis/test_files/ty_check/match_.snap new file mode 100644 index 0000000000..986df7e65c --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/match_.snap @@ -0,0 +1,176 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/match_.fe +--- +note: + ┌─ match_.fe:11:33 + │ +11 │ fn extract_num(self) -> i32 { + │ ╭─────────────────────────────────^ +12 │ │ match self { +13 │ │ Self::Var { x, .. } => x +14 │ │ Self::Var2(E2::Var(x)) => x +15 │ │ } +16 │ │ } + │ ╰─────^ i32 + +note: + ┌─ match_.fe:12:9 + │ +12 │ ╭ match self { +13 │ │ Self::Var { x, .. } => x +14 │ │ Self::Var2(E2::Var(x)) => x +15 │ │ } + │ ╰─────────^ i32 + +note: + ┌─ match_.fe:12:15 + │ +12 │ match self { + │ ^^^^ E + +note: + ┌─ match_.fe:13:13 + │ +13 │ Self::Var { x, .. } => x + │ ^^^^^^^^^^^^^^^^^^^ E + +note: + ┌─ match_.fe:13:25 + │ +13 │ Self::Var { x, .. } => x + │ ^ i32 + +note: + ┌─ match_.fe:13:28 + │ +13 │ Self::Var { x, .. } => x + │ ^^ + +note: + ┌─ match_.fe:13:36 + │ +13 │ Self::Var { x, .. } => x + │ ^ i32 + +note: + ┌─ match_.fe:14:13 + │ +14 │ Self::Var2(E2::Var(x)) => x + │ ^^^^^^^^^^^^^^^^^^^^^^ E + +note: + ┌─ match_.fe:14:24 + │ +14 │ Self::Var2(E2::Var(x)) => x + │ ^^^^^^^^^^ E2 + +note: + ┌─ match_.fe:14:32 + │ +14 │ Self::Var2(E2::Var(x)) => x + │ ^ i32 + +note: + ┌─ match_.fe:14:39 + │ +14 │ Self::Var2(E2::Var(x)) => x + │ ^ i32 + +note: + ┌─ match_.fe:19:25 + │ +19 │ pub fn foo(e: E) -> i32 { + │ ╭─────────────────────────^ +20 │ │ match e { +21 │ │ E::Var { x, .. } => { +22 │ │ x + · │ +27 │ │ } +28 │ │ } + │ ╰─^ i32 + +note: + ┌─ match_.fe:20:5 + │ +20 │ ╭ match e { +21 │ │ E::Var { x, .. } => { +22 │ │ x +23 │ │ } + · │ +26 │ │ } +27 │ │ } + │ ╰─────^ i32 + +note: + ┌─ match_.fe:20:11 + │ +20 │ match e { + │ ^ E + +note: + ┌─ match_.fe:21:9 + │ +21 │ E::Var { x, .. } => { + │ ^^^^^^^^^^^^^^^^ E + +note: + ┌─ match_.fe:21:18 + │ +21 │ E::Var { x, .. } => { + │ ^ i32 + +note: + ┌─ match_.fe:21:21 + │ +21 │ E::Var { x, .. } => { + │ ^^ + +note: + ┌─ match_.fe:21:29 + │ +21 │ E::Var { x, .. } => { + │ ╭─────────────────────────────^ +22 │ │ x +23 │ │ } + │ ╰─────────^ i32 + +note: + ┌─ match_.fe:22:13 + │ +22 │ x + │ ^ i32 + +note: + ┌─ match_.fe:24:9 + │ +24 │ E::Var2(E2::Var(x)) => { + │ ^^^^^^^^^^^^^^^^^^^ E + +note: + ┌─ match_.fe:24:17 + │ +24 │ E::Var2(E2::Var(x)) => { + │ ^^^^^^^^^^ E2 + +note: + ┌─ match_.fe:24:25 + │ +24 │ E::Var2(E2::Var(x)) => { + │ ^ i32 + +note: + ┌─ match_.fe:24:32 + │ +24 │ E::Var2(E2::Var(x)) => { + │ ╭────────────────────────────────^ +25 │ │ x +26 │ │ } + │ ╰─────────^ i32 + +note: + ┌─ match_.fe:25:13 + │ +25 │ x + │ ^ i32 diff --git a/crates/hir-analysis/test_files/ty_check/method.fe b/crates/hir-analysis/test_files/ty_check/method.fe new file mode 100644 index 0000000000..74463ba042 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method.fe @@ -0,0 +1,54 @@ +mod evm { + extern { + pub fn abort() -> ! + } +} + +pub fn todo() -> ! { + evm::abort() +} + +pub enum Option { + None, + Some(T) +} + +impl Option { + pub fn and(self, rhs: Option) -> Option { + match self { + Self::Some(t) => rhs + Self::None => Option::None + } + } + + pub fn get_or_insert(mut self, inner t: T) -> T { + match self { + Self::Some(t) => t + Self::None => { + self = Self::Some(t) + t + } + } + } +} + +impl Option { + pub fn unwrap(self: Self) -> T { + match self { + Self::None => evm::abort() + Self::Some(t) => t + } + } +} + +fn get_t() -> T { + todo() +} + +fn foo() -> Option { + let mut x: Option = Option::Some(1) + let func_obj = get_t + x.get_or_insert(inner: func_obj()) + + x.and(rhs: Option::Some(false)) +} diff --git a/crates/hir-analysis/test_files/ty_check/method.snap b/crates/hir-analysis/test_files/ty_check/method.snap new file mode 100644 index 0000000000..801570dcf0 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method.snap @@ -0,0 +1,370 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/method.fe +--- +note: + ┌─ method.fe:7:20 + │ +7 │ pub fn todo() -> ! { + │ ╭────────────────────^ +8 │ │ evm::abort() +9 │ │ } + │ ╰─^ ! + +note: + ┌─ method.fe:8:5 + │ +8 │ evm::abort() + │ ^^^^^^^^^^ fn abort + +note: + ┌─ method.fe:8:5 + │ +8 │ evm::abort() + │ ^^^^^^^^^^^^ ! + +note: + ┌─ method.fe:17:54 + │ +17 │ pub fn and(self, rhs: Option) -> Option { + │ ╭──────────────────────────────────────────────────────^ +18 │ │ match self { +19 │ │ Self::Some(t) => rhs +20 │ │ Self::None => Option::None +21 │ │ } +22 │ │ } + │ ╰─────^ Option + +note: + ┌─ method.fe:18:9 + │ +18 │ ╭ match self { +19 │ │ Self::Some(t) => rhs +20 │ │ Self::None => Option::None +21 │ │ } + │ ╰─────────^ Option + +note: + ┌─ method.fe:18:15 + │ +18 │ match self { + │ ^^^^ Option + +note: + ┌─ method.fe:19:13 + │ +19 │ Self::Some(t) => rhs + │ ^^^^^^^^^^^^^ Option + +note: + ┌─ method.fe:19:24 + │ +19 │ Self::Some(t) => rhs + │ ^ T + +note: + ┌─ method.fe:19:30 + │ +19 │ Self::Some(t) => rhs + │ ^^^ Option + +note: + ┌─ method.fe:20:13 + │ +20 │ Self::None => Option::None + │ ^^^^^^^^^^ Option + +note: + ┌─ method.fe:20:27 + │ +20 │ Self::None => Option::None + │ ^^^^^^^^^^^^ Option + +note: + ┌─ method.fe:24:53 + │ +24 │ pub fn get_or_insert(mut self, inner t: T) -> T { + │ ╭─────────────────────────────────────────────────────^ +25 │ │ match self { +26 │ │ Self::Some(t) => t +27 │ │ Self::None => { + · │ +31 │ │ } +32 │ │ } + │ ╰─────^ T + +note: + ┌─ method.fe:25:9 + │ +25 │ ╭ match self { +26 │ │ Self::Some(t) => t +27 │ │ Self::None => { +28 │ │ self = Self::Some(t) +29 │ │ t +30 │ │ } +31 │ │ } + │ ╰─────────^ T + +note: + ┌─ method.fe:25:15 + │ +25 │ match self { + │ ^^^^ Option + +note: + ┌─ method.fe:26:13 + │ +26 │ Self::Some(t) => t + │ ^^^^^^^^^^^^^ Option + +note: + ┌─ method.fe:26:24 + │ +26 │ Self::Some(t) => t + │ ^ T + +note: + ┌─ method.fe:26:30 + │ +26 │ Self::Some(t) => t + │ ^ T + +note: + ┌─ method.fe:27:13 + │ +27 │ Self::None => { + │ ^^^^^^^^^^ Option + +note: + ┌─ method.fe:27:27 + │ +27 │ Self::None => { + │ ╭───────────────────────────^ +28 │ │ self = Self::Some(t) +29 │ │ t +30 │ │ } + │ ╰─────────────^ T + +note: + ┌─ method.fe:28:17 + │ +28 │ self = Self::Some(t) + │ ^^^^ Option + +note: + ┌─ method.fe:28:17 + │ +28 │ self = Self::Some(t) + │ ^^^^^^^^^^^^^^^^^^^^ () + +note: + ┌─ method.fe:28:24 + │ +28 │ self = Self::Some(t) + │ ^^^^^^^^^^ fn Some + +note: + ┌─ method.fe:28:24 + │ +28 │ self = Self::Some(t) + │ ^^^^^^^^^^^^^ Option + +note: + ┌─ method.fe:28:35 + │ +28 │ self = Self::Some(t) + │ ^ T + +note: + ┌─ method.fe:29:17 + │ +29 │ t + │ ^ T + +note: + ┌─ method.fe:36:42 + │ +36 │ pub fn unwrap(self: Self) -> T { + │ ╭──────────────────────────────────────────^ +37 │ │ match self { +38 │ │ Self::None => evm::abort() +39 │ │ Self::Some(t) => t +40 │ │ } +41 │ │ } + │ ╰─────^ T + +note: + ┌─ method.fe:37:9 + │ +37 │ ╭ match self { +38 │ │ Self::None => evm::abort() +39 │ │ Self::Some(t) => t +40 │ │ } + │ ╰─────────^ T + +note: + ┌─ method.fe:37:15 + │ +37 │ match self { + │ ^^^^ Option + +note: + ┌─ method.fe:38:13 + │ +38 │ Self::None => evm::abort() + │ ^^^^^^^^^^ Option + +note: + ┌─ method.fe:38:27 + │ +38 │ Self::None => evm::abort() + │ ^^^^^^^^^^ fn abort + +note: + ┌─ method.fe:38:27 + │ +38 │ Self::None => evm::abort() + │ ^^^^^^^^^^^^ ! + +note: + ┌─ method.fe:39:13 + │ +39 │ Self::Some(t) => t + │ ^^^^^^^^^^^^^ Option + +note: + ┌─ method.fe:39:24 + │ +39 │ Self::Some(t) => t + │ ^ T + +note: + ┌─ method.fe:39:30 + │ +39 │ Self::Some(t) => t + │ ^ T + +note: + ┌─ method.fe:44:20 + │ +44 │ fn get_t() -> T { + │ ╭────────────────────^ +45 │ │ todo() +46 │ │ } + │ ╰─^ T + +note: + ┌─ method.fe:45:5 + │ +45 │ todo() + │ ^^^^ fn todo + +note: + ┌─ method.fe:45:5 + │ +45 │ todo() + │ ^^^^^^ T + +note: + ┌─ method.fe:48:26 + │ +48 │ fn foo() -> Option { + │ ╭──────────────────────────^ +49 │ │ let mut x: Option = Option::Some(1) +50 │ │ let func_obj = get_t +51 │ │ x.get_or_insert(inner: func_obj()) +52 │ │ +53 │ │ x.and(rhs: Option::Some(false)) +54 │ │ } + │ ╰─^ Option + +note: + ┌─ method.fe:49:9 + │ +49 │ let mut x: Option = Option::Some(1) + │ ^^^^^ Option + +note: + ┌─ method.fe:49:30 + │ +49 │ let mut x: Option = Option::Some(1) + │ ^^^^^^^^^^^^ fn Some + +note: + ┌─ method.fe:49:30 + │ +49 │ let mut x: Option = Option::Some(1) + │ ^^^^^^^^^^^^^^^ Option + +note: + ┌─ method.fe:49:43 + │ +49 │ let mut x: Option = Option::Some(1) + │ ^ i32 + +note: + ┌─ method.fe:50:9 + │ +50 │ let func_obj = get_t + │ ^^^^^^^^ fn get_t + +note: + ┌─ method.fe:50:20 + │ +50 │ let func_obj = get_t + │ ^^^^^ fn get_t + +note: + ┌─ method.fe:51:5 + │ +51 │ x.get_or_insert(inner: func_obj()) + │ ^ Option + +note: + ┌─ method.fe:51:5 + │ +51 │ x.get_or_insert(inner: func_obj()) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ i32 + +note: + ┌─ method.fe:51:28 + │ +51 │ x.get_or_insert(inner: func_obj()) + │ ^^^^^^^^ fn get_t + +note: + ┌─ method.fe:51:28 + │ +51 │ x.get_or_insert(inner: func_obj()) + │ ^^^^^^^^^^ i32 + +note: + ┌─ method.fe:53:5 + │ +53 │ x.and(rhs: Option::Some(false)) + │ ^ Option + +note: + ┌─ method.fe:53:5 + │ +53 │ x.and(rhs: Option::Some(false)) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Option + +note: + ┌─ method.fe:53:16 + │ +53 │ x.and(rhs: Option::Some(false)) + │ ^^^^^^^^^^^^ fn Some + +note: + ┌─ method.fe:53:16 + │ +53 │ x.and(rhs: Option::Some(false)) + │ ^^^^^^^^^^^^^^^^^^^ Option + +note: + ┌─ method.fe:53:29 + │ +53 │ x.and(rhs: Option::Some(false)) + │ ^^^^^ bool diff --git a/crates/hir-analysis/test_files/ty_check/method/generics.fe b/crates/hir-analysis/test_files/ty_check/method/generics.fe new file mode 100644 index 0000000000..ba9fca2b19 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method/generics.fe @@ -0,0 +1,41 @@ +trait Clamp { + fn clamp_into(self) -> Out +} + +trait Add { + fn add(self, _ rhs: Self) -> Self +} + +impl Add for i32 { + fn add(self, _ rhs: Self) -> Self { + self + rhs + } +} + +extern { + fn clamp_i64_i32(_ x: i64) -> i32 +} + +impl Clamp for i64 { + fn clamp_into(self) -> i32 { + clamp_i64_i32(self) + } +} + +trait ClampedAdd { + fn clamped_add(self, _ rhs: Rhs) -> Self +} + +impl ClampedAdd for T +where RHS: Clamp, + T: Add +{ + fn clamped_add(self, _ rhs: RHS) -> Self { + let rhs = rhs.clamp_into() + self.add(rhs) + } +} + +fn foo(x: i32, y: i64) -> i32 { + x.clamped_add(y) +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/method/generics.snap b/crates/hir-analysis/test_files/ty_check/method/generics.snap new file mode 100644 index 0000000000..183e8631eb --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method/generics.snap @@ -0,0 +1,131 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/method/generics.fe +--- +note: + ┌─ generics.fe:10:39 + │ +10 │ fn add(self, _ rhs: Self) -> Self { + │ ╭───────────────────────────────────────^ +11 │ │ self + rhs +12 │ │ } + │ ╰─────^ i32 + +note: + ┌─ generics.fe:11:9 + │ +11 │ self + rhs + │ ^^^^ i32 + +note: + ┌─ generics.fe:11:9 + │ +11 │ self + rhs + │ ^^^^^^^^^^ i32 + +note: + ┌─ generics.fe:11:16 + │ +11 │ self + rhs + │ ^^^ i32 + +note: + ┌─ generics.fe:20:32 + │ +20 │ fn clamp_into(self) -> i32 { + │ ╭────────────────────────────────^ +21 │ │ clamp_i64_i32(self) +22 │ │ } + │ ╰─────^ i32 + +note: + ┌─ generics.fe:21:9 + │ +21 │ clamp_i64_i32(self) + │ ^^^^^^^^^^^^^ fn clamp_i64_i32 + +note: + ┌─ generics.fe:21:9 + │ +21 │ clamp_i64_i32(self) + │ ^^^^^^^^^^^^^^^^^^^ i32 + +note: + ┌─ generics.fe:21:23 + │ +21 │ clamp_i64_i32(self) + │ ^^^^ i64 + +note: + ┌─ generics.fe:33:46 + │ +33 │ fn clamped_add(self, _ rhs: RHS) -> Self { + │ ╭──────────────────────────────────────────────^ +34 │ │ let rhs = rhs.clamp_into() +35 │ │ self.add(rhs) +36 │ │ } + │ ╰─────^ T + +note: + ┌─ generics.fe:34:13 + │ +34 │ let rhs = rhs.clamp_into() + │ ^^^ T + +note: + ┌─ generics.fe:34:19 + │ +34 │ let rhs = rhs.clamp_into() + │ ^^^ RHS + +note: + ┌─ generics.fe:34:19 + │ +34 │ let rhs = rhs.clamp_into() + │ ^^^^^^^^^^^^^^^^ T + +note: + ┌─ generics.fe:35:9 + │ +35 │ self.add(rhs) + │ ^^^^ T + +note: + ┌─ generics.fe:35:9 + │ +35 │ self.add(rhs) + │ ^^^^^^^^^^^^^ T + +note: + ┌─ generics.fe:35:18 + │ +35 │ self.add(rhs) + │ ^^^ T + +note: + ┌─ generics.fe:39:31 + │ +39 │ fn foo(x: i32, y: i64) -> i32 { + │ ╭───────────────────────────────^ +40 │ │ x.clamped_add(y) +41 │ │ } + │ ╰─^ i32 + +note: + ┌─ generics.fe:40:5 + │ +40 │ x.clamped_add(y) + │ ^ i32 + +note: + ┌─ generics.fe:40:5 + │ +40 │ x.clamped_add(y) + │ ^^^^^^^^^^^^^^^^ i32 + +note: + ┌─ generics.fe:40:19 + │ +40 │ x.clamped_add(y) + │ ^ i64 diff --git a/crates/hir-analysis/test_files/ty_check/method/infer_by_constraints.fe b/crates/hir-analysis/test_files/ty_check/method/infer_by_constraints.fe new file mode 100644 index 0000000000..ee4cb9acbf --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method/infer_by_constraints.fe @@ -0,0 +1,37 @@ +struct S { + t: T, +} + +impl S { + fn new() -> Self { + todo() + } +} + +trait Foo { + fn foo(self) -> (T, U) +} + +impl Foo for S { + fn foo(self) -> (T, i32) { + (self.t, 1) + } +} + +impl Foo for S { + fn foo(self) -> (u32, u32) { + (1, 1) + } +} + +extern { + fn todo() -> ! +} + + +fn bar() -> (u64, i32) { + let s = S::new() + + let (x, y) = s.foo() + (x, y) +} diff --git a/crates/hir-analysis/test_files/ty_check/method/infer_by_constraints.snap b/crates/hir-analysis/test_files/ty_check/method/infer_by_constraints.snap new file mode 100644 index 0000000000..12476bc1e8 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method/infer_by_constraints.snap @@ -0,0 +1,163 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/method/infer_by_constraints.fe +--- +note: + ┌─ infer_by_constraints.fe:6:22 + │ +6 │ fn new() -> Self { + │ ╭──────────────────────^ +7 │ │ todo() +8 │ │ } + │ ╰─────^ S + +note: + ┌─ infer_by_constraints.fe:7:9 + │ +7 │ todo() + │ ^^^^ fn todo + +note: + ┌─ infer_by_constraints.fe:7:9 + │ +7 │ todo() + │ ^^^^^^ S + +note: + ┌─ infer_by_constraints.fe:16:30 + │ +16 │ fn foo(self) -> (T, i32) { + │ ╭──────────────────────────────^ +17 │ │ (self.t, 1) +18 │ │ } + │ ╰─────^ (T, i32) + +note: + ┌─ infer_by_constraints.fe:17:9 + │ +17 │ (self.t, 1) + │ ^^^^^^^^^^^ (T, i32) + +note: + ┌─ infer_by_constraints.fe:17:10 + │ +17 │ (self.t, 1) + │ ^^^^ S + +note: + ┌─ infer_by_constraints.fe:17:10 + │ +17 │ (self.t, 1) + │ ^^^^^^ T + +note: + ┌─ infer_by_constraints.fe:17:18 + │ +17 │ (self.t, 1) + │ ^ i32 + +note: + ┌─ infer_by_constraints.fe:22:32 + │ +22 │ fn foo(self) -> (u32, u32) { + │ ╭────────────────────────────────^ +23 │ │ (1, 1) +24 │ │ } + │ ╰─────^ (u32, u32) + +note: + ┌─ infer_by_constraints.fe:23:9 + │ +23 │ (1, 1) + │ ^^^^^^ (u32, u32) + +note: + ┌─ infer_by_constraints.fe:23:10 + │ +23 │ (1, 1) + │ ^ u32 + +note: + ┌─ infer_by_constraints.fe:23:13 + │ +23 │ (1, 1) + │ ^ u32 + +note: + ┌─ infer_by_constraints.fe:32:24 + │ +32 │ fn bar() -> (u64, i32) { + │ ╭────────────────────────^ +33 │ │ let s = S::new() +34 │ │ +35 │ │ let (x, y) = s.foo() +36 │ │ (x, y) +37 │ │ } + │ ╰─^ (u64, i32) + +note: + ┌─ infer_by_constraints.fe:33:9 + │ +33 │ let s = S::new() + │ ^ S + +note: + ┌─ infer_by_constraints.fe:33:13 + │ +33 │ let s = S::new() + │ ^^^^^^ fn new + +note: + ┌─ infer_by_constraints.fe:33:13 + │ +33 │ let s = S::new() + │ ^^^^^^^^ S + +note: + ┌─ infer_by_constraints.fe:35:9 + │ +35 │ let (x, y) = s.foo() + │ ^^^^^^ (u64, i32) + +note: + ┌─ infer_by_constraints.fe:35:10 + │ +35 │ let (x, y) = s.foo() + │ ^ u64 + +note: + ┌─ infer_by_constraints.fe:35:13 + │ +35 │ let (x, y) = s.foo() + │ ^ i32 + +note: + ┌─ infer_by_constraints.fe:35:18 + │ +35 │ let (x, y) = s.foo() + │ ^ S + +note: + ┌─ infer_by_constraints.fe:35:18 + │ +35 │ let (x, y) = s.foo() + │ ^^^^^^^ (u64, i32) + +note: + ┌─ infer_by_constraints.fe:36:5 + │ +36 │ (x, y) + │ ^^^^^^ (u64, i32) + +note: + ┌─ infer_by_constraints.fe:36:6 + │ +36 │ (x, y) + │ ^ u64 + +note: + ┌─ infer_by_constraints.fe:36:9 + │ +36 │ (x, y) + │ ^ i32 diff --git a/crates/hir-analysis/test_files/ty_check/method/infer_by_method.fe b/crates/hir-analysis/test_files/ty_check/method/infer_by_method.fe new file mode 100644 index 0000000000..015aec79e7 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method/infer_by_method.fe @@ -0,0 +1,24 @@ +enum Option { + Some(T), + None +} + +impl Option { + fn foo(self) {} +} + +impl Option<()> { + fn bool_true() -> Self { + Self::Some(()) + } + fn bool_false() -> Self { + Self::None + } +} + +fn foo() { + let x = Option::None + x.foo() + + let b = Option::bool_true() +} diff --git a/crates/hir-analysis/test_files/ty_check/method/infer_by_method.snap b/crates/hir-analysis/test_files/ty_check/method/infer_by_method.snap new file mode 100644 index 0000000000..fd375c701e --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method/infer_by_method.snap @@ -0,0 +1,106 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/method/infer_by_method.fe +--- +note: + ┌─ infer_by_method.fe:7:18 + │ +7 │ fn foo(self) {} + │ ^^ () + +note: + ┌─ infer_by_method.fe:11:28 + │ +11 │ fn bool_true() -> Self { + │ ╭────────────────────────────^ +12 │ │ Self::Some(()) +13 │ │ } + │ ╰─────^ Option<()> + +note: + ┌─ infer_by_method.fe:12:9 + │ +12 │ Self::Some(()) + │ ^^^^^^^^^^ fn Some<()> + +note: + ┌─ infer_by_method.fe:12:9 + │ +12 │ Self::Some(()) + │ ^^^^^^^^^^^^^^ Option<()> + +note: + ┌─ infer_by_method.fe:12:20 + │ +12 │ Self::Some(()) + │ ^^ () + +note: + ┌─ infer_by_method.fe:14:29 + │ +14 │ fn bool_false() -> Self { + │ ╭─────────────────────────────^ +15 │ │ Self::None +16 │ │ } + │ ╰─────^ Option<()> + +note: + ┌─ infer_by_method.fe:15:9 + │ +15 │ Self::None + │ ^^^^^^^^^^ Option<()> + +note: + ┌─ infer_by_method.fe:19:10 + │ +19 │ fn foo() { + │ ╭──────────^ +20 │ │ let x = Option::None +21 │ │ x.foo() +22 │ │ +23 │ │ let b = Option::bool_true() +24 │ │ } + │ ╰─^ () + +note: + ┌─ infer_by_method.fe:20:9 + │ +20 │ let x = Option::None + │ ^ Option + +note: + ┌─ infer_by_method.fe:20:13 + │ +20 │ let x = Option::None + │ ^^^^^^^^^^^^ Option + +note: + ┌─ infer_by_method.fe:21:5 + │ +21 │ x.foo() + │ ^ Option + +note: + ┌─ infer_by_method.fe:21:5 + │ +21 │ x.foo() + │ ^^^^^^^ () + +note: + ┌─ infer_by_method.fe:23:9 + │ +23 │ let b = Option::bool_true() + │ ^ Option<()> + +note: + ┌─ infer_by_method.fe:23:13 + │ +23 │ let b = Option::bool_true() + │ ^^^^^^^^^^^^^^^^^ fn bool_true + +note: + ┌─ infer_by_method.fe:23:13 + │ +23 │ let b = Option::bool_true() + │ ^^^^^^^^^^^^^^^^^^^ Option<()> diff --git a/crates/hir-analysis/test_files/ty_check/method/unique_trait.fe b/crates/hir-analysis/test_files/ty_check/method/unique_trait.fe new file mode 100644 index 0000000000..7a66ccf3bc --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method/unique_trait.fe @@ -0,0 +1,16 @@ +// This test ensures that user doesn't need to import trait if its' unique. +fn foo(x: i32) -> i32 { + x.foo() +} + +mod inner { + trait Foo { + fn foo(self) -> Self + } + + impl Foo for i32 { + fn foo(self) -> i32 { + self + } + } +} diff --git a/crates/hir-analysis/test_files/ty_check/method/unique_trait.snap b/crates/hir-analysis/test_files/ty_check/method/unique_trait.snap new file mode 100644 index 0000000000..6f89cab8e6 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/method/unique_trait.snap @@ -0,0 +1,40 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/method/unique_trait.fe +--- +note: + ┌─ unique_trait.fe:2:23 + │ +2 │ fn foo(x: i32) -> i32 { + │ ╭───────────────────────^ +3 │ │ x.foo() +4 │ │ } + │ ╰─^ i32 + +note: + ┌─ unique_trait.fe:3:5 + │ +3 │ x.foo() + │ ^ i32 + +note: + ┌─ unique_trait.fe:3:5 + │ +3 │ x.foo() + │ ^^^^^^^ i32 + +note: + ┌─ unique_trait.fe:12:29 + │ +12 │ fn foo(self) -> i32 { + │ ╭─────────────────────────────^ +13 │ │ self +14 │ │ } + │ ╰─────────^ i32 + +note: + ┌─ unique_trait.fe:13:13 + │ +13 │ self + │ ^^^^ i32 diff --git a/crates/hir-analysis/test_files/ty_check/pat/path_tuple.fe b/crates/hir-analysis/test_files/ty_check/pat/path_tuple.fe new file mode 100644 index 0000000000..3668420bd9 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/pat/path_tuple.fe @@ -0,0 +1,10 @@ +pub enum Foo { + Variant(u8, u16, u32, u64) +} + +pub fn foo() { + let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/pat/path_tuple.snap b/crates/hir-analysis/test_files/ty_check/pat/path_tuple.snap new file mode 100644 index 0000000000..5300ff7210 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/pat/path_tuple.snap @@ -0,0 +1,274 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/pat/path_tuple.fe +--- +note: + ┌─ path_tuple.fe:5:14 + │ + 5 │ pub fn foo() { + │ ╭──────────────^ + 6 │ │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + 7 │ │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + 8 │ │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + 9 │ │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) +10 │ │ } + │ ╰─^ () + +note: + ┌─ path_tuple.fe:6:9 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ path_tuple.fe:6:22 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^ u8 + +note: + ┌─ path_tuple.fe:6:25 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^ u16 + +note: + ┌─ path_tuple.fe:6:28 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^ u32 + +note: + ┌─ path_tuple.fe:6:31 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^ u64 + +note: + ┌─ path_tuple.fe:6:36 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^ fn Variant + +note: + ┌─ path_tuple.fe:6:36 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ path_tuple.fe:6:49 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^ u8 + +note: + ┌─ path_tuple.fe:6:52 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^ u16 + +note: + ┌─ path_tuple.fe:6:55 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^ u32 + +note: + ┌─ path_tuple.fe:6:58 + │ +6 │ let Foo::Variant(a, b, c, d) = Foo::Variant(1, 2, 3, 4) + │ ^ u64 + +note: + ┌─ path_tuple.fe:7:9 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ path_tuple.fe:7:22 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^^ + +note: + ┌─ path_tuple.fe:7:26 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^ u32 + +note: + ┌─ path_tuple.fe:7:29 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^ u64 + +note: + ┌─ path_tuple.fe:7:34 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^ fn Variant + +note: + ┌─ path_tuple.fe:7:34 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ path_tuple.fe:7:47 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^ u8 + +note: + ┌─ path_tuple.fe:7:50 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^ u16 + +note: + ┌─ path_tuple.fe:7:53 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^ u32 + +note: + ┌─ path_tuple.fe:7:56 + │ +7 │ let Foo::Variant(.., a, b) = Foo::Variant(1, 2, 3, 4) + │ ^ u64 + +note: + ┌─ path_tuple.fe:8:9 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ path_tuple.fe:8:22 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^ u8 + +note: + ┌─ path_tuple.fe:8:25 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^^ + +note: + ┌─ path_tuple.fe:8:29 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^ u32 + +note: + ┌─ path_tuple.fe:8:32 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^ u64 + +note: + ┌─ path_tuple.fe:8:37 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^ fn Variant + +note: + ┌─ path_tuple.fe:8:37 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ path_tuple.fe:8:50 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^ u8 + +note: + ┌─ path_tuple.fe:8:53 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^ u16 + +note: + ┌─ path_tuple.fe:8:56 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^ u32 + +note: + ┌─ path_tuple.fe:8:59 + │ +8 │ let Foo::Variant(a, .., b, c) = Foo::Variant(1, 2, 3, 4) + │ ^ u64 + +note: + ┌─ path_tuple.fe:9:9 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ path_tuple.fe:9:22 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^ u8 + +note: + ┌─ path_tuple.fe:9:25 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^ u16 + +note: + ┌─ path_tuple.fe:9:28 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^ u32 + +note: + ┌─ path_tuple.fe:9:31 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^^ + +note: + ┌─ path_tuple.fe:9:37 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^ fn Variant + +note: + ┌─ path_tuple.fe:9:37 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^^^^^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ path_tuple.fe:9:50 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^ u8 + +note: + ┌─ path_tuple.fe:9:53 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^ u16 + +note: + ┌─ path_tuple.fe:9:56 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^ u32 + +note: + ┌─ path_tuple.fe:9:59 + │ +9 │ let Foo::Variant(a, b, c, ..) = Foo::Variant(1, 2, 3, 4) + │ ^ u64 diff --git a/crates/hir-analysis/test_files/ty_check/pat/record.fe b/crates/hir-analysis/test_files/ty_check/pat/record.fe new file mode 100644 index 0000000000..d2f8e8f3e8 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/pat/record.fe @@ -0,0 +1,21 @@ +pub struct S { + x: i32, + y: T, +} + +pub enum E { + Variant{x: i32, y: T}, +} + +pub enum Unit { + U +} + +pub fn foo() { + let S {x, y}: S + let S {x, y: Unit::U} + let S {y: Unit::U, x} + let E::Variant {x, y}: E + let E::Variant {x, y: Unit::U} + let E::Variant {y: Unit::U, x} +} diff --git a/crates/hir-analysis/test_files/ty_check/pat/record.snap b/crates/hir-analysis/test_files/ty_check/pat/record.snap new file mode 100644 index 0000000000..eabd063369 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/pat/record.snap @@ -0,0 +1,125 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/pat/record.fe +--- +note: + ┌─ record.fe:14:14 + │ +14 │ pub fn foo() { + │ ╭──────────────^ +15 │ │ let S {x, y}: S +16 │ │ let S {x, y: Unit::U} +17 │ │ let S {y: Unit::U, x} + · │ +20 │ │ let E::Variant {y: Unit::U, x} +21 │ │ } + │ ╰─^ () + +note: + ┌─ record.fe:15:9 + │ +15 │ let S {x, y}: S + │ ^^^^^^^^ S + +note: + ┌─ record.fe:15:12 + │ +15 │ let S {x, y}: S + │ ^ i32 + +note: + ┌─ record.fe:15:15 + │ +15 │ let S {x, y}: S + │ ^ u32 + +note: + ┌─ record.fe:16:9 + │ +16 │ let S {x, y: Unit::U} + │ ^^^^^^^^^^^^^^^^^ S + +note: + ┌─ record.fe:16:12 + │ +16 │ let S {x, y: Unit::U} + │ ^ i32 + +note: + ┌─ record.fe:16:18 + │ +16 │ let S {x, y: Unit::U} + │ ^^^^^^^ Unit + +note: + ┌─ record.fe:17:9 + │ +17 │ let S {y: Unit::U, x} + │ ^^^^^^^^^^^^^^^^^ S + +note: + ┌─ record.fe:17:15 + │ +17 │ let S {y: Unit::U, x} + │ ^^^^^^^ Unit + +note: + ┌─ record.fe:17:24 + │ +17 │ let S {y: Unit::U, x} + │ ^ i32 + +note: + ┌─ record.fe:18:9 + │ +18 │ let E::Variant {x, y}: E + │ ^^^^^^^^^^^^^^^^^ E + +note: + ┌─ record.fe:18:21 + │ +18 │ let E::Variant {x, y}: E + │ ^ i32 + +note: + ┌─ record.fe:18:24 + │ +18 │ let E::Variant {x, y}: E + │ ^ u32 + +note: + ┌─ record.fe:19:9 + │ +19 │ let E::Variant {x, y: Unit::U} + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^ E + +note: + ┌─ record.fe:19:21 + │ +19 │ let E::Variant {x, y: Unit::U} + │ ^ i32 + +note: + ┌─ record.fe:19:27 + │ +19 │ let E::Variant {x, y: Unit::U} + │ ^^^^^^^ Unit + +note: + ┌─ record.fe:20:9 + │ +20 │ let E::Variant {y: Unit::U, x} + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^ E + +note: + ┌─ record.fe:20:24 + │ +20 │ let E::Variant {y: Unit::U, x} + │ ^^^^^^^ Unit + +note: + ┌─ record.fe:20:33 + │ +20 │ let E::Variant {y: Unit::U, x} + │ ^ i32 diff --git a/crates/hir-analysis/test_files/ty_check/pat/tuple_pat.fe b/crates/hir-analysis/test_files/ty_check/pat/tuple_pat.fe new file mode 100644 index 0000000000..34ebd2b03a --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/pat/tuple_pat.fe @@ -0,0 +1,6 @@ +fn foo() { + let (x, y, z): (i8, u16, u32) + let (.., x, y): (u8, u16, u32, u64, u128) + let (x, y, z, ..): (u8, u16, u32, u64, u128) + let (x, .., y, z): (u8, u16, u32, u64, u128) +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/pat/tuple_pat.snap b/crates/hir-analysis/test_files/ty_check/pat/tuple_pat.snap new file mode 100644 index 0000000000..a6533a52b1 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/pat/tuple_pat.snap @@ -0,0 +1,124 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/pat/tuple_pat.fe +--- +note: + ┌─ tuple_pat.fe:1:10 + │ +1 │ fn foo() { + │ ╭──────────^ +2 │ │ let (x, y, z): (i8, u16, u32) +3 │ │ let (.., x, y): (u8, u16, u32, u64, u128) +4 │ │ let (x, y, z, ..): (u8, u16, u32, u64, u128) +5 │ │ let (x, .., y, z): (u8, u16, u32, u64, u128) +6 │ │ } + │ ╰─^ () + +note: + ┌─ tuple_pat.fe:2:9 + │ +2 │ let (x, y, z): (i8, u16, u32) + │ ^^^^^^^^^ (i8, u16, u32) + +note: + ┌─ tuple_pat.fe:2:10 + │ +2 │ let (x, y, z): (i8, u16, u32) + │ ^ i8 + +note: + ┌─ tuple_pat.fe:2:13 + │ +2 │ let (x, y, z): (i8, u16, u32) + │ ^ u16 + +note: + ┌─ tuple_pat.fe:2:16 + │ +2 │ let (x, y, z): (i8, u16, u32) + │ ^ u32 + +note: + ┌─ tuple_pat.fe:3:9 + │ +3 │ let (.., x, y): (u8, u16, u32, u64, u128) + │ ^^^^^^^^^^ (u8, u16, u32, u64, u128) + +note: + ┌─ tuple_pat.fe:3:10 + │ +3 │ let (.., x, y): (u8, u16, u32, u64, u128) + │ ^^ + +note: + ┌─ tuple_pat.fe:3:14 + │ +3 │ let (.., x, y): (u8, u16, u32, u64, u128) + │ ^ u64 + +note: + ┌─ tuple_pat.fe:3:17 + │ +3 │ let (.., x, y): (u8, u16, u32, u64, u128) + │ ^ u128 + +note: + ┌─ tuple_pat.fe:4:9 + │ +4 │ let (x, y, z, ..): (u8, u16, u32, u64, u128) + │ ^^^^^^^^^^^^^ (u8, u16, u32, u64, u128) + +note: + ┌─ tuple_pat.fe:4:10 + │ +4 │ let (x, y, z, ..): (u8, u16, u32, u64, u128) + │ ^ u8 + +note: + ┌─ tuple_pat.fe:4:13 + │ +4 │ let (x, y, z, ..): (u8, u16, u32, u64, u128) + │ ^ u16 + +note: + ┌─ tuple_pat.fe:4:16 + │ +4 │ let (x, y, z, ..): (u8, u16, u32, u64, u128) + │ ^ u32 + +note: + ┌─ tuple_pat.fe:4:19 + │ +4 │ let (x, y, z, ..): (u8, u16, u32, u64, u128) + │ ^^ + +note: + ┌─ tuple_pat.fe:5:9 + │ +5 │ let (x, .., y, z): (u8, u16, u32, u64, u128) + │ ^^^^^^^^^^^^^ (u8, u16, u32, u64, u128) + +note: + ┌─ tuple_pat.fe:5:10 + │ +5 │ let (x, .., y, z): (u8, u16, u32, u64, u128) + │ ^ u8 + +note: + ┌─ tuple_pat.fe:5:13 + │ +5 │ let (x, .., y, z): (u8, u16, u32, u64, u128) + │ ^^ + +note: + ┌─ tuple_pat.fe:5:17 + │ +5 │ let (x, .., y, z): (u8, u16, u32, u64, u128) + │ ^ u64 + +note: + ┌─ tuple_pat.fe:5:20 + │ +5 │ let (x, .., y, z): (u8, u16, u32, u64, u128) + │ ^ u128 diff --git a/crates/hir-analysis/test_files/ty_check/path_generic.fe b/crates/hir-analysis/test_files/ty_check/path_generic.fe new file mode 100644 index 0000000000..d3b1529e3f --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/path_generic.fe @@ -0,0 +1,57 @@ +trait Default { + fn default() -> Self +} + +struct Foo { + t: T +} + +impl Foo { + fn method() -> T { + T::default() + } +} + +impl Default for i32 { + fn default() -> Self { + 0 + } +} + +enum E { + A, + B(T), + C(i32), + D { x: i32 }, +} + +impl E { + fn gimme(_ val: U) -> E { + E::B(val) + } +} + +trait F where Self: * -> * { + fn replace(self: Self, with: U) -> Self +} + +impl F for E { + fn replace(self: Self, with: U) -> Self { + Self::B(with) + } +} + +fn foo() { + // Deciding the `Foo` type is not possible without a type argument for `Foo`. + // let x = Foo::method() + + let x = Foo::method() + + let e0 = E::B(x) + let e1 = E::C(10) + let e2 = E::A + let e3 = E::D { x: 10 } + + let e4 = E<()>::gimme(10) + let e5 = e1.replace(with: "hi") +} diff --git a/crates/hir-analysis/test_files/ty_check/path_generic.snap b/crates/hir-analysis/test_files/ty_check/path_generic.snap new file mode 100644 index 0000000000..2b403ee56f --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/path_generic.snap @@ -0,0 +1,251 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/path_generic.fe +--- +note: + ┌─ path_generic.fe:10:22 + │ +10 │ fn method() -> T { + │ ╭──────────────────────^ +11 │ │ T::default() +12 │ │ } + │ ╰─────^ T + +note: + ┌─ path_generic.fe:11:9 + │ +11 │ T::default() + │ ^^^^^^^^^^ fn default + +note: + ┌─ path_generic.fe:11:9 + │ +11 │ T::default() + │ ^^^^^^^^^^^^ T + +note: + ┌─ path_generic.fe:16:26 + │ +16 │ fn default() -> Self { + │ ╭──────────────────────────^ +17 │ │ 0 +18 │ │ } + │ ╰─────^ i32 + +note: + ┌─ path_generic.fe:17:9 + │ +17 │ 0 + │ ^ i32 + +note: + ┌─ path_generic.fe:29:35 + │ +29 │ fn gimme(_ val: U) -> E { + │ ╭───────────────────────────────────^ +30 │ │ E::B(val) +31 │ │ } + │ ╰─────^ E + +note: + ┌─ path_generic.fe:30:9 + │ +30 │ E::B(val) + │ ^^^^ fn B + +note: + ┌─ path_generic.fe:30:9 + │ +30 │ E::B(val) + │ ^^^^^^^^^ E + +note: + ┌─ path_generic.fe:30:14 + │ +30 │ E::B(val) + │ ^^^ U + +note: + ┌─ path_generic.fe:39:57 + │ +39 │ fn replace(self: Self, with: U) -> Self { + │ ╭─────────────────────────────────────────────────────────^ +40 │ │ Self::B(with) +41 │ │ } + │ ╰─────^ E + +note: + ┌─ path_generic.fe:40:9 + │ +40 │ Self::B(with) + │ ^^^^^^^ fn B + +note: + ┌─ path_generic.fe:40:9 + │ +40 │ Self::B(with) + │ ^^^^^^^^^^^^^ E + +note: + ┌─ path_generic.fe:40:17 + │ +40 │ Self::B(with) + │ ^^^^ U + +note: + ┌─ path_generic.fe:44:10 + │ +44 │ fn foo() { + │ ╭──────────^ +45 │ │ // Deciding the `Foo` type is not possible without a type argument for `Foo`. +46 │ │ // let x = Foo::method() +47 │ │ + · │ +56 │ │ let e5 = e1.replace(with: "hi") +57 │ │ } + │ ╰─^ () + +note: + ┌─ path_generic.fe:48:9 + │ +48 │ let x = Foo::method() + │ ^ i32 + +note: + ┌─ path_generic.fe:48:13 + │ +48 │ let x = Foo::method() + │ ^^^^^^^^^^^^^^^^ fn method + +note: + ┌─ path_generic.fe:48:13 + │ +48 │ let x = Foo::method() + │ ^^^^^^^^^^^^^^^^^^ i32 + +note: + ┌─ path_generic.fe:50:9 + │ +50 │ let e0 = E::B(x) + │ ^^ E + +note: + ┌─ path_generic.fe:50:14 + │ +50 │ let e0 = E::B(x) + │ ^^^^ fn B + +note: + ┌─ path_generic.fe:50:14 + │ +50 │ let e0 = E::B(x) + │ ^^^^^^^ E + +note: + ┌─ path_generic.fe:50:19 + │ +50 │ let e0 = E::B(x) + │ ^ i32 + +note: + ┌─ path_generic.fe:51:9 + │ +51 │ let e1 = E::C(10) + │ ^^ E + +note: + ┌─ path_generic.fe:51:14 + │ +51 │ let e1 = E::C(10) + │ ^^^^^^^^^ fn C + +note: + ┌─ path_generic.fe:51:14 + │ +51 │ let e1 = E::C(10) + │ ^^^^^^^^^^^^^ E + +note: + ┌─ path_generic.fe:51:24 + │ +51 │ let e1 = E::C(10) + │ ^^ i32 + +note: + ┌─ path_generic.fe:52:9 + │ +52 │ let e2 = E::A + │ ^^ E + +note: + ┌─ path_generic.fe:52:14 + │ +52 │ let e2 = E::A + │ ^^^^^^^^^ E + +note: + ┌─ path_generic.fe:53:9 + │ +53 │ let e3 = E::D { x: 10 } + │ ^^ E + +note: + ┌─ path_generic.fe:53:14 + │ +53 │ let e3 = E::D { x: 10 } + │ ^^^^^^^^^^^^^^^^^^^ E + +note: + ┌─ path_generic.fe:53:29 + │ +53 │ let e3 = E::D { x: 10 } + │ ^^ i32 + +note: + ┌─ path_generic.fe:55:9 + │ +55 │ let e4 = E<()>::gimme(10) + │ ^^ E + +note: + ┌─ path_generic.fe:55:14 + │ +55 │ let e4 = E<()>::gimme(10) + │ ^^^^^^^^^^^^^^^^^ fn gimme<(), u64> + +note: + ┌─ path_generic.fe:55:14 + │ +55 │ let e4 = E<()>::gimme(10) + │ ^^^^^^^^^^^^^^^^^^^^^ E + +note: + ┌─ path_generic.fe:55:32 + │ +55 │ let e4 = E<()>::gimme(10) + │ ^^ u64 + +note: + ┌─ path_generic.fe:56:9 + │ +56 │ let e5 = e1.replace(with: "hi") + │ ^^ E> + +note: + ┌─ path_generic.fe:56:14 + │ +56 │ let e5 = e1.replace(with: "hi") + │ ^^ E + +note: + ┌─ path_generic.fe:56:14 + │ +56 │ let e5 = e1.replace(with: "hi") + │ ^^^^^^^^^^^^^^^^^^^^^^ E> + +note: + ┌─ path_generic.fe:56:31 + │ +56 │ let e5 = e1.replace(with: "hi") + │ ^^^^ String<2> diff --git a/crates/hir-analysis/test_files/ty_check/record_init.fe b/crates/hir-analysis/test_files/ty_check/record_init.fe new file mode 100644 index 0000000000..f6f0a0a33c --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/record_init.fe @@ -0,0 +1,39 @@ +struct Foo { + x: i32, + y: String<10> +} + +fn foo() { + let x = 1 + let y = "FOO" + + let f = Foo {x, y} + + let f2 = Foo {x: 1, y: "FOO"} + + let f3 = Foo {y: "FOO", x: 1} +} + +struct Bar { + t: T, + u: U +} + +fn foo2(b: bool, z: Z) { + let t = false + let u = "Bar" + let f = Bar {t, u} + + let f2 = Bar {t: z, u: f} +} + +struct Wrapper +where T: * -> * -> * +{ + t: T +} + +fn foo3() { + let bar = Bar { t: 1, u: false } + let x = Wrapper { t: bar } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/record_init.snap b/crates/hir-analysis/test_files/ty_check/record_init.snap new file mode 100644 index 0000000000..ac1c49b89a --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/record_init.snap @@ -0,0 +1,250 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/record_init.fe +--- +note: + ┌─ record_init.fe:6:10 + │ + 6 │ fn foo() { + │ ╭──────────^ + 7 │ │ let x = 1 + 8 │ │ let y = "FOO" + 9 │ │ + · │ +14 │ │ let f3 = Foo {y: "FOO", x: 1} +15 │ │ } + │ ╰─^ () + +note: + ┌─ record_init.fe:7:9 + │ +7 │ let x = 1 + │ ^ i32 + +note: + ┌─ record_init.fe:7:13 + │ +7 │ let x = 1 + │ ^ i32 + +note: + ┌─ record_init.fe:8:9 + │ +8 │ let y = "FOO" + │ ^ String<10> + +note: + ┌─ record_init.fe:8:13 + │ +8 │ let y = "FOO" + │ ^^^^^ String<10> + +note: + ┌─ record_init.fe:10:9 + │ +10 │ let f = Foo {x, y} + │ ^ Foo + +note: + ┌─ record_init.fe:10:13 + │ +10 │ let f = Foo {x, y} + │ ^^^^^^^^^^ Foo + +note: + ┌─ record_init.fe:10:18 + │ +10 │ let f = Foo {x, y} + │ ^ i32 + +note: + ┌─ record_init.fe:10:21 + │ +10 │ let f = Foo {x, y} + │ ^ String<10> + +note: + ┌─ record_init.fe:12:9 + │ +12 │ let f2 = Foo {x: 1, y: "FOO"} + │ ^^ Foo + +note: + ┌─ record_init.fe:12:14 + │ +12 │ let f2 = Foo {x: 1, y: "FOO"} + │ ^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ record_init.fe:12:22 + │ +12 │ let f2 = Foo {x: 1, y: "FOO"} + │ ^ i32 + +note: + ┌─ record_init.fe:12:28 + │ +12 │ let f2 = Foo {x: 1, y: "FOO"} + │ ^^^^^ String<10> + +note: + ┌─ record_init.fe:14:9 + │ +14 │ let f3 = Foo {y: "FOO", x: 1} + │ ^^ Foo + +note: + ┌─ record_init.fe:14:14 + │ +14 │ let f3 = Foo {y: "FOO", x: 1} + │ ^^^^^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ record_init.fe:14:22 + │ +14 │ let f3 = Foo {y: "FOO", x: 1} + │ ^^^^^ String<10> + +note: + ┌─ record_init.fe:14:32 + │ +14 │ let f3 = Foo {y: "FOO", x: 1} + │ ^ i32 + +note: + ┌─ record_init.fe:22:27 + │ +22 │ fn foo2(b: bool, z: Z) { + │ ╭───────────────────────────^ +23 │ │ let t = false +24 │ │ let u = "Bar" +25 │ │ let f = Bar {t, u} +26 │ │ +27 │ │ let f2 = Bar {t: z, u: f} +28 │ │ } + │ ╰─^ () + +note: + ┌─ record_init.fe:23:9 + │ +23 │ let t = false + │ ^ bool + +note: + ┌─ record_init.fe:23:13 + │ +23 │ let t = false + │ ^^^^^ bool + +note: + ┌─ record_init.fe:24:9 + │ +24 │ let u = "Bar" + │ ^ String<3> + +note: + ┌─ record_init.fe:24:13 + │ +24 │ let u = "Bar" + │ ^^^^^ String<3> + +note: + ┌─ record_init.fe:25:9 + │ +25 │ let f = Bar {t, u} + │ ^ Bar> + +note: + ┌─ record_init.fe:25:13 + │ +25 │ let f = Bar {t, u} + │ ^^^^^^^^^^ Bar> + +note: + ┌─ record_init.fe:25:18 + │ +25 │ let f = Bar {t, u} + │ ^ bool + +note: + ┌─ record_init.fe:25:21 + │ +25 │ let f = Bar {t, u} + │ ^ String<3> + +note: + ┌─ record_init.fe:27:9 + │ +27 │ let f2 = Bar {t: z, u: f} + │ ^^ Bar>> + +note: + ┌─ record_init.fe:27:14 + │ +27 │ let f2 = Bar {t: z, u: f} + │ ^^^^^^^^^^^^^^^^ Bar>> + +note: + ┌─ record_init.fe:27:22 + │ +27 │ let f2 = Bar {t: z, u: f} + │ ^ Z + +note: + ┌─ record_init.fe:27:28 + │ +27 │ let f2 = Bar {t: z, u: f} + │ ^ Bar> + +note: + ┌─ record_init.fe:36:11 + │ +36 │ fn foo3() { + │ ╭───────────^ +37 │ │ let bar = Bar { t: 1, u: false } +38 │ │ let x = Wrapper { t: bar } +39 │ │ } + │ ╰─^ () + +note: + ┌─ record_init.fe:37:9 + │ +37 │ let bar = Bar { t: 1, u: false } + │ ^^^ Bar + +note: + ┌─ record_init.fe:37:15 + │ +37 │ let bar = Bar { t: 1, u: false } + │ ^^^^^^^^^^^^^^^^^^^^^^ Bar + +note: + ┌─ record_init.fe:37:24 + │ +37 │ let bar = Bar { t: 1, u: false } + │ ^ i32 + +note: + ┌─ record_init.fe:37:30 + │ +37 │ let bar = Bar { t: 1, u: false } + │ ^^^^^ bool + +note: + ┌─ record_init.fe:38:9 + │ +38 │ let x = Wrapper { t: bar } + │ ^ Wrapper + +note: + ┌─ record_init.fe:38:13 + │ +38 │ let x = Wrapper { t: bar } + │ ^^^^^^^^^^^^^^^^^^ Wrapper + +note: + ┌─ record_init.fe:38:26 + │ +38 │ let x = Wrapper { t: bar } + │ ^^^ Bar diff --git a/crates/hir-analysis/test_files/ty_check/ret.fe b/crates/hir-analysis/test_files/ty_check/ret.fe new file mode 100644 index 0000000000..1e87e46b6b --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/ret.fe @@ -0,0 +1,37 @@ +enum Tag { + Tag1, + Tag2, + Tag3 +} + +fn foo(b1: bool, b2: bool) -> i32 { + use Tag::* + + let tag = if b1 { + return 0 + } else if b2 { + Tag1 + } else { + Tag3 + } + + let b = match tag { + Tag1 => { + true + } + + Tag2 => { + return 3 + } + + Tag3 => { + false + } + } + + if b { + 1 + } else { + 2 + } +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/ret.snap b/crates/hir-analysis/test_files/ty_check/ret.snap new file mode 100644 index 0000000000..8af22f81d6 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/ret.snap @@ -0,0 +1,238 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/ret.fe +--- +note: + ┌─ ret.fe:7:35 + │ + 7 │ fn foo(b1: bool, b2: bool) -> i32 { + │ ╭───────────────────────────────────^ + 8 │ │ use Tag::* + 9 │ │ +10 │ │ let tag = if b1 { + · │ +36 │ │ } +37 │ │ } + │ ╰─^ i32 + +note: + ┌─ ret.fe:10:9 + │ +10 │ let tag = if b1 { + │ ^^^ Tag + +note: + ┌─ ret.fe:10:15 + │ +10 │ let tag = if b1 { + │ ╭───────────────^ +11 │ │ return 0 +12 │ │ } else if b2 { +13 │ │ Tag1 +14 │ │ } else { +15 │ │ Tag3 +16 │ │ } + │ ╰─────^ Tag + +note: + ┌─ ret.fe:10:18 + │ +10 │ let tag = if b1 { + │ ^^ bool + +note: + ┌─ ret.fe:10:21 + │ +10 │ let tag = if b1 { + │ ╭─────────────────────^ +11 │ │ return 0 +12 │ │ } else if b2 { + │ ╰─────^ ! + +note: + ┌─ ret.fe:11:16 + │ +11 │ return 0 + │ ^ i32 + +note: + ┌─ ret.fe:12:12 + │ +12 │ } else if b2 { + │ ╭────────────^ +13 │ │ Tag1 +14 │ │ } else { +15 │ │ Tag3 +16 │ │ } + │ ╰─────^ Tag + +note: + ┌─ ret.fe:12:15 + │ +12 │ } else if b2 { + │ ^^ bool + +note: + ┌─ ret.fe:12:18 + │ +12 │ } else if b2 { + │ ╭──────────────────^ +13 │ │ Tag1 +14 │ │ } else { + │ ╰─────^ Tag + +note: + ┌─ ret.fe:13:9 + │ +13 │ Tag1 + │ ^^^^ Tag + +note: + ┌─ ret.fe:14:12 + │ +14 │ } else { + │ ╭────────────^ +15 │ │ Tag3 +16 │ │ } + │ ╰─────^ Tag + +note: + ┌─ ret.fe:15:9 + │ +15 │ Tag3 + │ ^^^^ Tag + +note: + ┌─ ret.fe:18:9 + │ +18 │ let b = match tag { + │ ^ bool + +note: + ┌─ ret.fe:18:13 + │ +18 │ let b = match tag { + │ ╭─────────────^ +19 │ │ Tag1 => { +20 │ │ true +21 │ │ } + · │ +29 │ │ } +30 │ │ } + │ ╰─────^ bool + +note: + ┌─ ret.fe:18:19 + │ +18 │ let b = match tag { + │ ^^^ Tag + +note: + ┌─ ret.fe:19:9 + │ +19 │ Tag1 => { + │ ^^^^ Tag + +note: + ┌─ ret.fe:19:17 + │ +19 │ Tag1 => { + │ ╭─────────────────^ +20 │ │ true +21 │ │ } + │ ╰─────────^ bool + +note: + ┌─ ret.fe:20:13 + │ +20 │ true + │ ^^^^ bool + +note: + ┌─ ret.fe:23:9 + │ +23 │ Tag2 => { + │ ^^^^ Tag + +note: + ┌─ ret.fe:23:17 + │ +23 │ Tag2 => { + │ ╭─────────────────^ +24 │ │ return 3 +25 │ │ } + │ ╰─────────^ bool + +note: + ┌─ ret.fe:24:20 + │ +24 │ return 3 + │ ^ i32 + +note: + ┌─ ret.fe:27:9 + │ +27 │ Tag3 => { + │ ^^^^ Tag + +note: + ┌─ ret.fe:27:17 + │ +27 │ Tag3 => { + │ ╭─────────────────^ +28 │ │ false +29 │ │ } + │ ╰─────────^ bool + +note: + ┌─ ret.fe:28:13 + │ +28 │ false + │ ^^^^^ bool + +note: + ┌─ ret.fe:32:5 + │ +32 │ ╭ if b { +33 │ │ 1 +34 │ │ } else { +35 │ │ 2 +36 │ │ } + │ ╰─────^ i32 + +note: + ┌─ ret.fe:32:8 + │ +32 │ if b { + │ ^ bool + +note: + ┌─ ret.fe:32:10 + │ +32 │ if b { + │ ╭──────────^ +33 │ │ 1 +34 │ │ } else { + │ ╰─────^ i32 + +note: + ┌─ ret.fe:33:9 + │ +33 │ 1 + │ ^ i32 + +note: + ┌─ ret.fe:34:12 + │ +34 │ } else { + │ ╭────────────^ +35 │ │ 2 +36 │ │ } + │ ╰─────^ i32 + +note: + ┌─ ret.fe:35:9 + │ +35 │ 2 + │ ^ i32 diff --git a/crates/hir-analysis/test_files/ty_check/shadowing.fe b/crates/hir-analysis/test_files/ty_check/shadowing.fe new file mode 100644 index 0000000000..175aee39e3 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/shadowing.fe @@ -0,0 +1,22 @@ +fn foo() -> i32 { + let x = 1 + + { + let x = false + if x { + let x = "Hello" + x + } else { + let x = "Hi" + x + } + } + + x +} + + +fn bar() -> i32 { + let bar = 1 + bar +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/shadowing.snap b/crates/hir-analysis/test_files/ty_check/shadowing.snap new file mode 100644 index 0000000000..fdfc0fa161 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/shadowing.snap @@ -0,0 +1,161 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/shadowing.fe +--- +note: + ┌─ shadowing.fe:1:17 + │ + 1 │ fn foo() -> i32 { + │ ╭─────────────────^ + 2 │ │ let x = 1 + 3 │ │ + 4 │ │ { + · │ +15 │ │ x +16 │ │ } + │ ╰─^ i32 + +note: + ┌─ shadowing.fe:2:9 + │ +2 │ let x = 1 + │ ^ i32 + +note: + ┌─ shadowing.fe:2:13 + │ +2 │ let x = 1 + │ ^ i32 + +note: + ┌─ shadowing.fe:4:5 + │ + 4 │ ╭ { + 5 │ │ let x = false + 6 │ │ if x { + 7 │ │ let x = "Hello" + · │ +12 │ │ } +13 │ │ } + │ ╰─────^ String<5> + +note: + ┌─ shadowing.fe:5:13 + │ +5 │ let x = false + │ ^ bool + +note: + ┌─ shadowing.fe:5:17 + │ +5 │ let x = false + │ ^^^^^ bool + +note: + ┌─ shadowing.fe:6:9 + │ + 6 │ ╭ if x { + 7 │ │ let x = "Hello" + 8 │ │ x + 9 │ │ } else { +10 │ │ let x = "Hi" +11 │ │ x +12 │ │ } + │ ╰─────────^ String<5> + +note: + ┌─ shadowing.fe:6:12 + │ +6 │ if x { + │ ^ bool + +note: + ┌─ shadowing.fe:6:14 + │ +6 │ if x { + │ ╭──────────────^ +7 │ │ let x = "Hello" +8 │ │ x +9 │ │ } else { + │ ╰─────────^ String<5> + +note: + ┌─ shadowing.fe:7:17 + │ +7 │ let x = "Hello" + │ ^ String<5> + +note: + ┌─ shadowing.fe:7:21 + │ +7 │ let x = "Hello" + │ ^^^^^^^ String<5> + +note: + ┌─ shadowing.fe:8:13 + │ +8 │ x + │ ^ String<5> + +note: + ┌─ shadowing.fe:9:16 + │ + 9 │ } else { + │ ╭────────────────^ +10 │ │ let x = "Hi" +11 │ │ x +12 │ │ } + │ ╰─────────^ String<5> + +note: + ┌─ shadowing.fe:10:17 + │ +10 │ let x = "Hi" + │ ^ String<5> + +note: + ┌─ shadowing.fe:10:21 + │ +10 │ let x = "Hi" + │ ^^^^ String<5> + +note: + ┌─ shadowing.fe:11:13 + │ +11 │ x + │ ^ String<5> + +note: + ┌─ shadowing.fe:15:5 + │ +15 │ x + │ ^ i32 + +note: + ┌─ shadowing.fe:19:17 + │ +19 │ fn bar() -> i32 { + │ ╭─────────────────^ +20 │ │ let bar = 1 +21 │ │ bar +22 │ │ } + │ ╰─^ i32 + +note: + ┌─ shadowing.fe:20:9 + │ +20 │ let bar = 1 + │ ^^^ i32 + +note: + ┌─ shadowing.fe:20:15 + │ +20 │ let bar = 1 + │ ^ i32 + +note: + ┌─ shadowing.fe:21:5 + │ +21 │ bar + │ ^^^ i32 diff --git a/crates/hir-analysis/test_files/ty_check/tuple.fe b/crates/hir-analysis/test_files/ty_check/tuple.fe new file mode 100644 index 0000000000..be727a8f59 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/tuple.fe @@ -0,0 +1,10 @@ +struct Foo { + x: i32, + y: u32, +} + +fn foo() { + let f = Foo {x: 1, y: 2} + let tup_1 = (f, true, false) + let tup_2 = (("Foo", true), tup_1, "BAZ") +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/tuple.snap b/crates/hir-analysis/test_files/ty_check/tuple.snap new file mode 100644 index 0000000000..46908d2505 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/tuple.snap @@ -0,0 +1,111 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/tuple.fe +--- +note: + ┌─ tuple.fe:6:10 + │ + 6 │ fn foo() { + │ ╭──────────^ + 7 │ │ let f = Foo {x: 1, y: 2} + 8 │ │ let tup_1 = (f, true, false) + 9 │ │ let tup_2 = (("Foo", true), tup_1, "BAZ") +10 │ │ } + │ ╰─^ () + +note: + ┌─ tuple.fe:7:9 + │ +7 │ let f = Foo {x: 1, y: 2} + │ ^ Foo + +note: + ┌─ tuple.fe:7:13 + │ +7 │ let f = Foo {x: 1, y: 2} + │ ^^^^^^^^^^^^^^^^ Foo + +note: + ┌─ tuple.fe:7:21 + │ +7 │ let f = Foo {x: 1, y: 2} + │ ^ i32 + +note: + ┌─ tuple.fe:7:27 + │ +7 │ let f = Foo {x: 1, y: 2} + │ ^ u32 + +note: + ┌─ tuple.fe:8:9 + │ +8 │ let tup_1 = (f, true, false) + │ ^^^^^ (Foo, bool, bool) + +note: + ┌─ tuple.fe:8:17 + │ +8 │ let tup_1 = (f, true, false) + │ ^^^^^^^^^^^^^^^^ (Foo, bool, bool) + +note: + ┌─ tuple.fe:8:18 + │ +8 │ let tup_1 = (f, true, false) + │ ^ Foo + +note: + ┌─ tuple.fe:8:21 + │ +8 │ let tup_1 = (f, true, false) + │ ^^^^ bool + +note: + ┌─ tuple.fe:8:27 + │ +8 │ let tup_1 = (f, true, false) + │ ^^^^^ bool + +note: + ┌─ tuple.fe:9:9 + │ +9 │ let tup_2 = (("Foo", true), tup_1, "BAZ") + │ ^^^^^ ((String<3>, bool), (Foo, bool, bool), String<3>) + +note: + ┌─ tuple.fe:9:17 + │ +9 │ let tup_2 = (("Foo", true), tup_1, "BAZ") + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ((String<3>, bool), (Foo, bool, bool), String<3>) + +note: + ┌─ tuple.fe:9:18 + │ +9 │ let tup_2 = (("Foo", true), tup_1, "BAZ") + │ ^^^^^^^^^^^^^ (String<3>, bool) + +note: + ┌─ tuple.fe:9:19 + │ +9 │ let tup_2 = (("Foo", true), tup_1, "BAZ") + │ ^^^^^ String<3> + +note: + ┌─ tuple.fe:9:26 + │ +9 │ let tup_2 = (("Foo", true), tup_1, "BAZ") + │ ^^^^ bool + +note: + ┌─ tuple.fe:9:33 + │ +9 │ let tup_2 = (("Foo", true), tup_1, "BAZ") + │ ^^^^^ (Foo, bool, bool) + +note: + ┌─ tuple.fe:9:40 + │ +9 │ let tup_2 = (("Foo", true), tup_1, "BAZ") + │ ^^^^^ String<3> diff --git a/crates/hir-analysis/test_files/ty_check/type_alias.fe b/crates/hir-analysis/test_files/ty_check/type_alias.fe new file mode 100644 index 0000000000..6ed01e73ea --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/type_alias.fe @@ -0,0 +1,9 @@ +struct Map {} + +mod foo { + pub type Foo = super::Map +} + +fn main() { + let mut set: foo::Foo<()> +} diff --git a/crates/hir-analysis/test_files/ty_check/type_alias.snap b/crates/hir-analysis/test_files/ty_check/type_alias.snap new file mode 100644 index 0000000000..0769c944b4 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/type_alias.snap @@ -0,0 +1,19 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/type_alias.fe +--- +note: + ┌─ type_alias.fe:7:11 + │ +7 │ fn main() { + │ ╭───────────^ +8 │ │ let mut set: foo::Foo<()> +9 │ │ } + │ ╰─^ () + +note: + ┌─ type_alias.fe:8:9 + │ +8 │ let mut set: foo::Foo<()> + │ ^^^^^^^ Map diff --git a/crates/hir-analysis/test_files/ty_check/unary.fe b/crates/hir-analysis/test_files/ty_check/unary.fe new file mode 100644 index 0000000000..439266025b --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/unary.fe @@ -0,0 +1,7 @@ +pub fn foo(x: i32) { + let i1: i32 = +1 + let i2 = -i1 + let i3 = ~i2 + + let b = !false +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/unary.snap b/crates/hir-analysis/test_files/ty_check/unary.snap new file mode 100644 index 0000000000..b4d847aece --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/unary.snap @@ -0,0 +1,89 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/unary.fe +--- +note: + ┌─ unary.fe:1:20 + │ +1 │ pub fn foo(x: i32) { + │ ╭────────────────────^ +2 │ │ let i1: i32 = +1 +3 │ │ let i2 = -i1 +4 │ │ let i3 = ~i2 +5 │ │ +6 │ │ let b = !false +7 │ │ } + │ ╰─^ () + +note: + ┌─ unary.fe:2:9 + │ +2 │ let i1: i32 = +1 + │ ^^ i32 + +note: + ┌─ unary.fe:2:19 + │ +2 │ let i1: i32 = +1 + │ ^^ i32 + +note: + ┌─ unary.fe:2:20 + │ +2 │ let i1: i32 = +1 + │ ^ i32 + +note: + ┌─ unary.fe:3:9 + │ +3 │ let i2 = -i1 + │ ^^ i32 + +note: + ┌─ unary.fe:3:14 + │ +3 │ let i2 = -i1 + │ ^^^ i32 + +note: + ┌─ unary.fe:3:15 + │ +3 │ let i2 = -i1 + │ ^^ i32 + +note: + ┌─ unary.fe:4:9 + │ +4 │ let i3 = ~i2 + │ ^^ i32 + +note: + ┌─ unary.fe:4:14 + │ +4 │ let i3 = ~i2 + │ ^^^ i32 + +note: + ┌─ unary.fe:4:15 + │ +4 │ let i3 = ~i2 + │ ^^ i32 + +note: + ┌─ unary.fe:6:9 + │ +6 │ let b = !false + │ ^ bool + +note: + ┌─ unary.fe:6:13 + │ +6 │ let b = !false + │ ^^^^^^ bool + +note: + ┌─ unary.fe:6:14 + │ +6 │ let b = !false + │ ^^^^^ bool diff --git a/crates/hir-analysis/test_files/ty_check/while_.fe b/crates/hir-analysis/test_files/ty_check/while_.fe new file mode 100644 index 0000000000..91b35a636d --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/while_.fe @@ -0,0 +1,38 @@ +pub fn factorial(mut num: u32) -> u32 { + let mut res = 1 + while num > 0 { + res *= num + num -= 1 + } + + res +} + +pub fn factorial2(mut num: u32) -> u32 { + let mut res = 1 + while true { + if num > 1 { + res *= num + num -= 1 + } else { + return res + } + } + + return res +} + + +pub fn factorial3(mut num: u32) -> u32 { + let mut res = 1 + while true { + num = if num > 1 { + res *= num + num - 1 + } else { + break + } + } + + res +} \ No newline at end of file diff --git a/crates/hir-analysis/test_files/ty_check/while_.snap b/crates/hir-analysis/test_files/ty_check/while_.snap new file mode 100644 index 0000000000..3df786c444 --- /dev/null +++ b/crates/hir-analysis/test_files/ty_check/while_.snap @@ -0,0 +1,391 @@ +--- +source: crates/hir-analysis/tests/ty_check.rs +expression: res +input_file: crates/hir-analysis/test_files/ty_check/while_.fe +--- +note: + ┌─ while_.fe:1:39 + │ +1 │ pub fn factorial(mut num: u32) -> u32 { + │ ╭───────────────────────────────────────^ +2 │ │ let mut res = 1 +3 │ │ while num > 0 { +4 │ │ res *= num + · │ +8 │ │ res +9 │ │ } + │ ╰─^ u32 + +note: + ┌─ while_.fe:2:9 + │ +2 │ let mut res = 1 + │ ^^^^^^^ u32 + +note: + ┌─ while_.fe:2:19 + │ +2 │ let mut res = 1 + │ ^ u32 + +note: + ┌─ while_.fe:3:11 + │ +3 │ while num > 0 { + │ ^^^ u32 + +note: + ┌─ while_.fe:3:11 + │ +3 │ while num > 0 { + │ ^^^^^^^ bool + +note: + ┌─ while_.fe:3:17 + │ +3 │ while num > 0 { + │ ^ u32 + +note: + ┌─ while_.fe:3:19 + │ +3 │ while num > 0 { + │ ╭───────────────────^ +4 │ │ res *= num +5 │ │ num -= 1 +6 │ │ } + │ ╰─────^ () + +note: + ┌─ while_.fe:4:9 + │ +4 │ res *= num + │ ^^^ u32 + +note: + ┌─ while_.fe:4:9 + │ +4 │ res *= num + │ ^^^^^^^^^^ () + +note: + ┌─ while_.fe:4:16 + │ +4 │ res *= num + │ ^^^ u32 + +note: + ┌─ while_.fe:5:9 + │ +5 │ num -= 1 + │ ^^^ u32 + +note: + ┌─ while_.fe:5:9 + │ +5 │ num -= 1 + │ ^^^^^^^^ () + +note: + ┌─ while_.fe:5:16 + │ +5 │ num -= 1 + │ ^ u32 + +note: + ┌─ while_.fe:8:5 + │ +8 │ res + │ ^^^ u32 + +note: + ┌─ while_.fe:11:40 + │ +11 │ pub fn factorial2(mut num: u32) -> u32 { + │ ╭────────────────────────────────────────^ +12 │ │ let mut res = 1 +13 │ │ while true { +14 │ │ if num > 1 { + · │ +22 │ │ return res +23 │ │ } + │ ╰─^ u32 + +note: + ┌─ while_.fe:12:9 + │ +12 │ let mut res = 1 + │ ^^^^^^^ u32 + +note: + ┌─ while_.fe:12:19 + │ +12 │ let mut res = 1 + │ ^ u32 + +note: + ┌─ while_.fe:13:11 + │ +13 │ while true { + │ ^^^^ bool + +note: + ┌─ while_.fe:13:16 + │ +13 │ while true { + │ ╭────────────────^ +14 │ │ if num > 1 { +15 │ │ res *= num +16 │ │ num -= 1 + · │ +19 │ │ } +20 │ │ } + │ ╰─────^ () + +note: + ┌─ while_.fe:14:9 + │ +14 │ ╭ if num > 1 { +15 │ │ res *= num +16 │ │ num -= 1 +17 │ │ } else { +18 │ │ return res +19 │ │ } + │ ╰─────────^ () + +note: + ┌─ while_.fe:14:12 + │ +14 │ if num > 1 { + │ ^^^ u32 + +note: + ┌─ while_.fe:14:12 + │ +14 │ if num > 1 { + │ ^^^^^^^ bool + +note: + ┌─ while_.fe:14:18 + │ +14 │ if num > 1 { + │ ^ u32 + +note: + ┌─ while_.fe:14:20 + │ +14 │ if num > 1 { + │ ╭────────────────────^ +15 │ │ res *= num +16 │ │ num -= 1 +17 │ │ } else { + │ ╰─────────^ () + +note: + ┌─ while_.fe:15:13 + │ +15 │ res *= num + │ ^^^ u32 + +note: + ┌─ while_.fe:15:13 + │ +15 │ res *= num + │ ^^^^^^^^^^ () + +note: + ┌─ while_.fe:15:20 + │ +15 │ res *= num + │ ^^^ u32 + +note: + ┌─ while_.fe:16:13 + │ +16 │ num -= 1 + │ ^^^ u32 + +note: + ┌─ while_.fe:16:13 + │ +16 │ num -= 1 + │ ^^^^^^^^ () + +note: + ┌─ while_.fe:16:20 + │ +16 │ num -= 1 + │ ^ u32 + +note: + ┌─ while_.fe:17:16 + │ +17 │ } else { + │ ╭────────────────^ +18 │ │ return res +19 │ │ } + │ ╰─────────^ () + +note: + ┌─ while_.fe:18:20 + │ +18 │ return res + │ ^^^ u32 + +note: + ┌─ while_.fe:22:12 + │ +22 │ return res + │ ^^^ u32 + +note: + ┌─ while_.fe:26:40 + │ +26 │ pub fn factorial3(mut num: u32) -> u32 { + │ ╭────────────────────────────────────────^ +27 │ │ let mut res = 1 +28 │ │ while true { +29 │ │ num = if num > 1 { + · │ +37 │ │ res +38 │ │ } + │ ╰─^ u32 + +note: + ┌─ while_.fe:27:9 + │ +27 │ let mut res = 1 + │ ^^^^^^^ u32 + +note: + ┌─ while_.fe:27:19 + │ +27 │ let mut res = 1 + │ ^ u32 + +note: + ┌─ while_.fe:28:11 + │ +28 │ while true { + │ ^^^^ bool + +note: + ┌─ while_.fe:28:16 + │ +28 │ while true { + │ ╭────────────────^ +29 │ │ num = if num > 1 { +30 │ │ res *= num +31 │ │ num - 1 + · │ +34 │ │ } +35 │ │ } + │ ╰─────^ () + +note: + ┌─ while_.fe:29:9 + │ +29 │ num = if num > 1 { + │ ^^^ u32 + +note: + ┌─ while_.fe:29:9 + │ +29 │ ╭ num = if num > 1 { +30 │ │ res *= num +31 │ │ num - 1 +32 │ │ } else { +33 │ │ break +34 │ │ } + │ ╰─────────^ () + +note: + ┌─ while_.fe:29:15 + │ +29 │ num = if num > 1 { + │ ╭───────────────^ +30 │ │ res *= num +31 │ │ num - 1 +32 │ │ } else { +33 │ │ break +34 │ │ } + │ ╰─────────^ u32 + +note: + ┌─ while_.fe:29:18 + │ +29 │ num = if num > 1 { + │ ^^^ u32 + +note: + ┌─ while_.fe:29:18 + │ +29 │ num = if num > 1 { + │ ^^^^^^^ bool + +note: + ┌─ while_.fe:29:24 + │ +29 │ num = if num > 1 { + │ ^ u32 + +note: + ┌─ while_.fe:29:26 + │ +29 │ num = if num > 1 { + │ ╭──────────────────────────^ +30 │ │ res *= num +31 │ │ num - 1 +32 │ │ } else { + │ ╰─────────^ u32 + +note: + ┌─ while_.fe:30:13 + │ +30 │ res *= num + │ ^^^ u32 + +note: + ┌─ while_.fe:30:13 + │ +30 │ res *= num + │ ^^^^^^^^^^ () + +note: + ┌─ while_.fe:30:20 + │ +30 │ res *= num + │ ^^^ u32 + +note: + ┌─ while_.fe:31:13 + │ +31 │ num - 1 + │ ^^^ u32 + +note: + ┌─ while_.fe:31:13 + │ +31 │ num - 1 + │ ^^^^^^^ u32 + +note: + ┌─ while_.fe:31:19 + │ +31 │ num - 1 + │ ^ u32 + +note: + ┌─ while_.fe:32:16 + │ +32 │ } else { + │ ╭────────────────^ +33 │ │ break +34 │ │ } + │ ╰─────────^ u32 + +note: + ┌─ while_.fe:37:5 + │ +37 │ res + │ ^^^ u32 diff --git a/crates/hir-analysis/tests/constraints.rs b/crates/hir-analysis/tests/constraints.rs new file mode 100644 index 0000000000..428b79a871 --- /dev/null +++ b/crates/hir-analysis/tests/constraints.rs @@ -0,0 +1,18 @@ +mod test_db; +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use test_db::HirAnalysisTestDb; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/constraints", + glob: "*.fe" +)] +fn constraints_standalone(fixture: Fixture<&str>) { + let mut db = HirAnalysisTestDb::default(); + let path = Path::new(fixture.path()); + let file_name = path.file_name().and_then(|file| file.to_str()).unwrap(); + let (ingot, file) = db.new_stand_alone(file_name, fixture.content()); + let (top_mod, _) = db.top_mod(ingot, file); + db.assert_no_diags(top_mod); +} diff --git a/crates/hir-analysis/tests/def_analysis.rs b/crates/hir-analysis/tests/def_analysis.rs new file mode 100644 index 0000000000..c8496264d5 --- /dev/null +++ b/crates/hir-analysis/tests/def_analysis.rs @@ -0,0 +1,18 @@ +mod test_db; +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use test_db::HirAnalysisTestDb; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/def_analysis", + glob: "*.fe" +)] +fn def_analysis_standalone(fixture: Fixture<&str>) { + let mut db = HirAnalysisTestDb::default(); + let path = Path::new(fixture.path()); + let file_name = path.file_name().and_then(|file| file.to_str()).unwrap(); + let (ingot, file) = db.new_stand_alone(file_name, fixture.content()); + let (top_mod, _) = db.top_mod(ingot, file); + db.assert_no_diags(top_mod); +} diff --git a/crates/hir-analysis/tests/early_path_resolution.rs b/crates/hir-analysis/tests/early_path_resolution.rs new file mode 100644 index 0000000000..984cab0f08 --- /dev/null +++ b/crates/hir-analysis/tests/early_path_resolution.rs @@ -0,0 +1,92 @@ +mod test_db; +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use fe_compiler_test_utils::snap_test; +use fe_hir_analysis::name_resolution::{resolve_path, NameDomain}; +use hir::{ + hir_def::{Expr, ExprId, ItemKind, Pat, PatId, PathId, TopLevelMod, TypeId}, + visitor::prelude::*, + HirDb, SpannedHirDb, +}; +use test_db::{HirAnalysisTestDb, HirPropertyFormatter}; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/early_path_resolution", + glob: "*.fe" +)] +fn early_path_resolution_standalone(fixture: Fixture<&str>) { + let mut db = HirAnalysisTestDb::default(); + let path = Path::new(fixture.path()); + let file_name = path.file_name().and_then(|file| file.to_str()).unwrap(); + let (ingot, file) = db.new_stand_alone(file_name, fixture.content()); + let (top_mod, mut prop_formatter) = db.top_mod(ingot, file); + db.assert_no_diags(top_mod); + + let mut ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); + PathVisitor { + db: &db, + top_mod, + domain_stack: Vec::new(), + prop_formatter: &mut prop_formatter, + } + .visit_top_mod(&mut ctxt, top_mod); + + let res = prop_formatter.finish(db.as_spanned_hir_db()); + snap_test!(res, fixture.path()); +} + +struct PathVisitor<'db, 'a> { + db: &'db HirAnalysisTestDb, + top_mod: TopLevelMod<'db>, + domain_stack: Vec, + prop_formatter: &'a mut HirPropertyFormatter<'db>, +} + +impl<'db> Visitor<'db> for PathVisitor<'db, '_> { + fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'db, LazyItemSpan<'db>>, item: ItemKind<'db>) { + if matches!(item, ItemKind::Use(_)) { + return; + } + + self.domain_stack.push(NameDomain::TYPE); + walk_item(self, ctxt, item); + self.domain_stack.pop(); + } + + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'db, LazyTySpan<'db>>, ty: TypeId<'db>) { + self.domain_stack.push(NameDomain::TYPE); + walk_ty(self, ctxt, ty); + self.domain_stack.pop(); + } + + fn visit_pat(&mut self, _: &mut VisitorCtxt<'db, LazyPatSpan<'db>>, _: PatId, _: &Pat<'db>) {} + + fn visit_expr( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyExprSpan<'db>>, + expr: ExprId, + expr_data: &Expr<'db>, + ) { + if matches!(expr_data, Expr::Block { .. }) { + walk_expr(self, ctxt, expr); + } + } + + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'db, LazyPathSpan<'db>>, path: PathId<'db>) { + let scope = ctxt.scope(); + let prop = match resolve_path(self.db, path, scope, false) { + Ok(res) => res.pretty_path(self.db).unwrap(), + Err(err) => err.print(), + }; + let span = ctxt + .span() + .unwrap() + .segment(path.segment_index(self.db.as_hir_db())) + .ident() + .into(); + self.prop_formatter.push_prop(self.top_mod, span, prop); + + walk_path(self, ctxt, path); + } +} diff --git a/crates/hir-analysis/tests/import.rs b/crates/hir-analysis/tests/import.rs new file mode 100644 index 0000000000..452ca1c829 --- /dev/null +++ b/crates/hir-analysis/tests/import.rs @@ -0,0 +1,74 @@ +mod test_db; +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use fe_compiler_test_utils::snap_test; +use fe_hir_analysis::name_resolution::{ImportAnalysisPass, NameDerivation, ResolvedImports}; +use hir::{analysis_pass::ModuleAnalysisPass, hir_def::Use}; +use rustc_hash::FxHashMap; +use test_db::{HirAnalysisTestDb, HirPropertyFormatter}; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/imports", + glob: "*.fe" +)] +fn import_standalone(fixture: Fixture<&str>) { + let mut db = HirAnalysisTestDb::default(); + let path = Path::new(fixture.path()); + let file_name = path.file_name().and_then(|file| file.to_str()).unwrap(); + let (ingot, file) = db.new_stand_alone(file_name, fixture.content()); + let (top_mod, mut prop_formatter) = db.top_mod(ingot, file); + + db.assert_no_diags(top_mod); + + let mut pass = ImportAnalysisPass::new(&db); + let resolved_imports = pass.resolve_imports(top_mod.ingot(&db)); + let diags = pass.run_on_module(top_mod); + if !diags.is_empty() { + panic!("Failed to resolve imports"); + } + + let res = format_imports(&db, &mut prop_formatter, resolved_imports); + snap_test!(res, fixture.path()); +} + +fn format_imports<'db>( + db: &'db HirAnalysisTestDb, + prop_formatter: &mut HirPropertyFormatter<'db>, + imports: &ResolvedImports<'db>, +) -> String { + let mut use_res_map: FxHashMap> = FxHashMap::default(); + + for name_resolved in imports.named_resolved.values().flat_map(|r| r.values()) { + for res in name_resolved.iter_ok() { + match res.derivation { + NameDerivation::NamedImported(use_) => use_res_map + .entry(use_) + .or_default() + .push(res.pretty_path(db).unwrap()), + _ => unreachable!(), + } + } + } + + for (_, glob_set) in imports.glob_resolved.iter() { + for (&use_, res_set_with_ident) in glob_set.iter() { + for (ident, res_set) in res_set_with_ident.iter() { + let ident = ident.data(db); + for res in res_set { + let def_path = res.pretty_path(db).unwrap(); + let resolved = format!("{} as {}", def_path, ident); + use_res_map.entry(use_).or_default().push(resolved) + } + } + } + } + for (use_, mut values) in use_res_map.into_iter() { + let use_span = use_.lazy_span().into(); + values.sort_unstable(); + let imported_names = values.join(" | "); + prop_formatter.push_prop(use_.top_mod(db), use_span, imported_names) + } + + prop_formatter.finish(db) +} diff --git a/crates/hir-analysis/tests/repeated_updates.rs b/crates/hir-analysis/tests/repeated_updates.rs new file mode 100644 index 0000000000..b57c5721dc --- /dev/null +++ b/crates/hir-analysis/tests/repeated_updates.rs @@ -0,0 +1,54 @@ +mod test_db; + +use fe_hir_analysis::{name_resolution::PathAnalysisPass, ty::FuncAnalysisPass}; +use hir::{analysis_pass::AnalysisPassManager, lower::map_file_to_mod, LowerHirDb}; +use test_db::HirAnalysisTestDb; + +use salsa::Setter; + +#[test] +fn test_updated() { + let mut db = HirAnalysisTestDb::default(); + let file_name = "file.fe"; + let versions = vec![ + r#"fn foo() {}"#, + r#"use bla + fn foo() {}"#, + r#"use bla::bla + fn foo() {}"#, + r#"use bla::bla::bla + fn foo() {}"#, + r#"use bla::bla::bla::bla + fn foo() {}"#, + ]; + + let (ingot, file) = db.new_stand_alone(file_name, versions[0]); + + for version in versions { + { + let top_mod = map_file_to_mod(db.as_lower_hir_db(), ingot, file); + let mut pass_manager = initialize_pass_manager(&db); + let _ = pass_manager.run_on_module(top_mod); + } + + { + file.set_text(&mut db).to(version.into()); + } + } +} + +fn initialize_pass_manager(db: &HirAnalysisTestDb) -> AnalysisPassManager<'_> { + let mut pass_manager = AnalysisPassManager::new(); + // pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + // pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + // pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); + // pass_manager.add_module_pass(Box::new(AdtDefAnalysisPass::new(db))); + // pass_manager.add_module_pass(Box::new(TypeAliasAnalysisPass::new(db))); + // pass_manager.add_module_pass(Box::new(TraitAnalysisPass::new(db))); + // pass_manager.add_module_pass(Box::new(ImplAnalysisPass::new(db))); + // pass_manager.add_module_pass(Box::new(ImplTraitAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(FuncAnalysisPass::new(db))); + // pass_manager.add_module_pass(Box::new(BodyAnalysisPass::new(db))); + pass_manager +} diff --git a/crates/hir-analysis/tests/test_db.rs b/crates/hir-analysis/tests/test_db.rs new file mode 100644 index 0000000000..d10b1b3d3f --- /dev/null +++ b/crates/hir-analysis/tests/test_db.rs @@ -0,0 +1,210 @@ +use std::collections::BTreeMap; + +use codespan_reporting::{ + diagnostic::{Diagnostic, Label}, + files::SimpleFiles, + term::{ + self, + termcolor::{BufferWriter, ColorChoice}, + }, +}; +use common::{ + diagnostics::Span, + impl_db_traits, + indexmap::{IndexMap, IndexSet}, + input::{IngotKind, Version}, + InputDb, InputFile, InputIngot, +}; +use driver::{diagnostics::ToCsDiag, CsDbWrapper, DriverDb}; +use fe_hir_analysis::{ + name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, + ty::{ + AdtDefAnalysisPass, BodyAnalysisPass, FuncAnalysisPass, ImplAnalysisPass, + ImplTraitAnalysisPass, TraitAnalysisPass, TypeAliasAnalysisPass, + }, + HirAnalysisDb, +}; +use hir::{ + analysis_pass::AnalysisPassManager, + hir_def::TopLevelMod, + lower, + span::{DynLazySpan, LazySpan}, + HirDb, LowerHirDb, ParsingPass, SpannedHirDb, +}; +use rustc_hash::FxHashMap; + +type CodeSpanFileId = usize; + +#[derive(Default, Clone)] +#[salsa::db] +pub struct HirAnalysisTestDb { + storage: salsa::Storage, +} +impl_db_traits!( + HirAnalysisTestDb, + InputDb, + HirDb, + LowerHirDb, + SpannedHirDb, + HirAnalysisDb, + DriverDb, +); + +// https://github.com/rust-lang/rust/issues/46379 +#[allow(dead_code)] +impl HirAnalysisTestDb { + pub fn new_stand_alone(&mut self, file_name: &str, text: &str) -> (InputIngot, InputFile) { + let kind = IngotKind::StandAlone; + let version = Version::new(0, 0, 1); + let ingot = InputIngot::new(self, file_name, kind, version, IndexSet::default()); + let root = InputFile::new(self, file_name.into(), text.to_string()); + ingot.set_root_file(self, root); + ingot.set_files(self, [root].into_iter().collect()); + (ingot, root) + } + + pub fn top_mod( + &self, + ingot: InputIngot, + input: InputFile, + ) -> (TopLevelMod, HirPropertyFormatter) { + let mut prop_formatter = HirPropertyFormatter::default(); + let top_mod = self.register_file(&mut prop_formatter, ingot, input); + (top_mod, prop_formatter) + } + + pub fn assert_no_diags(&self, top_mod: TopLevelMod) { + let mut manager = initialize_analysis_pass(self); + let diags = manager.run_on_module(top_mod); + + if !diags.is_empty() { + let writer = BufferWriter::stderr(ColorChoice::Auto); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + // copied from driver + let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); + diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { + std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), + ord => ord, + }); + + for diag in diags { + let cs_diag = &diag.to_cs(self); + term::emit( + &mut buffer, + &config, + &CsDbWrapper(self.as_driver_db()), + cs_diag, + ) + .unwrap(); + } + eprintln!("{}", std::str::from_utf8(buffer.as_slice()).unwrap()); + + panic!("this module contains errors"); + } + } + + fn register_file<'db>( + &'db self, + prop_formatter: &mut HirPropertyFormatter<'db>, + ingot: InputIngot, + input_file: InputFile, + ) -> TopLevelMod<'db> { + let top_mod = lower::map_file_to_mod(self, ingot, input_file); + let path = input_file.path(self); + let text = input_file.text(self); + prop_formatter.register_top_mod(path.as_str(), text, top_mod); + top_mod + } +} + +pub struct HirPropertyFormatter<'db> { + // https://github.com/rust-lang/rust/issues/46379 + #[allow(dead_code)] + properties: IndexMap, Vec<(String, DynLazySpan<'db>)>>, + top_mod_to_file: FxHashMap, CodeSpanFileId>, + code_span_files: SimpleFiles, +} + +// https://github.com/rust-lang/rust/issues/46379 +#[allow(dead_code)] +impl<'db> HirPropertyFormatter<'db> { + pub fn push_prop(&mut self, top_mod: TopLevelMod<'db>, span: DynLazySpan<'db>, prop: String) { + self.properties + .entry(top_mod) + .or_default() + .push((prop, span)); + } + + pub fn finish(&mut self, db: &'db dyn SpannedHirDb) -> String { + let writer = BufferWriter::stderr(ColorChoice::Never); + let mut buffer = writer.buffer(); + let config = term::Config::default(); + + for top_mod in self.top_mod_to_file.keys() { + if !self.properties.contains_key(top_mod) { + continue; + } + + let diags = self.properties[top_mod] + .iter() + .map(|(prop, span)| { + let (span, diag) = self.property_to_diag(db, *top_mod, prop, span.clone()); + ((span.file, (span.range.start(), span.range.end())), diag) + }) + .collect::>(); + + for diag in diags.values() { + term::emit(&mut buffer, &config, &self.code_span_files, diag).unwrap(); + } + } + + std::str::from_utf8(buffer.as_slice()).unwrap().to_string() + } + + fn property_to_diag( + &self, + db: &'db dyn SpannedHirDb, + top_mod: TopLevelMod<'db>, + prop: &str, + span: DynLazySpan<'db>, + ) -> (Span, Diagnostic) { + let file_id = self.top_mod_to_file[&top_mod]; + let span = span.resolve(db).unwrap(); + let diag = Diagnostic::note() + .with_labels(vec![Label::primary(file_id, span.range).with_message(prop)]); + (span, diag) + } + + fn register_top_mod(&mut self, path: &str, text: &str, top_mod: TopLevelMod<'db>) { + let file_id = self.code_span_files.add(path.to_string(), text.to_string()); + self.top_mod_to_file.insert(top_mod, file_id); + } +} + +impl Default for HirPropertyFormatter<'_> { + fn default() -> Self { + Self { + properties: Default::default(), + top_mod_to_file: Default::default(), + code_span_files: SimpleFiles::new(), + } + } +} + +fn initialize_analysis_pass(db: &HirAnalysisTestDb) -> AnalysisPassManager<'_> { + let mut pass_manager = AnalysisPassManager::new(); + pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(AdtDefAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TypeAliasAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TraitAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImplAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImplTraitAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(FuncAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(BodyAnalysisPass::new(db))); + pass_manager +} diff --git a/crates/hir-analysis/tests/ty_check.rs b/crates/hir-analysis/tests/ty_check.rs new file mode 100644 index 0000000000..bb1f3f55a6 --- /dev/null +++ b/crates/hir-analysis/tests/ty_check.rs @@ -0,0 +1,49 @@ +mod test_db; +use std::path::Path; + +use dir_test::{dir_test, Fixture}; +use fe_compiler_test_utils::snap_test; +use fe_hir_analysis::ty::ty_check::check_func_body; +use test_db::HirAnalysisTestDb; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/ty_check", + glob: "**/*.fe" +)] +fn ty_check_standalone(fixture: Fixture<&str>) { + let mut db = HirAnalysisTestDb::default(); + let path = Path::new(fixture.path()); + let file_name = path.file_name().and_then(|file| file.to_str()).unwrap(); + let (ingot, file) = db.new_stand_alone(file_name, fixture.content()); + let (top_mod, mut prop_formatter) = db.top_mod(ingot, file); + + db.assert_no_diags(top_mod); + + for &func in top_mod.all_funcs(&db) { + let Some(body) = func.body(&db) else { + continue; + }; + + let typed_body = &check_func_body(&db, func).1; + for expr in body.exprs(&db).keys() { + let ty = typed_body.expr_ty(&db, expr); + prop_formatter.push_prop( + func.top_mod(&db), + expr.lazy_span(body).into(), + ty.pretty_print(&db).to_string(), + ); + } + + for pat in body.pats(&db).keys() { + let ty = typed_body.pat_ty(&db, pat); + prop_formatter.push_prop( + func.top_mod(&db), + pat.lazy_span(body).into(), + ty.pretty_print(&db).to_string(), + ); + } + } + + let res = prop_formatter.finish(&db); + snap_test!(res, fixture.path()); +} diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml new file mode 100644 index 0000000000..ca364d2e0c --- /dev/null +++ b/crates/hir/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "fe-hir" +version = "0.26.0" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides HIR definition and lowering for Fe lang" + +[dependencies] +salsa.workspace = true +derive_more = "0.99" +cranelift-entity = "0.91" +num-bigint = "0.4" +num-traits = "0.2.15" +camino = "1.1.4" +rustc-hash = "1.1.0" +smallvec = "1.10.0" +paste = "1.0" +dot2 = "1.0" + +common = { path = "../common2", package = "fe-common2" } +parser = { path = "../parser2", package = "fe-parser2" } diff --git a/crates/hir/src/analysis_pass.rs b/crates/hir/src/analysis_pass.rs new file mode 100644 index 0000000000..f162bf3c71 --- /dev/null +++ b/crates/hir/src/analysis_pass.rs @@ -0,0 +1,36 @@ +use crate::{diagnostics::DiagnosticVoucher, hir_def::TopLevelMod}; + +/// All analysis passes that run analysis on the HIR top level module +/// granularity should implement this trait. +pub trait ModuleAnalysisPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>>; +} + +#[derive(Default)] +pub struct AnalysisPassManager<'db> { + module_passes: Vec + 'db>>, +} + +impl<'db> AnalysisPassManager<'db> { + pub fn new() -> Self { + Self::default() + } + + pub fn add_module_pass(&mut self, pass: Box + 'db>) { + self.module_passes.push(pass); + } + + pub fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + let mut diags = vec![]; + for pass in self.module_passes.iter_mut() { + diags.extend(pass.run_on_module(top_mod)); + } + diags + } +} diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs new file mode 100644 index 0000000000..177b56e32d --- /dev/null +++ b/crates/hir/src/diagnostics.rs @@ -0,0 +1,47 @@ +//! This module defines the diagnostics that can be accumulated inside salsa-db +//! with span-agnostic forms. All diagnostics accumulated in salsa-db should +//! implement [`DiagnosticVoucher`] which defines the conversion into +//! [`CompleteDiagnostic`]. + +use common::diagnostics::{CompleteDiagnostic, GlobalErrorCode}; + +use crate::SpannedHirDb; + +/// All diagnostics accumulated in salsa-db should implement +/// [`DiagnosticVoucher`] which defines the conversion into +/// [`CompleteDiagnostic`]. +/// +/// All types that implement `DiagnosticVoucher` must NOT have a span +/// information which invalidates cache in salsa-db. Instead of it, the all +/// information is given by [`SpannedHirDb`] to allow evaluating span lazily. +/// +/// The reason why we use `DiagnosticVoucher` is that we want to evaluate span +/// lazily to avoid invalidating cache in salsa-db. +/// +/// To obtain a span from HIR nodes in a lazy manner, it's recommended to use +/// `[LazySpan]`(crate::span::LazySpan) and types that implement `LazySpan`. +pub trait DiagnosticVoucher<'db>: Send { + fn error_code(&self) -> GlobalErrorCode; + /// Makes a [`CompleteDiagnostic`]. + fn to_complete(&self, db: &'db dyn SpannedHirDb) -> CompleteDiagnostic; +} + +impl DiagnosticVoucher<'_> for CompleteDiagnostic { + fn error_code(&self) -> GlobalErrorCode { + self.error_code.clone() + } + + fn to_complete(&self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { + self.clone() + } +} + +impl<'db> DiagnosticVoucher<'db> for Box + 'db> { + fn error_code(&self) -> GlobalErrorCode { + self.as_ref().error_code() + } + + fn to_complete(&self, db: &'db dyn SpannedHirDb) -> CompleteDiagnostic { + self.as_ref().to_complete(db) + } +} diff --git a/crates/hir/src/hir_def/attr.rs b/crates/hir/src/hir_def/attr.rs new file mode 100644 index 0000000000..92029f470e --- /dev/null +++ b/crates/hir/src/hir_def/attr.rs @@ -0,0 +1,31 @@ +use super::{IdentId, Partial, StringId}; + +#[salsa::interned] +pub struct AttrListId<'db> { + #[return_ref] + pub data: Vec>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +pub enum Attr<'db> { + Normal(NormalAttr<'db>), + DocComment(DocCommentAttr<'db>), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct NormalAttr<'db> { + pub name: Partial>, + pub args: Vec>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct DocCommentAttr<'db> { + /// This is the text of the doc comment, excluding the `///` prefix. + pub text: StringId<'db>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AttrArg<'db> { + pub key: Partial>, + pub value: Partial>, +} diff --git a/crates/hir/src/hir_def/body.rs b/crates/hir/src/hir_def/body.rs new file mode 100644 index 0000000000..3e86ad1807 --- /dev/null +++ b/crates/hir/src/hir_def/body.rs @@ -0,0 +1,278 @@ +// This is necessary because `salsa::tracked` structs generates a +// constructor +// that may take many arguments depending on the number of fields in the struct. +#![allow(clippy::too_many_arguments)] + +use std::hash::Hash; + +use cranelift_entity::{EntityRef, PrimaryMap, SecondaryMap}; +use parser::ast::{self, prelude::*}; +use rustc_hash::FxHashMap; +use salsa::Update; + +use super::{ + scope_graph::ScopeId, Expr, ExprId, Partial, Pat, PatId, Stmt, StmtId, TopLevelMod, + TrackedItemId, +}; +use crate::{ + span::{item::LazyBodySpan, HirOrigin}, + visitor::prelude::*, + HirDb, +}; + +#[salsa::tracked] +pub struct Body<'db> { + #[id] + id: TrackedItemId<'db>, + + /// The expression that evaluates to the value of the body. + /// In case of a function body, this is always be the block expression. + pub expr: ExprId, + + pub body_kind: BodyKind, + + #[return_ref] + pub stmts: NodeStore>>, + #[return_ref] + pub exprs: NodeStore>>, + #[return_ref] + pub pats: NodeStore>>, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) source_map: BodySourceMap, + #[return_ref] + pub(crate) origin: HirOrigin, +} + +impl<'db> Body<'db> { + pub fn lazy_span(self) -> LazyBodySpan<'db> { + LazyBodySpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } + + #[doc(hidden)] + /// Returns the order of the blocks in the body in lexical order. + /// e.g., + /// ```fe + /// fn foo() { // 0 + /// ... + /// { // 1 + /// ... + /// { // 2 + /// ... + /// } + /// } + /// } + /// + /// + /// Currently, this is only used for testing. + /// When it turns out to be generally useful, we need to consider to let + /// salsa track this method. + pub fn iter_block(self, db: &dyn HirDb) -> FxHashMap { + BlockOrderCalculator::new(db, self).calculate() + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum BodyKind { + FuncBody, + Anonymous, +} + +#[derive(Debug)] +pub struct NodeStore(PrimaryMap) +where + K: EntityRef; + +impl NodeStore +where + K: EntityRef, +{ + pub fn new() -> Self { + Self(PrimaryMap::new()) + } +} +impl Default for NodeStore +where + K: EntityRef, +{ + fn default() -> Self { + Self::new() + } +} + +impl std::ops::Deref for NodeStore +where + K: EntityRef, +{ + type Target = PrimaryMap; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl std::ops::DerefMut for NodeStore +where + K: EntityRef, +{ + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl std::ops::Index for NodeStore +where + K: EntityRef, +{ + type Output = V; + + fn index(&self, k: K) -> &V { + &self.0[k] + } +} + +unsafe impl Update for NodeStore +where + K: EntityRef + Update, + V: Update, +{ + unsafe fn maybe_update(old_ptr: *mut Self, new_val: Self) -> bool { + let old_val = unsafe { &mut *old_ptr }; + if old_val.len() != new_val.len() { + *old_val = new_val; + return true; + } + + let mut changed = false; + for (k, v) in new_val.0.into_iter() { + let old_val = &mut old_val[k]; + changed |= Update::maybe_update(old_val, v); + } + + changed + } +} + +/// Mutable indexing into an `PrimaryMap`. +impl std::ops::IndexMut for NodeStore +where + K: EntityRef, +{ + fn index_mut(&mut self, k: K) -> &mut V { + &mut self.0[k] + } +} + +pub trait SourceAst: AstNode + Clone + Hash + PartialEq + Eq {} +impl SourceAst for T where T: AstNode + Clone + Hash + PartialEq + Eq {} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct BodySourceMap { + pub stmt_map: SourceNodeMap, + pub expr_map: SourceNodeMap, + pub pat_map: SourceNodeMap, +} + +#[derive(Clone, Debug)] +pub struct SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ + pub node_to_source: SecondaryMap>, + pub source_to_node: FxHashMap, Node>, +} + +impl SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ + pub(crate) fn insert(&mut self, node: Node, ast: HirOrigin) { + self.node_to_source[node] = ast.clone(); + self.source_to_node.insert(ast, node); + } + + pub(crate) fn node_to_source(&self, node: Node) -> &HirOrigin { + &self.node_to_source[node] + } +} + +impl PartialEq for SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ + fn eq(&self, other: &Self) -> bool { + self.node_to_source == other.node_to_source + } +} + +impl Eq for SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ +} + +impl Default for SourceNodeMap +where + Ast: SourceAst, + Node: EntityRef, +{ + fn default() -> Self { + Self { + source_to_node: FxHashMap::default(), + node_to_source: SecondaryMap::new(), + } + } +} + +struct BlockOrderCalculator<'db> { + db: &'db dyn HirDb, + order: FxHashMap, + body: Body<'db>, + fresh_number: usize, +} + +impl<'db> Visitor<'db> for BlockOrderCalculator<'db> { + fn visit_expr( + &mut self, + ctxt: &mut crate::visitor::VisitorCtxt<'db, crate::span::expr::LazyExprSpan<'db>>, + expr: ExprId, + expr_data: &Expr<'db>, + ) { + if ctxt.body() == self.body && matches!(expr_data, Expr::Block(..)) { + self.order.insert(expr, self.fresh_number); + self.fresh_number += 1; + } + + walk_expr(self, ctxt, expr) + } +} + +impl<'db> BlockOrderCalculator<'db> { + fn new(db: &'db dyn HirDb, body: Body<'db>) -> Self { + Self { + db, + order: FxHashMap::default(), + body, + fresh_number: 0, + } + } + + fn calculate(mut self) -> FxHashMap { + let expr = self.body.expr(self.db); + let Partial::Present(expr_data) = expr.data(self.db, self.body) else { + return self.order; + }; + + let mut ctxt = VisitorCtxt::with_expr(self.db, self.body.scope(), self.body, expr); + self.visit_expr(&mut ctxt, expr, expr_data); + self.order + } +} diff --git a/crates/hir/src/hir_def/expr.rs b/crates/hir/src/hir_def/expr.rs new file mode 100644 index 0000000000..1bcfd08fba --- /dev/null +++ b/crates/hir/src/hir_def/expr.rs @@ -0,0 +1,198 @@ +use cranelift_entity::entity_impl; + +use super::{Body, GenericArgListId, IdentId, IntegerId, LitKind, Partial, PatId, PathId, StmtId}; +use crate::{span::expr::LazyExprSpan, HirDb}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] +pub enum Expr<'db> { + Lit(LitKind<'db>), + Block(Vec), + /// The first `ExprId` is the lhs, the second is the rhs. + /// + /// and a `BinOp`. + Bin(ExprId, ExprId, Partial), + Un(ExprId, Partial), + /// (callee, call args) + Call(ExprId, Vec>), + /// (receiver, method_name, generic args, call args) + MethodCall( + ExprId, + Partial>, + GenericArgListId<'db>, + Vec>, + ), + Path(Partial>), + /// The record construction expression. + /// The fist `PathId` is the record type, the second is the record fields. + RecordInit(Partial>, Vec>), + Field(ExprId, Partial>), + Tuple(Vec), + /// The first `ExprId` is the indexed expression, the second is the index. + Index(ExprId, ExprId), + Array(Vec), + + /// The size of the rep should be the body instead of expression, because it + /// should be resolved as a constant expression. + ArrayRep(ExprId, Partial>), + + /// The first `ExprId` is the condition, the second is the then branch, the + /// third is the else branch. + /// In case `else if`, the third is the lowered into `If` expression. + If(ExprId, ExprId, Option), + + /// The first `ExprId` is the scrutinee, the second is the arms. + Match(ExprId, Partial>), + + /// The `Assign` Expression. The first `ExprId` is the destination of the + /// assignment, and the second `ExprId` is the rhs value of the binding. + Assign(ExprId, ExprId), + + AugAssign(ExprId, ExprId, ArithBinOp), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, salsa::Update)] +pub struct ExprId(u32); +entity_impl!(ExprId); + +impl ExprId { + pub fn lazy_span(self, body: Body) -> LazyExprSpan { + LazyExprSpan::new(body, self) + } + + pub fn data<'db>(self, db: &'db dyn HirDb, body: Body<'db>) -> &'db Partial> { + &body.exprs(db)[self] + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)] +pub enum FieldIndex<'db> { + /// The field is indexed by its name. + /// `field.foo`. + Ident(IdentId<'db>), + /// The field is indexed by its integer. + /// `field.0`. + Index(IntegerId<'db>), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] +pub struct MatchArm { + pub pat: PatId, + pub body: ExprId, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From, salsa::Update)] +pub enum BinOp { + Arith(ArithBinOp), + Comp(CompBinOp), + Logical(LogicalBinOp), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ArithBinOp { + /// `+` + Add, + /// `-` + Sub, + /// `*` + Mul, + /// `/` + Div, + /// `%` + Rem, + /// `**` + Pow, + /// `<<` + LShift, + /// `>>` + RShift, + /// `&` + BitAnd, + /// `|` + BitOr, + /// `^` + BitXor, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum CompBinOp { + /// `==` + Eq, + /// `!=` + NotEq, + /// `<` + Lt, + /// `<=` + LtEq, + /// `>` + Gt, + /// `>=` + GtEq, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum LogicalBinOp { + /// `&&` + And, + /// `||` + Or, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum UnOp { + /// `+` + Plus, + /// `-` + Minus, + /// `!` + Not, + /// `~` + BitNot, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)] +pub struct CallArg<'db> { + pub label: Option>, + pub expr: ExprId, +} + +impl<'db> CallArg<'db> { + /// Returns the label of the argument if + /// 1. the argument has an explicit label. or + /// 2. If 1. is not true, then the argument is labeled when the expression + /// is a path expression and the path is an identifier. + pub fn label_eagerly(&self, db: &'db dyn HirDb, body: Body<'db>) -> Option> { + if let Some(label) = self.label { + return Some(label); + }; + + let Partial::Present(Expr::Path(Partial::Present(path))) = self.expr.data(db, body) else { + return None; + }; + + path.as_ident(db) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)] +pub struct Field<'db> { + pub label: Option>, + pub expr: ExprId, +} + +impl<'db> Field<'db> { + /// Returns the label of the field if + /// 1. the filed has an explicit label. or + /// 2. If 1. is not true, then the field is labeled when the expression is a + /// path expression and the path is an identifier. + pub fn label_eagerly(&self, db: &'db dyn HirDb, body: Body<'db>) -> Option> { + if let Some(label) = self.label { + return Some(label); + }; + + let Partial::Present(Expr::Path(Partial::Present(path))) = self.expr.data(db, body) else { + return None; + }; + + path.as_ident(db) + } +} diff --git a/crates/hir/src/hir_def/ident.rs b/crates/hir/src/hir_def/ident.rs new file mode 100644 index 0000000000..e24dfb9031 --- /dev/null +++ b/crates/hir/src/hir_def/ident.rs @@ -0,0 +1,47 @@ +use crate::HirDb; + +#[salsa::interned] +pub struct IdentId<'db> { + #[return_ref] + pub data: String, +} + +macro_rules! define_keywords { + ($(($name: ident, $kw_str:literal),)*) => { + impl<'db> IdentId<'db> { + $( + paste::paste! { + pub fn [](db: &'db dyn HirDb) -> Self { + Self::new(db, $kw_str.to_string()) + } + + pub fn [](self, db: &dyn HirDb) -> bool { + self.data(db) == $kw_str + } + } + )+ + } + }; +} + +define_keywords! { + (ingot, "ingot"), + (super, "super"), + (self, "self"), + (self_ty, "Self"), + (bool, "bool"), + (u8, "u8"), + (u16, "u16"), + (u32, "u32"), + (u64, "u64"), + (u128, "u128"), + (u256, "u256"), + (usize, "usize"), + (i8, "i8"), + (i16, "i16"), + (i32, "i32"), + (i64, "i64"), + (i128, "i128"), + (i256, "i256"), + (isize, "isize"), +} diff --git a/crates/hir/src/hir_def/item.rs b/crates/hir/src/hir_def/item.rs new file mode 100644 index 0000000000..75fffeeb1f --- /dev/null +++ b/crates/hir/src/hir_def/item.rs @@ -0,0 +1,1224 @@ +// This is necessary because `salsa::tracked` structs generates a +// constructor +// that may take many arguments depending on the number of fields in the struct. +#![allow(clippy::too_many_arguments)] + +use common::InputFile; +use parser::ast; + +use super::{ + scope_graph::{ScopeGraph, ScopeId}, + AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, IngotId, Partial, TupleTypeId, + TypeId, UseAlias, WhereClauseId, +}; +use crate::{ + hir_def::TraitRefId, + lower, + span::{ + item::{ + LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFuncSpan, LazyImplSpan, + LazyImplTraitSpan, LazyItemSpan, LazyModSpan, LazyStructSpan, LazyTopModSpan, + LazyTraitSpan, LazyTypeAliasSpan, LazyUseSpan, + }, + params::{LazyGenericParamListSpan, LazyWhereClauseSpan}, + DynLazySpan, HirOrigin, + }, + HirDb, +}; + +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + derive_more::From, + derive_more::TryInto, + salsa::Update, +)] +pub enum ItemKind<'db> { + TopMod(TopLevelMod<'db>), + Mod(Mod<'db>), + Func(Func<'db>), + Struct(Struct<'db>), + Contract(Contract<'db>), + Enum(Enum<'db>), + TypeAlias(TypeAlias<'db>), + Impl(Impl<'db>), + Trait(Trait<'db>), + ImplTrait(ImplTrait<'db>), + Const(Const<'db>), + Use(Use<'db>), + /// Body is not an `Item`, but this makes it easier for analyzers to handle + /// it. + Body(Body<'db>), +} + +impl<'db> ItemKind<'db> { + pub fn lazy_span(self) -> LazyItemSpan<'db> { + LazyItemSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self) + } + + pub fn name(self, db: &'db dyn HirDb) -> Option> { + use ItemKind::*; + match self { + TopMod(top_mod) => Some(top_mod.name(db)), + Mod(mod_) => mod_.name(db).to_opt(), + Func(func_) => func_.name(db).to_opt(), + Struct(struct_) => struct_.name(db).to_opt(), + Contract(contract_) => contract_.name(db).to_opt(), + Enum(enum_) => enum_.name(db).to_opt(), + TypeAlias(alias) => alias.name(db).to_opt(), + Trait(trait_) => trait_.name(db).to_opt(), + Const(const_) => const_.name(db).to_opt(), + Use(_) | Body(_) | Impl(_) | ImplTrait(_) => None, + } + } + + /// Returns attributes being applied to this item. + pub fn attrs(self, db: &'db dyn HirDb) -> Option> { + match self { + Self::Mod(mod_) => mod_.attributes(db), + Self::Func(func) => func.attributes(db), + Self::Struct(struct_) => struct_.attributes(db), + Self::Contract(contract) => contract.attributes(db), + Self::Enum(enum_) => enum_.attributes(db), + Self::TypeAlias(alias) => alias.attributes(db), + Self::Impl(impl_) => impl_.attributes(db), + Self::Trait(trait_) => trait_.attributes(db), + Self::ImplTrait(impl_trait) => impl_trait.attributes(db), + Self::Const(const_) => const_.attributes(db), + _ => return None, + } + .into() + } + + pub fn kind_name(self) -> &'static str { + use ItemKind::*; + match self { + TopMod(_) => "mod", + Mod(_) => "mod", + Func(_) => "fn", + Struct(_) => "struct", + Contract(_) => "contract", + Enum(_) => "enum", + TypeAlias(_) => "type", + Trait(_) => "trait", + Impl(_) => "impl", + ImplTrait(_) => "impl trait", + Const(_) => "const", + Use(_) => "use", + Body(_) => "body", + } + } + + pub fn name_span(self) -> Option> { + use ItemKind::*; + match self { + Mod(mod_) => Some(mod_.lazy_span().name().into()), + Func(func_) => Some(func_.lazy_span().name().into()), + Struct(struct_) => Some(struct_.lazy_span().name().into()), + Contract(contract_) => Some(contract_.lazy_span().name().into()), + Enum(enum_) => Some(enum_.lazy_span().name().into()), + TypeAlias(alias) => Some(alias.lazy_span().alias().into()), + Trait(trait_) => Some(trait_.lazy_span().name().into()), + Const(const_) => Some(const_.lazy_span().name().into()), + TopMod(_) | Use(_) | Body(_) | Impl(_) | ImplTrait(_) => None, + } + } + + pub fn vis(self, db: &dyn HirDb) -> Visibility { + use ItemKind::*; + match self { + TopMod(top_mod) => top_mod.vis(db), + Mod(mod_) => mod_.vis(db), + Func(func) => func.vis(db), + Struct(struct_) => struct_.vis(db), + Contract(contract) => contract.vis(db), + Enum(enum_) => enum_.vis(db), + TypeAlias(type_) => type_.vis(db), + Trait(trait_) => trait_.vis(db), + Const(const_) => const_.vis(db), + Use(use_) => use_.vis(db), + Impl(_) | ImplTrait(_) | Body(_) => Visibility::Private, + } + } + + pub fn ingot(self, db: &'db dyn HirDb) -> IngotId<'db> { + let top_mod = self.top_mod(db); + top_mod.ingot(db) + } + + pub fn top_mod(self, db: &'db dyn HirDb) -> TopLevelMod<'db> { + match self { + ItemKind::TopMod(top_mod) => top_mod, + ItemKind::Mod(mod_) => mod_.top_mod(db), + ItemKind::Func(func) => func.top_mod(db), + ItemKind::Struct(struct_) => struct_.top_mod(db), + ItemKind::Contract(contract) => contract.top_mod(db), + ItemKind::Enum(enum_) => enum_.top_mod(db), + ItemKind::TypeAlias(type_) => type_.top_mod(db), + ItemKind::Trait(trait_) => trait_.top_mod(db), + ItemKind::Impl(impl_) => impl_.top_mod(db), + ItemKind::ImplTrait(impl_trait) => impl_trait.top_mod(db), + ItemKind::Const(const_) => const_.top_mod(db), + ItemKind::Use(use_) => use_.top_mod(db), + ItemKind::Body(body) => body.top_mod(db), + } + } + + pub fn is_type(self) -> bool { + matches!( + self, + Self::Struct(_) | Self::Enum(_) | Self::Contract(_) | Self::TypeAlias(_) + ) + } + + pub fn is_trait(self) -> bool { + matches!(self, Self::Trait(_)) + } +} + +impl<'db> From> for ItemKind<'db> { + fn from(owner: GenericParamOwner<'db>) -> Self { + match owner { + GenericParamOwner::Func(func) => ItemKind::Func(func), + GenericParamOwner::Struct(struct_) => ItemKind::Struct(struct_), + GenericParamOwner::Enum(enum_) => ItemKind::Enum(enum_), + GenericParamOwner::TypeAlias(type_alias) => ItemKind::TypeAlias(type_alias), + GenericParamOwner::Impl(impl_) => ItemKind::Impl(impl_), + GenericParamOwner::Trait(trait_) => ItemKind::Trait(trait_), + GenericParamOwner::ImplTrait(impl_trait) => ItemKind::ImplTrait(impl_trait), + } + } +} + +impl<'db> From> for ItemKind<'db> { + fn from(owner: WhereClauseOwner<'db>) -> Self { + match owner { + WhereClauseOwner::Func(func) => ItemKind::Func(func), + WhereClauseOwner::Struct(struct_) => ItemKind::Struct(struct_), + WhereClauseOwner::Enum(enum_) => ItemKind::Enum(enum_), + WhereClauseOwner::Impl(impl_) => ItemKind::Impl(impl_), + WhereClauseOwner::Trait(trait_) => ItemKind::Trait(trait_), + WhereClauseOwner::ImplTrait(impl_trait) => ItemKind::ImplTrait(impl_trait), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] +pub enum GenericParamOwner<'db> { + Func(Func<'db>), + Struct(Struct<'db>), + Enum(Enum<'db>), + TypeAlias(TypeAlias<'db>), + Impl(Impl<'db>), + Trait(Trait<'db>), + ImplTrait(ImplTrait<'db>), +} + +impl<'db> GenericParamOwner<'db> { + pub fn top_mod(self, db: &'db dyn HirDb) -> TopLevelMod<'db> { + ItemKind::from(self).top_mod(db) + } + + pub fn params(self, db: &'db dyn HirDb) -> GenericParamListId<'db> { + match self { + GenericParamOwner::Func(func) => func.generic_params(db), + GenericParamOwner::Struct(struct_) => struct_.generic_params(db), + GenericParamOwner::Enum(enum_) => enum_.generic_params(db), + GenericParamOwner::TypeAlias(type_alias) => type_alias.generic_params(db), + GenericParamOwner::Impl(impl_) => impl_.generic_params(db), + GenericParamOwner::Trait(trait_) => trait_.generic_params(db), + GenericParamOwner::ImplTrait(impl_trait) => impl_trait.generic_params(db), + } + } + + pub fn params_span(self) -> LazyGenericParamListSpan<'db> { + match self { + GenericParamOwner::Func(func) => func.lazy_span().generic_params_moved(), + GenericParamOwner::Struct(struct_) => struct_.lazy_span().generic_params_moved(), + GenericParamOwner::Enum(enum_) => enum_.lazy_span().generic_params_moved(), + GenericParamOwner::TypeAlias(type_alias) => { + type_alias.lazy_span().generic_params_moved() + } + GenericParamOwner::Impl(impl_) => impl_.lazy_span().generic_params_moved(), + GenericParamOwner::Trait(trait_) => trait_.lazy_span().generic_params_moved(), + GenericParamOwner::ImplTrait(impl_trait) => { + impl_trait.lazy_span().generic_params_moved() + } + } + } + + pub fn scope(self) -> ScopeId<'db> { + ItemKind::from(self).scope() + } + + pub fn from_item_opt(item: ItemKind<'db>) -> Option { + match item { + ItemKind::Func(func) => Some(GenericParamOwner::Func(func)), + ItemKind::Struct(struct_) => Some(GenericParamOwner::Struct(struct_)), + ItemKind::Enum(enum_) => Some(GenericParamOwner::Enum(enum_)), + ItemKind::TypeAlias(type_alias) => Some(GenericParamOwner::TypeAlias(type_alias)), + ItemKind::Impl(impl_) => Some(GenericParamOwner::Impl(impl_)), + ItemKind::Trait(trait_) => Some(GenericParamOwner::Trait(trait_)), + ItemKind::ImplTrait(impl_trait) => Some(GenericParamOwner::ImplTrait(impl_trait)), + _ => None, + } + } + + pub fn parent(self, db: &'db dyn HirDb) -> Option { + let ScopeId::Item(item) = self.scope().parent(db)? else { + return None; + }; + + match item { + ItemKind::Func(func) => Some(GenericParamOwner::Func(func)), + ItemKind::Struct(struct_) => Some(GenericParamOwner::Struct(struct_)), + ItemKind::Enum(enum_) => Some(GenericParamOwner::Enum(enum_)), + ItemKind::TypeAlias(type_alias) => Some(GenericParamOwner::TypeAlias(type_alias)), + ItemKind::Impl(impl_) => Some(GenericParamOwner::Impl(impl_)), + ItemKind::Trait(trait_) => Some(GenericParamOwner::Trait(trait_)), + ItemKind::ImplTrait(impl_trait) => Some(GenericParamOwner::ImplTrait(impl_trait)), + _ => None, + } + } + + pub fn where_clause_owner(self) -> Option> { + let item = ItemKind::from(self); + WhereClauseOwner::from_item_opt(item) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, derive_more::From)] +pub enum WhereClauseOwner<'db> { + Func(Func<'db>), + Struct(Struct<'db>), + Enum(Enum<'db>), + Impl(Impl<'db>), + Trait(Trait<'db>), + ImplTrait(ImplTrait<'db>), +} + +impl<'db> WhereClauseOwner<'db> { + pub fn top_mod(self, db: &'db dyn HirDb) -> TopLevelMod<'db> { + ItemKind::from(self).top_mod(db) + } + + pub fn where_clause(self, db: &'db dyn HirDb) -> WhereClauseId<'db> { + match self { + Self::Func(func) => func.where_clause(db), + Self::Struct(struct_) => struct_.where_clause(db), + Self::Enum(enum_) => enum_.where_clause(db), + Self::Impl(impl_) => impl_.where_clause(db), + Self::Trait(trait_) => trait_.where_clause(db), + Self::ImplTrait(impl_trait) => impl_trait.where_clause(db), + } + } + + pub fn where_clause_span(self) -> LazyWhereClauseSpan<'db> { + match self { + Self::Func(func) => func.lazy_span().where_clause_moved(), + Self::Struct(struct_) => struct_.lazy_span().where_clause_moved(), + Self::Enum(enum_) => enum_.lazy_span().where_clause_moved(), + Self::Impl(impl_) => impl_.lazy_span().where_clause_moved(), + Self::Trait(trait_) => trait_.lazy_span().where_clause_moved(), + Self::ImplTrait(impl_trait) => impl_trait.lazy_span().where_clause_moved(), + } + } + + pub fn scope(self) -> ScopeId<'db> { + ItemKind::from(self).scope() + } + + pub fn from_item_opt(item: ItemKind<'db>) -> Option { + match item { + ItemKind::Func(func) => Some(Self::Func(func)), + ItemKind::Struct(struct_) => Some(Self::Struct(struct_)), + ItemKind::Enum(enum_) => Some(Self::Enum(enum_)), + ItemKind::Impl(impl_) => Some(Self::Impl(impl_)), + ItemKind::Trait(trait_) => Some(Self::Trait(trait_)), + ItemKind::ImplTrait(impl_trait) => Some(Self::ImplTrait(impl_trait)), + _ => None, + } + } +} + +#[salsa::tracked] +pub struct TopLevelMod<'db> { + // No #[id] here, because `TopLevelMod` is always unique to a `InputFile` that is an argument + // of `module_scope_graph`. + pub name: IdentId<'db>, + + pub ingot: IngotId<'db>, + pub(crate) file: InputFile, +} +impl<'db> TopLevelMod<'db> { + pub fn lazy_span(self) -> LazyTopModSpan<'db> { + LazyTopModSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } + + pub fn scope_graph(self, db: &'db dyn HirDb) -> &'db ScopeGraph<'db> { + lower::scope_graph_impl(db, self) + } + + /// Returns the child top level modules of `self`. + pub fn child_top_mods( + self, + db: &'db dyn HirDb, + ) -> impl Iterator> + 'db { + let module_tree = self.ingot(db).module_tree(db); + module_tree.children(self) + } + + /// Returns the top level children of this module. + /// If you need all the children, use + /// [`children_nested`](Self::children_nested) instead. + pub fn children_non_nested( + self, + db: &'db dyn HirDb, + ) -> impl Iterator> + 'db { + let s_graph = self.scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope) + } + + /// Returns all the children of this module, including nested items. + pub fn children_nested(self, db: &'db dyn HirDb) -> impl Iterator> + 'db { + let s_graph = self.scope_graph(db); + s_graph.items_dfs(db) + } + + pub fn parent(self, db: &'db dyn HirDb) -> Option> { + let module_tree = self.ingot(db).module_tree(db); + module_tree.parent(self) + } + + pub fn vis(self, _db: &dyn HirDb) -> Visibility { + // We don't have a way to specify visibility of a top level module. + // Please change here if we introduce it. + Visibility::Public + } + + /// Returns all items in the top level module including ones in nested + /// modules. + pub fn all_items(self, db: &'db dyn HirDb) -> &'db Vec> { + all_items_in_top_mod(db, self) + } + + /// Returns all structs in the top level module including ones in nested + /// modules. + pub fn all_structs(self, db: &'db dyn HirDb) -> &'db Vec> { + all_structs_in_top_mod(db, self) + } + + /// Returns all enums in the top level module including ones in nested + /// modules. + pub fn all_enums(self, db: &'db dyn HirDb) -> &'db Vec> { + all_enums_in_top_mod(db, self) + } + + /// Returns all contracts in the top level module including ones in nested + /// modules. + pub fn all_contracts(self, db: &'db dyn HirDb) -> &'db Vec> { + all_contracts_in_top_mod(db, self) + } + + /// Returns all type aliases in the top level module including ones in + /// nested modules. + pub fn all_type_aliases(self, db: &'db dyn HirDb) -> &'db Vec> { + all_type_aliases_in_top_mod(db, self) + } + + /// Returns all traits in the top level module including ones in nested + /// modules. + pub fn all_traits(self, db: &'db dyn HirDb) -> &'db Vec> { + all_traits_in_top_mod(db, self) + } + + pub fn all_funcs(self, db: &'db dyn HirDb) -> &'db Vec> { + all_funcs_in_top_mod(db, self) + } + + /// Returns all traits in the top level module including ones in nested + /// modules. + pub fn all_impl_traits(self, db: &'db dyn HirDb) -> &'db Vec> { + all_impl_trait_in_top_mod(db, self) + } + + /// Returns all impls in the top level module including ones in nested + /// modules. + pub fn all_impls(self, db: &'db dyn HirDb) -> &'db Vec> { + all_impl_in_top_mod(db, self) + } +} + +#[salsa::tracked(return_ref)] +pub fn all_top_modules_in_ingot<'db>( + db: &'db dyn HirDb, + ingot: IngotId<'db>, +) -> Vec> { + let tree = ingot.module_tree(db); + tree.all_modules().collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_enums_in_ingot<'db>(db: &'db dyn HirDb, ingot: IngotId<'db>) -> Vec> { + ingot + .all_modules(db) + .iter() + .flat_map(|top_mod| top_mod.all_enums(db).iter().copied()) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_impl_traits_in_ingot<'db>( + db: &'db dyn HirDb, + ingot: IngotId<'db>, +) -> Vec> { + ingot + .all_modules(db) + .iter() + .flat_map(|top_mod| top_mod.all_impl_traits(db).iter().copied()) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_impls_in_ingot<'db>(db: &'db dyn HirDb, ingot: IngotId<'db>) -> Vec> { + ingot + .all_modules(db) + .iter() + .flat_map(|top_mod| top_mod.all_impls(db).iter().copied()) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_items_in_top_mod<'db>( + db: &'db dyn HirDb, + top_mod: TopLevelMod<'db>, +) -> Vec> { + top_mod.children_nested(db).collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_structs_in_top_mod<'db>( + db: &'db dyn HirDb, + top_mod: TopLevelMod<'db>, +) -> Vec> { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::Struct(struct_) => Some(*struct_), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_enums_in_top_mod<'db>(db: &'db dyn HirDb, top_mod: TopLevelMod<'db>) -> Vec> { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::Enum(enum_) => Some(*enum_), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_type_aliases_in_top_mod<'db>( + db: &'db dyn HirDb, + top_mod: TopLevelMod<'db>, +) -> Vec> { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::TypeAlias(alias) => Some(*alias), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_contracts_in_top_mod<'db>( + db: &'db dyn HirDb, + top_mod: TopLevelMod<'db>, +) -> Vec> { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::Contract(contract) => Some(*contract), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_traits_in_top_mod<'db>( + db: &'db dyn HirDb, + top_mod: TopLevelMod<'db>, +) -> Vec> { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::Trait(trait_) => Some(*trait_), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_funcs_in_top_mod<'db>(db: &'db dyn HirDb, top_mod: TopLevelMod<'db>) -> Vec> { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::Func(func_) => Some(*func_), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_impl_in_top_mod<'db>(db: &'db dyn HirDb, top_mod: TopLevelMod<'db>) -> Vec> { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::Impl(impl_) => Some(*impl_), + _ => None, + }) + .collect() +} + +#[salsa::tracked(return_ref)] +pub fn all_impl_trait_in_top_mod<'db>( + db: &'db dyn HirDb, + top_mod: TopLevelMod<'db>, +) -> Vec> { + all_items_in_top_mod(db, top_mod) + .iter() + .filter_map(|item| match item { + ItemKind::ImplTrait(impl_trait) => Some(*impl_trait), + _ => None, + }) + .collect() +} + +#[salsa::tracked] +pub struct Mod<'db> { + #[id] + id: TrackedItemId<'db>, + + pub name: Partial>, + pub attributes: AttrListId<'db>, + pub vis: Visibility, + + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Mod<'db> { + pub fn lazy_span(self) -> LazyModSpan<'db> { + LazyModSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } + + pub fn children_non_nested( + self, + db: &'db dyn HirDb, + ) -> impl Iterator> + 'db { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope) + } +} + +#[salsa::tracked] +pub struct Func<'db> { + #[id] + id: TrackedItemId<'db>, + + pub name: Partial>, + pub attributes: AttrListId<'db>, + pub generic_params: GenericParamListId<'db>, + pub where_clause: WhereClauseId<'db>, + pub params: Partial>, + pub ret_ty: Option>, + pub modifier: ItemModifier, + pub body: Option>, + pub is_extern: bool, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Func<'db> { + pub fn lazy_span(self) -> LazyFuncSpan<'db> { + LazyFuncSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } + + pub fn vis(self, db: &dyn HirDb) -> Visibility { + self.modifier(db).to_visibility() + } + + pub fn is_method(self, db: &dyn HirDb) -> bool { + let Some(params) = self.params(db).to_opt() else { + return false; + }; + + let Some(first_param) = params.data(db).first() else { + return false; + }; + + first_param + .name + .to_opt() + .and_then(|name| name.ident()) + .is_some_and(|ident| ident.is_self(db)) + } + + /// Returns `true` if the function is method or associated functions. + pub fn is_associated_func(self, db: &dyn HirDb) -> bool { + let item = match self.scope().parent(db) { + Some(ScopeId::Item(item)) => item, + _ => return false, + }; + + matches!( + item, + ItemKind::Trait(_) | ItemKind::Impl(_) | ItemKind::ImplTrait(_) + ) + } +} + +#[salsa::tracked] +pub struct Struct<'db> { + #[id] + id: TrackedItemId<'db>, + + pub name: Partial>, + pub attributes: AttrListId<'db>, + pub vis: Visibility, + pub generic_params: GenericParamListId<'db>, + pub where_clause: WhereClauseId<'db>, + pub fields: FieldDefListId<'db>, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Struct<'db> { + pub fn lazy_span(self) -> LazyStructSpan<'db> { + LazyStructSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } + + /// Returns the human readable string of the expected struct initializer. + /// ## Example + /// When `S` is a struct defined as below: + /// ```fe + /// struct S { + /// x: u64, + /// y: i32, + /// } + /// ``` + /// Then this method returns ` { x, y }`. + pub fn format_initializer_args(self, db: &dyn HirDb) -> String { + self.fields(db).format_initializer_args(db) + } +} + +#[salsa::tracked] +pub struct Contract<'db> { + #[id] + id: TrackedItemId<'db>, + + pub name: Partial>, + pub attributes: AttrListId<'db>, + pub vis: Visibility, + pub fields: FieldDefListId<'db>, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Contract<'db> { + pub fn lazy_span(self) -> LazyContractSpan<'db> { + LazyContractSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } +} + +#[salsa::tracked] +pub struct Enum<'db> { + #[id] + id: TrackedItemId<'db>, + + pub name: Partial>, + pub attributes: AttrListId<'db>, + pub vis: Visibility, + pub generic_params: GenericParamListId<'db>, + pub where_clause: WhereClauseId<'db>, + pub variants: VariantDefListId<'db>, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Enum<'db> { + pub fn lazy_span(self) -> LazyEnumSpan<'db> { + LazyEnumSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } +} + +#[salsa::tracked] +pub struct TypeAlias<'db> { + #[id] + id: TrackedItemId<'db>, + + pub name: Partial>, + pub attributes: AttrListId<'db>, + pub vis: Visibility, + pub generic_params: GenericParamListId<'db>, + pub ty: Partial>, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> TypeAlias<'db> { + pub fn lazy_span(self) -> LazyTypeAliasSpan<'db> { + LazyTypeAliasSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } +} + +#[salsa::tracked] +pub struct Impl<'db> { + #[id] + id: TrackedItemId<'db>, + + pub ty: super::Partial>, + pub attributes: AttrListId<'db>, + pub generic_params: GenericParamListId<'db>, + pub where_clause: WhereClauseId<'db>, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Impl<'db> { + pub fn lazy_span(self) -> LazyImplSpan<'db> { + LazyImplSpan::new(self) + } + + pub fn children_non_nested( + self, + db: &'db dyn HirDb, + ) -> impl Iterator> + 'db { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope) + } + + pub fn funcs(self, db: &'db dyn HirDb) -> impl Iterator> + 'db { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope).filter_map(|item| match item { + ItemKind::Func(func) => Some(func), + _ => None, + }) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } +} + +#[salsa::tracked] +pub struct Trait<'db> { + #[id] + id: TrackedItemId<'db>, + + pub name: Partial>, + + pub attributes: AttrListId<'db>, + pub vis: Visibility, + pub generic_params: GenericParamListId<'db>, + #[return_ref] + pub super_traits: Vec>, + pub where_clause: WhereClauseId<'db>, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Trait<'db> { + pub fn lazy_span(self) -> LazyTraitSpan<'db> { + LazyTraitSpan::new(self) + } + + pub fn children_non_nested( + self, + db: &'db dyn HirDb, + ) -> impl Iterator> + 'db { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } + + pub fn methods(self, db: &'db dyn HirDb) -> impl Iterator> + 'db { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope).filter_map(|item| match item { + ItemKind::Func(func) => Some(func), + _ => None, + }) + } +} + +#[salsa::tracked] +pub struct ImplTrait<'db> { + #[id] + id: TrackedItemId<'db>, + + pub trait_ref: Partial>, + pub ty: Partial>, + pub attributes: AttrListId<'db>, + pub generic_params: GenericParamListId<'db>, + pub where_clause: WhereClauseId<'db>, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> ImplTrait<'db> { + pub fn lazy_span(self) -> LazyImplTraitSpan<'db> { + LazyImplTraitSpan::new(self) + } + + pub fn children_non_nested( + self, + db: &'db dyn HirDb, + ) -> impl Iterator> + 'db { + let s_graph = self.top_mod(db).scope_graph(db); + let scope = ScopeId::from_item(self.into()); + s_graph.child_items(scope) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } + + pub fn methods(self, db: &'db dyn HirDb) -> impl Iterator> + 'db { + self.children_non_nested(db).filter_map(|item| match item { + ItemKind::Func(func) => Some(func), + _ => None, + }) + } +} + +#[salsa::tracked] +pub struct Const<'db> { + #[id] + id: TrackedItemId<'db>, + + pub name: Partial>, + pub attributes: AttrListId<'db>, + pub ty: Partial>, + pub body: Partial>, + pub vis: Visibility, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Const<'db> { + pub fn lazy_span(self) -> LazyConstSpan<'db> { + LazyConstSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } +} + +#[salsa::tracked] +pub struct Use<'db> { + #[id] + id: TrackedItemId<'db>, + + pub path: Partial>, + pub alias: Option>>, + pub vis: Visibility, + pub top_mod: TopLevelMod<'db>, + + #[return_ref] + pub(crate) origin: HirOrigin, +} +impl<'db> Use<'db> { + pub fn lazy_span(self) -> LazyUseSpan<'db> { + LazyUseSpan::new(self) + } + + pub fn scope(self) -> ScopeId<'db> { + ScopeId::from_item(self.into()) + } + + /// Returns imported name if it is present and not a glob. + pub fn imported_name(&self, db: &'db dyn HirDb) -> Option> { + if let Some(alias) = self.alias(db) { + return match alias { + Partial::Present(UseAlias::Ident(name)) => Some(name), + _ => None, + }; + } + + self.path(db).to_opt()?.last_ident(db) + } + + /// Returns the span of imported name span if the use is not a glob. + /// The returned span is + /// 1. If the use has an alias, the span of the alias. + /// 2. If the use has no alias, the span of the last segment of the path. + pub fn imported_name_span(&self, db: &'db dyn HirDb) -> Option> { + if self.is_glob(db) { + return None; + } + + if self.alias(db).is_some() { + Some(self.lazy_span().alias().into()) + } else { + let segment_len = self.path(db).to_opt()?.segment_len(db); + Some(self.lazy_span().path().segment(segment_len - 1).into()) + } + } + + pub fn glob_span(&self, db: &dyn HirDb) -> Option> { + if !self.is_glob(db) { + return None; + } + + let segment_len = self.path(db).to_opt()?.segment_len(db); + Some(self.lazy_span().path().segment(segment_len - 1).into()) + } + + pub fn is_glob(&self, db: &dyn HirDb) -> bool { + self.path(db).to_opt().is_some_and(|path| path.is_glob(db)) + } + + pub fn is_unnamed(&self, db: &dyn HirDb) -> bool { + if let Some(alias) = self.alias(db) { + !matches!(alias, Partial::Present(UseAlias::Ident(_))) + } else { + false + } + } + + pub(crate) fn pretty_path(&self, db: &dyn HirDb) -> String { + self.path(db) + .to_opt() + .map_or_else(|| "{invalid}".to_string(), |path| path.pretty_path(db)) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ItemModifier { + Pub, + Unsafe, + PubAndUnsafe, + None, +} + +impl ItemModifier { + pub fn to_visibility(self) -> Visibility { + match self { + ItemModifier::Pub | ItemModifier::PubAndUnsafe => Visibility::Public, + ItemModifier::Unsafe | ItemModifier::None => Visibility::Private, + } + } +} + +#[salsa::interned] +pub struct FieldDefListId<'db> { + #[return_ref] + pub data: Vec>, +} + +impl<'db> FieldDefListId<'db> { + pub fn get_field(self, db: &'db dyn HirDb, name: IdentId<'db>) -> Option<&'db FieldDef<'db>> { + self.data(db) + .iter() + .find(|field| field.name.to_opt() == Some(name)) + } + + pub fn field_idx(self, db: &dyn HirDb, name: IdentId<'db>) -> Option { + self.data(db) + .iter() + .position(|field| field.name.to_opt() == Some(name)) + } + + fn format_initializer_args(self, db: &dyn HirDb) -> String { + let args = self + .data(db) + .iter() + .map(|field| { + field + .name + .to_opt() + .map_or_else(|| "_".to_string(), |name| name.data(db).to_string()) + }) + .collect::>() + .join(", "); + + format!(" {{ {} }}", args) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FieldDef<'db> { + pub attributes: AttrListId<'db>, + pub name: Partial>, + pub ty: Partial>, + pub vis: Visibility, +} + +#[salsa::interned] +pub struct VariantDefListId<'db> { + #[return_ref] + pub data: Vec>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct VariantDef<'db> { + pub attributes: AttrListId<'db>, + pub name: Partial>, + pub kind: VariantKind<'db>, +} + +impl VariantDef<'_> { + /// Returns the human readable string of the expected variant initializer. + /// ## Example + /// When enum `E` is an variant defined as below: + /// ```fe + /// enum E { + /// V(u64, i32), + /// S { x: u64, y: i32 }, + /// } + /// ``` + /// + /// Then the method returns `(_, _)` for the first variant and ` { x, y }` + /// for the second variant. + pub fn format_initializer_args(&self, db: &dyn HirDb) -> String { + match self.kind { + VariantKind::Unit => "".to_string(), + VariantKind::Tuple(tup) => { + let args = (0..tup.len(db)).map(|_| "_").collect::>().join(", "); + format!("({})", args) + } + + VariantKind::Record(fields) => fields.format_initializer_args(db), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum VariantKind<'db> { + Unit, + Tuple(TupleTypeId<'db>), + Record(FieldDefListId<'db>), +} + +#[salsa::interned] +pub struct ImplItemListId<'db> { + #[return_ref] + pub items: Vec>, +} + +pub type TraitItemListId<'db> = ImplItemListId<'db>; +pub type ImplTraitItemListId<'db> = ImplItemListId<'db>; +pub type ExternItemListId<'db> = ImplItemListId<'db>; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Visibility { + Public, + Private, +} + +impl Visibility { + pub fn is_pub(self) -> bool { + self == Self::Public + } +} + +#[salsa::interned] +pub struct TrackedItemId<'db> { + variant: TrackedItemVariant<'db>, +} + +impl<'db> TrackedItemId<'db> { + pub(crate) fn join(self, db: &'db dyn HirDb, variant: TrackedItemVariant<'db>) -> Self { + let old = self.variant(db); + let joined = old.join(variant); + Self::new(db, joined) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TrackedItemVariant<'db> { + TopLevelMod(IdentId<'db>), + Mod(Partial>), + Func(Partial>), + Struct(Partial>), + Contract(Partial>), + Enum(Partial>), + TypeAlias(Partial>), + Impl(Partial>), + Trait(Partial>), + ImplTrait(Partial>, Partial>), + Const(Partial>), + Use(Partial>), + FuncBody, + NamelessBody, + Joined(Box, Box), +} +impl TrackedItemVariant<'_> { + pub(crate) fn join(self, rhs: Self) -> Self { + Self::Joined(self.into(), rhs.into()) + } +} diff --git a/crates/hir/src/hir_def/mod.rs b/crates/hir/src/hir_def/mod.rs new file mode 100644 index 0000000000..572075ef33 --- /dev/null +++ b/crates/hir/src/hir_def/mod.rs @@ -0,0 +1,194 @@ +// TODO: Remove this when https://github.com/salsa-rs/salsa/pull/513 is fixed. +#![allow(clippy::unused_unit)] +pub mod attr; +pub mod body; +pub mod expr; +pub mod ident; +pub mod item; +pub mod params; +pub mod pat; +pub mod path; +pub mod prim_ty; +pub mod scope_graph; +pub mod stmt; +pub mod types; +pub mod use_tree; + +mod scope_graph_viz; + +pub(crate) mod module_tree; + +pub use attr::*; +pub use body::*; +use common::{input::IngotKind, InputIngot}; +pub use expr::*; +pub use ident::*; +pub use item::*; +pub use module_tree::*; +use num_bigint::BigUint; +pub use params::*; +pub use pat::*; +pub use path::*; +use salsa::Update; +pub use stmt::*; +pub use types::*; +pub use use_tree::*; + +use crate::{external_ingots_impl, HirDb}; + +#[salsa::tracked] +pub struct IngotId<'db> { + inner: InputIngot, +} +impl<'db> IngotId<'db> { + pub fn module_tree(self, db: &dyn HirDb) -> &ModuleTree { + module_tree_impl(db, self.inner(db)) + } + + pub fn all_modules(self, db: &'db dyn HirDb) -> &'db [TopLevelMod<'db>] { + all_top_modules_in_ingot(db, self) + } + + pub fn root_mod(self, db: &dyn HirDb) -> TopLevelMod { + self.module_tree(db).root_data().top_mod + } + + pub fn external_ingots(self, db: &dyn HirDb) -> &[(IdentId, IngotId)] { + external_ingots_impl(db, self.inner(db)) + } + + pub fn kind(self, db: &dyn HirDb) -> IngotKind { + self.inner(db).kind(db.as_input_db()) + } + + pub fn all_enums(self, db: &'db dyn HirDb) -> &'db [Enum<'db>] { + all_enums_in_ingot(db, self) + } + + pub fn all_impl_traits(self, db: &'db dyn HirDb) -> &'db [ImplTrait<'db>] { + all_impl_traits_in_ingot(db, self) + } + + pub fn all_impls(self, db: &'db dyn HirDb) -> &'db Vec> { + all_impls_in_ingot(db, self) + } + + pub fn is_std(self, db: &'db dyn HirDb) -> bool { + matches!(self.kind(db), IngotKind::Std) + } +} + +#[salsa::interned] +pub struct IntegerId<'db> { + #[return_ref] + pub data: BigUint, +} + +impl<'db> IntegerId<'db> { + pub fn from_usize(db: &'db dyn HirDb, value: usize) -> Self { + let data = BigUint::from(value); + Self::new(db, data) + } +} + +#[salsa::interned] +pub struct StringId<'db> { + /// The text of the string literal, without the quotes. + #[return_ref] + pub data: String, +} + +impl<'db> StringId<'db> { + pub fn from_str(db: &'db dyn HirDb, value: &str) -> Self { + let data = value.to_string(); + Self::new(db, data) + } + + pub fn len_bytes(&self, db: &dyn HirDb) -> usize { + self.data(db).len() + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, derive_more::From, salsa::Update)] +pub enum LitKind<'db> { + Int(IntegerId<'db>), + String(StringId<'db>), + Bool(bool), +} + +/// `Partial` is a type that explicitly indicates the possibility that an HIR +/// node cannot be generated due to syntax errors in the source file. +/// +/// If a node is `Partial::Absent`, it means that the corresponding AST either +/// does not exist or is erroneous. When a `Partial::Absent` is generated, the +/// relevant error is always generated by the parser, so in Analysis phases, it +/// can often be ignored. +/// +/// This type is clearly distinguished from `Option`. The +/// `Option` type is used to hold syntactically valid optional nodes, while +/// `Partial` means that a syntactically required element may be missing. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Partial { + Present(T), + Absent, +} +unsafe impl Update for Partial +where + T: Update, +{ + unsafe fn maybe_update(old_ptr: *mut Self, new_val: Self) -> bool { + use Partial::*; + + let old_val = unsafe { &mut *old_ptr }; + match (old_val, new_val) { + (Present(old), Present(new)) => T::maybe_update(old, new), + (Absent, Absent) => false, + (old_value, new_value) => { + *old_value = new_value; + true + } + } + } +} + +impl Partial { + pub fn unwrap(&self) -> &T { + match self { + Self::Present(value) => value, + Self::Absent => panic!("unwrap called on absent value"), + } + } + + pub fn to_opt(self) -> Option { + match self { + Self::Present(value) => Some(value), + Self::Absent => None, + } + } + + pub fn is_present(&self) -> bool { + matches!(self, Self::Present(_)) + } +} + +impl Default for Partial { + fn default() -> Self { + Self::Absent + } +} + +impl From> for Partial { + fn from(value: Option) -> Self { + if let Some(value) = value { + Self::Present(value) + } else { + Self::Absent + } + } +} + +impl From> for Option { + fn from(value: Partial) -> Option { + value.to_opt() + } +} diff --git a/crates/hir/src/hir_def/module_tree.rs b/crates/hir/src/hir_def/module_tree.rs new file mode 100644 index 0000000000..b57702220d --- /dev/null +++ b/crates/hir/src/hir_def/module_tree.rs @@ -0,0 +1,313 @@ +use camino::Utf8Path; +use common::{indexmap::IndexMap, InputFile, InputIngot}; +use cranelift_entity::{entity_impl, PrimaryMap}; + +use super::{IdentId, IngotId, TopLevelMod}; +use crate::{lower::map_file_to_mod_impl, HirDb}; + +/// This tree represents the structure of an ingot. +/// Internal modules are not included in this tree, instead, they are included +/// in [ScopeGraph](crate::hir_def::scope_graph::ScopeGraph). +/// +/// This is used in later name resolution phase. +/// The tree is file contents agnostic, i.e., **only** depends on project +/// structure and crate dependency. +/// +/// +/// Example: +/// ```text +/// ingot/ +/// ├─ main.fe +/// ├─ mod1.fe +/// ├─ mod1/ +/// │ ├─ foo.fe +/// ├─ mod2.fe +/// ├─ mod2 +/// │ ├─ bar.fe +/// ├─ mod3 +/// │ ├─ baz.fe +/// ``` +/// +/// The resulting tree would be like below. +/// +/// ```text +/// +------+ +/// *---- | main |----* +/// | +------+ | +------+ +/// | | | baz | +/// | | +------+ +/// v v +/// +------+ +------+ +/// | mod2 | | mod1 | +/// +------+ +------+ +/// | | +/// | | +/// v v +/// +------+ +------+ +/// | bar | | foo | +/// +------+ +------+ +/// ``` +/// +/// **NOTE:** `mod3` is not included in the main tree because it doesn't have a corresponding file. +/// As a result, `baz` is represented as a "floating" node. +/// In this case, the tree is actually a forest. But we don't need to care about it. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ModuleTree<'db> { + pub(crate) root: ModuleTreeNodeId, + pub(crate) module_tree: PrimaryMap>, + pub(crate) mod_map: IndexMap, ModuleTreeNodeId>, + + pub ingot: IngotId<'db>, +} + +impl ModuleTree<'_> { + /// Returns the tree node data of the given id. + pub fn node_data(&self, id: ModuleTreeNodeId) -> &ModuleTreeNode { + &self.module_tree[id] + } + + /// Returns the tree node id of the given top level module. + pub fn tree_node(&self, top_mod: TopLevelMod) -> ModuleTreeNodeId { + self.mod_map[&top_mod] + } + + /// Returns the tree node data of the given top level module. + pub fn tree_node_data(&self, top_mod: TopLevelMod) -> &ModuleTreeNode { + &self.module_tree[self.tree_node(top_mod)] + } + + /// Returns the root of the tree, which corresponds to the ingot root file. + pub fn root(&self) -> ModuleTreeNodeId { + self.root + } + + pub fn root_data(&self) -> &ModuleTreeNode { + self.node_data(self.root) + } + + /// Returns an iterator of all top level modules in this ingot. + pub fn all_modules(&self) -> impl Iterator + '_ { + self.mod_map.keys().copied() + } + + pub fn parent(&self, top_mod: TopLevelMod) -> Option { + let node = self.tree_node_data(top_mod); + node.parent.map(|id| self.module_tree[id].top_mod) + } + + pub fn children(&self, top_mod: TopLevelMod) -> impl Iterator + '_ { + self.tree_node_data(top_mod) + .children + .iter() + .map(move |&id| { + let node = &self.module_tree[id]; + node.top_mod + }) + } +} + +/// Returns a module tree of the given ingot. The resulted tree only includes +/// top level modules. This function only depends on an ingot structure and +/// external ingot dependency, and not depends on file contents. +#[salsa::tracked(return_ref)] +#[allow(elided_named_lifetimes)] +pub(crate) fn module_tree_impl(db: &dyn HirDb, ingot: InputIngot) -> ModuleTree<'_> { + ModuleTreeBuilder::new(db, ingot).build() +} + +/// A top level module that is one-to-one mapped to a file. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ModuleTreeNode<'db> { + pub top_mod: TopLevelMod<'db>, + /// A parent of the top level module. + /// This is `None` if + /// 1. the module is a root module or + /// 2. the module is a "floating" module. + pub parent: Option, + /// A list of child top level module. + pub children: Vec, +} + +impl<'db> ModuleTreeNode<'db> { + fn new(top_mod: TopLevelMod<'db>) -> Self { + Self { + top_mod, + parent: None, + children: Vec::new(), + } + } + pub fn name(&self, db: &'db dyn HirDb) -> IdentId<'db> { + self.top_mod.name(db) + } +} + +/// An opaque identifier for a module tree node. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct ModuleTreeNodeId(u32); +entity_impl!(ModuleTreeNodeId); + +struct ModuleTreeBuilder<'db> { + db: &'db dyn HirDb, + input_ingot: InputIngot, + ingot: IngotId<'db>, + module_tree: PrimaryMap>, + mod_map: IndexMap, ModuleTreeNodeId>, + path_map: IndexMap<&'db Utf8Path, ModuleTreeNodeId>, +} + +impl<'db> ModuleTreeBuilder<'db> { + fn new(db: &'db dyn HirDb, ingot: InputIngot) -> Self { + Self { + db, + input_ingot: ingot, + ingot: IngotId::new(db, ingot), + module_tree: PrimaryMap::default(), + mod_map: IndexMap::default(), + path_map: IndexMap::default(), + } + } + + fn build(mut self) -> ModuleTree<'db> { + self.set_modules(); + self.build_tree(); + + let root_mod = map_file_to_mod_impl( + self.db, + self.ingot, + self.input_ingot.root_file(self.db.as_input_db()), + ); + let root = self.mod_map[&root_mod]; + ModuleTree { + root, + module_tree: self.module_tree, + mod_map: self.mod_map, + ingot: self.ingot, + } + } + + fn set_modules(&mut self) { + for &file in self.input_ingot.files(self.db.as_input_db()) { + let top_mod = map_file_to_mod_impl(self.db, self.ingot, file); + + let module_id = self.module_tree.push(ModuleTreeNode::new(top_mod)); + self.path_map + .insert(file.path(self.db.as_input_db()), module_id); + self.mod_map.insert(top_mod, module_id); + } + } + + fn build_tree(&mut self) { + let root = self.input_ingot.root_file(self.db.as_input_db()); + + for &child in self.input_ingot.files(self.db.as_input_db()) { + // Ignore the root file because it has no parent. + if child == root { + continue; + } + + let root_path = root.path(self.db.as_input_db()); + let root_mod = map_file_to_mod_impl(self.db, self.ingot, root); + let child_path = child.path(self.db.as_input_db()); + let child_mod = map_file_to_mod_impl(self.db, self.ingot, child); + + // If the file is in the same directory as the root file, the file is a direct + // child of the root. + if child_path.parent() == root_path.parent() { + let root_mod = self.mod_map[&root_mod]; + let cur_mod = self.mod_map[&child_mod]; + self.add_branch(root_mod, cur_mod); + continue; + } + + assert!(child_path + .parent() + .unwrap() + .starts_with(root_path.parent().unwrap())); + + if let Some(parent_mod) = self.parent_module(child) { + let cur_mod = self.mod_map[&child_mod]; + self.add_branch(parent_mod, cur_mod); + } + } + } + + fn parent_module(&self, file: InputFile) -> Option { + let file_path = file.path(self.db.as_input_db()); + let file_dir = file_path.parent()?; + let parent_dir = file_dir.parent()?; + + let parent_mod_stem = file_dir.into_iter().next_back()?; + let parent_mod_path = parent_dir.join(parent_mod_stem).with_extension("fe"); + self.path_map.get(parent_mod_path.as_path()).copied() + } + + fn add_branch(&mut self, parent: ModuleTreeNodeId, child: ModuleTreeNodeId) { + self.module_tree[parent].children.push(child); + + self.module_tree[child].parent = Some(parent); + } +} + +#[cfg(test)] +mod tests { + use common::input::{IngotKind, Version}; + + use super::*; + use crate::{lower, test_db::TestDb}; + + #[test] + fn module_tree() { + let mut db = TestDb::default(); + + let local_ingot = InputIngot::new( + &db, + "/foo/fargo", + IngotKind::Local, + Version::new(0, 0, 1), + Default::default(), + ); + let local_root = InputFile::new(&db, "src/lib.fe".into(), "".into()); + let mod1 = InputFile::new(&db, "src/mod1.fe".into(), "".into()); + let mod2 = InputFile::new(&db, "src/mod2.fe".into(), "".into()); + let foo = InputFile::new(&db, "src/mod1/foo.fe".into(), "".into()); + let bar = InputFile::new(&db, "src/mod2/bar.fe".into(), "".into()); + let baz = InputFile::new(&db, "src/mod2/baz.fe".into(), "".into()); + let floating = InputFile::new(&db, "src/mod3/floating.fe".into(), "".into()); + local_ingot.set_root_file(&mut db, local_root); + local_ingot.set_files( + &mut db, + [local_root, mod1, mod2, foo, bar, baz, floating] + .into_iter() + .collect(), + ); + + let local_root_mod = lower::map_file_to_mod(&db, local_ingot, local_root); + let mod1_mod = lower::map_file_to_mod(&db, local_ingot, mod1); + let mod2_mod = lower::map_file_to_mod(&db, local_ingot, mod2); + let foo_mod = lower::map_file_to_mod(&db, local_ingot, foo); + let bar_mod = lower::map_file_to_mod(&db, local_ingot, bar); + let baz_mod = lower::map_file_to_mod(&db, local_ingot, baz); + + let local_tree = lower::module_tree(&db, local_ingot); + let root_node = local_tree.root_data(); + assert_eq!(root_node.top_mod, local_root_mod); + assert_eq!(root_node.children.len(), 2); + + for &child in &root_node.children { + if child == local_tree.tree_node(mod1_mod) { + let child = local_tree.node_data(child); + assert_eq!(child.parent, Some(local_tree.root())); + assert_eq!(child.children.len(), 1); + assert_eq!(child.children[0], local_tree.tree_node(foo_mod)); + } else if child == local_tree.tree_node(mod2_mod) { + let child = local_tree.node_data(child); + assert_eq!(child.parent, Some(local_tree.root())); + assert_eq!(child.children.len(), 2); + assert_eq!(child.children[0], local_tree.tree_node(bar_mod)); + assert_eq!(child.children[1], local_tree.tree_node(baz_mod)); + } else { + panic!("unexpected child") + } + } + } +} diff --git a/crates/hir/src/hir_def/params.rs b/crates/hir/src/hir_def/params.rs new file mode 100644 index 0000000000..6659824cb5 --- /dev/null +++ b/crates/hir/src/hir_def/params.rs @@ -0,0 +1,188 @@ +use super::{Body, IdentId, Partial, PathId}; +use crate::{hir_def::TypeId, HirDb}; + +#[salsa::interned] +pub struct GenericArgListId<'db> { + #[return_ref] + pub data: Vec>, + pub is_given: bool, +} + +impl<'db> GenericArgListId<'db> { + pub fn none(db: &'db dyn HirDb) -> Self { + Self::new(db, vec![], false) + } + + pub fn len(self, db: &dyn HirDb) -> usize { + self.data(db).len() + } + + pub fn is_empty(self, db: &dyn HirDb) -> bool { + self.data(db).is_empty() + } +} + +#[salsa::interned] +pub struct GenericParamListId<'db> { + #[return_ref] + pub data: Vec>, +} + +impl GenericParamListId<'_> { + pub fn len(&self, db: &dyn HirDb) -> usize { + self.data(db).len() + } +} + +#[salsa::interned] +pub struct FuncParamListId<'db> { + #[return_ref] + pub data: Vec>, +} + +#[salsa::interned] +pub struct WhereClauseId<'db> { + #[return_ref] + pub data: Vec>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +pub enum GenericParam<'db> { + Type(TypeGenericParam<'db>), + Const(ConstGenericParam<'db>), +} + +impl<'db> GenericParam<'db> { + pub fn name(&self) -> Partial> { + match self { + Self::Type(ty) => ty.name, + Self::Const(c) => c.name, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeGenericParam<'db> { + pub name: Partial>, + pub bounds: Vec>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ConstGenericParam<'db> { + pub name: Partial>, + pub ty: Partial>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +pub enum GenericArg<'db> { + Type(TypeGenericArg<'db>), + Const(ConstGenericArg<'db>), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeGenericArg<'db> { + pub ty: Partial>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ConstGenericArg<'db> { + pub body: Partial>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FuncParam<'db> { + pub is_mut: bool, + pub label: Option>, + pub name: Partial>, + pub ty: Partial>, + + /// `true` if this parameter is `self` and the type is not specified. + /// `ty` should have `Self` type without any type arguments. + pub self_ty_fallback: bool, +} + +impl<'db> FuncParam<'db> { + pub fn label_eagerly(&self) -> Option> { + match self.label { + Some(FuncParamName::Ident(ident)) => return Some(ident), + Some(FuncParamName::Underscore) => return None, + _ => {} + } + + if let FuncParamName::Ident(ident) = self.name.to_opt()? { + Some(ident) + } else { + None + } + } + + pub fn name(&self) -> Option> { + match self.name.to_opt()? { + FuncParamName::Ident(name) => Some(name), + _ => None, + } + } + + pub fn is_self_param(&self, db: &dyn HirDb) -> bool { + self.name.to_opt().is_some_and(|name| name.is_self(db)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct WherePredicate<'db> { + pub ty: Partial>, + pub bounds: Vec>, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum FuncParamName<'db> { + Ident(IdentId<'db>), + Underscore, +} + +impl<'db> FuncParamName<'db> { + pub fn ident(&self) -> Option> { + match self { + FuncParamName::Ident(name) => Some(*name), + _ => None, + } + } + + pub fn is_self(&self, db: &dyn HirDb) -> bool { + self.ident().is_some_and(|id| id.is_self(db)) + } + + pub fn pretty_print(&self, db: &dyn HirDb) -> String { + match self { + FuncParamName::Ident(name) => name.data(db).to_string(), + FuncParamName::Underscore => "_".to_string(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TypeBound<'db> { + Trait(TraitRefId<'db>), + Kind(Partial), +} + +#[salsa::interned] +pub struct TraitRefId<'db> { + /// The path to the trait. + pub path: Partial>, +} + +impl<'db> TraitRefId<'db> { + /// Returns the generic arg list of the last segment of the trait ref path + pub fn generic_args(self, db: &'db dyn HirDb) -> Option> { + self.path(db).to_opt().map(|path| path.generic_args(db)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum KindBound { + /// `*` + Mono, + /// `* -> *` + Abs(Partial>, Partial>), +} diff --git a/crates/hir/src/hir_def/pat.rs b/crates/hir/src/hir_def/pat.rs new file mode 100644 index 0000000000..bd121a911f --- /dev/null +++ b/crates/hir/src/hir_def/pat.rs @@ -0,0 +1,54 @@ +use cranelift_entity::entity_impl; + +use super::{Body, IdentId, LitKind, Partial, PathId}; +use crate::{span::pat::LazyPatSpan, HirDb}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] +pub enum Pat<'db> { + WildCard, + Rest, + Lit(Partial>), + Tuple(Vec), + /// The second bool is `true` if the pat has `mut` in front of it. + Path(Partial>, bool), + PathTuple(Partial>, Vec), + Record(Partial>, Vec>), + Or(PatId, PatId), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)] +pub struct PatId(u32); +entity_impl!(PatId); + +impl PatId { + pub fn lazy_span(self, body: Body) -> LazyPatSpan { + LazyPatSpan::new(body, self) + } + + pub fn data<'db>(self, db: &'db dyn HirDb, body: Body<'db>) -> &'db Partial> { + &body.pats(db)[self] + } + + pub fn is_rest(self, db: &dyn HirDb, body: Body) -> bool { + matches!(self.data(db, body), Partial::Present(Pat::Rest)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] +pub struct RecordPatField<'db> { + pub label: Partial>, + pub pat: PatId, +} + +impl<'db> RecordPatField<'db> { + pub fn label(&self, db: &'db dyn HirDb, body: Body<'db>) -> Option> { + if let Partial::Present(label) = self.label { + return Some(label); + } + + match self.pat.data(db, body) { + Partial::Present(Pat::Path(Partial::Present(path), _)) => path.as_ident(db), + _ => None, + } + } +} diff --git a/crates/hir/src/hir_def/path.rs b/crates/hir/src/hir_def/path.rs new file mode 100644 index 0000000000..2cfd46090b --- /dev/null +++ b/crates/hir/src/hir_def/path.rs @@ -0,0 +1,101 @@ +use super::{GenericArgListId, IdentId}; +use crate::{hir_def::Partial, HirDb}; + +#[salsa::interned] +pub struct PathId<'db> { + pub ident: Partial>, + pub generic_args: GenericArgListId<'db>, + pub parent: Option>, +} + +impl<'db> PathId<'db> { + pub fn from_ident(db: &'db dyn HirDb, ident: IdentId<'db>) -> Self { + Self::new( + db, + Partial::Present(ident), + GenericArgListId::none(db), + None, + ) + } + + pub fn self_ty(db: &'db dyn HirDb, args: GenericArgListId<'db>) -> Self { + Self::new(db, Partial::Present(IdentId::make_self_ty(db)), args, None) + } + + pub fn len(self, db: &dyn HirDb) -> usize { + if let Some(parent) = self.parent(db) { + parent.len(db) + 1 + } else { + 1 + } + } + + pub fn segment_index(self, db: &dyn HirDb) -> usize { + self.len(db) - 1 + } + + pub fn segment(self, db: &'db dyn HirDb, idx: usize) -> Option> { + if idx == self.segment_index(db) { + Some(self) + } else { + self.parent(db).and_then(|p| p.segment(db, idx)) + } + } + + pub fn root_ident(self, db: &'db dyn HirDb) -> Option> { + if let Some(parent) = self.parent(db) { + parent.root_ident(db) + } else { + self.ident(db).to_opt() + } + } + + pub fn as_ident(self, db: &'db dyn HirDb) -> Option> { + if self.parent(db).is_none() && self.generic_args(db).is_empty(db) { + self.ident(db).to_opt() + } else { + None + } + } + + pub fn is_bare_ident(self, db: &dyn HirDb) -> bool { + self.parent(db).is_none() + && self.ident(db).is_present() + && self.generic_args(db).is_empty(db) + } + + pub fn is_self_ty(self, db: &dyn HirDb) -> bool { + if self.parent(db).is_none() && self.ident(db).is_present() { + self.ident(db).unwrap().is_self_ty(db) + } else { + false + } + } + + pub fn push( + self, + db: &'db dyn HirDb, + ident: Partial>, + generic_args: GenericArgListId<'db>, + ) -> Self { + Self::new(db, ident, generic_args, Some(self)) + } + + pub fn push_ident(self, db: &'db dyn HirDb, ident: IdentId<'db>) -> Self { + Self::new( + db, + Partial::Present(ident), + GenericArgListId::none(db), + Some(self), + ) + } + + pub fn pretty_print(self, db: &dyn HirDb) -> String { + let ident = self.ident(db).to_opt().map_or("_", |id| id.data(db)); + if let Some(parent) = self.parent(db) { + parent.pretty_print(db) + "::" + ident + } else { + ident.to_string() + } + } +} diff --git a/crates/hir/src/hir_def/prim_ty.rs b/crates/hir/src/hir_def/prim_ty.rs new file mode 100644 index 0000000000..080e7e70b3 --- /dev/null +++ b/crates/hir/src/hir_def/prim_ty.rs @@ -0,0 +1,92 @@ +use super::IdentId; +use crate::HirDb; + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum PrimTy { + Bool, + Int(IntTy), + Uint(UintTy), + String, +} + +impl PrimTy { + pub fn name(self, db: &dyn HirDb) -> IdentId { + match self { + PrimTy::Bool => IdentId::make_bool(db), + PrimTy::Int(ty) => ty.name(db), + PrimTy::Uint(ty) => ty.name(db), + PrimTy::String => IdentId::new(db, "String".to_string()), + } + } + + pub fn all_types() -> &'static [PrimTy] { + &[ + PrimTy::Bool, + PrimTy::Int(IntTy::I8), + PrimTy::Int(IntTy::I16), + PrimTy::Int(IntTy::I32), + PrimTy::Int(IntTy::I64), + PrimTy::Int(IntTy::I128), + PrimTy::Int(IntTy::I256), + PrimTy::Int(IntTy::Isize), + PrimTy::Uint(UintTy::U8), + PrimTy::Uint(UintTy::U16), + PrimTy::Uint(UintTy::U32), + PrimTy::Uint(UintTy::U64), + PrimTy::Uint(UintTy::U128), + PrimTy::Uint(UintTy::U256), + PrimTy::Uint(UintTy::Usize), + PrimTy::String, + ] + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum IntTy { + I8, + I16, + I32, + I64, + I128, + I256, + Isize, +} + +impl IntTy { + pub fn name(self, db: &dyn HirDb) -> IdentId { + match self { + IntTy::I8 => IdentId::make_i8(db), + IntTy::I16 => IdentId::make_i16(db), + IntTy::I32 => IdentId::make_i32(db), + IntTy::I64 => IdentId::make_i64(db), + IntTy::I128 => IdentId::make_i128(db), + IntTy::I256 => IdentId::make_i256(db), + IntTy::Isize => IdentId::make_isize(db), + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub enum UintTy { + U8, + U16, + U32, + U64, + U128, + U256, + Usize, +} + +impl UintTy { + pub fn name(self, db: &dyn HirDb) -> IdentId { + match self { + UintTy::U8 => IdentId::make_u8(db), + UintTy::U16 => IdentId::make_u16(db), + UintTy::U32 => IdentId::make_u32(db), + UintTy::U64 => IdentId::make_u64(db), + UintTy::U128 => IdentId::make_u128(db), + UintTy::U256 => IdentId::make_u256(db), + UintTy::Usize => IdentId::make_usize(db), + } + } +} diff --git a/crates/hir/src/hir_def/scope_graph.rs b/crates/hir/src/hir_def/scope_graph.rs new file mode 100644 index 0000000000..484f369be9 --- /dev/null +++ b/crates/hir/src/hir_def/scope_graph.rs @@ -0,0 +1,765 @@ +use std::io; + +use common::indexmap::IndexSet; +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::{ + scope_graph_viz::ScopeGraphFormatter, AttrListId, Body, Const, Contract, Enum, ExprId, + FieldDef, Func, FuncParam, FuncParamName, GenericParam, IdentId, Impl, ImplTrait, IngotId, + ItemKind, Mod, TopLevelMod, Trait, TypeAlias, Use, VariantDef, VariantKind, Visibility, +}; +use crate::{ + hir_def::{BodyKind, GenericParamOwner}, + span::DynLazySpan, + HirDb, +}; + +/// Represents a scope relation graph in a top-level module. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ScopeGraph<'db> { + /// The top-level module containing the scope graph. + pub top_mod: TopLevelMod<'db>, + /// The scopes in the graph. + pub scopes: FxHashMap, Scope<'db>>, + /// The all unresolved uses in the graph, this is used in name resolution. + pub unresolved_uses: FxHashSet>, +} + +impl<'db> ScopeGraph<'db> { + /// Represents all item scopes in a top-level module in depth-first order. + pub fn items_dfs<'a>(&'a self, db: &'a dyn HirDb) -> impl Iterator> + 'a { + ScopeGraphItemIterDfs { + db, + graph: self, + visited: Default::default(), + stack: vec![self.top_mod.scope()], + } + } + + /// Returns the direct child items of the given `scope`. + pub fn child_items(&self, scope: ScopeId<'db>) -> impl Iterator> + '_ { + self.children(scope).filter_map(|child| child.to_item()) + } + + /// Returns the direct child scopes of the given `scope` + pub fn children(&self, scope: ScopeId<'db>) -> impl Iterator> + '_ { + self.edges(scope).iter().filter_map(|edge| match edge.kind { + EdgeKind::Lex(_) + | EdgeKind::Super(_) + | EdgeKind::Ingot(_) + | EdgeKind::SelfTy(_) + | EdgeKind::Self_(_) => None, + + _ => Some(edge.dest), + }) + } + + /// Returns the all edges outgoing from the given `scope`. + pub fn edges(&self, scope: ScopeId<'db>) -> &IndexSet> { + &self.scopes[&scope].edges + } + + /// Write a scope graph as a dot file format to given `w`. + pub fn write_as_dot(&self, db: &dyn HirDb, w: &mut impl io::Write) -> io::Result<()> { + ScopeGraphFormatter::new(db, self).render(w) + } + + pub fn scope_data(&self, scope: &ScopeId<'db>) -> &Scope { + &self.scopes[scope] + } +} + +/// An reference to a `[ScopeData]` in a `ScopeGraph`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, salsa::Update)] +pub enum ScopeId<'db> { + /// An item scope. + Item(ItemKind<'db>), + + /// A generic parameter scope. + GenericParam(ItemKind<'db>, usize), + + /// A function parameter scope. + FuncParam(ItemKind<'db>, usize), + + /// A field scope. + Field(FieldParent<'db>, usize), + + /// A variant scope. + Variant(ItemKind<'db>, usize), + + /// A block scope. + Block(Body<'db>, ExprId), +} +impl<'db> ScopeId<'db> { + /// Returns the top level module containing this scope. + pub fn top_mod(&self, db: &'db dyn HirDb) -> TopLevelMod<'db> { + match self { + ScopeId::Item(item) => item.top_mod(db), + ScopeId::GenericParam(item, _) => item.top_mod(db), + ScopeId::FuncParam(item, _) => item.top_mod(db), + ScopeId::Field(FieldParent::Item(item), _) => item.top_mod(db), + ScopeId::Field(FieldParent::Variant(item, _), _) => item.top_mod(db), + ScopeId::Variant(item, _) => item.top_mod(db), + ScopeId::Block(body, _) => body.top_mod(db), + } + } + + /// Convert an item to a scope id. + pub fn from_item(item: ItemKind<'db>) -> Self { + Self::Item(item) + } + + /// Convert a scope id to an item if the scope is an item. + pub fn to_item(self) -> Option> { + match self { + ScopeId::Item(item) => Some(item), + _ => None, + } + } + + /// Returns the nearest item that contains this scope. + /// If the scope is item itself, returns the item. + pub fn item(self) -> ItemKind<'db> { + match self { + ScopeId::Item(item) => item, + ScopeId::GenericParam(item, _) => item, + ScopeId::FuncParam(item, _) => item, + ScopeId::Field(FieldParent::Item(item), _) => item, + ScopeId::Field(FieldParent::Variant(item, _), _) => item, + ScopeId::Variant(item, _) => item, + ScopeId::Block(body, _) => body.into(), + } + } + + /// Resolves the `ScopeId` to `T`. + /// Returns `None` if the resolution is not defined for this scope. + pub fn resolve_to(self, db: &'db dyn HirDb) -> Option + where + T: FromScope<'db>, + { + T::from_scope(self, db) + } + + /// Returns attributes being applied to the scope. + pub fn attrs(self, db: &'db dyn HirDb) -> Option> { + match self { + ScopeId::Item(item) => item.attrs(db), + ScopeId::Field(..) => { + let def: &FieldDef = self.resolve_to(db).unwrap(); + Some(def.attributes) + } + ScopeId::Variant(..) => { + let def: &VariantDef = self.resolve_to(db).unwrap(); + Some(def.attributes) + } + _ => None, + } + } + + /// Returns the scope graph containing this scope. + pub fn scope_graph(self, db: &'db dyn HirDb) -> &'db ScopeGraph<'db> { + self.top_mod(db).scope_graph(db) + } + + pub fn edges(self, db: &'db dyn HirDb) -> &'db IndexSet> { + self.scope_graph(db).edges(self) + } + + /// Returns `true` if `scope` is reachable from `self` by following only + /// lexical edges. + pub fn is_lex_child(self, db: &dyn HirDb, scope: &ScopeId) -> bool { + if self.top_mod(db) != scope.top_mod(db) { + return false; + } + + match self.lex_parent(db) { + Some(lex_parent) => { + if &lex_parent == scope { + return true; + } + lex_parent.is_lex_child(db, scope) + } + None => false, + } + } + + /// Returns `true` if `self` is a transitive reflexive child of `of`. + pub fn is_transitive_child_of(self, db: &dyn HirDb, of: ScopeId) -> bool { + let mut current = Some(self); + + while let Some(scope) = current { + if scope == of { + return true; + } + current = scope.parent(db); + } + + false + } + + /// Return the `IngotId` containing the scope. + pub fn ingot(self, db: &'db dyn HirDb) -> IngotId<'db> { + self.top_mod(db).ingot(db) + } + + /// Returns the `Scope` data for this scope. + pub fn data(self, db: &'db dyn HirDb) -> &'db Scope<'db> { + self.top_mod(db).scope_graph(db).scope_data(&self) + } + + /// Returns the parent scope of this scope. + /// The parent scope is + /// 1. the lexical parent if it exists + /// 2. the parent module if 1. does not exist + pub fn parent(self, db: &'db dyn HirDb) -> Option { + let mut super_dest = None; + for edge in self.edges(db) { + if let EdgeKind::Lex(_) = edge.kind { + return Some(edge.dest); + } + if let EdgeKind::Super(_) = edge.kind { + super_dest = Some(edge.dest); + } + } + super_dest + } + + /// Returns the lexical parent scope of this scope. + pub fn lex_parent(self, db: &'db dyn HirDb) -> Option { + self.data(db) + .edges + .iter() + .find(|e| matches!(e.kind, EdgeKind::Lex(_))) + .map(|e| e.dest) + } + + /// Returns the parent module of this scope. + pub fn parent_module(self, db: &'db dyn HirDb) -> Option { + let parent_item = self.parent_item(db)?; + match parent_item { + ItemKind::Mod(_) | ItemKind::TopMod(_) => Some(Self::Item(parent_item)), + _ => { + let parent_id = Self::from_item(parent_item); + parent_id.parent_module(db) + } + } + } + + /// Returns the direct child items of the given `scope`. + pub fn child_items(self, db: &'db dyn HirDb) -> impl Iterator> + 'db { + self.scope_graph(db).child_items(self) + } + + /// Returns the direct child scopes of the given `scope` + pub fn children(self, db: &'db dyn HirDb) -> impl Iterator> + 'db { + self.scope_graph(db).children(self) + } + + /// Returns `true` if the scope is a type. + pub fn is_type(self) -> bool { + match self { + ScopeId::Item(item) => item.is_type(), + ScopeId::GenericParam(..) => true, + _ => false, + } + } + + /// Returns the item that contains this scope. + pub fn parent_item(self, db: &'db dyn HirDb) -> Option> { + let mut parent = self.parent(db)?; + loop { + match parent { + ScopeId::Item(item) => return Some(item), + _ => { + parent = parent.parent(db)?; + } + } + } + } + + pub fn name(self, db: &'db dyn HirDb) -> Option> { + match self.data(db).id { + ScopeId::Item(item) => item.name(db), + + ScopeId::Variant(..) => self.resolve_to::<&VariantDef>(db).unwrap().name.to_opt(), + + ScopeId::Field(..) => self.resolve_to::<&FieldDef>(db).unwrap().name.to_opt(), + + ScopeId::FuncParam(..) => { + let param: &FuncParam = self.resolve_to(db).unwrap(); + if let Some(FuncParamName::Ident(ident)) = param.label { + Some(ident) + } else { + param.name() + } + } + + ScopeId::GenericParam(..) => { + let param: &GenericParam = self.resolve_to(db).unwrap(); + param.name().to_opt() + } + + ScopeId::Block(..) => None, + } + } + + pub fn name_span(self, db: &'db dyn HirDb) -> Option> { + match self.data(db).id { + ScopeId::Item(item) => item.name_span(), + + ScopeId::Variant(parent, idx) => { + let enum_: Enum = parent.try_into().unwrap(); + Some(enum_.lazy_span().variants().variant(idx).name().into()) + } + + ScopeId::Field(FieldParent::Item(parent), idx) => match parent { + ItemKind::Struct(s) => Some(s.lazy_span().fields().field(idx).name().into()), + ItemKind::Contract(c) => Some(c.lazy_span().fields().field(idx).name().into()), + _ => unreachable!(), + }, + ScopeId::Field(FieldParent::Variant(parent, vidx), fidx) => { + let enum_: Enum = parent.try_into().unwrap(); + Some( + enum_ + .lazy_span() + .variants() + .variant(vidx) + .fields() + .field(fidx) + .name() + .into(), + ) + } + + ScopeId::FuncParam(parent, idx) => { + let func: Func = parent.try_into().unwrap(); + let param = &func.params(db).to_opt()?.data(db)[idx]; + let param_span = func.lazy_span().params().param(idx); + if let Some(FuncParamName::Ident(_)) = param.label { + Some(param_span.label().into()) + } else { + Some(param_span.name().into()) + } + } + + ScopeId::GenericParam(parent, idx) => { + let parent = GenericParamOwner::from_item_opt(parent).unwrap(); + + Some(parent.params_span().param(idx).into()) + } + + ScopeId::Block(..) => None, + } + } + + pub fn kind_name(&self) -> &'static str { + match self { + ScopeId::Item(item) => item.kind_name(), + ScopeId::GenericParam(_, _) => "type", + ScopeId::FuncParam(_, _) => "value", + ScopeId::Field(_, _) => "field", + ScopeId::Variant(_, _) => "value", + ScopeId::Block(_, _) => "block", + } + } + + pub fn pretty_path(self, db: &dyn HirDb) -> Option { + let name = match self { + ScopeId::Block(body, expr) => format!("{{block{}}}", body.iter_block(db)[&expr]), + ScopeId::Item(ItemKind::Body(body)) => match body.body_kind(db) { + BodyKind::FuncBody => "{fn_body}".to_string(), + BodyKind::Anonymous => "{anonymous_body}".to_string(), + }, + _ => self.name(db)?.data(db).clone(), + }; + + if let Some(parent) = self.parent(db) { + let parent_path = parent.pretty_path(db)?; + Some(format!("{}::{}", parent_path, name)) + } else { + Some(name) + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, salsa::Update)] +pub enum FieldParent<'db> { + Item(ItemKind<'db>), + Variant(ItemKind<'db>, usize), +} + +impl<'db> FieldParent<'db> { + pub fn scope(self) -> ScopeId<'db> { + match self { + FieldParent::Item(item) => ScopeId::Item(item), + FieldParent::Variant(variant, idx) => ScopeId::Variant(variant, idx), + } + } +} + +struct ScopeGraphItemIterDfs<'db, 'a> { + db: &'db dyn HirDb, + graph: &'a ScopeGraph<'db>, + visited: FxHashSet>, + stack: Vec>, +} + +impl<'db> std::iter::Iterator for ScopeGraphItemIterDfs<'db, '_> { + type Item = ItemKind<'db>; + + fn next(&mut self) -> Option> { + while let Some(scope) = self.stack.pop() { + self.visited.insert(scope); + for edge in self.graph.edges(scope).iter().rev() { + let dest = edge.dest; + let top_mod = dest.top_mod(self.db); + if top_mod != self.graph.top_mod || self.visited.contains(&dest) { + continue; + } + + match dest { + ScopeId::Item(_) | ScopeId::Block(..) => { + self.stack.push(dest); + } + + _ => {} + } + } + + if let Some(item) = scope.to_item() { + return Some(item); + } + } + + None + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Scope<'db> { + pub id: ScopeId<'db>, + pub edges: IndexSet>, + pub vis: Visibility, +} + +impl<'db> Scope<'db> { + pub fn new(kind: ScopeId<'db>, vis: Visibility) -> Self { + Self { + id: kind, + edges: Default::default(), + vis, + } + } +} + +/// An edge of the scope graph. +/// The edge contains the destination of the edge and the kind of the edge. +/// [`EdgeKind`] is contains supplementary information about the destination +/// scope, which is used for name resolution. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ScopeEdge<'db> { + pub dest: ScopeId<'db>, + pub kind: EdgeKind<'db>, +} + +/// A specific edge property definitions. +/// +/// NOTE: The internal types of each variants contains very small amount of +/// information, the reason why we need to prepare each internal types is to +/// allow us to implement traits to each edges directly. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub enum EdgeKind<'db> { + /// An edge to a lexical parent scope. + Lex(LexEdge), + /// An edge to a module. + Mod(ModEdge<'db>), + /// An edge to a type. + Type(TypeEdge<'db>), + /// An edge to a trait. + Trait(TraitEdge<'db>), + /// An edge from a scope to a generic parameter. + GenericParam(GenericParamEdge<'db>), + /// An edge to a value. The value is either a function or a + /// constant. + Value(ValueEdge<'db>), + /// An edge to a field definition scope. + Field(FieldEdge<'db>), + /// An edge to a enum variant definition scope. + Variant(VariantEdge<'db>), + /// An edge to a module that is referenced by a `super` keyword. + Super(SuperEdge), + /// An edge to an ingot that is referenced by a `ingot` keyword. + Ingot(IngotEdge), + /// An edge to a scope that is referenced by a `self` keyword. + Self_(SelfEdge), + /// An edge to a scope that is referenced by a `Self` keyword. + SelfTy(SelfTyEdge), + /// An edge to an anonymous scope, e.g., `impl` or function body. + Anon(AnonEdge), +} + +impl<'db> EdgeKind<'db> { + pub fn lex() -> Self { + EdgeKind::Lex(LexEdge()) + } + + pub fn mod_(ident: IdentId<'db>) -> Self { + EdgeKind::Mod(ident.into()) + } + + pub fn type_(ident: IdentId<'db>) -> Self { + EdgeKind::Type(ident.into()) + } + + pub fn trait_(ident: IdentId<'db>) -> Self { + EdgeKind::Trait(ident.into()) + } + + pub fn generic_param(ident: IdentId<'db>) -> Self { + EdgeKind::GenericParam(ident.into()) + } + + pub fn value(ident: IdentId<'db>) -> Self { + EdgeKind::Value(ident.into()) + } + + pub fn field(ident: IdentId<'db>) -> Self { + EdgeKind::Field(ident.into()) + } + + pub fn variant(ident: IdentId<'db>) -> Self { + EdgeKind::Variant(ident.into()) + } + + pub fn super_() -> Self { + EdgeKind::Super(SuperEdge()) + } + + pub fn ingot() -> Self { + EdgeKind::Ingot(IngotEdge()) + } + + pub fn self_ty() -> Self { + EdgeKind::SelfTy(SelfTyEdge()) + } + + pub fn self_() -> Self { + EdgeKind::Self_(SelfEdge()) + } + + pub fn anon() -> Self { + EdgeKind::Anon(AnonEdge()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct LexEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct ModEdge<'db>(pub IdentId<'db>); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct TypeEdge<'db>(pub IdentId<'db>); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct TraitEdge<'db>(pub IdentId<'db>); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct ValueEdge<'db>(pub IdentId<'db>); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct GenericParamEdge<'db>(pub IdentId<'db>); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct FieldEdge<'db>(pub IdentId<'db>); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct VariantEdge<'db>(pub IdentId<'db>); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct SuperEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct IngotEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct SelfTyEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::From)] +pub struct SelfEdge(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct AnonEdge(); + +pub trait FromScope<'db>: Sized { + fn from_scope(scope: ScopeId<'db>, db: &'db dyn HirDb) -> Option; +} + +impl<'db> FromScope<'db> for ItemKind<'db> { + fn from_scope(scope: ScopeId<'db>, _db: &'db dyn HirDb) -> Option { + match scope { + ScopeId::Item(item) => Some(item), + _ => None, + } + } +} + +macro_rules! item_from_scope { + ($($item_ty: ty,)*) => { + $( + impl<'db> FromScope<'db> for $item_ty { + fn from_scope(scope: ScopeId<'db>, db: &'db dyn HirDb) -> Option { + scope.resolve_to::(db).and_then(|item| item.try_into().ok()) + } + } + )* + }; +} + +item_from_scope! { + TopLevelMod<'db>, + Mod<'db>, + Func<'db>, + Contract<'db>, + Enum<'db>, + TypeAlias<'db>, + Impl<'db>, + Trait<'db>, + ImplTrait<'db>, + Const<'db>, + Use<'db>, + Body<'db>, +} + +impl<'db> FromScope<'db> for &'db FieldDef<'db> { + fn from_scope(scope: ScopeId<'db>, db: &'db dyn HirDb) -> Option { + let ScopeId::Field(parent, idx) = scope else { + return None; + }; + + match parent { + FieldParent::Item(item) => match item { + ItemKind::Struct(s) => Some(&s.fields(db).data(db)[idx]), + ItemKind::Contract(c) => Some(&c.fields(db).data(db)[idx]), + _ => unreachable!(), + }, + + FieldParent::Variant(parent, vidx) => { + let enum_: Enum = parent.try_into().unwrap(); + match enum_.variants(db).data(db)[vidx].kind { + VariantKind::Record(fields) => Some(&fields.data(db)[idx]), + _ => unreachable!(), + } + } + } + } +} + +impl<'db> FromScope<'db> for &'db VariantDef<'db> { + fn from_scope(scope: ScopeId<'db>, db: &'db dyn HirDb) -> Option { + let ScopeId::Variant(parent, idx) = scope else { + return None; + }; + let enum_: Enum = parent.try_into().unwrap(); + + Some(&enum_.variants(db).data(db)[idx]) + } +} + +impl<'db> FromScope<'db> for &'db FuncParam<'db> { + fn from_scope(scope: ScopeId<'db>, db: &'db dyn HirDb) -> Option { + let ScopeId::FuncParam(parent, idx) = scope else { + return None; + }; + + let func: Func = parent.try_into().unwrap(); + func.params(db).to_opt().map(|params| ¶ms.data(db)[idx]) + } +} + +impl<'db> FromScope<'db> for &'db GenericParam<'db> { + fn from_scope(scope: ScopeId<'db>, db: &'db dyn HirDb) -> Option { + let ScopeId::GenericParam(parent, idx) = scope else { + return None; + }; + + let parent = GenericParamOwner::from_item_opt(parent).unwrap(); + Some(&parent.params(db).data(db)[idx]) + } +} + +#[cfg(test)] +mod tests { + + use crate::{ + hir_def::{ + scope_graph::{FieldParent, ScopeId}, + ItemKind, + }, + test_db::TestDb, + }; + + #[test] + fn item_tree() { + let mut db = TestDb::default(); + + let text = r#" + mod foo { + fn bar() {} + extern { + fn baz() + } + } + + enum MyEnum {} + + mod baz { + struct MyS {} + } + "#; + + let (ingot, file) = db.standalone_file(text); + let scope_graph = db.parse_source(ingot, file); + assert_eq!(scope_graph.items_dfs(&db).count(), 8); + + for (i, item) in scope_graph.items_dfs(&db).enumerate() { + match i { + 0 => assert!(matches!(item, ItemKind::TopMod(_))), + 1 => assert!(matches!(item, ItemKind::Mod(_))), + 2 => assert!(matches!(item, ItemKind::Func(_))), + 3 => assert!(matches!(item, ItemKind::Body(_))), + 4 => assert!(matches!(item, ItemKind::Func(_))), + 5 => assert!(matches!(item, ItemKind::Enum(_))), + 6 => assert!(matches!(item, ItemKind::Mod(_))), + 7 => assert!(matches!(item, ItemKind::Struct(_))), + _ => unreachable!(), + } + } + } + + #[test] + fn enum_record_fields() { + let mut db = TestDb::default(); + + let text = r#" + enum Foo { + X { a: i8, b: i8 }, + } + "#; + + let (ingot, file) = db.standalone_file(text); + let scope_graph = db.parse_source(ingot, file); + let root = scope_graph.top_mod.scope(); + let enum_ = scope_graph.children(root).next().unwrap(); + assert!(matches!(enum_.item(), ItemKind::Enum(_))); + + let variant = scope_graph.children(enum_).next().unwrap(); + assert!(matches!(variant, ScopeId::Variant(_, _))); + + let field = scope_graph.children(variant).next().unwrap(); + assert!(matches!( + field, + ScopeId::Field(FieldParent::Variant(_, _), _) + )); + } +} diff --git a/crates/hir/src/hir_def/scope_graph_viz.rs b/crates/hir/src/hir_def/scope_graph_viz.rs new file mode 100644 index 0000000000..77ffc06d55 --- /dev/null +++ b/crates/hir/src/hir_def/scope_graph_viz.rs @@ -0,0 +1,224 @@ +use std::{ + collections::{hash_map::Entry, VecDeque}, + io, +}; + +use cranelift_entity::{entity_impl, PrimaryMap}; +use dot2::label::Text; +use rustc_hash::{FxHashMap, FxHashSet}; + +use super::scope_graph::{EdgeKind, ScopeGraph, ScopeId}; +use crate::{hir_def::ItemKind, HirDb}; + +type NodeId = usize; + +pub(super) struct ScopeGraphFormatter<'db> { + db: &'db dyn HirDb, + edges: PrimaryMap>, + nodes: Vec>, +} + +impl<'db> ScopeGraphFormatter<'db> { + fn build_formatter(&mut self, s_graph: &ScopeGraph<'db>) { + let mut visited = FxHashSet::default(); + let mut nodes_map = FxHashMap::default(); + + let mut worklist = VecDeque::new(); + let root = s_graph.top_mod.scope(); + worklist.push_back(root); + while let Some(scope) = worklist.pop_front() { + if !visited.insert(scope) { + continue; + } + let source = self.node_id(scope, &mut nodes_map); + + for edge in s_graph.edges(scope) { + let target = self.node_id(edge.dest, &mut nodes_map); + + self.edges.push(Edge { + kind: edge.kind, + target, + source, + }); + + if !visited.contains(&edge.dest) { + worklist.push_back(edge.dest); + } + } + } + } + + fn node_id( + &mut self, + scope: ScopeId<'db>, + nodes_map: &mut FxHashMap, usize>, + ) -> NodeId { + match nodes_map.entry(scope) { + Entry::Occupied(entry) => *entry.get(), + Entry::Vacant(entry) => { + let id = self.nodes.len(); + self.nodes.push(scope); + entry.insert(id); + id + } + } + } +} + +impl<'db> ScopeGraphFormatter<'db> { + pub(super) fn new(db: &'db dyn HirDb, s_graph: &ScopeGraph<'db>) -> Self { + let nodes = Vec::new(); + let edges = PrimaryMap::new(); + let mut formatter = Self { db, edges, nodes }; + + formatter.build_formatter(s_graph); + formatter + } + + pub(super) fn render(&self, w: &mut impl io::Write) -> io::Result<()> { + dot2::render(self, w).map_err(|err| match err { + dot2::Error::Io(err) => err, + dot2::Error::InvalidId => unreachable!(), + }) + } +} + +impl<'db, 'a> dot2::Labeller<'a> for ScopeGraphFormatter<'db> { + type Node = NodeId; + type Edge = EdgeId; + type Subgraph = (); + + fn graph_id(&'a self) -> dot2::Result> { + dot2::Id::new("example1") + } + + fn node_id(&'a self, n: &Self::Node) -> dot2::Result> { + dot2::Id::new(format!("N{n}")) + } + + fn node_label<'b>(&'a self, node: &Self::Node) -> dot2::Result> { + let label = match &self.nodes[*node] { + ScopeId::Item(item) => { + let item_name = match item { + ItemKind::Use(use_) => use_.pretty_path(self.db), + _ => item + .name(self.db) + .map_or(" ", |name| name.data(self.db)) + .to_string(), + }; + + format!( + r#" {kind_name} {item_name} "#, + kw_color = "#7B2D80", + kind_name = item.kind_name(), + item_color = "#1B458D", + ) + } + + ScopeId::Block(body, expr) => { + let idx = body.iter_block(self.db)[expr]; + format!( + r#" {{block{block_number}}} "#, + block_color = "#383A42", + block_number = idx + ) + } + + scope => { + format!( + r#" {name} "#, + param_color = "#3A793A", + name = scope + .name(self.db) + .map_or(String::new(), |name| name.data(self.db).to_string()), + ) + } + }; + Ok(Text::HtmlStr(label.into())) + } + + fn edge_label(&'a self, e: &Self::Edge) -> Text<'a> { + let edge = &self.edges[*e]; + + let label = match edge.kind { + EdgeKind::Lex(_) => "lex", + EdgeKind::Mod(_) => "mod", + EdgeKind::Type(_) => "type", + EdgeKind::Trait(_) => "trait", + EdgeKind::GenericParam(_) => "generic_param", + EdgeKind::Value(_) => "value", + EdgeKind::Field(_) => "field", + EdgeKind::Variant(_) => "variant", + EdgeKind::Super(_) => "super", + EdgeKind::Ingot(_) => "ingot", + EdgeKind::Self_(_) => "self", + EdgeKind::SelfTy(_) => "self_ty", + EdgeKind::Anon(_) => "anon", + }; + let color = edge.color(); + let colored_label = format!(r#" {} "#, color, label); + Text::HtmlStr(colored_label.into()) + } + + fn edge_color(&'a self, e: &Self::Edge) -> Option> { + let edge = &self.edges[*e]; + Some(Text::LabelStr(edge.color().into())) + } + + fn node_shape(&self, _n: &Self::Node) -> Option> { + Some(Text::LabelStr("box".into())) + } +} + +impl<'a> dot2::GraphWalk<'a> for ScopeGraphFormatter<'_> { + type Node = NodeId; + type Edge = EdgeId; + type Subgraph = (); + + fn nodes(&'a self) -> dot2::Nodes<'a, Self::Node> { + (0..self.nodes.len()).collect() + } + + fn edges(&'a self) -> dot2::Edges<'a, Self::Edge> { + self.edges.keys().collect() + } + + fn source(&self, e: &Self::Edge) -> Self::Node { + self.edges[*e].source + } + + fn target(&self, e: &Self::Edge) -> Self::Node { + self.edges[*e].target + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub(super) struct EdgeId(u32); +entity_impl!(EdgeId); + +#[derive(Debug)] +struct Edge<'db> { + kind: EdgeKind<'db>, + target: NodeId, + source: NodeId, +} + +impl Edge<'_> { + fn color(&self) -> &'static str { + match self.kind { + EdgeKind::Lex(_) => "#F94144", + EdgeKind::Mod(_) => "#F3722C", + EdgeKind::Type(_) => "#F8961E", + EdgeKind::Trait(_) => "#F9C74F", + EdgeKind::GenericParam(_) => "#90BE6D", + EdgeKind::Value(_) => "#43AA8B", + EdgeKind::Field(_) => "#577590", + EdgeKind::Variant(_) => "#6D597A", + EdgeKind::Super(_) => "#B56576", + EdgeKind::Ingot(_) => "#E56B6F", + EdgeKind::Self_(_) => "#FFBA49", + EdgeKind::SelfTy(_) => "#3A6351", + EdgeKind::Anon(_) => "#788475", + } + } +} diff --git a/crates/hir/src/hir_def/stmt.rs b/crates/hir/src/hir_def/stmt.rs new file mode 100644 index 0000000000..e68ed20543 --- /dev/null +++ b/crates/hir/src/hir_def/stmt.rs @@ -0,0 +1,41 @@ +use cranelift_entity::entity_impl; + +use super::{Body, ExprId, Partial, PatId, TypeId}; +use crate::{span::stmt::LazyStmtSpan, HirDb}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash, salsa::Update)] +pub enum Stmt<'db> { + /// The `let` statement. The first `PatId` is the pattern for binding, the + /// second `Option` is the type annotation, and the third + /// `Option` is the expression for initialization. + Let(PatId, Option>, Option), + /// The first `PatId` is the pattern for binding which can be used in the + /// for-loop body. + /// + /// The second `ExprId` is the iterable expression. + /// + /// The third `ExprId` is the for-loop body. + For(PatId, ExprId, ExprId), + + /// The first `ExprId` is the condition of the while-loop. + /// The second `ExprId` is the body of the while-loop. + While(ExprId, ExprId), + Continue, + Break, + Return(Option), + Expr(ExprId), +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)] +pub struct StmtId(u32); +entity_impl!(StmtId); + +impl StmtId { + pub fn lazy_span(self, body: Body) -> LazyStmtSpan { + LazyStmtSpan::new(body, self) + } + + pub fn data<'db>(self, db: &'db dyn HirDb, body: Body<'db>) -> &'db Partial> { + &body.stmts(db)[self] + } +} diff --git a/crates/hir/src/hir_def/types.rs b/crates/hir/src/hir_def/types.rs new file mode 100644 index 0000000000..65fade0c1c --- /dev/null +++ b/crates/hir/src/hir_def/types.rs @@ -0,0 +1,48 @@ +use super::{Body, GenericArgListId, Partial, PathId}; +use crate::HirDb; + +#[salsa::interned] +pub struct TypeId<'db> { + #[return_ref] + pub data: TypeKind<'db>, +} + +impl<'db> TypeId<'db> { + pub fn is_self_ty(self, db: &dyn HirDb) -> bool { + matches!(self.data(db), TypeKind::SelfType(_)) + } + + pub fn fallback_self_ty(db: &'db dyn HirDb) -> Self { + Self::new( + db, + TypeKind::SelfType(GenericArgListId::new(db, Vec::new(), false)), + ) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum TypeKind<'db> { + Ptr(Partial>), + Path(Partial>), + SelfType(GenericArgListId<'db>), + Tuple(TupleTypeId<'db>), + /// The first `TypeId` is the element type, the second `Body` is the length. + Array(Partial>, Partial>), + Never, +} + +#[salsa::interned] +pub struct TupleTypeId<'db> { + #[return_ref] + pub data: Vec>>, +} + +impl<'db> TupleTypeId<'db> { + pub fn to_ty(self, db: &'db dyn HirDb) -> TypeId<'db> { + TypeId::new(db, TypeKind::Tuple(self)) + } + + pub fn len(self, db: &dyn HirDb) -> usize { + self.data(db).len() + } +} diff --git a/crates/hir/src/hir_def/use_tree.rs b/crates/hir/src/hir_def/use_tree.rs new file mode 100644 index 0000000000..865ba84410 --- /dev/null +++ b/crates/hir/src/hir_def/use_tree.rs @@ -0,0 +1,77 @@ +use super::IdentId; +use crate::{hir_def::Partial, HirDb}; + +#[salsa::interned] +pub struct UsePathId<'db> { + #[return_ref] + pub data: Vec>>, +} + +impl<'db> UsePathId<'db> { + pub fn is_glob(&self, db: &dyn HirDb) -> bool { + self.data(db) + .last() + .and_then(|seg| seg.to_opt()) + .is_some_and(|seg| seg.is_glob()) + } + + pub fn last_ident(&self, db: &'db dyn HirDb) -> Option> { + self.data(db) + .last() + .and_then(|seg| seg.to_opt()) + .and_then(|seg| seg.ident()) + } + + pub fn segment_len(&self, db: &dyn HirDb) -> usize { + self.data(db).len() + } + + pub fn pretty_path(&self, db: &dyn HirDb) -> String { + let mut path = String::new(); + + for (i, seg) in self.data(db).iter().enumerate() { + if i != 0 { + path.push_str("::"); + } + match seg { + Partial::Absent => path.push_str("{invalid}"), + Partial::Present(seg) => match seg { + UsePathSegment::Ident(ident) => path.push_str(ident.data(db)), + UsePathSegment::Glob => path.push('*'), + }, + } + } + path + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum UsePathSegment<'db> { + Ident(IdentId<'db>), + /// `*`. + Glob, +} + +impl<'db> UsePathSegment<'db> { + /// Returns the ident of the last path segment. + /// If the last segment is a glob, returns `None`. + pub fn ident(self) -> Option> { + match self { + UsePathSegment::Ident(ident) => Some(ident), + UsePathSegment::Glob => None, + } + } + + pub fn is_glob(self) -> bool { + match self { + UsePathSegment::Ident(_) => false, + UsePathSegment::Glob => true, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)] +pub enum UseAlias<'db> { + Ident(IdentId<'db>), + Underscore, +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs new file mode 100644 index 0000000000..5ac188f797 --- /dev/null +++ b/crates/hir/src/lib.rs @@ -0,0 +1,167 @@ +use analysis_pass::ModuleAnalysisPass; +use common::{InputDb, InputIngot}; +use hir_def::{module_tree_impl, IdentId, IngotId, TopLevelMod}; +pub use lower::parse::ParserError; +use lower::parse::{parse_file_impl, ParseErrorAccumulator}; +use parser::GreenNode; + +pub mod analysis_pass; +pub mod diagnostics; +pub mod hir_def; +pub mod lower; +pub mod span; +pub mod visitor; + +#[derive(Clone, Copy)] +pub struct ParsingPass<'db> { + db: &'db dyn HirDb, +} + +impl<'db> ParsingPass<'db> { + pub fn new(db: &'db dyn HirDb) -> Self { + Self { db } + } + + pub fn green_node(self, top_mod: TopLevelMod) -> GreenNode { + parse_file_impl(self.db, top_mod) + } +} + +impl<'db> ModuleAnalysisPass<'db> for ParsingPass<'db> { + fn run_on_module( + &mut self, + top_mod: TopLevelMod<'db>, + ) -> Vec + 'db>> { + parse_file_impl::accumulated::(self.db, top_mod) + .into_iter() + .map(|d| Box::new(d.0) as _) + .collect::>() + } +} + +/// Returns the root modules and names of external ingots that the given `ingot` +/// depends on. +/// From the outside of the crate, this functionality can be accessed via +/// [`TopLevelMod::external_ingots`](crate::TopLevelMod::external_ingots). +// The reason why this function is not a public API is that we want to prohibit users of `HirDb` to +// access `InputIngot` directly. +#[salsa::tracked(return_ref)] +#[allow(elided_named_lifetimes)] +pub(crate) fn external_ingots_impl( + db: &dyn HirDb, + ingot: InputIngot, +) -> Vec<(IdentId<'_>, IngotId<'_>)> { + let mut res = Vec::new(); + for dep in ingot.external_ingots(db.as_input_db()) { + let name = IdentId::new(db, dep.name.to_string()); + let ingot = module_tree_impl(db, dep.ingot) + .root_data() + .top_mod + .ingot(db); + res.push((name, ingot)) + } + res +} + +#[salsa::db] +pub trait HirDb: salsa::Database + InputDb { + fn as_hir_db(&self) -> &dyn HirDb; +} + +/// `LowerHirDb` is a marker trait for lowering AST to HIR items. +/// All code that requires [`LowerHirDb`] is considered have a possibility to +/// invalidate the cache in salsa when a revision is updated. Therefore, +/// implementations relying on `LowerHirDb` are prohibited in all +/// Analysis phases. +#[salsa::db] +pub trait LowerHirDb: salsa::Database + HirDb { + fn as_lower_hir_db(&self) -> &dyn LowerHirDb; +} + +/// `SpannedHirDb` is a marker trait for extracting span-dependent information +/// from HIR Items. +/// All code that requires [`SpannedHirDb`] is considered have a possibility to +/// invalidate the cache in salsa when a revision is updated. Therefore, +/// implementations relying on `SpannedHirDb` are prohibited in all +/// Analysis phases. +/// +/// This marker is mainly used to inject [HirOrigin](crate::span::HirOrigin) to +/// generate [CompleteDiagnostic](common::diagnostics::CompleteDiagnostic) from +/// [DiagnosticVoucher](crate::diagnostics::DiagnosticVoucher). +/// See also `[LazySpan]`[`crate::span::LazySpan`] for more details. +#[salsa::db] +pub trait SpannedHirDb: salsa::Database + HirDb { + fn as_spanned_hir_db(&self) -> &dyn SpannedHirDb; +} + +#[cfg(test)] +mod test_db { + use common::{ + impl_db_traits, + indexmap::IndexSet, + input::{IngotKind, Version}, + InputDb, InputFile, InputIngot, + }; + + use super::HirDb; + use crate::{ + hir_def::{scope_graph::ScopeGraph, ItemKind, TopLevelMod}, + lower::{map_file_to_mod, scope_graph}, + span::LazySpan, + LowerHirDb, SpannedHirDb, + }; + + #[derive(Clone, Default)] + #[salsa::db] + pub(crate) struct TestDb { + storage: salsa::Storage, + } + impl_db_traits!(TestDb, InputDb, HirDb, LowerHirDb, SpannedHirDb); + + impl TestDb { + pub fn parse_source(&self, ingot: InputIngot, file: InputFile) -> &ScopeGraph { + let top_mod = map_file_to_mod(self, ingot, file); + scope_graph(self, top_mod) + } + + /// Parses the given source text and returns the first inner item in the + /// file. + pub fn expect_item<'db, T>(&'db self, ingot: InputIngot, input: InputFile) -> T + where + ItemKind<'db>: TryInto, + { + let tree = self.parse_source(ingot, input); + tree.items_dfs(self) + .find_map(|it| it.try_into().ok()) + .unwrap() + } + + pub fn expect_items<'db, T>(&'db self, ingot: InputIngot, input: InputFile) -> Vec + where + ItemKind<'db>: TryInto, + { + let tree = self.parse_source(ingot, input); + tree.items_dfs(self) + .filter_map(|it| it.try_into().ok()) + .collect() + } + + pub fn text_at(&self, top_mod: TopLevelMod, span: &impl LazySpan) -> &str { + let range = span.resolve(self).unwrap().range; + let file = top_mod.file(self.as_hir_db()); + let text = file.text(self.as_hir_db().as_input_db()); + &text[range.start().into()..range.end().into()] + } + + pub fn standalone_file(&mut self, text: &str) -> (InputIngot, InputFile) { + let path = "hir_test"; + let kind = IngotKind::StandAlone; + let version = Version::new(0, 0, 1); + let ingot = InputIngot::new(self, path, kind, version, IndexSet::default()); + let file = InputFile::new(self, "test_file.fe".into(), text.to_string()); + ingot.set_root_file(self, file); + ingot.set_files(self, [file].into_iter().collect()); + (ingot, file) + } + } +} diff --git a/crates/hir/src/lower/attr.rs b/crates/hir/src/lower/attr.rs new file mode 100644 index 0000000000..39faf982e9 --- /dev/null +++ b/crates/hir/src/lower/attr.rs @@ -0,0 +1,64 @@ +use parser::ast; + +use super::FileLowerCtxt; +use crate::hir_def::{attr::*, IdentId, StringId}; + +impl<'db> AttrListId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::AttrList) -> Self { + let attrs = ast + .into_iter() + .map(|attr| Attr::lower_ast(ctxt, attr)) + .collect::>(); + Self::new(ctxt.db(), attrs) + } + + pub(super) fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'db>, ast: Option) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or_else(|| Self::new(ctxt.db(), vec![])) + } +} + +impl<'db> Attr<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Attr) -> Self { + match ast.kind() { + ast::AttrKind::Normal(attr) => NormalAttr::lower_ast(ctxt, attr).into(), + ast::AttrKind::DocComment(attr) => DocCommentAttr::lower_ast(ctxt, attr).into(), + } + } +} + +impl<'db> NormalAttr<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::NormalAttr) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let args = ast + .args() + .map(|args| { + args.into_iter() + .map(|arg| AttrArg::lower_ast(ctxt, arg)) + .collect() + }) + .unwrap_or_default(); + + Self { name, args } + } +} + +impl<'db> DocCommentAttr<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::DocCommentAttr) -> Self { + let text = ast + .doc() + .map(|doc| doc.text()[3..].to_string()) + .unwrap_or_default(); + Self { + text: StringId::new(ctxt.db(), text), + } + } +} + +impl<'db> AttrArg<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::AttrArg) -> Self { + let key = IdentId::lower_token_partial(ctxt, ast.key()); + let value = IdentId::lower_token_partial(ctxt, ast.value()); + Self { key, value } + } +} diff --git a/crates/hir/src/lower/body.rs b/crates/hir/src/lower/body.rs new file mode 100644 index 0000000000..861af33e1d --- /dev/null +++ b/crates/hir/src/lower/body.rs @@ -0,0 +1,108 @@ +use parser::ast; + +use super::FileLowerCtxt; +use crate::{ + hir_def::{ + Body, BodyKind, BodySourceMap, Expr, ExprId, NodeStore, Partial, Pat, PatId, Stmt, StmtId, + TrackedItemId, TrackedItemVariant, + }, + span::HirOrigin, +}; + +impl<'db> Body<'db> { + pub(super) fn lower_ast(f_ctxt: &mut FileLowerCtxt<'db>, ast: ast::Expr) -> Self { + let id = f_ctxt.joined_id(TrackedItemVariant::FuncBody); + let mut ctxt = BodyCtxt::new(f_ctxt, id); + let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); + ctxt.build(&ast, body_expr, BodyKind::FuncBody) + } + + pub(super) fn lower_ast_nameless(f_ctxt: &mut FileLowerCtxt<'db>, ast: ast::Expr) -> Self { + let id = f_ctxt.joined_id(TrackedItemVariant::NamelessBody); + let mut ctxt = BodyCtxt::new(f_ctxt, id); + let body_expr = Expr::lower_ast(&mut ctxt, ast.clone()); + ctxt.build(&ast, body_expr, BodyKind::Anonymous) + } +} + +pub(super) struct BodyCtxt<'ctxt, 'db> { + pub(super) f_ctxt: &'ctxt mut FileLowerCtxt<'db>, + pub(super) id: TrackedItemId<'db>, + + pub(super) stmts: NodeStore>>, + pub(super) exprs: NodeStore>>, + pub(super) pats: NodeStore>>, + pub(super) source_map: BodySourceMap, +} + +impl<'ctxt, 'db> BodyCtxt<'ctxt, 'db> { + pub(super) fn push_expr(&mut self, expr: Expr<'db>, origin: HirOrigin) -> ExprId { + let expr_id = self.exprs.push(Partial::Present(expr)); + self.source_map.expr_map.insert(expr_id, origin); + + expr_id + } + + pub(super) fn push_invalid_expr(&mut self, origin: HirOrigin) -> ExprId { + let expr_id = self.exprs.push(Partial::Absent); + self.source_map.expr_map.insert(expr_id, origin); + + expr_id + } + + pub(super) fn push_missing_expr(&mut self) -> ExprId { + let expr_id = self.exprs.push(Partial::Absent); + self.source_map.expr_map.insert(expr_id, HirOrigin::None); + expr_id + } + + pub(super) fn push_stmt(&mut self, stmt: Stmt<'db>, origin: HirOrigin) -> StmtId { + let stmt_id = self.stmts.push(Partial::Present(stmt)); + self.source_map.stmt_map.insert(stmt_id, origin); + + stmt_id + } + + pub(super) fn push_pat(&mut self, pat: Pat<'db>, origin: HirOrigin) -> PatId { + let pat_id = self.pats.push(Partial::Present(pat)); + self.source_map.pat_map.insert(pat_id, origin); + pat_id + } + + pub(super) fn push_missing_pat(&mut self) -> PatId { + let pat_id = self.pats.push(Partial::Absent); + self.source_map.pat_map.insert(pat_id, HirOrigin::None); + pat_id + } + + fn new(f_ctxt: &'ctxt mut FileLowerCtxt<'db>, id: TrackedItemId<'db>) -> Self { + f_ctxt.enter_body_scope(id); + Self { + f_ctxt, + id, + stmts: NodeStore::new(), + exprs: NodeStore::new(), + pats: NodeStore::new(), + source_map: BodySourceMap::default(), + } + } + + fn build(self, ast: &ast::Expr, body_expr: ExprId, body_kind: BodyKind) -> Body<'db> { + let origin = HirOrigin::raw(ast); + let body = Body::new( + self.f_ctxt.db(), + self.id, + body_expr, + body_kind, + self.stmts, + self.exprs, + self.pats, + self.f_ctxt.top_mod(), + self.source_map, + origin, + ); + + self.f_ctxt.leave_item_scope(body); + body + } +} diff --git a/crates/hir/src/lower/expr.rs b/crates/hir/src/lower/expr.rs new file mode 100644 index 0000000000..8df114f931 --- /dev/null +++ b/crates/hir/src/lower/expr.rs @@ -0,0 +1,290 @@ +use parser::ast::{self, prelude::*}; + +use super::body::BodyCtxt; +use crate::{ + hir_def::{ + expr::*, Body, GenericArgListId, IdentId, IntegerId, ItemKind, LitKind, Pat, PathId, Stmt, + }, + span::HirOrigin, +}; + +impl<'db> Expr<'db> { + pub(super) fn lower_ast(ctxt: &mut BodyCtxt<'_, 'db>, ast: ast::Expr) -> ExprId { + let expr = match ast.kind() { + ast::ExprKind::Lit(lit) => { + if let Some(lit) = lit.lit() { + let lit = LitKind::lower_ast(ctxt.f_ctxt, lit); + Self::Lit(lit) + } else { + return ctxt.push_invalid_expr(HirOrigin::raw(&ast)); + } + } + + ast::ExprKind::Block(block) => { + ctxt.f_ctxt.enter_block_scope(); + let mut stmts = vec![]; + + for stmt in block.stmts() { + let stmt = Stmt::push_to_body(ctxt, stmt); + stmts.push(stmt); + } + let expr_id = ctxt.push_expr(Self::Block(stmts), HirOrigin::raw(&ast)); + + for item in block.items() { + ItemKind::lower_ast(ctxt.f_ctxt, item); + } + + ctxt.f_ctxt.leave_block_scope(expr_id); + return expr_id; + } + + ast::ExprKind::Bin(bin) => { + let lhs = Self::push_to_body_opt(ctxt, bin.lhs()); + let rhs = Self::push_to_body_opt(ctxt, bin.rhs()); + let op = bin.op().map(BinOp::lower_ast).into(); + Self::Bin(lhs, rhs, op) + } + + ast::ExprKind::Un(un) => { + let expr = Self::push_to_body_opt(ctxt, un.expr()); + let op = un.op().map(UnOp::lower_ast).into(); + Self::Un(expr, op) + } + + ast::ExprKind::Call(call) => { + let callee = Self::push_to_body_opt(ctxt, call.callee()); + let args = call + .args() + .map(|args| { + args.into_iter() + .map(|arg| CallArg::lower_ast(ctxt, arg)) + .collect() + }) + .unwrap_or_default(); + Self::Call(callee, args) + } + + ast::ExprKind::MethodCall(method_call) => { + let receiver = Self::push_to_body_opt(ctxt, method_call.receiver()); + let method_name = + IdentId::lower_token_partial(ctxt.f_ctxt, method_call.method_name()); + let generic_args = + GenericArgListId::lower_ast_opt(ctxt.f_ctxt, method_call.generic_args()); + let args = method_call + .args() + .map(|args| { + args.into_iter() + .map(|arg| CallArg::lower_ast(ctxt, arg)) + .collect() + }) + .unwrap_or_default(); + Self::MethodCall(receiver, method_name, generic_args, args) + } + + ast::ExprKind::Path(path) => { + let path = PathId::lower_ast_partial(ctxt.f_ctxt, path.path()); + Self::Path(path) + } + + ast::ExprKind::RecordInit(record_init) => { + let path = PathId::lower_ast_partial(ctxt.f_ctxt, record_init.path()); + let fields = record_init + .fields() + .map(|fields| { + fields + .into_iter() + .map(|field| Field::lower_ast(ctxt, field)) + .collect() + }) + .unwrap_or_default(); + Self::RecordInit(path, fields) + } + + ast::ExprKind::Field(field) => { + let receiver = Self::push_to_body_opt(ctxt, field.receiver()); + let field = if let Some(name) = field.field_name() { + Some(FieldIndex::Ident(IdentId::lower_token(ctxt.f_ctxt, name))).into() + } else if let Some(num) = field.field_index() { + Some(FieldIndex::Index(IntegerId::lower_ast(ctxt.f_ctxt, num))).into() + } else { + None.into() + }; + Self::Field(receiver, field) + } + + ast::ExprKind::Index(index) => { + let indexed = Self::push_to_body_opt(ctxt, index.expr()); + let index = Self::push_to_body_opt(ctxt, index.index()); + Self::Index(indexed, index) + } + + ast::ExprKind::Tuple(tup) => { + let elems = tup + .elems() + .map(|elem| Self::push_to_body_opt(ctxt, elem)) + .collect(); + + Self::Tuple(elems) + } + + ast::ExprKind::Array(array) => { + let elems = array + .elems() + .map(|elem| Self::push_to_body_opt(ctxt, elem)) + .collect(); + Self::Array(elems) + } + + ast::ExprKind::ArrayRep(array_rep) => { + let val = Self::push_to_body_opt(ctxt, array_rep.val()); + let len = array_rep + .len() + .map(|ast| Body::lower_ast_nameless(ctxt.f_ctxt, ast)) + .into(); + Self::ArrayRep(val, len) + } + + ast::ExprKind::If(if_) => { + let cond = Self::push_to_body_opt(ctxt, if_.cond()); + let then = Expr::push_to_body_opt( + ctxt, + if_.then() + .and_then(|body| ast::Expr::cast(body.syntax().clone())), + ); + let else_ = if_.else_().map(|ast| Self::lower_ast(ctxt, ast)); + Self::If(cond, then, else_) + } + + ast::ExprKind::Match(match_) => { + let scrutinee = Self::push_to_body_opt(ctxt, match_.scrutinee()); + let arm = match_ + .arms() + .map(|arms| { + arms.into_iter() + .map(|arm| MatchArm::lower_ast(ctxt, arm)) + .collect() + }) + .into(); + + Self::Match(scrutinee, arm) + } + + ast::ExprKind::Paren(paren) => { + return Self::push_to_body_opt(ctxt, paren.expr()); + } + + ast::ExprKind::Assign(assign) => { + let lhs = Self::push_to_body_opt(ctxt, assign.lhs_expr()); + let rhs = Self::push_to_body_opt(ctxt, assign.rhs_expr()); + Self::Assign(lhs, rhs) + } + + ast::ExprKind::AugAssign(aug_assign) => { + let lhs = Self::push_to_body_opt(ctxt, aug_assign.lhs_expr()); + let rhs = Self::push_to_body_opt(ctxt, aug_assign.rhs_expr()); + let binop = aug_assign.op().map(ArithBinOp::lower_ast).unwrap(); + + Self::AugAssign(lhs, rhs, binop) + } + }; + + ctxt.push_expr(expr, HirOrigin::raw(&ast)) + } + + pub(super) fn push_to_body_opt(ctxt: &mut BodyCtxt<'_, '_>, ast: Option) -> ExprId { + if let Some(ast) = ast { + Expr::lower_ast(ctxt, ast) + } else { + ctxt.push_missing_expr() + } + } +} + +impl BinOp { + pub(super) fn lower_ast(ast: ast::BinOp) -> Self { + match ast { + ast::BinOp::Arith(arith) => ArithBinOp::lower_ast(arith).into(), + ast::BinOp::Comp(arith) => CompBinOp::lower_ast(arith).into(), + ast::BinOp::Logical(arith) => LogicalBinOp::lower_ast(arith).into(), + } + } +} + +impl ArithBinOp { + pub(super) fn lower_ast(ast: ast::ArithBinOp) -> Self { + match ast { + ast::ArithBinOp::Add(_) => Self::Add, + ast::ArithBinOp::Sub(_) => Self::Sub, + ast::ArithBinOp::Mul(_) => Self::Mul, + ast::ArithBinOp::Div(_) => Self::Div, + ast::ArithBinOp::Mod(_) => Self::Rem, + ast::ArithBinOp::Pow(_) => Self::Pow, + ast::ArithBinOp::LShift(_) => Self::LShift, + ast::ArithBinOp::RShift(_) => Self::RShift, + ast::ArithBinOp::BitAnd(_) => Self::BitAnd, + ast::ArithBinOp::BitOr(_) => Self::BitOr, + ast::ArithBinOp::BitXor(_) => Self::BitXor, + } + } +} + +impl CompBinOp { + pub(super) fn lower_ast(ast: ast::CompBinOp) -> Self { + match ast { + ast::CompBinOp::Eq(_) => Self::Eq, + ast::CompBinOp::NotEq(_) => Self::NotEq, + ast::CompBinOp::Lt(_) => Self::Lt, + ast::CompBinOp::LtEq(_) => Self::LtEq, + ast::CompBinOp::Gt(_) => Self::Gt, + ast::CompBinOp::GtEq(_) => Self::GtEq, + } + } +} + +impl LogicalBinOp { + pub(super) fn lower_ast(ast: ast::LogicalBinOp) -> Self { + match ast { + ast::LogicalBinOp::And(_) => Self::And, + ast::LogicalBinOp::Or(_) => Self::Or, + } + } +} + +impl UnOp { + fn lower_ast(ast: ast::UnOp) -> Self { + match ast { + ast::UnOp::Plus(_) => Self::Plus, + ast::UnOp::Minus(_) => Self::Minus, + ast::UnOp::Not(_) => Self::Not, + ast::UnOp::BitNot(_) => Self::BitNot, + } + } +} + +impl MatchArm { + fn lower_ast(ctxt: &mut BodyCtxt<'_, '_>, ast: ast::MatchArm) -> Self { + let pat = Pat::lower_ast_opt(ctxt, ast.pat()); + let body = Expr::push_to_body_opt(ctxt, ast.body()); + Self { pat, body } + } +} + +impl<'db> CallArg<'db> { + fn lower_ast(ctxt: &mut BodyCtxt<'_, 'db>, ast: ast::CallArg) -> Self { + let label = ast + .label() + .map(|label| IdentId::lower_token(ctxt.f_ctxt, label)); + let expr = Expr::push_to_body_opt(ctxt, ast.expr()); + Self { label, expr } + } +} + +impl<'db> Field<'db> { + fn lower_ast(ctxt: &mut BodyCtxt<'_, 'db>, ast: ast::RecordField) -> Self { + let label = ast + .label() + .map(|label| IdentId::lower_token(ctxt.f_ctxt, label)); + let expr = Expr::push_to_body_opt(ctxt, ast.expr()); + Self { label, expr } + } +} diff --git a/crates/hir/src/lower/item.rs b/crates/hir/src/lower/item.rs new file mode 100644 index 0000000000..95bb764fcd --- /dev/null +++ b/crates/hir/src/lower/item.rs @@ -0,0 +1,455 @@ +use parser::ast::{self, prelude::*}; + +use super::FileLowerCtxt; +use crate::{ + hir_def::{ + item::*, AttrListId, Body, FuncParamListId, GenericParamListId, IdentId, TraitRefId, + TupleTypeId, TypeId, WhereClauseId, + }, + span::HirOrigin, +}; + +pub(crate) fn lower_module_items(ctxt: &mut FileLowerCtxt<'_>, items: ast::ItemList) { + for item in items { + ItemKind::lower_ast(ctxt, item); + } +} + +impl<'db> ItemKind<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Item) { + let Some(kind) = ast.kind() else { + return; + }; + + match kind { + ast::ItemKind::Mod(mod_) => { + Mod::lower_ast(ctxt, mod_); + } + ast::ItemKind::Func(fn_) => { + Func::lower_ast(ctxt, fn_, false); + } + ast::ItemKind::Struct(struct_) => { + Struct::lower_ast(ctxt, struct_); + } + ast::ItemKind::Contract(contract) => { + Contract::lower_ast(ctxt, contract); + } + ast::ItemKind::Enum(enum_) => { + Enum::lower_ast(ctxt, enum_); + } + ast::ItemKind::TypeAlias(alias) => { + TypeAlias::lower_ast(ctxt, alias); + } + ast::ItemKind::Impl(impl_) => { + Impl::lower_ast(ctxt, impl_); + } + ast::ItemKind::Trait(trait_) => { + Trait::lower_ast(ctxt, trait_); + } + ast::ItemKind::ImplTrait(impl_trait) => { + ImplTrait::lower_ast(ctxt, impl_trait); + } + ast::ItemKind::Const(const_) => { + Const::lower_ast(ctxt, const_); + } + ast::ItemKind::Use(use_) => { + Use::lower_ast(ctxt, use_); + } + ast::ItemKind::Extern(extern_) => { + if let Some(extern_block) = extern_.extern_block() { + for fn_ in extern_block { + Func::lower_ast(ctxt, fn_, true); + } + } + } + } + } +} + +impl<'db> Mod<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Mod) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let id = ctxt.joined_id(TrackedItemVariant::Mod(name)); + ctxt.enter_item_scope(id, true); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + if let Some(items) = ast.items() { + lower_module_items(ctxt, items); + } + + let origin = HirOrigin::raw(&ast); + let mod_ = Self::new(ctxt.db(), id, name, attributes, vis, ctxt.top_mod(), origin); + ctxt.leave_item_scope(mod_) + } +} + +impl<'db> Func<'db> { + pub(super) fn lower_ast( + ctxt: &mut FileLowerCtxt<'db>, + ast: ast::Func, + is_extern: bool, + ) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let id = ctxt.joined_id(TrackedItemVariant::Func(name)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let params = ast + .params() + .map(|params| FuncParamListId::lower_ast(ctxt, params)) + .into(); + let ret_ty = ast.ret_ty().map(|ty| TypeId::lower_ast(ctxt, ty)); + let modifier = ItemModifier::lower_ast(ast.modifier()); + let body = ast + .body() + .map(|body| Body::lower_ast(ctxt, ast::Expr::cast(body.syntax().clone()).unwrap())); + let origin = HirOrigin::raw(&ast); + + let fn_ = Self::new( + ctxt.db(), + id, + name, + attributes, + generic_params, + where_clause, + params, + ret_ty, + modifier, + body, + is_extern, + ctxt.top_mod(), + origin, + ); + ctxt.leave_item_scope(fn_) + } +} + +impl<'db> Struct<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Struct) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let id = ctxt.joined_id(TrackedItemVariant::Struct(name)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let fields = FieldDefListId::lower_ast_opt(ctxt, ast.fields()); + let origin = HirOrigin::raw(&ast); + + let struct_ = Self::new( + ctxt.db(), + id, + name, + attributes, + vis, + generic_params, + where_clause, + fields, + ctxt.top_mod(), + origin, + ); + ctxt.leave_item_scope(struct_) + } +} + +impl<'db> Contract<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Contract) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let id = ctxt.joined_id(TrackedItemVariant::Contract(name)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + let fields = FieldDefListId::lower_ast_opt(ctxt, ast.fields()); + let origin = HirOrigin::raw(&ast); + + let contract = Self::new( + ctxt.db(), + id, + name, + attributes, + vis, + fields, + ctxt.top_mod(), + origin, + ); + ctxt.leave_item_scope(contract) + } +} + +impl<'db> Enum<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Enum) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let id = ctxt.joined_id(TrackedItemVariant::Enum(name)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let variants = VariantDefListId::lower_ast_opt(ctxt, ast.variants()); + let origin = HirOrigin::raw(&ast); + + let enum_ = Self::new( + ctxt.db(), + id, + name, + attributes, + vis, + generic_params, + where_clause, + variants, + ctxt.top_mod(), + origin, + ); + ctxt.leave_item_scope(enum_) + } +} + +impl<'db> TypeAlias<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::TypeAlias) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.alias()); + let id = ctxt.joined_id(TrackedItemVariant::TypeAlias(name)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); + let origin = HirOrigin::raw(&ast); + + let alias = Self::new( + ctxt.db(), + id, + name, + attributes, + vis, + generic_params, + ty, + ctxt.top_mod(), + origin, + ); + ctxt.leave_item_scope(alias) + } +} + +impl<'db> Impl<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Impl) -> Self { + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); + let id = ctxt.joined_id(TrackedItemVariant::Impl(ty)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let origin = HirOrigin::raw(&ast); + + if let Some(item_list) = ast.item_list() { + for impl_item in item_list { + Func::lower_ast(ctxt, impl_item, false); + } + } + + let impl_ = Self::new( + ctxt.db(), + id, + ty, + attributes, + generic_params, + where_clause, + ctxt.top_mod(), + origin, + ); + ctxt.leave_item_scope(impl_) + } +} + +impl<'db> Trait<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Trait) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let id = ctxt.joined_id(TrackedItemVariant::Trait(name)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let super_traits = if let Some(super_traits) = ast.super_trait_list() { + super_traits + .into_iter() + .map(|trait_ref| TraitRefId::lower_ast(ctxt, trait_ref)) + .collect() + } else { + vec![] + }; + let origin = HirOrigin::raw(&ast); + + if let Some(item_list) = ast.item_list() { + for impl_item in item_list { + Func::lower_ast(ctxt, impl_item, false); + } + } + + let trait_ = Self::new( + ctxt.db(), + id, + name, + attributes, + vis, + generic_params, + super_traits, + where_clause, + ctxt.top_mod(), + origin, + ); + + ctxt.leave_item_scope(trait_) + } +} + +impl<'db> ImplTrait<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::ImplTrait) -> Self { + let trait_ref = TraitRefId::lower_ast_partial(ctxt, ast.trait_ref()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); + let id = ctxt.joined_id(TrackedItemVariant::ImplTrait(trait_ref, ty)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let generic_params = GenericParamListId::lower_ast_opt(ctxt, ast.generic_params()); + let where_clause = WhereClauseId::lower_ast_opt(ctxt, ast.where_clause()); + let origin = HirOrigin::raw(&ast); + + if let Some(item_list) = ast.item_list() { + for impl_item in item_list { + Func::lower_ast(ctxt, impl_item, false); + } + } + + let impl_trait = Self::new( + ctxt.db(), + id, + trait_ref, + ty, + attributes, + generic_params, + where_clause, + ctxt.top_mod(), + origin, + ); + ctxt.leave_item_scope(impl_trait) + } +} + +impl<'db> Const<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Const) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let id = ctxt.joined_id(TrackedItemVariant::Const(name)); + ctxt.enter_item_scope(id, false); + + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); + let body = ast.value().map(|ast| Body::lower_ast(ctxt, ast)).into(); + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + let origin = HirOrigin::raw(&ast); + + let const_ = Self::new( + ctxt.db(), + id, + name, + attributes, + ty, + body, + vis, + ctxt.top_mod(), + origin, + ); + ctxt.leave_item_scope(const_) + } +} + +impl ItemModifier { + pub(super) fn lower_ast(ast: Option) -> Self { + let Some(ast) = ast else { + return Self::None; + }; + + match (ast.pub_kw().is_some(), ast.unsafe_kw().is_some()) { + (true, true) => Self::PubAndUnsafe, + (true, false) => Self::Pub, + (false, true) => Self::Unsafe, + (false, false) => Self::None, + } + } +} + +impl<'db> FieldDefListId<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::RecordFieldDefList) -> Self { + let fields = ast + .into_iter() + .map(|field| FieldDef::lower_ast(ctxt, field)) + .collect::>(); + Self::new(ctxt.db(), fields) + } + + fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'db>, ast: Option) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or(Self::new(ctxt.db(), Vec::new())) + } +} + +impl<'db> FieldDef<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::RecordFieldDef) -> Self { + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); + let vis = if ast.pub_kw().is_some() { + Visibility::Public + } else { + Visibility::Private + }; + + Self { + attributes, + name, + ty, + vis, + } + } +} + +impl<'db> VariantDefListId<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::VariantDefList) -> Self { + let variants = ast + .into_iter() + .map(|variant| VariantDef::lower_ast(ctxt, variant)) + .collect::>(); + Self::new(ctxt.db(), variants) + } + + fn lower_ast_opt(ctxt: &mut FileLowerCtxt<'db>, ast: Option) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or(Self::new(ctxt.db(), Vec::new())) + } +} + +impl<'db> VariantDef<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::VariantDef) -> Self { + let attributes = AttrListId::lower_ast_opt(ctxt, ast.attr_list()); + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let kind = match ast.kind() { + ast::VariantKind::Unit => VariantKind::Unit, + ast::VariantKind::Tuple(t) => VariantKind::Tuple(TupleTypeId::lower_ast(ctxt, t)), + ast::VariantKind::Record(r) => VariantKind::Record(FieldDefListId::lower_ast(ctxt, r)), + }; + + Self { + attributes, + name, + kind, + } + } +} diff --git a/crates/hir/src/lower/mod.rs b/crates/hir/src/lower/mod.rs new file mode 100644 index 0000000000..cd2b4832f6 --- /dev/null +++ b/crates/hir/src/lower/mod.rs @@ -0,0 +1,193 @@ +use common::{InputFile, InputIngot}; +use num_bigint::BigUint; +use num_traits::Num; +use parser::{ + ast::{self, prelude::*}, + SyntaxNode, SyntaxToken, +}; + +use self::{item::lower_module_items, parse::parse_file_impl, scope_builder::ScopeGraphBuilder}; +use crate::{ + hir_def::{ + module_tree_impl, scope_graph::ScopeGraph, ExprId, IdentId, IngotId, IntegerId, ItemKind, + LitKind, ModuleTree, Partial, StringId, TopLevelMod, TrackedItemId, TrackedItemVariant, + }, + HirDb, LowerHirDb, +}; + +pub(crate) mod parse; + +mod attr; +mod body; +mod expr; +mod item; +mod params; +mod pat; +mod path; +mod scope_builder; +mod stmt; +mod types; +mod use_tree; + +/// Maps the given file to a top-level module. +/// This function just maps the file to a top-level module, and doesn't perform +/// any parsing or lowering. +/// To perform the actual lowering, use [`scope_graph`] instead. +pub fn map_file_to_mod(db: &dyn LowerHirDb, ingot: InputIngot, file: InputFile) -> TopLevelMod { + let ingot = module_tree_impl(db.as_hir_db(), ingot).ingot; + map_file_to_mod_impl(db.as_hir_db(), ingot, file) +} + +/// Returns the scope graph of the given top-level module. +pub fn scope_graph<'db>( + db: &'db dyn LowerHirDb, + top_mod: TopLevelMod<'db>, +) -> &'db ScopeGraph<'db> { + scope_graph_impl(db.as_hir_db(), top_mod) +} + +/// Returns the ingot module tree of the given ingot. +pub fn module_tree(db: &dyn LowerHirDb, ingot: InputIngot) -> &ModuleTree { + module_tree_impl(db.as_hir_db(), ingot) +} + +#[salsa::tracked] +pub(crate) fn map_file_to_mod_impl<'db>( + db: &'db dyn HirDb, + ingot: IngotId<'db>, + file: InputFile, +) -> TopLevelMod<'db> { + let path = file.path(db.as_input_db()); + let name = path.file_stem().unwrap(); + let mod_name = IdentId::new(db, name.to_string()); + TopLevelMod::new(db, mod_name, ingot, file) +} + +#[salsa::tracked(return_ref)] +pub(crate) fn scope_graph_impl<'db>( + db: &'db dyn HirDb, + top_mod: TopLevelMod<'db>, +) -> ScopeGraph<'db> { + let ast = top_mod_ast(db, top_mod); + let mut ctxt = FileLowerCtxt::enter_top_mod(db, top_mod); + + if let Some(items) = ast.items() { + lower_module_items(&mut ctxt, items); + } + ctxt.leave_item_scope(top_mod); + + ctxt.build() +} + +pub(crate) fn top_mod_ast(db: &dyn HirDb, top_mod: TopLevelMod) -> ast::Root { + let node = SyntaxNode::new_root(parse_file_impl(db, top_mod)); + // This cast never fails even if the file content is empty. + ast::Root::cast(node).unwrap() +} + +pub(super) struct FileLowerCtxt<'db> { + builder: ScopeGraphBuilder<'db>, +} + +impl<'db> FileLowerCtxt<'db> { + pub(super) fn enter_top_mod(db: &'db dyn HirDb, top_mod: TopLevelMod<'db>) -> Self { + Self { + builder: ScopeGraphBuilder::enter_top_mod(db, top_mod), + } + } + + pub(super) fn build(self) -> ScopeGraph<'db> { + self.builder.build() + } + + pub(super) fn db(&self) -> &'db dyn HirDb { + self.builder.db + } + + pub(super) fn top_mod(&self) -> TopLevelMod<'db> { + self.builder.top_mod + } + + pub(super) fn enter_block_scope(&mut self) { + self.builder.enter_block_scope(); + } + + pub(super) fn leave_block_scope(&mut self, block: ExprId) { + self.builder.leave_block_scope(block); + } + + pub(super) fn joined_id(&self, id: TrackedItemVariant<'db>) -> TrackedItemId<'db> { + self.builder.joined_id(id) + } + + /// Creates a new scope for an item. + fn enter_item_scope(&mut self, id: TrackedItemId<'db>, is_mod: bool) { + self.builder.enter_item_scope(id, is_mod); + } + + fn enter_body_scope(&mut self, id: TrackedItemId<'db>) { + self.builder.enter_body_scope(id); + } + + /// Leaves the current scope, `item` should be the generated item which owns + /// the scope. + fn leave_item_scope(&mut self, item: I) -> I + where + I: Into> + Copy, + { + self.builder.leave_item_scope(item.into()); + item + } +} + +impl<'db> IdentId<'db> { + fn lower_token(ctxt: &mut FileLowerCtxt<'db>, token: SyntaxToken) -> Self { + Self::new(ctxt.db(), token.text().to_string()) + } + + fn lower_token_partial( + ctxt: &mut FileLowerCtxt<'db>, + token: Option, + ) -> Partial { + token.map(|token| Self::lower_token(ctxt, token)).into() + } +} + +impl<'db> LitKind<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Lit) -> Self { + match ast.kind() { + ast::LitKind::Int(int) => Self::Int(IntegerId::lower_ast(ctxt, int)), + ast::LitKind::String(string) => { + let text = string.token().text(); + Self::String(StringId::new( + ctxt.db(), + text[1..text.len() - 1].to_string(), + )) + } + ast::LitKind::Bool(bool) => match bool.token().text() { + "true" => Self::Bool(true), + "false" => Self::Bool(false), + _ => unreachable!(), + }, + } + } +} + +impl<'db> IntegerId<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::LitInt) -> Self { + let text = ast.token().text(); + // Parser ensures that the text is valid pair with a radix and a number. + if text.len() < 2 { + return Self::new(ctxt.db(), BigUint::from_str_radix(text, 10).unwrap()); + } + + let int = match &text[0..2] { + "0x" | "0X" => BigUint::from_str_radix(&text[2..], 16).unwrap(), + "0o" | "0O" => BigUint::from_str_radix(&text[2..], 8).unwrap(), + "0b" | "0B" => BigUint::from_str_radix(&text[2..], 2).unwrap(), + _ => BigUint::from_str_radix(text, 10).unwrap(), + }; + + Self::new(ctxt.db(), int) + } +} diff --git a/crates/hir/src/lower/params.rs b/crates/hir/src/lower/params.rs new file mode 100644 index 0000000000..7751da90d9 --- /dev/null +++ b/crates/hir/src/lower/params.rs @@ -0,0 +1,233 @@ +use parser::ast::{self}; + +use super::FileLowerCtxt; +use crate::hir_def::{params::*, Body, IdentId, Partial, TypeId}; + +impl<'db> GenericArgListId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::GenericArgList) -> Self { + let args = ast + .into_iter() + .map(|arg| GenericArg::lower_ast(ctxt, arg)) + .collect::>(); + Self::new(ctxt.db(), args, true) + } + + pub(super) fn lower_ast_opt( + ctxt: &mut FileLowerCtxt<'db>, + ast: Option, + ) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or_else(|| Self::none(ctxt.db())) + } +} + +impl<'db> GenericParamListId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::GenericParamList) -> Self { + let params = ast + .into_iter() + .map(|param| GenericParam::lower_ast(ctxt, param)) + .collect::>(); + Self::new(ctxt.db(), params) + } + + pub(super) fn lower_ast_opt( + ctxt: &mut FileLowerCtxt<'db>, + ast: Option, + ) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or_else(|| Self::new(ctxt.db(), Vec::new())) + } +} + +impl<'db> FuncParamListId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::FuncParamList) -> Self { + let params = ast + .into_iter() + .map(|param| FuncParam::lower_ast(ctxt, param)) + .collect::>(); + Self::new(ctxt.db(), params) + } +} + +impl<'db> WhereClauseId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::WhereClause) -> Self { + let predicates = ast + .into_iter() + .map(|pred| WherePredicate::lower_ast(ctxt, pred)) + .collect::>(); + Self::new(ctxt.db(), predicates) + } + + pub(super) fn lower_ast_opt( + ctxt: &mut FileLowerCtxt<'db>, + ast: Option, + ) -> Self { + ast.map(|ast| Self::lower_ast(ctxt, ast)) + .unwrap_or_else(|| Self::new(ctxt.db(), Vec::new())) + } +} + +impl<'db> TypeGenericParam<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::TypeGenericParam) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let bounds = ast + .bounds() + .map(|bounds| { + bounds + .into_iter() + .map(|bound| TypeBound::lower_ast(ctxt, bound)) + .collect() + }) + .unwrap_or_default(); + + Self { name, bounds } + } +} + +impl<'db> ConstGenericParam<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::ConstGenericParam) -> Self { + let name = IdentId::lower_token_partial(ctxt, ast.name()); + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); + Self { name, ty } + } +} + +impl<'db> GenericArg<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::GenericArg) -> Self { + match ast.kind() { + ast::GenericArgKind::Type(type_param) => { + TypeGenericArg::lower_ast(ctxt, type_param).into() + } + ast::GenericArgKind::Const(const_param) => { + ConstGenericArg::lower_ast(ctxt, const_param).into() + } + } + } +} + +impl<'db> TypeGenericArg<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::TypeGenericArg) -> Self { + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); + Self { ty } + } +} + +impl<'db> ConstGenericArg<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::ConstGenericArg) -> Self { + let body = ast + .expr() + .map(|expr| Body::lower_ast_nameless(ctxt, expr)) + .into(); + + Self { body } + } +} + +impl<'db> GenericParam<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::GenericParam) -> Self { + match ast.kind() { + ast::GenericParamKind::Type(type_param) => { + TypeGenericParam::lower_ast(ctxt, type_param).into() + } + ast::GenericParamKind::Const(const_param) => { + ConstGenericParam::lower_ast(ctxt, const_param).into() + } + } + } +} + +impl<'db> FuncParam<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::FuncParam) -> Self { + let is_mut = ast.mut_token().is_some(); + let label = ast.label().map(|ast| FuncParamName::lower_label(ctxt, ast)); + let name = ast.name().map(|ast| FuncParamName::lower_ast(ctxt, ast)); + + let self_ty_fallback = + name.is_some_and(|name| name.is_self(ctxt.db())) && ast.ty().is_none(); + + let ty = if self_ty_fallback { + Partial::Present(TypeId::fallback_self_ty(ctxt.db())) + } else { + TypeId::lower_ast_partial(ctxt, ast.ty()) + }; + + Self { + is_mut, + label, + name: name.into(), + ty, + self_ty_fallback, + } + } +} + +impl<'db> WherePredicate<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::WherePredicate) -> Self { + let ty = TypeId::lower_ast_partial(ctxt, ast.ty()); + let bounds = ast + .bounds() + .map(|bounds| { + bounds + .into_iter() + .map(|bound| TypeBound::lower_ast(ctxt, bound)) + .collect() + }) + .unwrap_or_default(); + Self { ty, bounds } + } +} + +impl<'db> TypeBound<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::TypeBound) -> Self { + if let Some(trait_bound) = ast.trait_bound() { + Self::Trait(TraitRefId::lower_ast(ctxt, trait_bound)) + } else { + Self::Kind(KindBound::lower_ast_opt(ctxt, ast.kind_bound())) + } + } +} + +impl KindBound { + fn lower_ast_opt(_ctxt: &mut FileLowerCtxt<'_>, ast: Option) -> Partial { + let Some(ast) = ast else { + return Partial::Absent; + }; + + if let Some(abs) = ast.abs() { + let lhs = KindBound::lower_ast_opt(_ctxt, abs.lhs()) + .to_opt() + .map(Box::new) + .into(); + + let rhs = KindBound::lower_ast_opt(_ctxt, abs.rhs()) + .to_opt() + .map(Box::new) + .into(); + + Partial::Present(KindBound::Abs(lhs, rhs)) + } else if ast.mono().is_some() { + Partial::Present(KindBound::Mono) + } else { + Partial::Absent + } + } +} + +impl<'db> FuncParamName<'db> { + fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::FuncParamName) -> Self { + match ast { + ast::FuncParamName::Ident(name) => { + FuncParamName::Ident(IdentId::lower_token(ctxt, name)) + } + ast::FuncParamName::SelfParam(_) => FuncParamName::Ident(IdentId::make_self(ctxt.db())), + ast::FuncParamName::Underscore(_) => FuncParamName::Underscore, + } + } + + fn lower_label(ctxt: &mut FileLowerCtxt<'db>, ast: ast::FuncParamLabel) -> FuncParamName<'db> { + match ast { + ast::FuncParamLabel::Ident(name) => Self::Ident(IdentId::lower_token(ctxt, name)), + ast::FuncParamLabel::Underscore(_) => Self::Underscore, + } + } +} diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs new file mode 100644 index 0000000000..becfe83316 --- /dev/null +++ b/crates/hir/src/lower/parse.rs @@ -0,0 +1,65 @@ +use common::{ + diagnostics::{ + CompleteDiagnostic, DiagnosticPass, GlobalErrorCode, LabelStyle, Severity, Span, SpanKind, + SubDiagnostic, + }, + InputFile, +}; +use parser::GreenNode; +use salsa::Accumulator; + +use crate::{diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, HirDb, SpannedHirDb}; + +#[salsa::tracked] +pub(crate) fn parse_file_impl<'db>(db: &'db dyn HirDb, top_mod: TopLevelMod<'db>) -> GreenNode { + let file = top_mod.file(db); + let text = file.text(db.as_input_db()); + let (node, parse_errors) = parser::parse_source_file(text); + + for error in parse_errors { + ParseErrorAccumulator::push(db, ParserError { file, error }); + } + node +} + +#[doc(hidden)] +#[salsa::accumulator] +pub struct ParseErrorAccumulator(pub ParserError); + +impl ParseErrorAccumulator { + fn push(db: &dyn HirDb, err: ParserError) { + Self(err).accumulate(db); + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] + +pub struct ParserError { + file: InputFile, + error: parser::ParseError, +} + +// `ParseError` has span information, but this is not a problem because the +// parsing procedure itself depends on the file content, and thus span +// information. +impl<'db> DiagnosticVoucher<'db> for ParserError { + fn error_code(&self) -> GlobalErrorCode { + GlobalErrorCode::new(DiagnosticPass::Parse, 1) + } + + fn to_complete(&self, _db: &'db dyn SpannedHirDb) -> CompleteDiagnostic { + let error_code = self.error_code(); + let span = Span::new(self.file, self.error.range(), SpanKind::Original); + CompleteDiagnostic::new( + Severity::Error, + self.error.msg(), + vec![SubDiagnostic::new( + LabelStyle::Primary, + self.error.label(), + Some(span), + )], + vec![], + error_code, + ) + } +} diff --git a/crates/hir/src/lower/pat.rs b/crates/hir/src/lower/pat.rs new file mode 100644 index 0000000000..71e7da794f --- /dev/null +++ b/crates/hir/src/lower/pat.rs @@ -0,0 +1,86 @@ +use parser::ast; + +use super::body::BodyCtxt; +use crate::{ + hir_def::{pat::*, IdentId, LitKind, PathId}, + span::HirOrigin, +}; + +impl<'db> Pat<'db> { + pub(super) fn lower_ast(ctxt: &mut BodyCtxt<'_, 'db>, ast: ast::Pat) -> PatId { + let pat = match &ast.kind() { + ast::PatKind::WildCard(_) => Pat::WildCard, + + ast::PatKind::Rest(_) => Pat::Rest, + + ast::PatKind::Lit(lit_pat) => { + let lit_kind = lit_pat + .lit() + .map(|lit| LitKind::lower_ast(ctxt.f_ctxt, lit)) + .into(); + Pat::Lit(lit_kind) + } + + ast::PatKind::Tuple(tup) => { + let elems = match tup.elems() { + Some(elems) => elems.iter().map(|pat| Pat::lower_ast(ctxt, pat)).collect(), + None => vec![], + }; + Pat::Tuple(elems) + } + + ast::PatKind::Path(path_ast) => { + let path = PathId::lower_ast_partial(ctxt.f_ctxt, path_ast.path()); + Pat::Path(path, path_ast.mut_token().is_some()) + } + + ast::PatKind::PathTuple(path_tup) => { + let path = PathId::lower_ast_partial(ctxt.f_ctxt, path_tup.path()); + let elems = match path_tup.elems() { + Some(elems) => elems.iter().map(|pat| Pat::lower_ast(ctxt, pat)).collect(), + None => vec![], + }; + Pat::PathTuple(path, elems) + } + + ast::PatKind::Record(record) => { + let path = PathId::lower_ast_partial(ctxt.f_ctxt, record.path()); + let fields = match record.fields() { + Some(fields) => fields + .iter() + .map(|f| RecordPatField::lower_ast(ctxt, &f)) + .collect(), + None => vec![], + }; + Pat::Record(path, fields) + } + + ast::PatKind::Or(or) => { + let lhs = Self::lower_ast_opt(ctxt, or.lhs()); + let rhs = Self::lower_ast_opt(ctxt, or.rhs()); + Pat::Or(lhs, rhs) + } + }; + + ctxt.push_pat(pat, HirOrigin::raw(&ast)) + } + + pub(super) fn lower_ast_opt(ctxt: &mut BodyCtxt<'_, 'db>, ast: Option) -> PatId { + if let Some(ast) = ast { + Pat::lower_ast(ctxt, ast) + } else { + ctxt.push_missing_pat() + } + } +} + +impl<'db> RecordPatField<'db> { + fn lower_ast(ctxt: &mut BodyCtxt<'_, 'db>, ast: &ast::RecordPatField) -> RecordPatField<'db> { + let label = IdentId::lower_token_partial(ctxt.f_ctxt, ast.name()); + let pat = ast + .pat() + .map(|pat| Pat::lower_ast(ctxt, pat)) + .unwrap_or_else(|| ctxt.push_missing_pat()); + RecordPatField { label, pat } + } +} diff --git a/crates/hir/src/lower/path.rs b/crates/hir/src/lower/path.rs new file mode 100644 index 0000000000..8af54e760f --- /dev/null +++ b/crates/hir/src/lower/path.rs @@ -0,0 +1,38 @@ +use crate::hir_def::{GenericArgListId, IdentId, Partial, PathId}; +use parser::ast::{self, GenericArgsOwner}; + +use super::FileLowerCtxt; + +impl<'db> PathId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Path) -> Self { + let db = ctxt.db(); + + let mut path: Option = None; + for seg in ast.into_iter() { + let ident = match seg.kind() { + Some(ast::PathSegmentKind::Ingot(_)) => Some(IdentId::make_ingot(db)), + Some(ast::PathSegmentKind::Super(_)) => Some(IdentId::make_super(db)), + Some(ast::PathSegmentKind::SelfTy(_)) => Some(IdentId::make_self_ty(db)), + Some(ast::PathSegmentKind::Self_(_)) => Some(IdentId::make_self(db)), + Some(ast::PathSegmentKind::Ident(ident)) => Some(IdentId::lower_token(ctxt, ident)), + None => None, + } + .into(); + + let generic_args = GenericArgListId::lower_ast_opt(ctxt, seg.generic_args()); + + path = path + .map(|p| p.push(db, ident, generic_args)) + .or_else(|| Some(Self::new(db, ident, generic_args, None))) + } + + path.expect("ast::Path must contain at least 1 segment") + } + + pub(super) fn lower_ast_partial( + ctxt: &mut FileLowerCtxt<'db>, + ast: Option, + ) -> Partial { + ast.map(|ast| Self::lower_ast(ctxt, ast)).into() + } +} diff --git a/crates/hir/src/lower/scope_builder.rs b/crates/hir/src/lower/scope_builder.rs new file mode 100644 index 0000000000..a60e86af7e --- /dev/null +++ b/crates/hir/src/lower/scope_builder.rs @@ -0,0 +1,498 @@ +use cranelift_entity::{entity_impl, PrimaryMap}; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::{ + hir_def::{ + scope_graph::{EdgeKind, FieldParent, Scope, ScopeEdge, ScopeGraph, ScopeId}, + Body, ExprId, FieldDefListId, FuncParamListId, FuncParamName, GenericParamListId, ItemKind, + TopLevelMod, TrackedItemId, TrackedItemVariant, Use, VariantDefListId, VariantKind, + Visibility, + }, + HirDb, +}; + +/// An [`ScopeGraph`] builder that is used to construct the scope in the hir +/// lowering phase. +// +// The difficulty in constructing a scope graph lies in that the ScopeId must +// hold the corresponding HIR node to represent the scope. However, because HIR +// nodes tracked by salsa are immutable, it is only possible to create HIR nodes +// once the lowering of the item is completely finished. This means that a +// ScopeId can only be constructed after the completion of lowering, or at the +// end point of the scope. +// +// Therefore, the builder's `enter_*_scope` method group does not take any +// concrete item information as arguments. When the `enter_*_scope` method group +// is called, the builder constructs a dummy scope and sets up the relationship +// between this dummy scope and other scopes. Then, when the `leave_*_scope` +// method group is called, the builder substitutes the dummy scope with the real +// scope while keeping the relationship between scopes intact. +pub(super) struct ScopeGraphBuilder<'db> { + pub(super) db: &'db dyn HirDb, + pub(super) top_mod: TopLevelMod<'db>, + graph: IntermediateScopeGraph<'db>, + scope_stack: Vec, + module_stack: Vec, + id_stack: Vec>, + declared_blocks: Vec>>, +} + +impl<'db> ScopeGraphBuilder<'db> { + pub(super) fn enter_top_mod(db: &'db dyn HirDb, top_mod: TopLevelMod<'db>) -> Self { + let mut builder = Self { + db, + top_mod, + graph: IntermediateScopeGraph::default(), + scope_stack: Default::default(), + module_stack: Default::default(), + id_stack: Default::default(), + declared_blocks: vec![], + }; + + let id = TrackedItemId::new(db, TrackedItemVariant::TopLevelMod(top_mod.name(db))); + builder.enter_item_scope(id, true); + builder + } + + pub(super) fn build(self) -> ScopeGraph<'db> { + self.graph.build(self.top_mod) + } + + pub(super) fn enter_item_scope(&mut self, id: TrackedItemId<'db>, is_mod: bool) { + self.id_stack.push(id); + self.enter_scope_impl(is_mod); + } + + pub(super) fn enter_body_scope(&mut self, id: TrackedItemId<'db>) { + self.declared_blocks.push(FxHashMap::default()); + self.enter_item_scope(id, false); + } + + pub(super) fn leave_item_scope(&mut self, item: ItemKind<'db>) { + use ItemKind::*; + + self.id_stack.pop(); + let item_node = self.scope_stack.pop().unwrap(); + self.initialize_item_scope(item_node, item); + + if let ItemKind::TopMod(top_mod) = item { + debug_assert!(self.scope_stack.is_empty()); + self.graph.add_edge(item_node, item_node, EdgeKind::self_()); + + self.graph.add_external_edge( + item_node, + ScopeId::Item(top_mod.ingot(self.db).root_mod(self.db).into()), + EdgeKind::ingot(), + ); + for child in top_mod.child_top_mods(self.db) { + let child_name = child.name(self.db); + let edge = EdgeKind::mod_(child_name); + self.graph + .add_external_edge(item_node, ScopeId::Item(child.into()), edge) + } + + if let Some(parent) = top_mod.parent(self.db) { + let edge = EdgeKind::super_(); + self.graph + .add_external_edge(item_node, ScopeId::Item(parent.into()), edge); + } + self.module_stack.pop().unwrap(); + + return; + } + + let parent_node = *self.scope_stack.last().unwrap(); + let parent_to_child_edge = match item { + Mod(inner) => { + self.module_stack.pop().unwrap(); + + self.graph.add_edge( + item_node, + *self.module_stack.last().unwrap(), + EdgeKind::super_(), + ); + self.graph.add_external_edge( + item_node, + ScopeId::Item(self.top_mod.ingot(self.db).root_mod(self.db).into()), + EdgeKind::ingot(), + ); + self.graph.add_edge(item_node, item_node, EdgeKind::self_()); + + inner + .name(self.db) + .to_opt() + .map(EdgeKind::mod_) + .unwrap_or_else(EdgeKind::anon) + } + + Func(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + if let Some(params) = inner.params(self.db).to_opt() { + self.add_func_param_scope(item_node, inner.into(), params); + } + inner + .name(self.db) + .to_opt() + .map(EdgeKind::value) + .unwrap_or_else(EdgeKind::anon) + } + + Struct(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + self.add_field_scope( + item_node, + FieldParent::Item(inner.into()), + inner.fields(self.db), + ); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) + } + + Contract(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + self.add_field_scope( + item_node, + FieldParent::Item(inner.into()), + inner.fields(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); + + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) + } + + Enum(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + self.add_variant_scope(item_node, inner.into(), inner.variants(self.db)); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) + } + + TypeAlias(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::type_) + .unwrap_or_else(EdgeKind::anon) + } + + Impl(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); + EdgeKind::anon() + } + + Trait(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::trait_) + .unwrap_or_else(EdgeKind::anon) + } + + ImplTrait(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + self.add_generic_param_scope( + item_node, + inner.into(), + inner.generic_params(self.db), + ); + self.graph + .add_edge(item_node, item_node, EdgeKind::self_ty()); + EdgeKind::anon() + } + + Const(inner) => { + self.graph.add_lex_edge(item_node, parent_node); + inner + .name(self.db) + .to_opt() + .map(EdgeKind::value) + .unwrap_or_else(EdgeKind::anon) + } + + Use(use_) => { + self.graph.unresolved_uses.insert(use_); + + self.graph.add_lex_edge(item_node, parent_node); + EdgeKind::anon() + } + + Body(body) => { + self.graph.add_lex_edge(item_node, parent_node); + for (node, block) in self.declared_blocks.pop().unwrap() { + let block = block.unwrap(); + self.finalize_block_scope(node, body, block); + } + EdgeKind::anon() + } + + _ => unreachable!(), + }; + + self.graph + .add_edge(parent_node, item_node, parent_to_child_edge); + } + + pub(super) fn joined_id(&self, variant: TrackedItemVariant<'db>) -> TrackedItemId<'db> { + self.id_stack.last().unwrap().join(self.db, variant) + } + + pub(super) fn enter_block_scope(&mut self) { + let node = self.enter_scope_impl(false); + self.declared_blocks.last_mut().unwrap().insert(node, None); + } + + pub(super) fn leave_block_scope(&mut self, block: ExprId) { + let block_node = self.scope_stack.pop().unwrap(); + let parent_node = *self.scope_stack.last().unwrap(); + *self + .declared_blocks + .last_mut() + .unwrap() + .get_mut(&block_node) + .unwrap() = Some(block); + self.graph.add_lex_edge(block_node, parent_node); + self.graph + .add_edge(parent_node, block_node, EdgeKind::anon()); + } + + fn enter_scope_impl(&mut self, is_mod: bool) -> NodeId { + // Create dummy scope, the scope kind is initialized when leaving the scope. + let (dummy_scope_id, dummy_scope) = self.dummy_scope(); + let id = self.graph.push(dummy_scope_id, dummy_scope); + self.scope_stack.push(id); + if is_mod { + self.module_stack.push(id); + } + id + } + + fn initialize_item_scope(&mut self, node: NodeId, item: ItemKind<'db>) { + self.graph.initialize_item_scope(self.db, node, item) + } + + fn finalize_block_scope(&mut self, node: NodeId, body: Body<'db>, block: ExprId) { + self.graph.finalize_block_scope(node, body, block); + } + + fn add_field_scope( + &mut self, + parent_node: NodeId, + parent: FieldParent<'db>, + fields: FieldDefListId<'db>, + ) { + for (i, field) in fields.data(self.db).iter().enumerate() { + let scope_id = ScopeId::Field(parent, i); + let scope_data = Scope::new(scope_id, field.vis); + + let field_node = self.graph.push(scope_id, scope_data); + self.graph.add_lex_edge(field_node, parent_node); + let kind = field + .name + .to_opt() + .map(EdgeKind::field) + .unwrap_or_else(EdgeKind::anon); + self.graph.add_edge(parent_node, field_node, kind) + } + } + + fn add_variant_scope( + &mut self, + parent_node: NodeId, + parent_item: ItemKind<'db>, + variants: VariantDefListId<'db>, + ) { + let parent_vis = parent_item.vis(self.db); + + for (i, variant) in variants.data(self.db).iter().enumerate() { + let scope_id = ScopeId::Variant(parent_item, i); + let scope_data = Scope::new(scope_id, parent_vis); + + let variant_node = self.graph.push(scope_id, scope_data); + self.graph.add_lex_edge(variant_node, parent_node); + let kind = variant + .name + .to_opt() + .map(EdgeKind::variant) + .unwrap_or_else(EdgeKind::anon); + + if let VariantKind::Record(fields) = variant.kind { + self.add_field_scope(variant_node, FieldParent::Variant(parent_item, i), fields) + } + + self.graph.add_edge(parent_node, variant_node, kind) + } + } + + fn add_func_param_scope( + &mut self, + parent_node: NodeId, + parent_item: ItemKind<'db>, + params: FuncParamListId<'db>, + ) { + for (i, param) in params.data(self.db).iter().enumerate() { + let scope_id = ScopeId::FuncParam(parent_item, i); + let scope = Scope::new(scope_id, Visibility::Private); + let func_param_node = self.graph.push(scope_id, scope); + + self.graph.add_lex_edge(func_param_node, parent_node); + let kind = param + .name + .to_opt() + .map(|name| match name { + FuncParamName::Ident(ident) => EdgeKind::value(ident), + FuncParamName::Underscore => EdgeKind::anon(), + }) + .unwrap_or_else(EdgeKind::anon); + self.graph.add_edge(parent_node, func_param_node, kind) + } + } + + fn add_generic_param_scope( + &mut self, + parent_node: NodeId, + parent_item: ItemKind<'db>, + params: GenericParamListId<'db>, + ) { + for (i, param) in params.data(self.db).iter().enumerate() { + let scope_id = ScopeId::GenericParam(parent_item, i); + let scope = Scope::new(scope_id, Visibility::Private); + + let generic_param_node = self.graph.push(scope_id, scope); + self.graph.add_lex_edge(generic_param_node, parent_node); + let kind = param + .name() + .to_opt() + .map(EdgeKind::generic_param) + .unwrap_or_else(EdgeKind::anon); + self.graph.add_edge(parent_node, generic_param_node, kind) + } + } + + fn dummy_scope(&self) -> (ScopeId<'db>, Scope<'db>) { + let scope_id = ScopeId::Item(self.top_mod.into()); + (scope_id, Scope::new(scope_id, Visibility::Public)) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +struct NodeId(u32); +entity_impl!(NodeId); + +#[derive(Default)] +struct IntermediateScopeGraph<'db> { + nodes: PrimaryMap, Scope<'db>)>, + edges: FxHashMap)>>, + unresolved_uses: FxHashSet>, +} + +impl<'db> IntermediateScopeGraph<'db> { + fn build(mut self, top_mod: TopLevelMod<'db>) -> ScopeGraph<'db> { + for (from_node, edges) in self.edges { + for (dest_node, kind) in edges { + let dest = self.nodes[dest_node].0; + let edge = ScopeEdge { dest, kind }; + self.nodes[from_node].1.edges.insert(edge); + } + } + + let scopes = self + .nodes + .into_iter() + .map(|(_, (id, data))| (id, data)) + .collect(); + + ScopeGraph { + top_mod, + scopes, + unresolved_uses: self.unresolved_uses, + } + } + + fn push(&mut self, scope_id: ScopeId<'db>, scope_data: Scope<'db>) -> NodeId { + self.nodes.push((scope_id, scope_data)) + } + + fn initialize_item_scope(&mut self, db: &dyn HirDb, node: NodeId, item: ItemKind<'db>) { + let scope_id = ScopeId::Item(item); + + let scope_data = &mut self.nodes[node]; + scope_data.0 = scope_id; + scope_data.1.id = scope_id; + scope_data.1.vis = item.vis(db); + } + + fn finalize_block_scope(&mut self, node: NodeId, body: Body<'db>, block: ExprId) { + let scope_id = ScopeId::Block(body, block); + let scope_data = &mut self.nodes[node]; + scope_data.0 = scope_id; + scope_data.1.id = scope_id; + scope_data.1.vis = Visibility::Private; + } + + fn add_lex_edge(&mut self, child: NodeId, parent: NodeId) { + self.edges + .entry(child) + .or_default() + .push((parent, EdgeKind::lex())); + } + + fn add_edge(&mut self, from: NodeId, dest: NodeId, kind: EdgeKind<'db>) { + self.edges.entry(from).or_default().push((dest, kind)); + } + + /// Add an edge to the graph that is not part of the current file. + fn add_external_edge(&mut self, from: NodeId, dest: ScopeId<'db>, kind: EdgeKind<'db>) { + self.nodes[from].1.edges.insert(ScopeEdge { dest, kind }); + } +} diff --git a/crates/hir/src/lower/stmt.rs b/crates/hir/src/lower/stmt.rs new file mode 100644 index 0000000000..ca3b84b714 --- /dev/null +++ b/crates/hir/src/lower/stmt.rs @@ -0,0 +1,63 @@ +use parser::ast::{self, prelude::*}; + +use super::body::BodyCtxt; +use crate::{ + hir_def::{stmt::*, Expr, Pat, TypeId}, + span::HirOrigin, +}; + +impl<'db> Stmt<'db> { + pub(super) fn push_to_body(ctxt: &mut BodyCtxt<'_, 'db>, ast: ast::Stmt) -> StmtId { + let (stmt, origin_kind) = match ast.kind() { + ast::StmtKind::Let(let_) => { + let pat = Pat::lower_ast_opt(ctxt, let_.pat()); + let ty = let_ + .type_annotation() + .map(|ty| TypeId::lower_ast(ctxt.f_ctxt, ty)); + let init = let_.initializer().map(|init| Expr::lower_ast(ctxt, init)); + (Stmt::Let(pat, ty, init), HirOrigin::raw(&ast)) + } + ast::StmtKind::For(for_) => { + let bind = Pat::lower_ast_opt(ctxt, for_.pat()); + let iter = Expr::push_to_body_opt(ctxt, for_.iterable()); + let body = Expr::push_to_body_opt( + ctxt, + for_.body() + .and_then(|body| ast::Expr::cast(body.syntax().clone())), + ); + + (Stmt::For(bind, iter, body), HirOrigin::raw(&ast)) + } + + ast::StmtKind::While(while_) => { + let cond = Expr::push_to_body_opt(ctxt, while_.cond()); + let body = Expr::push_to_body_opt( + ctxt, + while_ + .body() + .and_then(|body| ast::Expr::cast(body.syntax().clone())), + ); + + (Stmt::While(cond, body), HirOrigin::raw(&ast)) + } + + ast::StmtKind::Continue(_) => (Stmt::Continue, HirOrigin::raw(&ast)), + + ast::StmtKind::Break(_) => (Stmt::Break, HirOrigin::raw(&ast)), + + ast::StmtKind::Return(ret) => { + let expr = ret + .has_value() + .then(|| Expr::push_to_body_opt(ctxt, ret.expr())); + (Stmt::Return(expr), HirOrigin::raw(&ast)) + } + + ast::StmtKind::Expr(expr) => { + let expr = Expr::push_to_body_opt(ctxt, expr.expr()); + (Stmt::Expr(expr), HirOrigin::raw(&ast)) + } + }; + + ctxt.push_stmt(stmt, origin_kind) + } +} diff --git a/crates/hir/src/lower/types.rs b/crates/hir/src/lower/types.rs new file mode 100644 index 0000000000..eb5873d176 --- /dev/null +++ b/crates/hir/src/lower/types.rs @@ -0,0 +1,73 @@ +use parser::ast::{self, prelude::*}; + +use super::FileLowerCtxt; +use crate::hir_def::{ + Body, GenericArgListId, Partial, PathId, TraitRefId, TupleTypeId, TypeId, TypeKind, +}; + +impl<'db> TypeId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Type) -> Self { + let kind = match ast.kind() { + ast::TypeKind::Ptr(ty) => { + let inner = Self::lower_ast_partial(ctxt, ty.inner()); + TypeKind::Ptr(inner) + } + + ast::TypeKind::Path(ty) => { + let path = PathId::lower_ast_partial(ctxt, ty.path()); + TypeKind::Path(path) + } + + ast::TypeKind::SelfType(ty) => { + let generic_args = GenericArgListId::lower_ast_opt(ctxt, ty.generic_args()); + TypeKind::SelfType(generic_args) + } + + ast::TypeKind::Tuple(ty) => TypeKind::Tuple(TupleTypeId::lower_ast(ctxt, ty)), + + ast::TypeKind::Array(ty) => { + let elem_ty = Self::lower_ast_partial(ctxt, ty.elem_ty()); + let body = ty + .len() + .map(|ast| Body::lower_ast_nameless(ctxt, ast)) + .into(); + TypeKind::Array(elem_ty, body) + } + + ast::TypeKind::Never(_) => TypeKind::Never, + }; + + TypeId::new(ctxt.db(), kind) + } + + pub(super) fn lower_ast_partial( + ctxt: &mut FileLowerCtxt<'db>, + ast: Option, + ) -> Partial { + ast.map(|ast| Self::lower_ast(ctxt, ast)).into() + } +} + +impl<'db> TupleTypeId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::TupleType) -> Self { + let mut elem_tys = Vec::new(); + for elem in ast { + elem_tys.push(Some(TypeId::lower_ast(ctxt, elem)).into()); + } + TupleTypeId::new(ctxt.db(), elem_tys) + } +} + +impl<'db> TraitRefId<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::TraitRef) -> Self { + let path = ast.path().map(|ast| PathId::lower_ast(ctxt, ast)).into(); + Self::new(ctxt.db(), path) + } + + pub(super) fn lower_ast_partial( + ctxt: &mut FileLowerCtxt<'db>, + ast: Option, + ) -> Partial { + ast.map(|ast| Self::lower_ast(ctxt, ast)).into() + } +} diff --git a/crates/hir/src/lower/use_tree.rs b/crates/hir/src/lower/use_tree.rs new file mode 100644 index 0000000000..e3923340e0 --- /dev/null +++ b/crates/hir/src/lower/use_tree.rs @@ -0,0 +1,174 @@ +use parser::ast::{self, prelude::*}; + +use super::FileLowerCtxt; +use crate::{ + hir_def::{use_tree::*, IdentId, ItemModifier, Partial, TrackedItemVariant, Use}, + span::{HirOrigin, UseDesugared}, +}; + +impl<'db> Use<'db> { + pub(super) fn lower_ast(ctxt: &mut FileLowerCtxt<'db>, ast: ast::Use) -> Vec { + let vis = ItemModifier::lower_ast(ast.modifier()).to_visibility(); + + let Some(use_tree) = ast.use_tree() else { + let id = ctxt.joined_id(TrackedItemVariant::Use(Partial::Absent)); + ctxt.enter_item_scope(id, false); + let path = Partial::Absent; + let alias = None; + let top_mod = ctxt.top_mod(); + let origin = HirOrigin::raw(&ast); + let use_ = Self::new(ctxt.db(), id, path, alias, vis, top_mod, origin); + ctxt.leave_item_scope(use_); + return vec![use_]; + }; + + // If the use tree has no subtree, then there is no need to decompose it. + if !use_tree.has_subtree() { + let path = UsePathId::lower_ast_partial(ctxt, use_tree.path()); + let id = ctxt.joined_id(TrackedItemVariant::Use(path)); + ctxt.enter_item_scope(id, false); + let alias = use_tree + .alias() + .map(|alias| UseAlias::lower_ast_partial(ctxt, alias)); + let top_mod = ctxt.top_mod(); + let origin = HirOrigin::raw(&ast); + let use_ = Self::new(ctxt.db(), id, path, alias, vis, top_mod, origin); + ctxt.leave_item_scope(use_); + return vec![use_]; + } + + let decomposed_paths = decompose_tree(ctxt, ast, use_tree); + decomposed_paths + .into_iter() + .map(|(path, alias, origin)| { + let id = ctxt.joined_id(TrackedItemVariant::Use(path)); + ctxt.enter_item_scope(id, false); + let top_mod = ctxt.top_mod(); + let origin = HirOrigin::desugared(origin); + let use_ = Self::new(ctxt.db(), id, path, alias, vis, top_mod, origin); + ctxt.leave_item_scope(use_) + }) + .collect() + } +} + +impl<'db> UsePathId<'db> { + fn lower_ast_partial( + ctxt: &mut FileLowerCtxt<'db>, + ast: Option, + ) -> Partial { + let Some(ast) = ast else { + return Partial::Absent; + }; + + let segments = ast + .into_iter() + .map(|ast| UsePathSegment::lower_ast_partial(ctxt, ast)) + .collect::>(); + Some(Self::new(ctxt.db(), segments)).into() + } + + fn from_segments( + ctxt: &mut FileLowerCtxt<'db>, + ast_segs: Vec, + ) -> Partial { + if ast_segs.is_empty() { + Partial::Absent + } else { + let segs = ast_segs + .into_iter() + .map(|seg| UsePathSegment::lower_ast_partial(ctxt, seg)) + .collect::>(); + Partial::Present(Self::new(ctxt.db(), segs)) + } + } +} + +impl<'db> UsePathSegment<'db> { + fn lower_ast_partial(ctxt: &mut FileLowerCtxt<'db>, ast: ast::UsePathSegment) -> Partial { + let db = ctxt.db(); + + ast.kind() + .map(|kind| match kind { + ast::UsePathSegmentKind::Ingot(_) => Self::Ident(IdentId::make_ingot(db)), + ast::UsePathSegmentKind::Super(_) => Self::Ident(IdentId::make_super(db)), + ast::UsePathSegmentKind::Ident(ident) => { + Self::Ident(IdentId::lower_token(ctxt, ident)) + } + ast::UsePathSegmentKind::Self_(_) => Self::Ident(IdentId::make_self(db)), + ast::UsePathSegmentKind::Glob(_) => Self::Glob, + }) + .into() + } +} + +impl<'db> UseAlias<'db> { + pub(super) fn lower_ast_partial( + ctxt: &mut FileLowerCtxt<'db>, + ast: ast::UseAlias, + ) -> Partial { + if let Some(ident) = ast.ident() { + Some(Self::Ident(IdentId::lower_token(ctxt, ident))) + } else if ast.underscore().is_some() { + Some(Self::Underscore) + } else { + None + } + .into() + } +} + +fn decompose_tree<'db>( + ctxt: &mut FileLowerCtxt<'db>, + ast: ast::Use, + use_tree: ast::UseTree, +) -> Vec<( + Partial>, + Option>>, + UseDesugared, +)> { + let use_desugared = UseDesugared::new(&ast); + decompose_subtree(ctxt, use_tree, (vec![], use_desugared)) + .into_iter() + .map(|(ast_segs, alias, desugared)| { + let path = UsePathId::from_segments(ctxt, ast_segs); + (path, alias, desugared) + }) + .collect() +} + +fn decompose_subtree<'db>( + ctxt: &mut FileLowerCtxt<'db>, + subtree: ast::UseTree, + succ: (Vec, UseDesugared), +) -> Vec<( + Vec, + Option>>, + UseDesugared, +)> { + let (mut succ_path, mut succ_desugared) = succ; + if let Some(path) = subtree.path() { + for seg in path { + succ_desugared.push_seg(&seg); + succ_path.push(seg.clone()); + } + } + + if let Some(alias) = subtree.alias() { + succ_desugared.add_alias(&alias); + let alias = UseAlias::lower_ast_partial(ctxt, alias); + assert!(subtree.children().is_none()); + return vec![(succ_path, Some(alias), succ_desugared)]; + } + + let Some(children) = subtree.children() else { + return vec![(succ_path, None, succ_desugared)]; + }; + + children + .into_iter() + .flat_map(|subtree| { + decompose_subtree(ctxt, subtree, (succ_path.clone(), succ_desugared.clone())) + }) + .collect() +} diff --git a/crates/hir/src/span/attr.rs b/crates/hir/src/span/attr.rs new file mode 100644 index 0000000000..6bda632d4c --- /dev/null +++ b/crates/hir/src/span/attr.rs @@ -0,0 +1,66 @@ +use parser::ast; + +use super::define_lazy_span_node; + +define_lazy_span_node!( + LazyAttrListSpan, + ast::AttrList, + @idx { + (attr, LazyAttrSpan), + } +); + +define_lazy_span_node!(LazyAttrSpan); +impl<'db> LazyAttrSpan<'db> { + pub fn into_normal_attr(&self) -> LazyNormalAttrSpan<'db> { + self.clone().into_normal_attr_moved() + } + + pub fn into_normal_attr_moved(self) -> LazyNormalAttrSpan<'db> { + LazyNormalAttrSpan(self.0) + } + + pub fn into_doc_comment_attr(&self) -> LazyDocCommentAttrSpan<'db> { + self.clone().into_doc_comment_attr_moved() + } + + pub fn into_doc_comment_attr_moved(self) -> LazyDocCommentAttrSpan<'db> { + LazyDocCommentAttrSpan(self.0) + } +} + +define_lazy_span_node!( + LazyNormalAttrSpan, + ast::NormalAttr, + @token { + (name, name), + } + @node { + (args, args, LazyAttrArgListSpan), + } +); + +define_lazy_span_node!( + LazyDocCommentAttrSpan, + ast::DocCommentAttr, + @token { + (doc, doc), + } +); + +define_lazy_span_node!( + LazyAttrArgListSpan, + ast::AttrArgList, + @idx { + (arg, LazyAttrArgSpan), + } +); + +define_lazy_span_node!( + LazyAttrArgSpan, + ast::AttrArg, + @token { + (key, key), + (value, value), + } +); diff --git a/crates/hir/src/span/expr.rs b/crates/hir/src/span/expr.rs new file mode 100644 index 0000000000..cbb3a150c8 --- /dev/null +++ b/crates/hir/src/span/expr.rs @@ -0,0 +1,247 @@ +use parser::ast; + +use super::{ + body_source_map, define_lazy_span_node, + transition::{ChainInitiator, ResolvedOrigin, SpanTransitionChain}, +}; +use crate::{ + hir_def::{Body, ExprId}, + span::{params::LazyGenericArgListSpan, path::LazyPathSpan, LazyLitSpan, LazySpanAtom}, + SpannedHirDb, +}; + +define_lazy_span_node!(LazyExprSpan, ast::Expr,); +impl<'db> LazyExprSpan<'db> { + pub fn new(body: Body<'db>, expr: ExprId) -> Self { + let root = ExprRoot { expr, body }; + Self(SpanTransitionChain::new(root)) + } + + pub fn into_lit_expr(self) -> LazyLitExprSpan<'db> { + LazyLitExprSpan(self.0) + } + + pub fn into_bin_expr(self) -> LazyBinExprSpan<'db> { + LazyBinExprSpan(self.0) + } + + pub fn into_un_expr(self) -> LazyUnExprSpan<'db> { + LazyUnExprSpan(self.0) + } + + pub fn into_call_expr(self) -> LazyCallExprSpan<'db> { + LazyCallExprSpan(self.0) + } + + pub fn into_method_call_expr(self) -> LazyMethodCallExprSpan<'db> { + LazyMethodCallExprSpan(self.0) + } + + pub fn into_path_expr(self) -> LazyPathExprSpan<'db> { + LazyPathExprSpan(self.0) + } + + pub fn into_record_init_expr(self) -> LazyRecordInitExprSpan<'db> { + LazyRecordInitExprSpan(self.0) + } + + pub fn into_field_expr(self) -> LazyFieldExprSpan<'db> { + LazyFieldExprSpan(self.0) + } + + pub fn into_match_expr(self) -> LazyMatchExprSpan<'db> { + LazyMatchExprSpan(self.0) + } + + pub fn into_aug_assign_expr(self) -> LazyAugAssignExprSpan<'db> { + LazyAugAssignExprSpan(self.0) + } + + pub fn into_assign_expr(self) -> LazyAssignExprSpan<'db> { + LazyAssignExprSpan(self.0) + } +} + +define_lazy_span_node! { + LazyLitExprSpan, + ast::LitExpr, + @node { + (lit, lit, LazyLitSpan), + } +} + +define_lazy_span_node!( + LazyBinExprSpan, + ast::BinExpr, + @node { + (op, op, LazySpanAtom), + } +); + +define_lazy_span_node!( + LazyAssignExprSpan, + ast::AssignExpr, + @token { + (eq, eq), + } +); + +define_lazy_span_node!( + LazyAugAssignExprSpan, + ast::AugAssignExpr, + @node { + (op, op, LazySpanAtom), + } +); + +define_lazy_span_node!( + LazyUnExprSpan, + ast::UnExpr, + @node { + (op, op, LazySpanAtom), + } +); + +define_lazy_span_node!( + LazyCallExprSpan, + ast::CallExpr, + @node { + (args, args, LazyCallArgListSpan), + } +); + +define_lazy_span_node!( + LazyMethodCallExprSpan, + ast::MethodCallExpr, + @token { + (method_name, method_name), + } + @node { + (generic_args, generic_args, LazyGenericArgListSpan), + (args, args, LazyCallArgListSpan), + } +); + +define_lazy_span_node! { + LazyPathExprSpan, + ast::PathExpr, + @node { + (path, path, LazyPathSpan), + } +} + +define_lazy_span_node!( + LazyRecordInitExprSpan, + ast::RecordInitExpr, + @node { + (path, path, LazyPathSpan), + (fields, fields, LazyFieldListSpan), + } +); + +define_lazy_span_node!( + LazyFieldExprSpan, + ast::FieldExpr, + @token { + (accessor, name_or_index), + } +); + +define_lazy_span_node!( + LazyMatchExprSpan, + ast::MatchExpr, + @node { + (arms, arms, LazyMatchArmListSpan), + } +); + +define_lazy_span_node!( + LazyCallArgListSpan, + ast::CallArgList, + @idx { + (arg, LazyCallArgSpan), + } +); + +define_lazy_span_node!( + LazyCallArgSpan, + ast::CallArg, + @token { + (label, label), + } + @node { + (expr, expr, LazyExprSpan), + } +); + +define_lazy_span_node!( + LazyFieldListSpan, + ast::FieldList, + @idx { + (field, LazyFieldSpan), + } +); + +define_lazy_span_node!( + LazyFieldSpan, + ast::RecordField, + @token { + (label, label), + } +); + +define_lazy_span_node!( + LazyMatchArmListSpan, + ast::MatchArmList, + @idx { + (arm, LazyMatchArmSpan), + } +); + +define_lazy_span_node!(LazyMatchArmSpan); + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) struct ExprRoot<'db> { + expr: ExprId, + pub(crate) body: Body<'db>, +} + +impl ChainInitiator for ExprRoot<'_> { + fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { + let source_map = body_source_map(db, self.body); + let origin = source_map.expr_map.node_to_source(self.expr); + let top_mod = self.body.top_mod(db.as_hir_db()); + ResolvedOrigin::resolve(db, top_mod, origin) + } +} + +#[cfg(test)] +mod tests { + use crate::{ + hir_def::{ArithBinOp, Body, Expr}, + test_db::TestDb, + HirDb, + }; + + #[test] + fn aug_assign() { + let mut db = TestDb::default(); + + let text = r#" + fn foo(mut x: i32) { + x += 1 + } + }"#; + + let (ingot, file) = db.standalone_file(text); + let body: Body = db.expect_item::(ingot, file); + let bin_expr = match body.exprs(db.as_hir_db()).values().nth(2).unwrap().unwrap() { + Expr::AugAssign(lhs, rhs, bin_op) => (*lhs, *rhs, *bin_op), + _ => unreachable!(), + }; + let top_mod = body.top_mod(db.as_hir_db()); + assert_eq!("x", db.text_at(top_mod, &bin_expr.0.lazy_span(body))); + assert_eq!("1", db.text_at(top_mod, &bin_expr.1.lazy_span(body))); + assert_eq!(ArithBinOp::Add, bin_expr.2); + } +} diff --git a/crates/hir/src/span/item.rs b/crates/hir/src/span/item.rs new file mode 100644 index 0000000000..9f9b4ff526 --- /dev/null +++ b/crates/hir/src/span/item.rs @@ -0,0 +1,590 @@ +use parser::ast::{self, prelude::AstNode}; + +use super::{ + attr::LazyAttrListSpan, + define_lazy_span_node, + params::{LazyFuncParamListSpan, LazyGenericParamListSpan, LazyWhereClauseSpan}, + transition::SpanTransitionChain, + types::{LazyTupleTypeSpan, LazyTySpan}, + use_tree::LazyUseAliasSpan, +}; +use crate::{ + hir_def::{ + Body, Const, Contract, Enum, Func, Impl, ImplTrait, ItemKind, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, + }, + span::{ + params::LazyTraitRefSpan, + transition::{LazyArg, LazyTransitionFn, ResolvedOrigin, ResolvedOriginKind}, + use_tree::LazyUsePathSpan, + DesugaredOrigin, DesugaredUseFocus, + }, +}; + +define_lazy_span_node!(LazyTopModSpan, ast::Root); +impl<'db> LazyTopModSpan<'db> { + pub fn new(t: TopLevelMod<'db>) -> Self { + Self(SpanTransitionChain::new(t)) + } +} + +define_lazy_span_node!(LazyItemSpan); +impl<'db> LazyItemSpan<'db> { + pub fn new(i: ItemKind<'db>) -> Self { + Self(SpanTransitionChain::new(i)) + } +} + +define_lazy_span_node!( + LazyModSpan, + ast::Mod, + @token + { + (name, name), + } + @node + { + (attributes, attr_list, LazyAttrListSpan), + (modifier, modifier, LazyItemModifierSpan), + } +); +impl<'db> LazyModSpan<'db> { + pub fn new(m: Mod<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(m)) + } +} + +define_lazy_span_node!( + LazyFuncSpan, + ast::Func, + @token { + (name, name), + } + @node { + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + (params, params, LazyFuncParamListSpan), + (ret_ty, ret_ty, LazyTySpan), + } +); +impl<'db> LazyFuncSpan<'db> { + pub fn new(f: Func<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(f)) + } +} + +define_lazy_span_node!( + LazyStructSpan, + ast::Struct, + @token { + (name, name), + } + @node { + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + (fields, fields, LazyFieldDefListSpan), + } +); +impl<'db> LazyStructSpan<'db> { + pub fn new(s: Struct<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(s)) + } +} + +define_lazy_span_node!( + LazyContractSpan, + ast::Contract, + @token { + (name, name), + } + @node { + (attributes, attr_list, LazyAttrListSpan), + (modifier, modifier, LazyItemModifierSpan), + (fields, fields, LazyFieldDefListSpan), + } +); +impl<'db> LazyContractSpan<'db> { + pub fn new(c: Contract<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(c)) + } +} + +define_lazy_span_node!( + LazyEnumSpan, + ast::Enum, + @token { + (name, name), + } + @node { + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + (variants, variants, LazyVariantDefListSpan), + } +); +impl<'db> LazyEnumSpan<'db> { + pub fn new(e: Enum<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(e)) + } +} + +define_lazy_span_node!( + LazyTypeAliasSpan, + ast::TypeAlias, + @token { + (alias, alias), + } + @node { + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (modifier, modifier, LazyItemModifierSpan), + (ty, ty, LazyTySpan), + } +); +impl<'db> LazyTypeAliasSpan<'db> { + pub fn new(t: TypeAlias<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(t)) + } +} + +define_lazy_span_node!( + LazyImplSpan, + ast::Impl, + @node { + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (target_ty, ty, LazyTySpan), + } +); +impl<'db> LazyImplSpan<'db> { + pub fn new(i: Impl<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(i)) + } +} + +define_lazy_span_node!( + LazyTraitSpan, + ast::Trait, + @token { + (name, name), + } + @node { + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (super_traits, super_trait_list, LazySuperTraitListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (modifier, modifier, LazyItemModifierSpan), + } +); +impl<'db> LazyTraitSpan<'db> { + pub fn new(t: Trait<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(t)) + } +} + +define_lazy_span_node!( + LazySuperTraitListSpan, + ast::SuperTraitList, + @idx { + (super_trait, LazyTraitRefSpan), + } +); + +define_lazy_span_node!( + LazyImplTraitSpan, + ast::ImplTrait, + @node { + (attributes, attr_list, LazyAttrListSpan), + (generic_params, generic_params, LazyGenericParamListSpan), + (where_clause, where_clause, LazyWhereClauseSpan), + (trait_ref, trait_ref, LazyTraitRefSpan), + (ty, ty, LazyTySpan), + } +); +impl<'db> LazyImplTraitSpan<'db> { + pub fn new(i: ImplTrait<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(i)) + } +} + +define_lazy_span_node!( + LazyConstSpan, + ast::Const, + @token { + (name, name), + } + @node { + (attributes, attr_list, LazyAttrListSpan), + (ty, ty, LazyTySpan), + } +); +impl<'db> LazyConstSpan<'db> { + pub fn new(c: Const<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(c)) + } +} + +define_lazy_span_node!(LazyUseSpan, ast::Use); +impl<'db> LazyUseSpan<'db> { + pub fn new(u: Use<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(u)) + } + + pub fn path(&self) -> LazyUsePathSpan<'db> { + self.clone().path_moved() + } + + pub fn path_moved(mut self) -> LazyUsePathSpan<'db> { + fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { + origin + .map(|node| { + ast::Use::cast(node) + .and_then(|use_| use_.use_tree()) + .and_then(|tree| tree.path()) + .map(|n| n.syntax().clone().into()) + }) + .map_desugared(|root, desugared| match desugared { + DesugaredOrigin::Use(mut use_) => { + use_.focus = DesugaredUseFocus::Path; + ResolvedOriginKind::Desugared(root, DesugaredOrigin::Use(use_)) + } + }) + } + + let lazy_transition = LazyTransitionFn { + f, + arg: LazyArg::None, + }; + + self.0.push(lazy_transition); + LazyUsePathSpan(self.0) + } + + pub fn alias(&self) -> LazyUseAliasSpan<'db> { + self.clone().alias_moved() + } + + pub fn alias_moved(mut self) -> LazyUseAliasSpan<'db> { + fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { + origin + .map(|node| { + ast::Use::cast(node) + .and_then(|use_| use_.use_tree()) + .and_then(|tree| tree.alias()) + .map(|n| n.syntax().clone().into()) + }) + .map_desugared(|root, desugared| match desugared { + DesugaredOrigin::Use(mut use_) => { + use_.focus = DesugaredUseFocus::Alias; + ResolvedOriginKind::Desugared(root, DesugaredOrigin::Use(use_)) + } + }) + } + + let lazy_transition = LazyTransitionFn { + f, + arg: LazyArg::None, + }; + + self.0.push(lazy_transition); + LazyUseAliasSpan(self.0) + } +} + +define_lazy_span_node!(LazyBodySpan, ast::Expr); +impl<'db> LazyBodySpan<'db> { + pub fn new(b: Body<'db>) -> Self { + Self(crate::span::transition::SpanTransitionChain::new(b)) + } +} + +define_lazy_span_node!( + LazyFieldDefListSpan, + ast::RecordFieldDefList, + @idx { + (field, LazyFieldDefSpan), + } +); + +define_lazy_span_node!( + LazyFieldDefSpan, + ast::RecordFieldDef, + @token { + (pub_span, pub_kw), + (name, name), + } + @node { + (attributes, attr_list, LazyAttrListSpan), + (ty, ty, LazyTySpan), + } +); + +define_lazy_span_node!( + LazyVariantDefListSpan, + ast::VariantDefList, + @idx { + (variant, LazyVariantDefSpan), + } +); + +define_lazy_span_node!( + LazyVariantDefSpan, + ast::VariantDef, + @token { + (name, name), + } + @node { + (fields, fields, LazyFieldDefListSpan), + (attributes, attr_list, LazyAttrListSpan), + (tuple_type, tuple_type, LazyTupleTypeSpan), + } +); + +define_lazy_span_node!( + LazyItemModifierSpan, + ast::ItemModifier, + @token { + (pub_kw, pub_kw), + (unsafe_kw, unsafe_kw), + } +); + +#[cfg(test)] +mod tests { + use crate::{ + hir_def::{Enum, Func, Mod, Struct, TypeAlias, Use}, + test_db::TestDb, + HirDb, + }; + + #[test] + fn top_mod_span() { + let mut db = TestDb::default(); + + let text = r#" + mod foo { + fn bar() {} + } + + mod baz { + fn qux() {} + } + "#; + + let (ingot, file) = db.standalone_file(text); + let item_tree = db.parse_source(ingot, file); + let top_mod = item_tree.top_mod; + assert_eq!(text, db.text_at(top_mod, &top_mod.lazy_span())); + } + + #[test] + fn mod_span() { + let mut db = TestDb::default(); + + let text = r#" + + mod foo { + fn bar() {} + } + "#; + + let (ingot, file) = db.standalone_file(text); + let mod_ = db.expect_item::(ingot, file); + let top_mod = mod_.top_mod(db.as_hir_db()); + let mod_span = mod_.lazy_span(); + assert_eq!( + r#"mod foo { + fn bar() {} + }"#, + db.text_at(top_mod, &mod_span) + ); + assert_eq!("foo", db.text_at(top_mod, &mod_span.name())); + } + + #[test] + fn fn_span() { + let mut db = TestDb::default(); + + let text = r#" + fn my_func(x: u32, label y: foo::Bar<2>) -> FooResult + where U: Add + "#; + + let (ingot, file) = db.standalone_file(text); + + let fn_ = db.expect_item::(ingot, file); + let top_mod = fn_.top_mod(db.as_hir_db()); + let fn_span = fn_.lazy_span(); + assert_eq!("my_func", db.text_at(top_mod, &fn_span.name())); + + let generic_params = fn_span.generic_params(); + let type_generic_param_1 = generic_params.param(0).into_type_param(); + let type_generic_param_2 = generic_params.param(1).into_type_param(); + let const_generic_param = generic_params.param(2).into_const_param(); + + assert_eq!("T", db.text_at(top_mod, &type_generic_param_1.name())); + assert_eq!( + "Debug", + db.text_at(top_mod, &type_generic_param_1.bounds().bound(0)) + ); + assert_eq!("U", db.text_at(top_mod, &type_generic_param_2.name())); + assert_eq!( + "const", + db.text_at(top_mod, &const_generic_param.const_token()) + ); + assert_eq!("LEN", db.text_at(top_mod, &const_generic_param.name())); + assert_eq!("usize", db.text_at(top_mod, &const_generic_param.ty())); + + let params = fn_span.params(); + let param_1 = params.param(0); + let param_2 = params.param(1); + + assert_eq!("x", db.text_at(top_mod, ¶m_1.name())); + assert_eq!("u32", db.text_at(top_mod, ¶m_1.ty())); + assert_eq!("label", db.text_at(top_mod, ¶m_2.label())); + assert_eq!("foo::Bar<2>", db.text_at(top_mod, ¶m_2.ty())); + + assert_eq!("FooResult", db.text_at(top_mod, &fn_span.ret_ty())); + + let where_clause = fn_span.where_clause(); + let where_predicate = where_clause.predicate(0); + assert_eq!("where", db.text_at(top_mod, &where_clause.where_token())); + assert_eq!("U", db.text_at(top_mod, &where_predicate.ty())); + assert_eq!(": Add", db.text_at(top_mod, &where_predicate.bounds())); + } + + #[test] + fn struct_span() { + let mut db = TestDb::default(); + + let text = r#" + struct Foo { + x: u32 + pub y: foo::Bar<2> + }"#; + + let (ingot, file) = db.standalone_file(text); + let struct_ = db.expect_item::(ingot, file); + let top_mod = struct_.top_mod(db.as_hir_db()); + let struct_span = struct_.lazy_span(); + assert_eq!("Foo", db.text_at(top_mod, &struct_span.name())); + + let fields = struct_span.fields(); + let field_1 = fields.field(0); + let field_2 = fields.field(1); + + assert_eq!("x", db.text_at(top_mod, &field_1.name())); + assert_eq!("u32", db.text_at(top_mod, &field_1.ty())); + + assert_eq!("pub", db.text_at(top_mod, &field_2.pub_span())); + assert_eq!("y", db.text_at(top_mod, &field_2.name())); + assert_eq!("foo::Bar<2>", db.text_at(top_mod, &field_2.ty())); + } + + #[test] + fn enum_span() { + let mut db = TestDb::default(); + + let text = r#" + enum Foo { + Bar + Baz(u32, i32) + Bux { + x: i8 + y: u8 + } + }"#; + + let (ingot, file) = db.standalone_file(text); + let enum_ = db.expect_item::(ingot, file); + let top_mod = enum_.top_mod(db.as_hir_db()); + let enum_span = enum_.lazy_span(); + assert_eq!("Foo", db.text_at(top_mod, &enum_span.name())); + + let variants = enum_span.variants(); + let variant_1 = variants.variant(0); + let variant_2 = variants.variant(1); + let variant_3 = variants.variant(2); + + assert_eq!("Bar", db.text_at(top_mod, &variant_1.name())); + assert_eq!("Baz", db.text_at(top_mod, &variant_2.name())); + assert_eq!("(u32, i32)", db.text_at(top_mod, &variant_2.tuple_type())); + assert_eq!("Bux", db.text_at(top_mod, &variant_3.name())); + assert!(db.text_at(top_mod, &variant_3.fields()).contains("x: i8")); + } + + #[test] + fn type_alias_span() { + let mut db = TestDb::default(); + + let text = r#" + pub type Foo = u32 + "#; + + let (ingot, file) = db.standalone_file(text); + let type_alias = db.expect_item::(ingot, file); + let top_mod = type_alias.top_mod(db.as_hir_db()); + let type_alias_span = type_alias.lazy_span(); + assert_eq!("Foo", db.text_at(top_mod, &type_alias_span.alias())); + assert_eq!("u32", db.text_at(top_mod, &type_alias_span.ty())); + assert_eq!("pub", db.text_at(top_mod, &type_alias_span.modifier())); + } + + #[test] + fn use_span() { + let mut db = TestDb::default(); + + let text = r#" + use foo::bar::baz::Trait as _ + "#; + + let (ingot, file) = db.standalone_file(text); + let use_ = db.expect_item::(ingot, file); + + let top_mod = use_.top_mod(db.as_hir_db()); + let use_span = use_.lazy_span(); + let use_path_span = use_span.path(); + assert_eq!("foo", db.text_at(top_mod, &use_path_span.segment(0))); + assert_eq!("bar", db.text_at(top_mod, &use_path_span.segment(1))); + assert_eq!("baz", db.text_at(top_mod, &use_path_span.segment(2))); + assert_eq!("Trait", db.text_at(top_mod, &use_path_span.segment(3))); + assert_eq!("as _", db.text_at(top_mod, &use_span.alias())); + assert_eq!("_", db.text_at(top_mod, &use_span.alias().name())); + } + + #[test] + fn use_span_desugared() { + let mut db = TestDb::default(); + + let text = r#" + use foo::bar::{baz::*, qux as Alias} + "#; + + let (ingot, file) = db.standalone_file(text); + let uses = db.expect_items::(ingot, file); + assert_eq!(uses.len(), 2); + + let top_mod = uses[0].top_mod(db.as_hir_db()); + + let use_span = uses[0].lazy_span(); + let use_path_span = use_span.path(); + assert_eq!("foo", db.text_at(top_mod, &use_path_span.segment(0))); + assert_eq!("bar", db.text_at(top_mod, &use_path_span.segment(1))); + assert_eq!("baz", db.text_at(top_mod, &use_path_span.segment(2))); + assert_eq!("*", db.text_at(top_mod, &use_path_span.segment(3))); + + let use_span = uses[1].lazy_span(); + let use_path_span = use_span.path(); + assert_eq!("foo", db.text_at(top_mod, &use_path_span.segment(0))); + assert_eq!("bar", db.text_at(top_mod, &use_path_span.segment(1))); + assert_eq!("qux", db.text_at(top_mod, &use_path_span.segment(2))); + assert_eq!("as Alias", db.text_at(top_mod, &use_span.alias())); + assert_eq!("Alias", db.text_at(top_mod, &use_span.alias().name())); + } +} diff --git a/crates/hir/src/span/mod.rs b/crates/hir/src/span/mod.rs new file mode 100644 index 0000000000..d3f4c0c0b9 --- /dev/null +++ b/crates/hir/src/span/mod.rs @@ -0,0 +1,275 @@ +use common::diagnostics::Span; +use parser::ast::{self, prelude::*, AstPtr, SyntaxNodePtr}; + +use crate::{ + hir_def::{ + Body, Const, Contract, Enum, Func, Impl, ImplTrait, Mod, Struct, TopLevelMod, Trait, + TypeAlias, Use, + }, + lower::top_mod_ast, + HirDb, SpannedHirDb, +}; + +pub mod attr; +pub mod expr; +pub mod item; +pub mod params; +pub mod pat; +pub mod path; +pub mod stmt; +pub mod types; +pub mod use_tree; + +pub(crate) mod transition; + +pub mod lazy_spans { + pub use super::{ + attr::{ + LazyAttrArgListSpan, LazyAttrArgSpan, LazyAttrListSpan, LazyAttrSpan, + LazyDocCommentAttrSpan, LazyNormalAttrSpan, + }, + expr::{ + LazyAssignExprSpan, LazyAugAssignExprSpan, LazyBinExprSpan, LazyCallArgListSpan, + LazyCallArgSpan, LazyCallExprSpan, LazyExprSpan, LazyFieldExprSpan, LazyFieldListSpan, + LazyFieldSpan, LazyLitExprSpan, LazyMatchArmListSpan, LazyMatchArmSpan, + LazyMatchExprSpan, LazyMethodCallExprSpan, LazyPathExprSpan, LazyRecordInitExprSpan, + LazyUnExprSpan, + }, + item::{ + LazyBodySpan, LazyConstSpan, LazyContractSpan, LazyEnumSpan, LazyFieldDefListSpan, + LazyFieldDefSpan, LazyFuncSpan, LazyImplSpan, LazyImplTraitSpan, LazyItemModifierSpan, + LazyItemSpan, LazyModSpan, LazyStructSpan, LazyTopModSpan, LazyTraitSpan, + LazyTypeAliasSpan, LazyUseSpan, LazyVariantDefListSpan, LazyVariantDefSpan, + }, + params::{ + LazyConstGenericParamSpan, LazyFuncParamListSpan, LazyFuncParamSpan, + LazyGenericArgListSpan, LazyGenericArgSpan, LazyGenericParamListSpan, + LazyGenericParamSpan, LazyKindBoundSpan, LazyTraitRefSpan, LazyTypeBoundListSpan, + LazyTypeBoundSpan, LazyTypeGenericArgSpan, LazyWhereClauseSpan, LazyWherePredicateSpan, + }, + pat::{ + LazyLitPatSpan, LazyPatSpan, LazyPathPatSpan, LazyPathTuplePatSpan, + LazyRecordPatFieldListSpan, LazyRecordPatFieldSpan, LazyRecordPatSpan, + }, + path::{LazyPathSegmentSpan, LazyPathSpan}, + stmt::{LazyLetStmtSpan, LazyStmtSpan}, + types::{ + LazyArrayTypeSpan, LazyPathTypeSpan, LazyPtrTypeSpan, LazyTupleTypeSpan, LazyTySpan, + }, + use_tree::{LazyUseAliasSpan, LazyUsePathSegmentSpan, LazyUsePathSpan}, + DynLazySpan, LazyLitSpan, LazySpan, LazySpanAtom, + }; +} + +/// This struct represents a dynamic lazy span, which can be converted from all +/// types that implement [`LazySpan`] in this module. We want to avoid `dyn +/// LazySpan` usage because it doesn't implement `Clone` and `Eq` which leads to +/// a lot of difficulties in salsa integration +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct DynLazySpan<'db>(pub(super) Option>); +impl<'db> DynLazySpan<'db> { + pub fn invalid() -> Self { + Self(None) + } + + pub fn top_mod(&self, db: &'db dyn HirDb) -> Option> { + self.0.as_ref().map(|chain| chain.top_mod(db)) + } +} +impl LazySpan for DynLazySpan<'_> { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option { + if let Some(chain) = &self.0 { + chain.resolve(db) + } else { + None + } + } +} + +pub trait SpanDowncast<'db> { + fn downcast(dyn_span: DynLazySpan<'db>) -> Option + where + Self: Sized; +} + +/// The trait provides a way to extract [`Span`](common::diagnostics::Span) from +/// types which don't have a span information directly, but can be resolved into +/// a span lazily. +pub trait LazySpan { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option; +} + +pub fn toplevel_ast(db: &dyn SpannedHirDb, item: TopLevelMod) -> HirOrigin { + HirOrigin::raw(&top_mod_ast(db.as_hir_db(), item)) +} + +pub fn mod_ast<'db>(db: &'db dyn SpannedHirDb, item: Mod<'db>) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn func_ast<'db>(db: &'db dyn SpannedHirDb, item: Func<'db>) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn struct_ast<'db>( + db: &'db dyn SpannedHirDb, + item: Struct<'db>, +) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn contract_ast<'db>( + db: &'db dyn SpannedHirDb, + item: Contract<'db>, +) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn enum_ast<'db>(db: &'db dyn SpannedHirDb, item: Enum<'db>) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn type_alias_ast<'db>( + db: &'db dyn SpannedHirDb, + item: TypeAlias<'db>, +) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn impl_ast<'db>(db: &'db dyn SpannedHirDb, item: Impl<'db>) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn trait_ast<'db>(db: &'db dyn SpannedHirDb, item: Trait<'db>) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn impl_trait_ast<'db>( + db: &'db dyn SpannedHirDb, + item: ImplTrait<'db>, +) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn const_ast<'db>(db: &'db dyn SpannedHirDb, item: Const<'db>) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn use_ast<'db>(db: &'db dyn SpannedHirDb, item: Use<'db>) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn body_ast<'db>(db: &'db dyn SpannedHirDb, item: Body<'db>) -> &'db HirOrigin { + item.origin(db.as_hir_db()) +} + +pub fn body_source_map<'db>( + db: &'db dyn SpannedHirDb, + item: Body<'db>, +) -> &'db crate::hir_def::BodySourceMap { + item.source_map(db.as_hir_db()) +} + +/// This enum represents the origin of the HIR node in a file. +/// The origin has three possible kinds. +/// 1. `Raw` is used for nodes that are created by the parser and not +/// 2. `Expanded` is used for nodes that are created by the compiler and not +/// 3. `Desugared` is used for nodes that are created by the compiler and not +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum HirOrigin +where + T: AstNode, +{ + /// The HIR node is created by direct lowering from the corresponding AST. + Raw(AstPtr), + /// The HIR node is created by expanding attributes. + /// The `SyntaxNode` points to the callsite of the attribute. + Expanded(SyntaxNodePtr), + /// The HIR node is the result of desugaring in the lower phase from AST to + /// HIR. e.g., `a += b` is desugared into `a = a + b`. + Desugared(DesugaredOrigin), + + /// The HIR node is created by the compiler and not directly from the AST. + /// This is only used with `Invalid` nodes that don't have a corresponding + /// AST node. + /// e.g., the RHS of `a + ` is represented as `Invalid` node but there is no + /// corresponding origin. + None, +} + +impl HirOrigin +where + T: AstNode, +{ + pub(crate) fn raw(ast: &T) -> Self { + Self::Raw(AstPtr::new(ast)) + } + + pub(crate) fn desugared(origin: impl Into) -> Self { + Self::Desugared(origin.into()) + } +} + +impl Default for HirOrigin +where + T: AstNode, +{ + fn default() -> Self { + Self::None + } +} + +/// This enum represents the origin of the HIR node which is desugared into +/// other HIR node kinds. +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From)] +pub enum DesugaredOrigin { + /// The HIR node is the result of desugaring a AST use. + /// In HIR lowering, nested use tree is flattened into a single use path. + Use(UseDesugared), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UseDesugared { + pub root: AstPtr, + pub path: Vec>, + pub alias: Option>, + focus: DesugaredUseFocus, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum DesugaredUseFocus { + Root, + Path, + Alias, +} + +impl UseDesugared { + pub(super) fn new(ast: &ast::Use) -> Self { + Self { + root: AstPtr::new(ast), + path: vec![], + alias: None, + focus: DesugaredUseFocus::Root, + } + } + + pub(super) fn add_alias(&mut self, alias: &ast::UseAlias) { + self.alias = Some(AstPtr::new(alias)) + } + + pub(super) fn push_seg(&mut self, seg: &ast::UsePathSegment) { + self.path.push(AstPtr::new(seg)); + } +} + +use transition::define_lazy_span_node; + +use self::transition::SpanTransitionChain; + +define_lazy_span_node!(LazySpanAtom); +impl<'db> LazySpanAtom<'db> { + pub(super) fn into_lit_span(self) -> LazyLitSpan<'db> { + LazyLitSpan(self.0) + } +} +define_lazy_span_node!(LazyLitSpan); diff --git a/crates/hir/src/span/params.rs b/crates/hir/src/span/params.rs new file mode 100644 index 0000000000..34c9bc88d1 --- /dev/null +++ b/crates/hir/src/span/params.rs @@ -0,0 +1,164 @@ +use parser::ast; + +use super::{define_lazy_span_node, types::LazyTySpan}; +use crate::span::{path::LazyPathSpan, LazySpanAtom}; + +define_lazy_span_node!( + LazyFuncParamListSpan, + ast::FuncParamList, + @idx { + (param, LazyFuncParamSpan), + } +); + +define_lazy_span_node!( + LazyGenericParamListSpan, + ast::GenericParamList, + @idx { + (param, LazyGenericParamSpan), + } +); + +define_lazy_span_node!( + LazyGenericArgListSpan, + ast::GenericArgList, + @idx { + (arg, LazyGenericArgSpan), + } + +); +define_lazy_span_node!( + LazyWhereClauseSpan, + ast::WhereClause, + @token { + (where_token, where_kw), + } + @idx { + (predicate, LazyWherePredicateSpan), + } +); + +define_lazy_span_node!( + LazyFuncParamSpan, + ast::FuncParam, + @token { + (mut_kw, mut_token), + } + @node { + (label, label, LazySpanAtom), + (name, name, LazySpanAtom), + (ty, ty, LazyTySpan), + } +); + +impl<'db> LazyFuncParamSpan<'db> { + pub fn fallback_self_ty(&self) -> LazyTySpan<'db> { + LazyTySpan(self.name().0) + } +} + +define_lazy_span_node!(LazyGenericParamSpan, ast::GenericParam); +impl<'db> LazyGenericParamSpan<'db> { + pub fn into_type_param(self) -> LazyTypeGenericParamSpan<'db> { + LazyTypeGenericParamSpan(self.0) + } + + pub fn into_const_param(self) -> LazyConstGenericParamSpan<'db> { + LazyConstGenericParamSpan(self.0) + } +} + +define_lazy_span_node!( + LazyTypeGenericParamSpan, + ast::TypeGenericParam, + @token { + (name, name), + } + @node { + (bounds, bounds, LazyTypeBoundListSpan), + } +); + +define_lazy_span_node!( + LazyConstGenericParamSpan, + ast::ConstGenericParam, + @token { + (const_token, const_kw), + (name, name), + } + @node { + (ty, ty, LazyTySpan), + } +); + +define_lazy_span_node!(LazyGenericArgSpan); +impl<'db> LazyGenericArgSpan<'db> { + pub fn into_type_arg(self) -> LazyTypeGenericArgSpan<'db> { + LazyTypeGenericArgSpan(self.0) + } +} + +define_lazy_span_node!( + LazyTypeGenericArgSpan, + ast::TypeGenericArg, + @node { + (ty, ty, LazyTySpan), + } +); + +define_lazy_span_node!( + LazyWherePredicateSpan, + ast::WherePredicate, + @node { + (ty, ty, LazyTySpan), + (bounds, bounds, LazyTypeBoundListSpan), + } +); + +define_lazy_span_node! { + LazyTypeBoundListSpan, + ast::TypeBoundList, + @idx { + (bound, LazyTypeBoundSpan), + } +} + +define_lazy_span_node!( + LazyTypeBoundSpan, + ast::TypeBound, + @node { + (trait_bound, trait_bound, LazyTraitRefSpan), + (kind_bound, kind_bound, LazyKindBoundSpan), + } +); + +define_lazy_span_node!( + LazyTraitRefSpan, + ast::TraitRef, + @node { + (path, path, LazyPathSpan), + } +); + +define_lazy_span_node!( + LazyKindBoundSpan, + ast::KindBound, + @node { + (abs, abs, LazyKindBoundAbsSpan), + (mono, mono, LazyKindBoundMonoSpan), + } +); + +define_lazy_span_node!( + LazyKindBoundAbsSpan, + ast::KindBoundAbs, + @token { + (arrow, arrow), + } + @node { + (lhs, lhs, LazyKindBoundSpan), + (rhs, rhs, LazyKindBoundSpan), + } +); + +define_lazy_span_node! {LazyKindBoundMonoSpan, ast::LazyKindBoundMono} diff --git a/crates/hir/src/span/pat.rs b/crates/hir/src/span/pat.rs new file mode 100644 index 0000000000..52cea2e213 --- /dev/null +++ b/crates/hir/src/span/pat.rs @@ -0,0 +1,103 @@ +use parser::ast; + +use super::{ + body_source_map, define_lazy_span_node, + transition::{ChainInitiator, ResolvedOrigin, SpanTransitionChain}, +}; +use crate::{ + hir_def::{Body, PatId}, + span::{path::LazyPathSpan, LazyLitSpan}, + SpannedHirDb, +}; + +define_lazy_span_node!(LazyPatSpan, ast::Pat,); +impl<'db> LazyPatSpan<'db> { + pub fn new(body: Body<'db>, pat: PatId) -> Self { + let root = PatRoot { pat, body }; + Self(SpanTransitionChain::new(root)) + } + + pub fn into_path_pat(self) -> LazyPathPatSpan<'db> { + LazyPathPatSpan(self.0) + } + + pub fn into_lit_pat(self) -> LazyLitPatSpan<'db> { + LazyLitPatSpan(self.0) + } + + pub fn into_path_tuple_pat(self) -> LazyPathTuplePatSpan<'db> { + LazyPathTuplePatSpan(self.0) + } + + pub fn into_record_pat(self) -> LazyRecordPatSpan<'db> { + LazyRecordPatSpan(self.0) + } +} + +define_lazy_span_node!( + LazyLitPatSpan, + ast::LitPat, + @node { + (lit, lit, LazyLitSpan), + } +); + +define_lazy_span_node!( + LazyPathPatSpan, + ast::PathPat, + @token { + (mut_token, mut_token), + } + + @node { + (path, path, LazyPathSpan), + } +); + +define_lazy_span_node!( + LazyPathTuplePatSpan, + ast::PathTuplePat, + @node { + (path, path, LazyPathSpan), + } +); + +define_lazy_span_node!( + LazyRecordPatSpan, + ast::RecordPat, + @node { + (path, path, LazyPathSpan), + (fields, fields, LazyRecordPatFieldListSpan), + } +); + +define_lazy_span_node!( + LazyRecordPatFieldListSpan, + ast::RecordPatFieldList, + @idx { + (field, LazyRecordPatFieldSpan), + } +); + +define_lazy_span_node!( + LazyRecordPatFieldSpan, + ast::RecordPatField, + @token { + (name, name), + } +); + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) struct PatRoot<'db> { + pat: PatId, + pub(crate) body: Body<'db>, +} + +impl ChainInitiator for PatRoot<'_> { + fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { + let source_map = body_source_map(db, self.body); + let origin = source_map.pat_map.node_to_source(self.pat); + let top_mod = self.body.top_mod(db.as_hir_db()); + ResolvedOrigin::resolve(db, top_mod, origin) + } +} diff --git a/crates/hir/src/span/path.rs b/crates/hir/src/span/path.rs new file mode 100644 index 0000000000..fd9a9730d4 --- /dev/null +++ b/crates/hir/src/span/path.rs @@ -0,0 +1,27 @@ +use parser::ast; + +use super::{define_lazy_span_node, params::LazyGenericArgListSpan, LazySpanAtom}; + +define_lazy_span_node!( + LazyPathSpan, + ast::Path, + @idx { + (segment, LazyPathSegmentSpan), + } +); + +define_lazy_span_node!( + LazyPathSegmentSpan, + ast::PathSegment, + @token { + (ident, ident), + } + @node { + (generic_args, generic_args, LazyGenericArgListSpan), + } +); +impl<'db> LazyPathSegmentSpan<'db> { + pub fn into_atom(self) -> LazySpanAtom<'db> { + LazySpanAtom(self.0) + } +} diff --git a/crates/hir/src/span/stmt.rs b/crates/hir/src/span/stmt.rs new file mode 100644 index 0000000000..5a963b833a --- /dev/null +++ b/crates/hir/src/span/stmt.rs @@ -0,0 +1,46 @@ +use parser::ast; + +use super::{ + body_source_map, define_lazy_span_node, + transition::{ChainInitiator, ResolvedOrigin, SpanTransitionChain}, +}; +use crate::{ + hir_def::{Body, StmtId}, + span::types::LazyTySpan, + SpannedHirDb, +}; + +define_lazy_span_node!(LazyStmtSpan, ast::Stmt,); +impl<'db> LazyStmtSpan<'db> { + pub fn new(body: Body<'db>, stmt: StmtId) -> Self { + let root = StmtRoot { stmt, body }; + Self(SpanTransitionChain::new(root)) + } + + pub fn into_let_stmt(self) -> LazyLetStmtSpan<'db> { + LazyLetStmtSpan(self.0) + } +} + +define_lazy_span_node!( + LazyLetStmtSpan, + ast::LetStmt, + @node { + (ty, type_annotation, LazyTySpan), + } +); + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) struct StmtRoot<'db> { + stmt: StmtId, + pub(crate) body: Body<'db>, +} + +impl ChainInitiator for StmtRoot<'_> { + fn init(&self, db: &dyn SpannedHirDb) -> ResolvedOrigin { + let source_map = body_source_map(db, self.body); + let origin = source_map.stmt_map.node_to_source(self.stmt); + let top_mod = self.body.top_mod(db.as_hir_db()); + ResolvedOrigin::resolve(db, top_mod, origin) + } +} diff --git a/crates/hir/src/span/transition.rs b/crates/hir/src/span/transition.rs new file mode 100644 index 0000000000..5d4f91bca4 --- /dev/null +++ b/crates/hir/src/span/transition.rs @@ -0,0 +1,476 @@ +use common::{ + diagnostics::{Span, SpanKind}, + InputFile, +}; +use parser::{ + ast::prelude::*, syntax_node::NodeOrToken, FeLang, SyntaxNode, SyntaxToken, TextRange, +}; + +use super::{ + body_ast, const_ast, contract_ast, enum_ast, expr::ExprRoot, func_ast, impl_ast, + impl_trait_ast, mod_ast, pat::PatRoot, stmt::StmtRoot, struct_ast, trait_ast, type_alias_ast, + use_ast, DesugaredOrigin, DesugaredUseFocus, HirOrigin, LazySpan, UseDesugared, +}; +use crate::{ + hir_def::{ + Body, Const, Contract, Enum, Func, Impl, ImplTrait, ItemKind, Mod, Struct, TopLevelMod, + Trait, TypeAlias, Use, + }, + lower::top_mod_ast, + HirDb, SpannedHirDb, +}; + +/// This type represents function from the hir origin to another hir origin to +/// identify the span of HIR node. `LazyTransitionFn` is regarded as a closure +/// that takes a `HirOrigin` and [`LazyArg`], `LazyArg` is considered as +/// captured variables. +/// The reason why we use `LazyTransitionFn` instead of `dyn +/// Fn` is that we want to make all types that use `LazyTransitionFn` to be +/// `Clone` and `Eq`. +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) struct LazyTransitionFn { + pub(super) f: fn(ResolvedOrigin, LazyArg) -> ResolvedOrigin, + pub(super) arg: LazyArg, +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub(crate) enum LazyArg { + Idx(usize), + None, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub(crate) struct SpanTransitionChain<'db> { + pub(crate) root: ChainRoot<'db>, + pub(super) chain: Vec, +} + +impl<'db> SpanTransitionChain<'db> { + pub(crate) fn pop_transition(&mut self) { + self.chain.pop(); + } + + pub(crate) fn len(&self) -> usize { + self.chain.len() + } + + pub(super) fn new(root: impl Into>) -> Self { + Self { + root: root.into(), + chain: Vec::new(), + } + } + + pub(super) fn top_mod(&self, db: &'db dyn HirDb) -> TopLevelMod<'db> { + match self.root { + ChainRoot::ItemKind(item) => item.top_mod(db), + ChainRoot::TopMod(top_mod) => top_mod, + ChainRoot::Mod(m) => m.top_mod(db), + ChainRoot::Func(f) => f.top_mod(db), + ChainRoot::Struct(s) => s.top_mod(db), + ChainRoot::Contract(c) => c.top_mod(db), + ChainRoot::Enum(e) => e.top_mod(db), + ChainRoot::TypeAlias(t) => t.top_mod(db), + ChainRoot::Impl(i) => i.top_mod(db), + ChainRoot::Trait(t) => t.top_mod(db), + ChainRoot::ImplTrait(i) => i.top_mod(db), + ChainRoot::Const(c) => c.top_mod(db), + ChainRoot::Use(u) => u.top_mod(db), + ChainRoot::Body(b) => b.top_mod(db), + ChainRoot::Stmt(s) => s.body.top_mod(db), + ChainRoot::Expr(e) => e.body.top_mod(db), + ChainRoot::Pat(p) => p.body.top_mod(db), + } + } + + pub(super) fn push(&mut self, transition: LazyTransitionFn) { + self.chain.push(transition); + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, derive_more::From)] +pub(crate) enum ChainRoot<'db> { + ItemKind(ItemKind<'db>), + TopMod(TopLevelMod<'db>), + Mod(Mod<'db>), + Func(Func<'db>), + Struct(Struct<'db>), + Contract(Contract<'db>), + Enum(Enum<'db>), + TypeAlias(TypeAlias<'db>), + Impl(Impl<'db>), + Trait(Trait<'db>), + ImplTrait(ImplTrait<'db>), + Const(Const<'db>), + Use(Use<'db>), + Body(Body<'db>), + Stmt(StmtRoot<'db>), + Expr(ExprRoot<'db>), + Pat(PatRoot<'db>), +} + +#[derive(Debug, Clone)] +pub(crate) struct ResolvedOrigin { + pub(crate) file: InputFile, + pub(crate) kind: ResolvedOriginKind, +} + +impl ResolvedOrigin { + pub(crate) fn new(file: InputFile, kind: ResolvedOriginKind) -> Self { + Self { file, kind } + } + + pub(crate) fn resolve( + db: &dyn SpannedHirDb, + top_mod: TopLevelMod, + origin: &HirOrigin, + ) -> ResolvedOrigin + where + T: AstNode, + { + let root = top_mod_ast(db.as_hir_db(), top_mod).syntax().clone(); + let kind = match origin { + HirOrigin::Raw(ptr) => ResolvedOriginKind::Node(ptr.syntax_node_ptr().to_node(&root)), + HirOrigin::Expanded(ptr) => ResolvedOriginKind::Expanded(ptr.to_node(&root)), + HirOrigin::Desugared(desugared) => { + ResolvedOriginKind::Desugared(root, desugared.clone()) + } + HirOrigin::None => ResolvedOriginKind::None, + }; + + ResolvedOrigin::new(top_mod.file(db.as_hir_db()), kind) + } + + pub(crate) fn map(self, f: F) -> Self + where + F: FnOnce(SyntaxNode) -> Option, + { + let kind = match self.kind { + ResolvedOriginKind::Node(node) => match f(node) { + Some(NodeOrToken::Node(node)) => ResolvedOriginKind::Node(node), + Some(NodeOrToken::Token(token)) => ResolvedOriginKind::Token(token), + None => ResolvedOriginKind::None, + }, + kind => kind, + }; + + ResolvedOrigin { + file: self.file, + kind, + } + } + + pub(crate) fn map_desugared(self, f: F) -> Self + where + F: FnOnce(SyntaxNode, DesugaredOrigin) -> ResolvedOriginKind, + { + let kind = match self.kind { + ResolvedOriginKind::Desugared(root, desugared) => f(root, desugared), + kind => kind, + }; + + ResolvedOrigin { + file: self.file, + kind, + } + } +} + +#[derive(Debug, Clone)] +pub(crate) enum ResolvedOriginKind { + Node(SyntaxNode), + Token(SyntaxToken), + Expanded(SyntaxNode), + Desugared(SyntaxNode, DesugaredOrigin), + None, +} + +impl ChainInitiator for ChainRoot<'_> { + fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { + match self { + Self::ItemKind(kind) => match kind { + ItemKind::TopMod(top_mod) => top_mod.init(db), + ItemKind::Mod(mod_) => mod_.init(db), + ItemKind::Func(func) => func.init(db), + ItemKind::Struct(struct_) => struct_.init(db), + ItemKind::Contract(contract) => contract.init(db), + ItemKind::Enum(enum_) => enum_.init(db), + ItemKind::TypeAlias(type_alias) => type_alias.init(db), + ItemKind::Impl(impl_) => impl_.init(db), + ItemKind::Trait(trait_) => trait_.init(db), + ItemKind::ImplTrait(impl_trait) => impl_trait.init(db), + ItemKind::Const(const_) => const_.init(db), + ItemKind::Use(use_) => use_.init(db), + ItemKind::Body(body) => body.init(db), + }, + Self::TopMod(top_mod) => top_mod.init(db), + Self::Mod(mod_) => mod_.init(db), + Self::Func(func) => func.init(db), + Self::Struct(struct_) => struct_.init(db), + Self::Contract(contract) => contract.init(db), + Self::Enum(enum_) => enum_.init(db), + Self::TypeAlias(type_alias) => type_alias.init(db), + Self::Impl(impl_) => impl_.init(db), + Self::Trait(trait_) => trait_.init(db), + Self::ImplTrait(impl_trait) => impl_trait.init(db), + Self::Const(const_) => const_.init(db), + Self::Use(use_) => use_.init(db), + Self::Body(body) => body.init(db), + Self::Stmt(stmt) => stmt.init(db), + Self::Expr(expr) => expr.init(db), + Self::Pat(pat) => pat.init(db), + } + } +} + +impl LazySpan for SpanTransitionChain<'_> { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option { + let mut resolved = self.root.init(db); + + for LazyTransitionFn { f, arg } in &self.chain { + resolved = f(resolved, *arg); + } + + Some(match resolved.kind { + ResolvedOriginKind::Node(node) => { + Span::new(resolved.file, node.text_range(), SpanKind::Original) + } + ResolvedOriginKind::Token(token) => { + Span::new(resolved.file, token.text_range(), SpanKind::Original) + } + ResolvedOriginKind::Expanded(node) => { + Span::new(resolved.file, node.text_range(), SpanKind::Expanded) + } + ResolvedOriginKind::Desugared(root, desugared) => { + desugared.resolve(db, root, resolved.file) + } + ResolvedOriginKind::None => return None, + }) + } +} + +/// A trait for types that can be used as the root of a `SpanTransitionChain`. +pub(crate) trait ChainInitiator { + /// Returns the `ResolvedOrigin` for the root of the chain. + fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin; +} + +impl ChainInitiator for TopLevelMod<'_> { + fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { + let file = self.file(db.as_hir_db()); + let ast = top_mod_ast(db.as_hir_db(), *self); + ResolvedOrigin::new(file, ResolvedOriginKind::Node(ast.syntax().clone())) + } +} + +macro_rules! impl_chain_root { + ($(($ty:ty, $fn:ident),)*) => { + $( + impl<'db> ChainInitiator for $ty { + fn init(&self, db: &dyn crate::SpannedHirDb) -> ResolvedOrigin { + let top_mod = self.top_mod(db.as_hir_db()); + let origin = $fn(db, *self); + ResolvedOrigin::resolve(db, top_mod, origin) + } + })* + }; +} + +impl_chain_root! { + (Mod<'db>, mod_ast), + (Func<'db>, func_ast), + (Struct<'db>, struct_ast), + (Contract<'db>, contract_ast), + (Enum<'db>, enum_ast), + (TypeAlias<'db>, type_alias_ast), + (Impl<'db>, impl_ast), + (Trait<'db>, trait_ast), + (ImplTrait<'db>, impl_trait_ast), + (Const<'db>, const_ast), + (Use<'db>, use_ast), + (Body<'db>, body_ast), +} + +macro_rules! define_lazy_span_node { + ( + $name:ident + $(, + $sk_node: ty + $(, + $(@token {$(($name_token:ident, $getter_token:ident),)*})? + $(@node {$(($name_node:ident, $getter_node:ident, $result:tt),)*})? + $(@idx { $(($name_iter:ident, $result_iter:tt),)*})? + $(,)? + )? + )? + ) => { + #[derive(Clone, PartialEq, Eq, Hash, Debug)] + pub struct $name<'db>(pub(crate) crate::span::transition::SpanTransitionChain<'db>); + $( + $( + impl<'db> $name<'db> { + + pub fn top_mod(&self, db: &'db dyn crate::HirDb) -> Option> { + Some(self.0.top_mod(db)) + } + + $($( + pub fn $name_token(&self) -> crate::span::LazySpanAtom<'db> { + let cloned = self.clone(); + paste::paste! { + cloned.[<$name_token _moved>]() + } + } + + paste::paste! { + pub fn [<$name_token _moved>](mut self) -> crate::span::LazySpanAtom<'db> { + use parser::ast::prelude::*; + fn f(origin: crate::span::transition::ResolvedOrigin, _: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { + origin.map(|node| <$sk_node as AstNode>::cast(node) + .and_then(|n| n.$getter_token()) + .map(|n| n.into())) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::None, + }; + + self.0.push(lazy_transition); + crate::span::LazySpanAtom(self.0) + } + } + )*)? + + $($( + pub fn $name_node(&self) -> $result<'db> { + let cloned = self.clone(); + paste::paste! { + cloned.[<$name_node _moved>]() + } + } + + paste::paste! { + pub fn [<$name_node _moved>](mut self) -> $result<'db> { + use parser::ast::prelude::*; + + fn f(origin: crate::span::transition::ResolvedOrigin, _: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { + origin.map(|node| <$sk_node as AstNode>::cast(node) + .and_then(|n| n.$getter_node()) + .map(|n| n.syntax().clone().into())) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::None, + }; + self.0.push(lazy_transition); + $result(self.0) + } + } + )*)? + + $($( + + pub fn $name_iter(&self, idx: usize) -> $result_iter<'db> { + let cloned = self.clone(); + paste::paste! { + cloned.[<$name_iter _moved>](idx) + } + } + + paste::paste! { + pub fn [<$name_iter _moved>](mut self, idx: usize) -> $result_iter<'db> { + use parser::ast::prelude::*; + fn f(origin: crate::span::transition::ResolvedOrigin, arg: crate::span::transition::LazyArg) -> crate::span::transition::ResolvedOrigin { + let idx = match arg { + crate::span::transition::LazyArg::Idx(idx) => idx, + _ => unreachable!(), + }; + + origin.map(|node| <$sk_node as AstNode>::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into())) + } + + let lazy_transition = crate::span::transition::LazyTransitionFn { + f, + arg: crate::span::transition::LazyArg::Idx(idx), + }; + + self.0.push(lazy_transition); + $result_iter(self.0) + } + } + )*)? + })?)? + + + impl<'db> crate::span::LazySpan for $name<'db> { + fn resolve(&self, db: &dyn crate::SpannedHirDb) -> Option { + self.0.resolve(db) + } + } + + impl<'db> From<$name<'db>> for crate::span::DynLazySpan<'db> { + fn from(val: $name<'db>) -> Self { + Self(val.0.into()) + } + } + + impl<'db> crate::span::SpanDowncast<'db> for $name<'db> { + fn downcast(val: crate::span::DynLazySpan<'db>) -> Option { + val.0.map(|inner| Self(inner)) + } + } + }; +} + +impl DesugaredOrigin { + fn resolve(self, _db: &dyn SpannedHirDb, root: SyntaxNode, file: InputFile) -> Span { + let range = match self { + Self::Use(UseDesugared { + root: use_root, + path, + alias, + focus, + }) => match focus { + DesugaredUseFocus::Root => use_root.syntax_node_ptr().to_node(&root).text_range(), + DesugaredUseFocus::Path => { + if let Some(first_seg) = path.first() { + let last_seg = path.last().unwrap(); + TextRange::new( + first_seg + .syntax_node_ptr() + .to_node(&root) + .text_range() + .start(), + last_seg.syntax_node_ptr().to_node(&root).text_range().end(), + ) + } else { + return Span::new( + file, + TextRange::new(0.into(), 0.into()), + SpanKind::NotFound, + ); + } + } + DesugaredUseFocus::Alias => { + if let Some(alias) = alias { + alias.syntax_node_ptr().to_node(&root).text_range() + } else { + return Span::new( + file, + TextRange::new(0.into(), 0.into()), + SpanKind::NotFound, + ); + } + } + }, + }; + + Span::new(file, range, SpanKind::Original) + } +} + +pub(super) use define_lazy_span_node; diff --git a/crates/hir/src/span/types.rs b/crates/hir/src/span/types.rs new file mode 100644 index 0000000000..0a2749e26e --- /dev/null +++ b/crates/hir/src/span/types.rs @@ -0,0 +1,99 @@ +use parser::ast; + +use super::define_lazy_span_node; +use crate::span::{item::LazyBodySpan, params::LazyGenericArgListSpan, path::LazyPathSpan}; + +define_lazy_span_node!(LazyTySpan); +impl<'db> LazyTySpan<'db> { + /// Convert this [`LazyTySpan`] into a [`LazyPathTypeSpan`]. + /// + /// If the type that is pointed to by this is not a path type, the result + /// span will point to the same span of the original type. + pub fn into_path_type(self) -> LazyPathTypeSpan<'db> { + LazyPathTypeSpan(self.0) + } + + /// Convert this [`LazyTySpan`] into a [`LazyPtrTypeSpan`]. + /// + /// If the type that is pointed to by this is not a pointer type, the result + /// span will point to the same span of the original type. + pub fn into_ptr_type(self) -> LazyPtrTypeSpan<'db> { + LazyPtrTypeSpan(self.0) + } + + /// Convert this [`LazyTySpan`] into a [`LazyTupleTypeSpan`]. + /// + /// If the type that is pointed to by this is not a tuple type, the result + /// span will point to the same span of the original type. + pub fn into_tuple_type(self) -> LazyTupleTypeSpan<'db> { + LazyTupleTypeSpan(self.0) + } + + /// convert this [`LazyTySpan`] into a [`LazyArrayTypeSpan`]. + /// + /// If the type that is pointed to by this is not an array type, the result + /// span will point to the same span of the original type. + pub fn into_array_type(self) -> LazyArrayTypeSpan<'db> { + LazyArrayTypeSpan(self.0) + } + + pub fn into_self_type(self) -> LazySelfTypeSpan<'db> { + LazySelfTypeSpan(self.0) + } +} + +define_lazy_span_node!( + LazyPtrTypeSpan, + ast::PtrType, + @token { + (star, star), + } + @node { + (pointee, inner, LazyTySpan), + } +); + +define_lazy_span_node! +( + LazyPathTypeSpan, + ast::PathType, + @node { + (path, path, LazyPathSpan), + } +); + +define_lazy_span_node!( + LazyTupleTypeSpan, + ast::TupleType, + @token { + (l_paren, l_paren), + (r_paren, r_paren), + } + @idx { + (elem_ty, LazyTySpan), + } +); + +define_lazy_span_node!( + LazyArrayTypeSpan, + ast::ArrayType, + @token { + (l_bracket, l_bracket), + (r_bracket, r_bracket), + } + @node { + (elem, elem_ty, LazyTySpan), + (len, len, LazyBodySpan), + } +); + +define_lazy_span_node!( + LazySelfTypeSpan, + ast::SelfType, + @token { + (self_kw, self_kw), + } + @node { + (generic_args, generic_args, LazyGenericArgListSpan), + } +); diff --git a/crates/hir/src/span/use_tree.rs b/crates/hir/src/span/use_tree.rs new file mode 100644 index 0000000000..599b01e44e --- /dev/null +++ b/crates/hir/src/span/use_tree.rs @@ -0,0 +1,84 @@ +use parser::ast::{self, prelude::*}; + +use super::{define_lazy_span_node, LazySpanAtom}; +use crate::span::{ + transition::{LazyArg, LazyTransitionFn, ResolvedOrigin, ResolvedOriginKind}, + DesugaredOrigin, +}; + +define_lazy_span_node!(LazyUsePathSpan); +impl<'db> LazyUsePathSpan<'db> { + pub fn segment(&self, idx: usize) -> LazyUsePathSegmentSpan<'db> { + self.clone().segment_moved(idx) + } + + pub fn segment_moved(mut self, idx: usize) -> LazyUsePathSegmentSpan<'db> { + fn f(origin: ResolvedOrigin, arg: LazyArg) -> ResolvedOrigin { + let LazyArg::Idx(idx) = arg else { + unreachable!() + }; + + origin + .map(|node| { + ast::UsePath::cast(node) + .and_then(|f| f.into_iter().nth(idx)) + .map(|n| n.syntax().clone().into()) + }) + .map_desugared(|root, desugared| match desugared { + DesugaredOrigin::Use(use_) => use_ + .path + .get(idx) + .map(|ptr| ResolvedOriginKind::Node(ptr.syntax_node_ptr().to_node(&root))) + .unwrap_or_else(|| ResolvedOriginKind::None), + }) + } + + let lazy_transition = LazyTransitionFn { + f, + arg: LazyArg::Idx(idx), + }; + + self.0.push(lazy_transition); + LazyUsePathSegmentSpan(self.0) + } +} + +define_lazy_span_node!(LazyUsePathSegmentSpan); +impl<'db> LazyUsePathSegmentSpan<'db> { + pub fn into_atom(self) -> LazySpanAtom<'db> { + LazySpanAtom(self.0) + } +} + +define_lazy_span_node!(LazyUseAliasSpan, ast::UseAlias,); + +impl<'db> LazyUseAliasSpan<'db> { + pub fn name(&self) -> LazySpanAtom { + self.clone().name_moved() + } + + pub fn name_moved(mut self) -> LazySpanAtom<'db> { + fn f(origin: ResolvedOrigin, _: LazyArg) -> ResolvedOrigin { + origin + .map(|node| { + ast::UseAlias::cast(node) + .and_then(|a| a.alias()) + .map(|n| n.into()) + }) + .map_desugared(|root, desugared| match desugared { + DesugaredOrigin::Use(use_) => use_ + .alias + .and_then(|ptr| ptr.to_node(&root).alias().map(ResolvedOriginKind::Token)) + .unwrap_or_else(|| ResolvedOriginKind::None), + }) + } + + let lazy_transition = LazyTransitionFn { + f, + arg: LazyArg::None, + }; + self.0.push(lazy_transition); + + LazySpanAtom(self.0) + } +} diff --git a/crates/hir/src/visitor.rs b/crates/hir/src/visitor.rs new file mode 100644 index 0000000000..9383c00414 --- /dev/null +++ b/crates/hir/src/visitor.rs @@ -0,0 +1,2321 @@ +use std::{marker::PhantomData, mem}; + +use crate::{ + hir_def::{ + attr, + scope_graph::{FieldParent, ScopeId}, + Body, CallArg, Const, Contract, Enum, Expr, ExprId, Field, FieldDef, FieldDefListId, + FieldIndex, Func, FuncParam, FuncParamListId, FuncParamName, GenericArg, GenericArgListId, + GenericParam, GenericParamListId, IdentId, Impl, ImplTrait, IngotId, ItemKind, KindBound, + LitKind, MatchArm, Mod, Partial, Pat, PatId, PathId, Stmt, StmtId, Struct, TopLevelMod, + Trait, TraitRefId, TupleTypeId, TypeAlias, TypeBound, TypeId, TypeKind, Use, UseAlias, + UsePathId, UsePathSegment, VariantDef, VariantDefListId, VariantKind, WhereClauseId, + WherePredicate, + }, + span::{ + item::LazySuperTraitListSpan, lazy_spans::*, params::LazyTraitRefSpan, + transition::ChainRoot, SpanDowncast, + }, + HirDb, +}; + +pub mod prelude { + pub use super::{ + walk_arm, walk_attribute, walk_attribute_list, walk_body, walk_call_arg, + walk_call_arg_list, walk_const, walk_contract, walk_enum, walk_expr, walk_field, + walk_field_def, walk_field_def_list, walk_field_list, walk_func, walk_func_param, + walk_func_param_list, walk_generic_arg, walk_generic_arg_list, walk_generic_param, + walk_generic_param_list, walk_impl, walk_impl_trait, walk_item, walk_kind_bound, walk_mod, + walk_pat, walk_path, walk_stmt, walk_struct, walk_super_trait_list, walk_top_mod, + walk_trait, walk_trait_ref, walk_ty, walk_type_alias, walk_type_bound, + walk_type_bound_list, walk_use, walk_use_path, walk_variant_def, walk_variant_def_list, + walk_where_clause, walk_where_predicate, Visitor, VisitorCtxt, + }; + pub use crate::span::lazy_spans::*; +} + +/// A visitor for traversing the HIR. +pub trait Visitor<'db> { + fn visit_item(&mut self, ctxt: &mut VisitorCtxt<'db, LazyItemSpan<'db>>, item: ItemKind<'db>) { + walk_item(self, ctxt, item) + } + + fn visit_top_mod( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyTopModSpan<'db>>, + top_mod: TopLevelMod<'db>, + ) { + walk_top_mod(self, ctxt, top_mod) + } + + fn visit_mod(&mut self, ctxt: &mut VisitorCtxt<'db, LazyModSpan<'db>>, module: Mod<'db>) { + walk_mod(self, ctxt, module) + } + + fn visit_func(&mut self, ctxt: &mut VisitorCtxt<'db, LazyFuncSpan<'db>>, func: Func<'db>) { + walk_func(self, ctxt, func) + } + + fn visit_struct( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyStructSpan<'db>>, + struct_: Struct<'db>, + ) { + walk_struct(self, ctxt, struct_) + } + + fn visit_contract( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyContractSpan<'db>>, + contract: Contract<'db>, + ) { + walk_contract(self, ctxt, contract) + } + + fn visit_enum(&mut self, ctxt: &mut VisitorCtxt<'db, LazyEnumSpan<'db>>, enum_: Enum<'db>) { + walk_enum(self, ctxt, enum_) + } + + fn visit_type_alias( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyTypeAliasSpan<'db>>, + alias: TypeAlias<'db>, + ) { + walk_type_alias(self, ctxt, alias) + } + + fn visit_impl(&mut self, ctxt: &mut VisitorCtxt<'db, LazyImplSpan<'db>>, impl_: Impl<'db>) { + walk_impl(self, ctxt, impl_) + } + + fn visit_trait(&mut self, ctxt: &mut VisitorCtxt<'db, LazyTraitSpan<'db>>, trait_: Trait<'db>) { + walk_trait(self, ctxt, trait_) + } + + fn visit_impl_trait( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyImplTraitSpan<'db>>, + impl_trait: ImplTrait<'db>, + ) { + walk_impl_trait(self, ctxt, impl_trait) + } + + fn visit_const( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyConstSpan<'db>>, + constant: Const<'db>, + ) { + walk_const(self, ctxt, constant) + } + + fn visit_use(&mut self, ctxt: &mut VisitorCtxt<'db, LazyUseSpan<'db>>, use_: Use<'db>) { + walk_use(self, ctxt, use_) + } + + fn visit_body(&mut self, ctxt: &mut VisitorCtxt<'db, LazyBodySpan<'db>>, body: Body<'db>) { + walk_body(self, ctxt, body) + } + + fn visit_attribute_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyAttrListSpan<'db>>, + attrs: AttrListId<'db>, + ) { + walk_attribute_list(self, ctxt, attrs); + } + + fn visit_attribute( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyAttrSpan<'db>>, + attr: &Attr<'db>, + ) { + walk_attribute(self, ctxt, attr); + } + + fn visit_generic_param_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyGenericParamListSpan<'db>>, + params: GenericParamListId<'db>, + ) { + walk_generic_param_list(self, ctxt, params); + } + + fn visit_generic_param( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyGenericParamSpan<'db>>, + param: &GenericParam<'db>, + ) { + walk_generic_param(self, ctxt, param); + } + + fn visit_generic_arg_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyGenericArgListSpan<'db>>, + args: GenericArgListId<'db>, + ) { + walk_generic_arg_list(self, ctxt, args); + } + + fn visit_generic_arg( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyGenericArgSpan<'db>>, + arg: &GenericArg<'db>, + ) { + walk_generic_arg(self, ctxt, arg); + } + + fn visit_call_arg_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyCallArgListSpan<'db>>, + args: &[CallArg<'db>], + ) { + walk_call_arg_list(self, ctxt, args); + } + + fn visit_call_arg( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyCallArgSpan<'db>>, + arg: CallArg<'db>, + ) { + walk_call_arg(self, ctxt, arg); + } + + fn visit_type_bound_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyTypeBoundListSpan<'db>>, + bounds: &[TypeBound<'db>], + ) { + walk_type_bound_list(self, ctxt, bounds); + } + + fn visit_type_bound( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyTypeBoundSpan<'db>>, + bound: &TypeBound<'db>, + ) { + walk_type_bound(self, ctxt, bound); + } + + fn visit_trait_ref( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyTraitRefSpan<'db>>, + trait_ref: TraitRefId<'db>, + ) { + walk_trait_ref(self, ctxt, trait_ref); + } + + fn visit_super_trait_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazySuperTraitListSpan<'db>>, + super_traits: &[TraitRefId<'db>], + ) { + walk_super_trait_list(self, ctxt, super_traits); + } + + fn visit_kind_bound( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyKindBoundSpan<'db>>, + bound: &KindBound, + ) { + walk_kind_bound(self, ctxt, bound); + } + + fn visit_where_clause( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyWhereClauseSpan<'db>>, + where_clause: WhereClauseId<'db>, + ) { + walk_where_clause(self, ctxt, where_clause); + } + + fn visit_where_predicate( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyWherePredicateSpan<'db>>, + where_predicate: &WherePredicate<'db>, + ) { + walk_where_predicate(self, ctxt, where_predicate); + } + + fn visit_func_param_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFuncParamListSpan<'db>>, + params: FuncParamListId<'db>, + ) { + walk_func_param_list(self, ctxt, params); + } + + fn visit_func_param( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFuncParamSpan<'db>>, + param: &FuncParam<'db>, + ) { + walk_func_param(self, ctxt, param); + } + + fn visit_field_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFieldListSpan<'db>>, + fields: &[Field<'db>], + ) { + walk_field_list(self, ctxt, fields); + } + + fn visit_field(&mut self, ctxt: &mut VisitorCtxt<'db, LazyFieldSpan<'db>>, field: Field<'db>) { + walk_field(self, ctxt, field); + } + + fn visit_field_def_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFieldDefListSpan<'db>>, + fields: FieldDefListId<'db>, + ) { + walk_field_def_list(self, ctxt, fields); + } + + fn visit_field_def( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyFieldDefSpan<'db>>, + field: &FieldDef<'db>, + ) { + walk_field_def(self, ctxt, field); + } + + fn visit_variant_def_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyVariantDefListSpan<'db>>, + variants: VariantDefListId<'db>, + ) { + walk_variant_def_list(self, ctxt, variants); + } + + fn visit_variant_def( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyVariantDefSpan<'db>>, + variant: &VariantDef<'db>, + ) { + walk_variant_def(self, ctxt, variant) + } + + fn visit_stmt( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyStmtSpan<'db>>, + stmt: StmtId, + #[allow(unused_variables)] stmt_data: &Stmt<'db>, + ) { + walk_stmt(self, ctxt, stmt) + } + + fn visit_expr( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyExprSpan<'db>>, + expr: ExprId, + #[allow(unused_variables)] expr_data: &Expr<'db>, + ) { + walk_expr(self, ctxt, expr) + } + + fn visit_pat( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyPatSpan<'db>>, + pat: PatId, + #[allow(unused_variables)] pat_data: &Pat<'db>, + ) { + walk_pat(self, ctxt, pat) + } + + fn visit_arm(&mut self, ctxt: &mut VisitorCtxt<'db, LazyMatchArmSpan<'db>>, arm: &MatchArm) { + walk_arm(self, ctxt, arm) + } + + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'db, LazyPathSpan<'db>>, path: PathId<'db>) { + walk_path(self, ctxt, path) + } + + fn visit_use_path( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyUsePathSpan<'db>>, + use_path: UsePathId<'db>, + ) { + walk_use_path(self, ctxt, use_path) + } + + fn visit_ty(&mut self, ctxt: &mut VisitorCtxt<'db, LazyTySpan<'db>>, ty: TypeId<'db>) { + walk_ty(self, ctxt, ty) + } + + fn visit_tuple_type( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyTupleTypeSpan<'db>>, + ty: TupleTypeId<'db>, + ) { + walk_tuple_type(self, ctxt, ty) + } + + #[allow(unused_variables)] + fn visit_lit(&mut self, ctxt: &mut VisitorCtxt<'db, LazyLitSpan<'db>>, lit: LitKind<'db>) {} + + #[allow(unused_variables)] + fn visit_ident(&mut self, ctxt: &mut VisitorCtxt<'db, LazySpanAtom<'db>>, ident: IdentId<'db>) { + } +} + +pub fn walk_item<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyItemSpan<'db>>, + item: ItemKind<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + match item { + ItemKind::TopMod(top_mod) => { + let mut new_ctxt = VisitorCtxt::with_top_mod(ctxt.db, top_mod); + visitor.visit_top_mod(&mut new_ctxt, top_mod); + } + ItemKind::Mod(mod_) => { + let mut new_ctxt = VisitorCtxt::with_mod(ctxt.db, mod_); + visitor.visit_mod(&mut new_ctxt, mod_) + } + ItemKind::Func(func) => { + let mut new_ctxt = VisitorCtxt::with_func(ctxt.db, func); + visitor.visit_func(&mut new_ctxt, func) + } + ItemKind::Struct(struct_) => { + let mut new_ctxt = VisitorCtxt::with_struct(ctxt.db, struct_); + visitor.visit_struct(&mut new_ctxt, struct_) + } + ItemKind::Contract(contract) => { + let mut new_ctxt = VisitorCtxt::with_contract(ctxt.db, contract); + visitor.visit_contract(&mut new_ctxt, contract) + } + ItemKind::Enum(enum_) => { + let mut new_ctxt = VisitorCtxt::with_enum(ctxt.db, enum_); + visitor.visit_enum(&mut new_ctxt, enum_) + } + ItemKind::TypeAlias(alias) => { + let mut new_ctxt = VisitorCtxt::with_type_alias(ctxt.db, alias); + visitor.visit_type_alias(&mut new_ctxt, alias) + } + ItemKind::Impl(impl_) => { + let mut new_ctxt = VisitorCtxt::with_impl(ctxt.db, impl_); + visitor.visit_impl(&mut new_ctxt, impl_) + } + ItemKind::Trait(trait_) => { + let mut new_ctxt = VisitorCtxt::with_trait(ctxt.db, trait_); + visitor.visit_trait(&mut new_ctxt, trait_) + } + ItemKind::ImplTrait(impl_trait) => { + let mut new_ctxt = VisitorCtxt::with_impl_trait(ctxt.db, impl_trait); + visitor.visit_impl_trait(&mut new_ctxt, impl_trait) + } + ItemKind::Const(const_) => { + let mut new_ctxt = VisitorCtxt::with_const(ctxt.db, const_); + visitor.visit_const(&mut new_ctxt, const_) + } + ItemKind::Use(use_) => { + let mut new_ctxt = VisitorCtxt::with_use(ctxt.db, use_); + visitor.visit_use(&mut new_ctxt, use_) + } + ItemKind::Body(body) => { + let mut new_ctxt = VisitorCtxt::with_body(ctxt.db, body); + visitor.visit_body(&mut new_ctxt, body) + } + }; +} + +pub fn walk_top_mod<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyTopModSpan<'db>>, + top_mod: TopLevelMod<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + for child in top_mod.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, child), child); + } +} + +pub fn walk_mod<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyModSpan<'db>>, + mod_: Mod<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(name) = mod_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + }; + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = mod_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + for child in mod_.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, child), child); + } +} + +pub fn walk_func<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyFuncSpan<'db>>, + func: Func<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(name) = func.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + }; + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = func.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = func.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = func.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + if let Some(id) = func.params(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.params_moved(), + |ctxt| { + visitor.visit_func_param_list(ctxt, id); + }, + ) + } + + if let Some(ty) = func.ret_ty(ctxt.db) { + ctxt.with_new_ctxt( + |span| span.ret_ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + if let Some(body) = func.body(ctxt.db) { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } +} + +pub fn walk_struct<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyStructSpan<'db>>, + struct_: Struct<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(id) = struct_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, id); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = struct_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = struct_.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = struct_.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| { + let id = struct_.fields(ctxt.db); + visitor.visit_field_def_list(ctxt, id); + }, + ); +} + +pub fn walk_contract<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyContractSpan<'db>>, + contract: Contract<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(id) = contract.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, id); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = contract.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| { + let id = contract.fields(ctxt.db); + visitor.visit_field_def_list(ctxt, id); + }, + ); +} + +pub fn walk_enum<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyEnumSpan<'db>>, + enum_: Enum<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(id) = enum_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, id); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = enum_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = enum_.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = enum_.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.variants_moved(), + |ctxt| { + let id = enum_.variants(ctxt.db); + visitor.visit_variant_def_list(ctxt, id); + }, + ); +} + +pub fn walk_type_alias<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyTypeAliasSpan<'db>>, + alias: TypeAlias<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(id) = alias.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.alias_moved(), + |ctxt| { + visitor.visit_ident(ctxt, id); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = alias.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = alias.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + if let Some(ty) = alias.ty(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } +} + +pub fn walk_impl<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyImplSpan<'db>>, + impl_: Impl<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(ty) = impl_.ty(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.target_ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = impl_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = impl_.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = impl_.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + for item in impl_.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, item), item); + } +} + +pub fn walk_trait<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyTraitSpan<'db>>, + trait_: Trait<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(name) = trait_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = trait_.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = trait_.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.super_traits(), + |ctxt| visitor.visit_super_trait_list(ctxt, trait_.super_traits(ctxt.db)), + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = trait_.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + for item in trait_.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, item), item); + } +} + +pub fn walk_impl_trait<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyImplTraitSpan<'db>>, + impl_trait: ImplTrait<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(trait_ref) = impl_trait.trait_ref(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.trait_ref_moved(), + |ctxt| { + visitor.visit_trait_ref(ctxt, trait_ref); + }, + ) + } + + if let Some(ty) = impl_trait.ty(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.attributes_moved(), + |ctxt| { + let id = impl_trait.attributes(ctxt.db); + visitor.visit_attribute_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.generic_params_moved(), + |ctxt| { + let id = impl_trait.generic_params(ctxt.db); + visitor.visit_generic_param_list(ctxt, id); + }, + ); + + ctxt.with_new_ctxt( + |span| span.where_clause_moved(), + |ctxt| { + let id = impl_trait.where_clause(ctxt.db); + visitor.visit_where_clause(ctxt, id); + }, + ); + + for item in impl_trait.children_non_nested(ctxt.db) { + visitor.visit_item(&mut VisitorCtxt::with_item(ctxt.db, item), item); + } +} + +pub fn walk_const<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyConstSpan<'db>>, + const_: Const<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(name) = const_.name(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + } + + if let Some(ty) = const_.ty(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + if let Some(body) = const_.body(ctxt.db).to_opt() { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } +} + +pub fn walk_use<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyUseSpan<'db>>, + use_: Use<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(use_path) = use_.path(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_use_path(ctxt, use_path); + }, + ) + } + + if let Some(Partial::Present(UseAlias::Ident(ident))) = use_.alias(ctxt.db) { + ctxt.with_new_ctxt( + |span| span.alias_moved().name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, ident); + }, + ) + } +} + +pub fn walk_body<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyBodySpan<'db>>, + body: Body<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + let body_expr = body.expr(ctxt.db); + visit_node_in_body!(visitor, ctxt, &body_expr, expr); +} + +pub fn walk_stmt<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyStmtSpan<'db>>, + stmt: StmtId, +) where + V: Visitor<'db> + ?Sized, +{ + let Partial::Present(stmt) = stmt.data(ctxt.db, ctxt.body()) else { + return; + }; + + match stmt { + Stmt::Let(pat_id, ty, expr_id) => { + visit_node_in_body!(visitor, ctxt, pat_id, pat); + + if let Some(ty) = ty { + ctxt.with_new_ctxt( + |span| span.into_let_stmt().ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, *ty); + }, + ) + }; + + if let Some(expr_id) = expr_id { + visit_node_in_body!(visitor, ctxt, expr_id, expr); + } + } + + Stmt::For(pat_id, cond_id, for_body_id) => { + visit_node_in_body!(visitor, ctxt, pat_id, pat); + visit_node_in_body!(visitor, ctxt, cond_id, expr); + visit_node_in_body!(visitor, ctxt, for_body_id, expr); + } + + Stmt::While(cond_id, while_body_id) => { + visit_node_in_body!(visitor, ctxt, cond_id, expr); + visit_node_in_body!(visitor, ctxt, while_body_id, expr); + } + + Stmt::Return(Some(expr_id)) | Stmt::Expr(expr_id) => { + visit_node_in_body!(visitor, ctxt, expr_id, expr); + } + + Stmt::Return(None) | Stmt::Continue | Stmt::Break => {} + } +} + +pub fn walk_expr<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyExprSpan<'db>>, + expr: ExprId, +) where + V: Visitor<'db> + ?Sized, +{ + let Partial::Present(data) = expr.data(ctxt.db, ctxt.body()) else { + return; + }; + + match data { + Expr::Lit(lit) => ctxt.with_new_ctxt( + |span| span.into_lit_expr().lit_moved(), + |ctxt| { + visitor.visit_lit(ctxt, *lit); + }, + ), + + Expr::Block(stmts) => { + let s_graph = ctxt.top_mod().scope_graph(ctxt.db); + let scope = ctxt.scope(); + for item in s_graph.child_items(scope) { + let mut new_ctxt = VisitorCtxt::with_item(ctxt.db, item); + visitor.visit_item(&mut new_ctxt, item); + } + + for stmt_id in stmts { + visit_node_in_body!(visitor, ctxt, stmt_id, stmt); + } + } + + Expr::Bin(lhs_id, rhs_id, _) => { + visit_node_in_body!(visitor, ctxt, lhs_id, expr); + visit_node_in_body!(visitor, ctxt, rhs_id, expr); + } + + Expr::Un(expr_id, _) => { + visit_node_in_body!(visitor, ctxt, expr_id, expr); + } + + Expr::Call(callee_id, call_args) => { + visit_node_in_body!(visitor, ctxt, callee_id, expr); + ctxt.with_new_ctxt( + |span| span.into_call_expr(), + |ctxt| { + ctxt.with_new_ctxt( + |span| span.args_moved(), + |ctxt| { + visitor.visit_call_arg_list(ctxt, call_args); + }, + ); + }, + ); + } + + Expr::MethodCall(receiver_id, method_name, generic_args, call_args) => { + visit_node_in_body!(visitor, ctxt, receiver_id, expr); + + ctxt.with_new_ctxt( + |span| span.into_method_call_expr(), + |ctxt| { + if let Some(method_name) = method_name.to_opt() { + ctxt.with_new_ctxt( + |span| span.method_name_moved(), + |ctxt| visitor.visit_ident(ctxt, method_name), + ); + } + + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| visitor.visit_generic_arg_list(ctxt, *generic_args), + ); + + ctxt.with_new_ctxt( + |span| span.args_moved(), + |ctxt| { + visitor.visit_call_arg_list(ctxt, call_args); + }, + ); + }, + ); + } + + Expr::Path(path) => { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_path_expr().path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ); + } + } + + Expr::RecordInit(path, fields) => { + ctxt.with_new_ctxt( + |span| span.into_record_init_expr(), + |ctxt| { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ); + } + + ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| { + visitor.visit_field_list(ctxt, fields); + }, + ); + }, + ); + } + + Expr::Field(receiver_id, field_name) => { + visit_node_in_body!(visitor, ctxt, receiver_id, expr); + + match field_name { + Partial::Present(FieldIndex::Ident(ident)) => { + ctxt.with_new_ctxt( + |span| span.into_field_expr().accessor_moved(), + |ctxt| visitor.visit_ident(ctxt, *ident), + ); + } + + Partial::Present(FieldIndex::Index(index)) => { + ctxt.with_new_ctxt( + |span| span.into_field_expr().accessor_moved().into_lit_span(), + |ctxt| visitor.visit_lit(ctxt, (*index).into()), + ); + } + + Partial::Absent => {} + } + } + + Expr::Tuple(elems) => { + for elem_id in elems { + visit_node_in_body!(visitor, ctxt, elem_id, expr); + } + } + + Expr::Index(lhs_id, rhs_id) => { + visit_node_in_body!(visitor, ctxt, lhs_id, expr); + visit_node_in_body!(visitor, ctxt, rhs_id, expr); + } + + Expr::Array(elems) => { + for elem_id in elems { + visit_node_in_body!(visitor, ctxt, elem_id, expr); + } + } + + Expr::ArrayRep(val, rep) => { + visit_node_in_body!(visitor, ctxt, val, expr); + if let Some(body) = rep.to_opt() { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } + } + + Expr::If(cond, then, else_) => { + visit_node_in_body!(visitor, ctxt, cond, expr); + visit_node_in_body!(visitor, ctxt, then, expr); + if let Some(else_) = else_ { + visit_node_in_body!(visitor, ctxt, else_, expr); + } + } + + Expr::Match(scrutinee, arms) => { + visit_node_in_body!(visitor, ctxt, scrutinee, expr); + + if let Partial::Present(arms) = arms { + ctxt.with_new_ctxt( + |span| span.into_match_expr().arms_moved(), + |ctxt| { + for (i, arm) in arms.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.arm_moved(i), + |ctxt| { + visitor.visit_arm(ctxt, arm); + }, + ); + } + }, + ); + } + } + + Expr::Assign(left_expr_id, right_expr_id) => { + visit_node_in_body!(visitor, ctxt, left_expr_id, expr); + visit_node_in_body!(visitor, ctxt, right_expr_id, expr); + } + + Expr::AugAssign(left_expr_id, right_expr_id, _) => { + visit_node_in_body!(visitor, ctxt, left_expr_id, expr); + visit_node_in_body!(visitor, ctxt, right_expr_id, expr); + } + } +} + +pub fn walk_arm<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyMatchArmSpan<'db>>, + arm: &MatchArm, +) where + V: Visitor<'db> + ?Sized, +{ + visit_node_in_body!(visitor, ctxt, &arm.pat, pat); + visit_node_in_body!(visitor, ctxt, &arm.body, expr); +} + +pub fn walk_pat<'db, V>(visitor: &mut V, ctxt: &mut VisitorCtxt<'db, LazyPatSpan<'db>>, pat: PatId) +where + V: Visitor<'db> + ?Sized, +{ + let Partial::Present(data) = pat.data(ctxt.db, ctxt.body()) else { + return; + }; + + match data { + Pat::Lit(lit) => { + if let Some(lit) = lit.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_lit_pat().lit_moved(), + |ctxt| { + visitor.visit_lit(ctxt, lit); + }, + ) + }; + } + + Pat::Tuple(elems) => { + for elem in elems { + visit_node_in_body!(visitor, ctxt, elem, pat); + } + } + + Pat::Path(path, _) => { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_path_pat().path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ) + }; + } + + Pat::PathTuple(path, elems) => { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_path_tuple_pat().path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ) + }; + + for elem in elems { + visit_node_in_body!(visitor, ctxt, elem, pat); + } + } + + Pat::Record(path, fields) => ctxt.with_new_ctxt( + |span| span.into_record_pat(), + |ctxt| { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ); + } + + ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| { + for (i, field) in fields.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.field_moved(i), + |ctxt| { + if let Some(label) = field.label.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, label); + }, + ); + } + + visit_node_in_body!(visitor, ctxt, &field.pat, pat); + }, + ); + } + }, + ); + }, + ), + + Pat::Or(lhs, rhs) => { + visit_node_in_body!(visitor, ctxt, lhs, pat); + visit_node_in_body!(visitor, ctxt, rhs, pat); + } + + Pat::WildCard | Pat::Rest => {} + } +} + +pub fn walk_attribute_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyAttrListSpan<'db>>, + attr: AttrListId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + for (idx, attr) in attr.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.attr_moved(idx), + |ctxt| { + visitor.visit_attribute(ctxt, attr); + }, + ) + } +} + +pub fn walk_attribute<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyAttrSpan<'db>>, + attr: &Attr<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + match attr { + Attr::Normal(normal_attr) => { + ctxt.with_new_ctxt( + |span| span.into_normal_attr(), + |ctxt| { + if let Some(ident) = normal_attr.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, ident); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.args_moved(), + |ctxt| { + for (i, arg) in normal_attr.args.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.arg_moved(i), + |ctxt| { + if let Some(key) = arg.key.to_opt() { + ctxt.with_new_ctxt( + |span| span.key_moved(), + |ctxt| { + visitor.visit_ident(ctxt, key); + }, + ); + } + if let Some(value) = arg.value.to_opt() { + ctxt.with_new_ctxt( + |span| span.value_moved(), + |ctxt| { + visitor.visit_ident(ctxt, value); + }, + ); + } + }, + ); + } + }, + ); + }, + ); + } + + Attr::DocComment(doc_comment) => ctxt.with_new_ctxt( + |span| span.into_doc_comment_attr().doc_moved().into_lit_span(), + |ctxt| { + visitor.visit_lit(ctxt, doc_comment.text.into()); + }, + ), + } +} + +pub fn walk_generic_param_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyGenericParamListSpan<'db>>, + params: GenericParamListId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + let parent_item = ctxt.scope().item(); + for (i, param) in params.data(ctxt.db).iter().enumerate() { + ctxt.with_new_scoped_ctxt( + ScopeId::GenericParam(parent_item, i), + |span| span.param_moved(i), + |ctxt| { + visitor.visit_generic_param(ctxt, param); + }, + ) + } +} + +pub fn walk_generic_param<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyGenericParamSpan<'db>>, + param: &GenericParam<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + match param { + GenericParam::Type(ty_param) => ctxt.with_new_ctxt( + |span| span.into_type_param(), + |ctxt| { + if let Some(name) = ty_param.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ); + } + + ctxt.with_new_ctxt( + |span| span.bounds_moved(), + |ctxt| { + visitor.visit_type_bound_list(ctxt, &ty_param.bounds); + }, + ); + }, + ), + + GenericParam::Const(const_param) => ctxt.with_new_ctxt( + |span| span.into_const_param(), + |ctxt| { + if let Some(name) = const_param.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ); + } + + if let Some(ty) = const_param.ty.to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ); + } + }, + ), + } +} + +pub fn walk_generic_arg_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyGenericArgListSpan<'db>>, + args: GenericArgListId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + for (i, arg) in args.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.arg_moved(i), + |ctxt| { + visitor.visit_generic_arg(ctxt, arg); + }, + ) + } +} + +pub fn walk_generic_arg<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyGenericArgSpan<'db>>, + arg: &GenericArg<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + match arg { + GenericArg::Type(type_arg) => { + if let Some(ty) = type_arg.ty.to_opt() { + ctxt.with_new_ctxt( + |span| span.into_type_arg().ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + } + + GenericArg::Const(const_arg) => { + if let Some(body) = const_arg.body.to_opt() { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } + } + } +} + +pub fn walk_call_arg_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyCallArgListSpan<'db>>, + args: &[CallArg<'db>], +) where + V: Visitor<'db> + ?Sized, +{ + for (idx, arg) in args.iter().copied().enumerate() { + ctxt.with_new_ctxt( + |span| span.arg_moved(idx), + |ctxt| { + visitor.visit_call_arg(ctxt, arg); + }, + ) + } +} + +pub fn walk_call_arg<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyCallArgSpan<'db>>, + arg: CallArg<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(label) = arg.label { + ctxt.with_new_ctxt( + |span| span.label_moved(), + |ctxt| visitor.visit_ident(ctxt, label), + ); + } + + visit_node_in_body!(visitor, ctxt, &arg.expr, expr); +} + +pub fn walk_func_param_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyFuncParamListSpan<'db>>, + params: FuncParamListId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + let parent_item = ctxt.scope().item(); + for (idx, param) in params.data(ctxt.db).iter().enumerate() { + ctxt.with_new_scoped_ctxt( + ScopeId::FuncParam(parent_item, idx), + |span| span.param_moved(idx), + |ctxt| { + visitor.visit_func_param(ctxt, param); + }, + ) + } +} + +pub fn walk_func_param<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyFuncParamSpan<'db>>, + param: &FuncParam<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(FuncParamName::Ident(ident)) = param.label { + ctxt.with_new_ctxt( + |span| span.label_moved(), + |ctxt| visitor.visit_ident(ctxt, ident), + ); + } + + if let Some(FuncParamName::Ident(ident)) = param.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| visitor.visit_ident(ctxt, ident), + ); + } + + if let Some(ty) = param.ty.to_opt() { + if param.is_self_param(ctxt.db) && param.self_ty_fallback { + ctxt.with_new_ctxt( + |span| span.fallback_self_ty(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ); + } else { + ctxt.with_new_ctxt(|span| span.ty_moved(), |ctxt| visitor.visit_ty(ctxt, ty)); + } + } +} + +pub fn walk_field_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyFieldListSpan<'db>>, + fields: &[Field<'db>], +) where + V: Visitor<'db> + ?Sized, +{ + for (idx, field) in fields.iter().copied().enumerate() { + ctxt.with_new_ctxt( + |span| span.field_moved(idx), + |ctxt| { + visitor.visit_field(ctxt, field); + }, + ) + } +} + +pub fn walk_field<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyFieldSpan<'db>>, + field: Field<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(name) = field.label { + ctxt.with_new_ctxt( + |span| span.label_moved(), + |ctxt| visitor.visit_ident(ctxt, name), + ); + } + + visit_node_in_body!(visitor, ctxt, &field.expr, expr); +} + +pub fn walk_field_def_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyFieldDefListSpan<'db>>, + fields: FieldDefListId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + let parent = match ctxt.scope() { + ScopeId::Item(item) => FieldParent::Item(item), + ScopeId::Variant(item, idx) => FieldParent::Variant(item, idx), + _ => unreachable!(), + }; + for (idx, field) in fields.data(ctxt.db).iter().enumerate() { + ctxt.with_new_scoped_ctxt( + ScopeId::Field(parent, idx), + |span| span.field_moved(idx), + |ctxt| { + visitor.visit_field_def(ctxt, field); + }, + ) + } +} + +pub fn walk_field_def<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyFieldDefSpan<'db>>, + field: &FieldDef<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(name) = field.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + } + + if let Some(ty) = field.ty.to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } +} + +pub fn walk_variant_def_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyVariantDefListSpan<'db>>, + variants: VariantDefListId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + let parent_item = ctxt.scope().item(); + for (idx, variant) in variants.data(ctxt.db).iter().enumerate() { + ctxt.with_new_scoped_ctxt( + ScopeId::Variant(parent_item, idx), + |span| span.variant_moved(idx), + |ctxt| { + visitor.visit_variant_def(ctxt, variant); + }, + ) + } +} + +pub fn walk_variant_def<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyVariantDefSpan<'db>>, + variant: &VariantDef<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(name) = variant.name.to_opt() { + ctxt.with_new_ctxt( + |span| span.name_moved(), + |ctxt| { + visitor.visit_ident(ctxt, name); + }, + ) + } + + match variant.kind { + VariantKind::Unit => {} + VariantKind::Tuple(t) => ctxt.with_new_ctxt( + |span| span.tuple_type_moved(), + |ctxt| visitor.visit_tuple_type(ctxt, t), + ), + + VariantKind::Record(fields) => ctxt.with_new_ctxt( + |span| span.fields_moved(), + |ctxt| visitor.visit_field_def_list(ctxt, fields), + ), + } +} + +pub fn walk_path<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyPathSpan<'db>>, + path: PathId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + walk_path_impl(visitor, ctxt, path); +} + +fn walk_path_impl<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyPathSpan<'db>>, + path: PathId<'db>, +) -> usize +where + V: Visitor<'db> + ?Sized, +{ + let idx = if let Some(parent) = path.parent(ctxt.db()) { + 1 + walk_path_impl(visitor, ctxt, parent) + } else { + 0 + }; + if let Some(ident) = path.ident(ctxt.db).to_opt() { + ctxt.with_new_ctxt( + |span| span.segment_moved(idx).into_atom(), + |ctxt| { + visitor.visit_ident(ctxt, ident); + }, + ); + } + let generic_args = path.generic_args(ctxt.db); + ctxt.with_new_ctxt( + |span| span.segment(idx).generic_args_moved(), + |ctxt| { + visitor.visit_generic_arg_list(ctxt, generic_args); + }, + ); + idx +} + +pub fn walk_use_path<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyUsePathSpan<'db>>, + path: UsePathId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + for (i, segment) in path.data(ctxt.db).iter().enumerate() { + if let Some(UsePathSegment::Ident(ident)) = segment.to_opt() { + ctxt.with_new_ctxt( + |span| span.segment_moved(i).into_atom(), + |ctxt| { + visitor.visit_ident(ctxt, ident); + }, + ) + } + } +} + +pub fn walk_ty<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyTySpan<'db>>, + ty: TypeId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + match ty.data(ctxt.db) { + TypeKind::Ptr(ty) => { + if let Some(ty) = ty.to_opt() { + ctxt.with_new_ctxt( + |ctxt| ctxt.into_ptr_type().pointee(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + } + + TypeKind::Path(path) => ctxt.with_new_ctxt( + |span| span.into_path_type(), + |ctxt| { + if let Some(path) = path.to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| visitor.visit_path(ctxt, path), + ); + } + }, + ), + + TypeKind::Tuple(t) => ctxt.with_new_ctxt( + |span| span.into_tuple_type(), + |ctxt| walk_tuple_type(visitor, ctxt, *t), + ), + + TypeKind::Array(elem, body) => ctxt.with_new_ctxt( + |span| span.into_array_type(), + |ctxt| { + if let Some(elem) = elem.to_opt() { + ctxt.with_new_ctxt( + |span| span.elem_moved(), + |ctxt| { + visitor.visit_ty(ctxt, elem); + }, + ) + } + if let Some(body) = body.to_opt() { + visitor.visit_body(&mut VisitorCtxt::with_body(ctxt.db, body), body); + } + }, + ), + + TypeKind::SelfType(generic_args) => ctxt.with_new_ctxt( + |span| span.into_self_type(), + |ctxt| { + ctxt.with_new_ctxt( + |span| span.generic_args_moved(), + |ctxt| { + visitor.visit_generic_arg_list(ctxt, *generic_args); + }, + ); + }, + ), + + TypeKind::Never => {} + } +} + +pub fn walk_tuple_type<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyTupleTypeSpan<'db>>, + ty: TupleTypeId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + for (i, elem) in ty.data(ctxt.db()).iter().enumerate() { + let Some(elem) = elem.to_opt() else { + continue; + }; + ctxt.with_new_ctxt( + |span| span.elem_ty_moved(i), + |ctxt| { + visitor.visit_ty(ctxt, elem); + }, + ) + } +} + +pub fn walk_type_bound_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyTypeBoundListSpan<'db>>, + bounds: &[TypeBound<'db>], +) where + V: Visitor<'db> + ?Sized, +{ + for (idx, bound) in bounds.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.bound_moved(idx), + |ctxt| { + visitor.visit_type_bound(ctxt, bound); + }, + ) + } +} + +pub fn walk_type_bound<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyTypeBoundSpan<'db>>, + bound: &TypeBound<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + match bound { + TypeBound::Trait(trait_ref) => ctxt.with_new_ctxt( + |span| span.trait_bound_moved(), + |ctxt| visitor.visit_trait_ref(ctxt, *trait_ref), + ), + TypeBound::Kind(Partial::Present(kind_bound)) => ctxt.with_new_ctxt( + |span| span.kind_bound_moved(), + |ctxt| { + visitor.visit_kind_bound(ctxt, kind_bound); + }, + ), + _ => {} + } +} + +pub fn walk_trait_ref<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyTraitRefSpan<'db>>, + trait_ref: TraitRefId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(path) = trait_ref.path(ctxt.db()).to_opt() { + ctxt.with_new_ctxt( + |span| span.path_moved(), + |ctxt| { + visitor.visit_path(ctxt, path); + }, + ) + } +} + +pub fn walk_super_trait_list<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazySuperTraitListSpan<'db>>, + super_traits: &[TraitRefId<'db>], +) where + V: Visitor<'db> + ?Sized, +{ + for (idx, super_trait) in super_traits.iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.super_trait_moved(idx), + |ctxt| { + visitor.visit_trait_ref(ctxt, *super_trait); + }, + ) + } +} + +pub fn walk_kind_bound<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyKindBoundSpan<'db>>, + bound: &KindBound, +) where + V: Visitor<'db> + ?Sized, +{ + let KindBound::Abs(lhs, rhs) = bound else { + return; + }; + + if let Partial::Present(lhs) = lhs { + ctxt.with_new_ctxt( + |span| span.abs_moved().lhs_moved(), + |ctxt| { + visitor.visit_kind_bound(ctxt, lhs.as_ref()); + }, + ) + } + + if let Partial::Present(rhs) = rhs { + ctxt.with_new_ctxt( + |span| span.abs_moved().rhs_moved(), + |ctxt| { + visitor.visit_kind_bound(ctxt, rhs.as_ref()); + }, + ) + } +} + +pub fn walk_where_clause<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyWhereClauseSpan<'db>>, + predicates: WhereClauseId<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + for (idx, predicate) in predicates.data(ctxt.db).iter().enumerate() { + ctxt.with_new_ctxt( + |span| span.predicate_moved(idx), + |ctxt| { + visitor.visit_where_predicate(ctxt, predicate); + }, + ) + } +} + +pub fn walk_where_predicate<'db, V>( + visitor: &mut V, + ctxt: &mut VisitorCtxt<'db, LazyWherePredicateSpan<'db>>, + predicate: &WherePredicate<'db>, +) where + V: Visitor<'db> + ?Sized, +{ + if let Some(ty) = predicate.ty.to_opt() { + ctxt.with_new_ctxt( + |span| span.ty_moved(), + |ctxt| { + visitor.visit_ty(ctxt, ty); + }, + ) + } + + ctxt.with_new_ctxt( + |span| span.bounds_moved(), + |ctxt| { + visitor.visit_type_bound_list(ctxt, &predicate.bounds); + }, + ) +} + +use attr::{Attr, AttrListId}; + +/// [`VisitorCtxt`] is used to track the span information and the scope of the +/// current node being visited. +/// The context is updated automatically when entering a new node. Thus, the +/// user need to only construct the context when invoking a visitor. +pub struct VisitorCtxt<'db, T> +where + T: LazySpan, +{ + db: &'db dyn HirDb, + span: DynLazySpan<'db>, + scope_stack: Vec>, + + _t: PhantomData, +} + +impl<'db, T> VisitorCtxt<'db, T> +where + T: LazySpan, +{ + pub fn new(db: &'db dyn HirDb, scope: ScopeId<'db>, span: T) -> Self + where + T: Into>, + { + Self { + db, + span: span.into(), + scope_stack: vec![scope], + _t: PhantomData, + } + } + + pub fn db(&self) -> &'db dyn HirDb { + self.db + } + + pub fn ingot(&self) -> IngotId<'db> { + self.scope().ingot(self.db) + } + + pub fn span(&self) -> Option + where + T: SpanDowncast<'db>, + { + let dyn_span: DynLazySpan = self.span.clone(); + T::downcast(dyn_span) + } + + pub fn scope(&self) -> ScopeId<'db> { + *self.scope_stack.last().unwrap() + } + + pub fn top_mod(&self) -> TopLevelMod<'db> { + match self.span.0.as_ref().unwrap().root { + ChainRoot::ItemKind(item) => item.top_mod(self.db), + ChainRoot::TopMod(top_mod) => top_mod, + ChainRoot::Mod(mod_) => mod_.top_mod(self.db), + ChainRoot::Func(func) => func.top_mod(self.db), + ChainRoot::Struct(struct_) => struct_.top_mod(self.db), + ChainRoot::Contract(contract) => contract.top_mod(self.db), + ChainRoot::Enum(enum_) => enum_.top_mod(self.db), + ChainRoot::TypeAlias(alias) => alias.top_mod(self.db), + ChainRoot::Impl(impl_) => impl_.top_mod(self.db), + ChainRoot::Trait(trait_) => trait_.top_mod(self.db), + ChainRoot::ImplTrait(impl_trait) => impl_trait.top_mod(self.db), + ChainRoot::Const(const_) => const_.top_mod(self.db), + ChainRoot::Use(use_) => use_.top_mod(self.db), + ChainRoot::Body(body) => body.top_mod(self.db), + ChainRoot::Stmt(_) | ChainRoot::Expr(_) | ChainRoot::Pat(_) => { + self.body().top_mod(self.db) + } + } + } + + /// Create a new context for visiting a pattern. + /// `scope` is the scope that encloses the pattern. + pub fn with_pat(db: &'db dyn HirDb, scope: ScopeId<'db>, body: Body<'db>, pat: PatId) -> Self { + Self { + db, + span: LazyPatSpan::new(body, pat).into(), + scope_stack: vec![scope], + _t: PhantomData, + } + } + + /// Create a new context for visiting a statement. + /// `scope` is the scope that encloses the statement. + pub fn with_stmt( + db: &'db dyn HirDb, + scope: ScopeId<'db>, + body: Body<'db>, + stmt: StmtId, + ) -> Self { + Self { + db, + span: LazyStmtSpan::new(body, stmt).into(), + scope_stack: vec![scope], + _t: PhantomData, + } + } + + /// Create a new context for visiting an expression. + /// `scope` is the scope that encloses the expression. + pub fn with_expr( + db: &'db dyn HirDb, + scope: ScopeId<'db>, + body: Body<'db>, + expr: ExprId, + ) -> Self { + let scope_id = match expr.data(db, body) { + Partial::Present(Expr::Block(_)) => ScopeId::Block(body, expr), + _ => scope, + }; + + Self { + db, + span: LazyExprSpan::new(body, expr).into(), + scope_stack: vec![scope_id], + _t: PhantomData, + } + } + + /// Returns the body that encloses the current node. + /// # panic + /// Panics when the current node is not enclosed by a body. + pub fn body(&self) -> Body<'db> { + match self.span.0.as_ref().unwrap().root { + ChainRoot::Body(body) => body, + ChainRoot::Expr(expr) => expr.body, + ChainRoot::Stmt(stmt) => stmt.body, + ChainRoot::Pat(pat) => pat.body, + _ => panic!(), + } + } + + fn with_new_scoped_ctxt(&mut self, scope_id: ScopeId<'db>, f1: F1, f2: F2) + where + T: SpanDowncast<'db>, + F1: FnOnce(T) -> U, + F2: FnOnce(&mut VisitorCtxt<'db, U>), + U: LazySpan + SpanDowncast<'db> + Into>, + { + self.scope_stack.push(scope_id); + self.with_new_ctxt(f1, f2); + self.scope_stack.pop(); + } + + fn with_new_ctxt(&mut self, f1: F1, f2: F2) + where + T: SpanDowncast<'db>, + F1: FnOnce(T) -> U, + F2: FnOnce(&mut VisitorCtxt<'db, U>), + U: LazySpan + SpanDowncast<'db> + Into>, + { + let chain_len = self.span.0.as_ref().unwrap().len(); + let mut new_ctxt = self.transition(f1); + + f2(&mut new_ctxt); + + let n_pop = new_ctxt.span.0.as_ref().unwrap().len() - chain_len; + *self = new_ctxt.pop(n_pop); + } + + fn transition(&mut self, f: F) -> VisitorCtxt<'db, U> + where + T: SpanDowncast<'db>, + F: FnOnce(T) -> U, + U: LazySpan + SpanDowncast<'db> + Into>, + { + let dyn_span = mem::replace(&mut self.span, DynLazySpan::invalid()); + let scope_stack = mem::take(&mut self.scope_stack); + let span = T::downcast(dyn_span).unwrap(); + let u = f(span); + + Self { + db: self.db, + span: u.into(), + scope_stack, + _t: PhantomData, + } + .cast() + } + + fn pop(mut self, n_pop: usize) -> VisitorCtxt<'db, U> + where + U: LazySpan, + { + for _ in 0..n_pop { + self.span.0.as_mut().unwrap().pop_transition(); + } + + Self { + db: self.db, + span: self.span, + scope_stack: self.scope_stack, + _t: PhantomData, + } + .cast() + } + + fn cast(self) -> VisitorCtxt<'db, U> { + VisitorCtxt { + db: self.db, + span: self.span, + scope_stack: self.scope_stack, + _t: PhantomData, + } + } +} + +macro_rules! define_item_ctxt_ctor { + ($(( + $span_ty:ty, + $ctor:ident($ctor_name:ident: $ctor_ty:ty)),)*) => { + $(impl<'db> VisitorCtxt<'db, $span_ty> { + /// Create a new [`VisitorCtxt`] with the given item as the root of the span chain. + pub fn $ctor(db: &'db dyn HirDb, $ctor_name: $ctor_ty) -> Self { + Self { + db, + span: <$span_ty>::new($ctor_name).into(), + scope_stack: vec![$ctor_name.scope()], + _t: PhantomData, + } + } + })* + }; +} + +define_item_ctxt_ctor! { + (LazyItemSpan<'db>, with_item(item: ItemKind<'db>)), + (LazyTopModSpan<'db>, with_top_mod(top_mod: TopLevelMod<'db>)), + (LazyModSpan<'db>, with_mod(mod_: Mod<'db>)), + (LazyFuncSpan<'db>, with_func(func: Func<'db>)), + (LazyStructSpan<'db>, with_struct(struct_: Struct<'db>)), + (LazyContractSpan<'db>, with_contract(contract: Contract<'db>)), + (LazyEnumSpan<'db>, with_enum(enum_: Enum<'db>)), + (LazyTypeAliasSpan<'db>, with_type_alias(type_alias: TypeAlias<'db>)), + (LazyImplSpan<'db>, with_impl(impl_: Impl<'db>)), + (LazyTraitSpan<'db>, with_trait(trait_: Trait<'db>)), + (LazyImplTraitSpan<'db>, with_impl_trait(impl_trait: ImplTrait<'db>)), + (LazyConstSpan<'db>, with_const(const_: Const<'db>)), + (LazyUseSpan<'db>, with_use(use_: Use<'db>)), + (LazyBodySpan<'db>, with_body(body: Body<'db>)), +} + +macro_rules! visit_node_in_body { + ($visitor:expr, $ctxt:expr, $id:expr, $inner:ident) => { + if let Partial::Present(data) = $id.data($ctxt.db, $ctxt.body()) { + let scope = *$ctxt.scope_stack.last().unwrap(); + paste::paste! { + $visitor.[](&mut VisitorCtxt::[]($ctxt.db, scope, $ctxt.body(), *$id), *$id, data); + + } + } + } +} +use visit_node_in_body; + +#[cfg(test)] +mod tests { + + use super::*; + use crate::test_db::TestDb; + struct MyVisitor<'db> { + generic_param_list: Option>, + attributes: Vec>, + lit_ints: Vec>, + } + + impl<'db> Visitor<'db> for MyVisitor<'db> { + fn visit_attribute( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyAttrSpan<'db>>, + _attrs: &Attr<'db>, + ) { + self.attributes.push(ctxt.span().unwrap()); + } + + fn visit_generic_param_list( + &mut self, + ctxt: &mut VisitorCtxt<'db, LazyGenericParamListSpan<'db>>, + _params: GenericParamListId<'db>, + ) { + self.generic_param_list = Some(ctxt.span().unwrap()); + } + + fn visit_lit(&mut self, ctxt: &mut VisitorCtxt<'db, LazyLitSpan<'db>>, lit: LitKind<'db>) { + if let LitKind::Int(_) = lit { + self.lit_ints.push(ctxt.span().unwrap()); + } + } + } + + #[test] + fn visitor() { + let mut db = TestDb::default(); + let text = r#" + #attr1 + #attr2 + fn foo() { + 1 + "foo" + 42 + }"#; + + let (ingot, file) = db.standalone_file(text); + + let func = db.expect_item::(ingot, file); + let top_mod = func.top_mod(&db); + + let mut visitor = MyVisitor { + generic_param_list: None, + attributes: Vec::new(), + lit_ints: Vec::new(), + }; + + let mut ctxt = VisitorCtxt::with_func(&db, func); + visitor.visit_func(&mut ctxt, func); + + assert_eq!( + "", + db.text_at(top_mod, &visitor.generic_param_list.unwrap()) + ); + + assert_eq!(visitor.attributes.len(), 2); + assert_eq!("#attr1", db.text_at(top_mod, &visitor.attributes[0])); + assert_eq!("#attr2", db.text_at(top_mod, &visitor.attributes[1])); + + assert_eq!(visitor.lit_ints.len(), 2); + assert_eq!("1", db.text_at(top_mod, &visitor.lit_ints[0])); + assert_eq!("42", db.text_at(top_mod, &visitor.lit_ints[1])); + } +} diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml new file mode 100644 index 0000000000..cef7fed5c8 --- /dev/null +++ b/crates/language-server/Cargo.toml @@ -0,0 +1,44 @@ +[package] +name = "fe-language-server" +version = "0.26.0" +edition = "2021" +authors = ["The Fe Developers "] +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "An LSP language server for Fe lang" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +salsa.workspace = true +codespan-reporting = "0.11" +act-locally = "0.1.1" +hir = { path = "../hir", package = "fe-hir" } +hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } +camino = "1.1.4" +common = { path = "../common2", package = "fe-common2" } +anyhow = "1.0.71" +serde_json = "1.0.96" +rowan = "0.15.10" +fxhash = "0.2.1" +patricia_tree = "0.6.2" +glob = "0.3.1" +url = "2.4.1" +tokio = { version = "1.35.1", features = ["full", "io-std", "tracing", "net"] } +tokio-macros = "2.2.0" +futures = "0.3.28" +rust-embed = "8.3.0" +futures-batch = "0.6.1" +tracing = "0.1.40" +tracing-subscriber = "0.3.18" +async-lsp = { git = "https://github.com/micahscopes/async-lsp", branch = "pub-inner-type-id" } +tower = "0.4.13" +serde = "1.0.204" +clap = { version = "4.5.18", features = ["derive"] } +async-std = "1.13.0" +async-compat = "0.2.4" +tracing-tree = "0.4.0" + +[dev-dependencies] +fe-compiler-test-utils = { path = "../test-utils" } +dir-test = "0.1" diff --git a/crates/language-server/Makefile b/crates/language-server/Makefile new file mode 100644 index 0000000000..7b1ed08971 --- /dev/null +++ b/crates/language-server/Makefile @@ -0,0 +1,3 @@ +.PHONY: clippy +clippy: + cargo clippy -- -D warnings -A clippy::upper-case-acronyms -A clippy::large-enum-variant diff --git a/crates/language-server/README.md b/crates/language-server/README.md new file mode 100644 index 0000000000..09b722233d --- /dev/null +++ b/crates/language-server/README.md @@ -0,0 +1,121 @@ +# `fe-language-server` +## An LSP server for the Fe programming language +The Fe Language Server project aims to develop a robust, efficient, and feature-rich language server that enhances the development experience for Fe developers. This includes implementing a wide range of Language Server Protocol (LSP) features, improving the backend infrastructure, integrating with the VSCode extension, and thorough testing and documentation. + +## Current status: 🚧 experimental 🚧 + +The language server is being developed against an ongoing Fe compiler rewrite, so it doesn't currently support the version of Fe in the main branch. + +## Editor support +An LSP enabled VSCode Extension designed to work with this language server can be found in [the `editors/vscode` directory](./editors/vscode). + +## Development +To build the language server binary, run: +```bash +cargo build +``` + +One straightforward way to interact with the language server during development is to follow [the instructions in the Fe VSCode extension readme](./editors/vscode/README.md). + +## Progress and Roadmap +### May 2023 +- [x] LSP server crate scaffolding +- [x] LSP-based VSCode extension +- [x] Hover prototype + - printing debug information +- [x] Basic development setup and documentation +- [x] Orientation, research and study of rust-analyzer and Fe compiler +### June 2023 +- [x] Diagnostics publishing prototype + - using current deprecated diagnostics functionality +### July 2023 +- [x] Syntax highlighting in the VSCode extension via Textmate grammar file +- Initial work on transitioning to the new compiler architecture + - [x] Salsa2022 database setup for language server + - [x] Overhaul diagnostics types to support types in new compiler crates + - [x] Working prototype of new diagnostics functionality based on @Y-Nak's `name-resolution` branch +### August 2023 +- [x] Improvements to LSP server + VSCode extension development setup +- [x] Improved `document/didChange` handling +- [x] Flexible LSP server global logging support using the `log` crate +- [x] Code style cleanup and linting +- Further work on transitioning to the new compiler overhaul + - [x] Go-to definition prototype working for standalone .fe files; based on @Y-Nak's compiler rewrite + - [x] Name resolution from cursor position + - [x] Snap tests and unit tests for go-to definition functionality + - [x] Error handling improvements + - At the time, the rewritten name resolution only supported type resolution in the early resolution stage, but the data types and LSP server implementation should seamlessly support progress on the new compiler's name resolution functionality without many changes +### September 2023 +- [x] Research and study of rust-analyzer's file synchronization functionality +- Go-to definition cleanup +- Initial "workspace" functionality + - a data structure and API for generating, caching and synchronizing necessary salsa inputs to support language server functionality + - should support file synchronization via filesystem watching and LSP file change events alike + - [x] Sketch of initial workspace cache data structures + - Featuring a prefix tree based lookup for salsa inputs representing files, ingots and top-modules + - Supporting local, external and standalone ingot caches. + - [x] Upgraded go-to definition functionality to work with "local" ingots containing multiple modules + - [x] Integration of workspace data structures with existing language server data structures; slight refactor of existing architecture + - [x] Initial workspace sync prototype and tests +- [x] Initial support for subdiagnostics +- [x] Workspace sync API brainstorming and study +### October-December 2023 +- [x] Improve the internal synchronization API for the "workspace" data structure to support an explicit update step +- [x] Integrated the "workspace" data structure with LSP events and a rudimentary filesystem watcher +### January-March 2024 +- Initial prototype supporting concurrency/task management with `tower-lsp` +### July-November 2024 +- Big architecture overhaul, switched from `tower-lsp` to `async-lsp` + - much better control over concurrency and event processing order, working around issues described [here](https://github.com/ebkalderon/tower-lsp/issues/284) + - custom actor implementation supporting lock-free language server state + - handlers with thread-local futures +- Optional stream-based event handling support +- Multithreaded tracing and logging improvements +- Debounced, multithreaded diagnostics +- TCP support; support for multiple clients +- Organize and prepare server architecture for a variety of LSP features +- Implement a task queue system for long-running tasks +- Enhance hover feature to return useful information and documentation +- Initial work on VSCode extension release pipeline + +### To-do and tentative roadmap + +#### Server improvements +- [ ] Implement progress feedback notifications +- [ ] Implement configurable options for the language server + - Diagnostics configuration + - Inlay configuration + +#### LSP Features +- [ ] Expand go-to feature to support variables, functions, implementations and other references +- [ ] Support go-to definitions for Fe standard library +- [ ] Improve diagnostics implementation and expand tests +- [ ] Autofill and completions +- [ ] Import completion +- [ ] Refactoring symbol names and file names +- [ ] Inlay hints and annotations +- [ ] File system navigation +- [ ] Show syntax tree, HIR, etc. +- [ ] Syntactic and semantic highlighting + +#### Integration with VSCode and Zed +- [x] Configurable LSP binary +- [ ] VSCode extension release pipeline + - [x] build setup + - [ ] release pipeline github actions +- [ ] Implement configuration and options shortcuts in the VSCode extension +- [ ] Investigate support for running tests/proofs from the VSCode extension +- [ ] Zed integration + +#### Testing and Documentation +- [ ] Better test coverage +- [ ] Document the code, architecture, rationale for decisions, risks, and roadmap +- [ ] Create a catalog of examples that activate various LSP features + +#### Research and Miscellaneous +- [ ] Look into possibility of supporting proof and Fe test functionality in the language server +- [ ] Investigate possible use cases for extending the salsa architecture into the language server more directly, e.g. performance improvements +- [ ] View bytecode for selected code +- [ ] Code actions for smart contract development +- [ ] Would any LSP features be useful for plugging into LLM-based tools? E.g. analytics or descriptions of the codebase? +- [ ] Security review and documentation diff --git a/crates/language-server/src/backend/db.rs b/crates/language-server/src/backend/db.rs new file mode 100644 index 0000000000..d92231c60f --- /dev/null +++ b/crates/language-server/src/backend/db.rs @@ -0,0 +1,32 @@ +use common::{impl_db_traits, InputDb}; + +use hir::{HirDb, LowerHirDb, SpannedHirDb}; +use hir_analysis::HirAnalysisDb; +// xxx use salsa::{ParallelDatabase, Snapshot}; + +#[salsa::db] +pub trait LanguageServerDb: + salsa::Database + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ +} + +#[salsa::db] +impl LanguageServerDb for DB where + DB: Sized + salsa::Database + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb +{ +} + +#[salsa::db] +#[derive(Default, Clone)] +pub struct LanguageServerDatabase { + storage: salsa::Storage, +} + +impl_db_traits!( + LanguageServerDatabase, + InputDb, + HirDb, + LowerHirDb, + SpannedHirDb, + HirAnalysisDb +); diff --git a/crates/language-server/src/backend/mod.rs b/crates/language-server/src/backend/mod.rs new file mode 100644 index 0000000000..62cdc1557f --- /dev/null +++ b/crates/language-server/src/backend/mod.rs @@ -0,0 +1,34 @@ +pub(crate) mod db; +pub(crate) mod workspace; +use async_lsp::ClientSocket; +use db::LanguageServerDatabase; +use workspace::Workspace; + +// use tower_lsp::Client; + +pub struct Backend { + pub(super) client: ClientSocket, + pub(super) db: LanguageServerDatabase, + pub(super) workspace: Workspace, + #[allow(dead_code)] // TODO: salsa3-compatible parallelism + pub(super) workers: tokio::runtime::Runtime, +} + +impl Backend { + pub fn new(client: ClientSocket) -> Self { + let db = LanguageServerDatabase::default(); + let workspace = Workspace::default(); + + let workers = tokio::runtime::Builder::new_multi_thread() + .worker_threads(1) + .enable_all() + .build() + .unwrap(); + Self { + client, + db, + workspace, + workers, + } + } +} diff --git a/crates/language-server/src/backend/workspace.rs b/crates/language-server/src/backend/workspace.rs new file mode 100644 index 0000000000..894fef9489 --- /dev/null +++ b/crates/language-server/src/backend/workspace.rs @@ -0,0 +1,738 @@ +use std::path::{Path, PathBuf}; + +use super::db::LanguageServerDatabase; +use anyhow::Result; +use common::{ + indexmap::IndexSet, + input::{IngotKind, Version}, + InputFile, InputIngot, +}; + +use patricia_tree::StringPatriciaMap; +use salsa::Setter; +use tracing::info; + +use rust_embed::RustEmbed; + +#[derive(RustEmbed)] +#[folder = "../library/std"] +struct StdLib; + +const FE_CONFIG_SUFFIX: &str = "fe.toml"; + +fn ingot_directory_key(path: String) -> String { + path.strip_suffix(FE_CONFIG_SUFFIX) + .unwrap_or(&path) + .to_string() +} + +pub trait IngotFileContext { + fn get_input_for_file_path(&self, path: &str) -> Option<(InputIngot, InputFile)>; + + fn touch_input_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option<(InputIngot, InputFile)>; + + fn get_ingot_for_file_path(&self, path: &str) -> Option; + + fn touch_ingot_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option; + + fn remove_input_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Result<()>; +} + +pub struct LocalIngotContext { + pub ingot: InputIngot, + pub files: StringPatriciaMap, +} + +fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { + let ingot_path = ingot_path + .strip_suffix(&FE_CONFIG_SUFFIX) + .unwrap_or(ingot_path); + file_path.starts_with(ingot_path) +} + +pub fn get_containing_ingot_mut<'a, T>( + ingots: &'a mut StringPatriciaMap, + path: &'a str, +) -> Option<&'a mut T> { + ingots + .get_longest_common_prefix_mut(path) + .filter(|(ingot_path, _)| ingot_contains_file(ingot_path, path)) + .map(|(_, ingot)| ingot) +} + +pub fn get_containing_ingot<'a, T>( + ingots: &'a StringPatriciaMap, + path: &'a str, +) -> Option<&'a T> { + ingots + .get_longest_common_prefix(path) + .filter(|(ingot_path, _)| ingot_contains_file(ingot_path, path)) + .map(|(_, ingot)| ingot) +} + +impl LocalIngotContext { + pub fn new(db: &LanguageServerDatabase, config_path: &str) -> Option { + let ingot = InputIngot::new( + db, + config_path, + IngotKind::Local, + Version::new(0, 0, 0), + IndexSet::new(), + ); + Some(Self { + ingot, + files: StringPatriciaMap::new(), + }) + } +} + +impl IngotFileContext for LocalIngotContext { + fn touch_input_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option<(InputIngot, InputFile)> { + let ingot = self.touch_ingot_for_file_path(db, path)?; + let input = self + .files + .get(path) + .copied() + .unwrap_or_else(|| InputFile::new(db, path.into(), String::new())); + self.files.insert(path, input); + ingot.set_files(db, self.files.values().copied().collect()); + Some((ingot, input)) + } + + fn get_input_for_file_path(&self, path: &str) -> Option<(InputIngot, InputFile)> { + let ingot = self.get_ingot_for_file_path(path)?; + let file = self.files.get(path).copied()?; + Some((ingot, file)) + } + + fn touch_ingot_for_file_path( + &mut self, + _db: &mut LanguageServerDatabase, + _path: &str, + ) -> Option { + Some(self.ingot) + } + + fn get_ingot_for_file_path(&self, _path: &str) -> Option { + Some(self.ingot) + } + + fn remove_input_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Result<()> { + let file = self.files.remove(path); + + if let Some(_file) = file { + let ingot = self.ingot; + let new_ingot_files = self + .files + .values() + .copied() + .collect::>(); + ingot.set_files(db, new_ingot_files); + Ok(()) + } else { + Err(anyhow::anyhow!("File not found in ingot")) + } + } +} + +pub struct StandaloneIngotContext { + ingots: StringPatriciaMap, + files: StringPatriciaMap, +} + +impl StandaloneIngotContext { + pub fn new() -> Self { + Self { + ingots: StringPatriciaMap::new(), + files: StringPatriciaMap::new(), + } + } +} + +impl IngotFileContext for StandaloneIngotContext { + fn touch_input_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option<(InputIngot, InputFile)> { + let ingot = self.touch_ingot_for_file_path(db, path)?; + let input_file = self + .files + .get(path) + .copied() + .unwrap_or_else(|| InputFile::new(db, path.into(), String::new())); + let mut files = IndexSet::new(); + files.insert(input_file); + ingot.set_files(db, files); + ingot.set_root_file(db, input_file); + self.files.insert(path, input_file); + Some((ingot, input_file)) + } + + fn get_input_for_file_path(&self, path: &str) -> Option<(InputIngot, InputFile)> { + let ingot = self.get_ingot_for_file_path(path)?; + let file = self.files.get(path).copied()?; + Some((ingot, file)) + } + + fn touch_ingot_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option { + get_containing_ingot_mut(&mut self.ingots, path) + .as_deref() + .copied() + .map_or_else( + || { + let ingot = InputIngot::new( + db, + path, + IngotKind::StandAlone, + Version::new(0, 0, 0), + IndexSet::new(), + ); + self.ingots.insert(path, ingot); + Some(ingot) + }, + Some, + ) + } + + fn get_ingot_for_file_path(&self, path: &str) -> Option { + // this shouldn't mutate, it should only get the ingot or return `None` + get_containing_ingot(&self.ingots, path).copied() + } + + fn remove_input_for_file_path( + &mut self, + _db: &mut LanguageServerDatabase, + path: &str, + ) -> Result<()> { + let file = self.files.remove(path); + if let Some(_file) = file { + self.ingots.remove(path); + } + Ok(()) + } +} + +pub struct Workspace { + pub(crate) ingot_contexts: StringPatriciaMap, + pub(crate) standalone_ingot_context: StandaloneIngotContext, + pub(crate) root_path: Option, +} + +impl Workspace { + pub fn default() -> Self { + Self { + ingot_contexts: StringPatriciaMap::new(), + standalone_ingot_context: StandaloneIngotContext::new(), + root_path: None, + } + } + + pub fn all_files(&self) -> impl Iterator { + // Iterate over all files in the ingot contexts + let ingot_files = self + .ingot_contexts + .values() + .flat_map(|ctx| ctx.files.values()); + + // Get the files from the standalone ingot context + let standalone_files = self.standalone_ingot_context.files.values(); + + // Chain the iterators to create a single iterator over all files + ingot_files.chain(standalone_files) + } + + pub fn load_std_lib( + &mut self, + db: &mut LanguageServerDatabase, + root_path: &Path, + ) -> Result<()> { + let root_path_str = root_path.to_str().unwrap(); + self.touch_ingot_for_file_path(db, &format!("{}/std/{}", root_path_str, FE_CONFIG_SUFFIX)) + .unwrap(); + + info!("Loading std lib..."); + + // Collect paths to avoid borrowing `db` mutably during the closure + let paths: Vec<_> = StdLib::iter().collect(); + + for path in paths { + let path_str = path.as_ref(); + let std_path = format!("{}/std/{}", root_path_str, path_str); + info!("adding std file... {:?} --- {:?}", std_path, path_str); + if let Some(file) = StdLib::get(path_str) { + let contents = String::from_utf8(file.data.as_ref().to_vec()); + if let Ok(contents) = contents { + if let Some((_ingot, file)) = self.touch_input_for_file_path(db, &std_path) { + file.set_text(db).to(contents); + } + } + } + } + Ok(()) + } + + pub fn set_workspace_root( + &mut self, + db: &mut LanguageServerDatabase, + root_path: &Path, + ) -> Result<()> { + let path = root_path; + self.root_path = Some(path.to_path_buf()); + self.sync(db) + } + + pub fn ingot_context_from_config_path( + &mut self, + db: &LanguageServerDatabase, + config_path: &str, + ) -> Option<&mut LocalIngotContext> { + let key = ingot_directory_key(config_path.into()); + if self.ingot_contexts.contains_key(&key) { + return self.ingot_contexts.get_mut(&key); + } + let ingot_context = LocalIngotContext::new(db, config_path)?; + self.ingot_contexts.insert(key.clone(), ingot_context); + self.ingot_contexts.get_mut(&key) + } + + fn sync_local_ingots(&mut self, db: &mut LanguageServerDatabase, path: &str) { + let config_paths = glob::glob(&format!("{path}/**/{FE_CONFIG_SUFFIX}")) + .unwrap() + .filter_map(Result::ok) + .map(|p| p.to_str().unwrap().to_string()) + .collect::>(); + + let paths = config_paths + .iter() + .map(|s| ingot_directory_key(s.clone())) + .collect::>(); + + for path in &paths { + self.ingot_context_from_config_path(db, path); + } + + let existing_keys: Vec = self.ingot_contexts.keys().collect(); + + let keys_to_remove: Vec = existing_keys + .into_iter() + .filter(|key| !paths.contains(key)) + .collect(); + + for key in keys_to_remove { + self.ingot_contexts.remove(&key); + } + } + + fn sync_ingot_files(&mut self, db: &mut LanguageServerDatabase, config_path: &str) { + assert!(config_path.ends_with(FE_CONFIG_SUFFIX)); + info!("Syncing ingot at {}", config_path); + + let ingot_root = config_path.strip_suffix(FE_CONFIG_SUFFIX).unwrap(); + let actual_paths = glob::glob(&format!("{ingot_root}/src/**/*.fe")) + .unwrap() + .filter_map(Result::ok) + .map(|p| p.to_str().unwrap().to_string()) + .collect::>(); + + info!("Found {} files in ingot", actual_paths.len()); + info!("Syncing ingot files: {:?}", actual_paths); + + let ingot_context = self + .ingot_context_from_config_path(db, config_path) + .unwrap(); + + let previous_ingot_context_file_keys: Vec = ingot_context.files.keys().collect(); + for path in &previous_ingot_context_file_keys { + if !actual_paths.contains(path) { + let _ = ingot_context.remove_input_for_file_path(db, path); + } + } + + for path in actual_paths { + if !previous_ingot_context_file_keys.contains(&path) { + if let Some((_ingot, file)) = ingot_context.touch_input_for_file_path(db, &path) { + if let Ok(contents) = std::fs::read_to_string(&path) { + file.set_text(db).to(contents); + } + } + } + } + + let ingot_context_files = ingot_context + .files + .values() + .copied() + .collect::>(); + + ingot_context.ingot.set_files(db, ingot_context_files); + + // find the root file, which is either at `./src/main.fe` or `./src/lib.fe` + let root_file = ingot_context + .files + .values() + .find(|file| { + file.path(db).ends_with("src/main.fe") || file.path(db).ends_with("src/lib.fe") + }) + .copied(); + + if let Some(root_file) = root_file { + info!("Setting root file for ingot: {:?}", root_file.path(db)); + ingot_context.ingot.set_root_file(db, root_file); + } + } +} + +impl IngotFileContext for Workspace { + fn touch_input_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option<(InputIngot, InputFile)> { + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.touch_input_for_file_path(db, path) + } else { + self.standalone_ingot_context + .touch_input_for_file_path(db, path) + } + } + + fn get_input_for_file_path(&self, path: &str) -> Option<(InputIngot, InputFile)> { + let ctx = get_containing_ingot(&self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.get_input_for_file_path(path) + } else { + self.standalone_ingot_context.get_input_for_file_path(path) + } + } + + fn touch_ingot_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Option { + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.touch_ingot_for_file_path(db, path) + } else { + self.standalone_ingot_context + .touch_ingot_for_file_path(db, path) + } + } + + fn get_ingot_for_file_path(&self, path: &str) -> Option { + let ctx = get_containing_ingot(&self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.get_ingot_for_file_path(path) + } else { + self.standalone_ingot_context.get_ingot_for_file_path(path) + } + } + + fn remove_input_for_file_path( + &mut self, + db: &mut LanguageServerDatabase, + path: &str, + ) -> Result<()> { + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.remove_input_for_file_path(db, path) + } else { + self.standalone_ingot_context + .remove_input_for_file_path(db, path)?; + Ok(()) + } + } +} + +pub trait SyncableIngotFileContext { + fn sync(&mut self, db: &mut LanguageServerDatabase) -> Result<()>; +} + +impl SyncableIngotFileContext for Workspace { + fn sync(&mut self, db: &mut LanguageServerDatabase) -> Result<()> { + let path = { + let path = &self.root_path; + path.clone().unwrap() + }; + + let path_str = path.to_str().unwrap(); + + info!("Syncing workspace at {:?}", path_str); + self.sync_local_ingots(db, path_str); + + let ingot_paths = glob::glob(&format!("{path_str}/**/{FE_CONFIG_SUFFIX}")) + .ok() + .unwrap() + .filter_map(Result::ok) + .filter_map(|p| p.to_str().map(std::string::ToString::to_string)) + .collect::>(); + + info!("Found {} ingots", ingot_paths.len()); + + for ingot_path in ingot_paths { + self.sync_ingot_files(db, &ingot_path); + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use hir::lower::map_file_to_mod; + + use crate::backend::workspace::{ + get_containing_ingot_mut, IngotFileContext, Workspace, FE_CONFIG_SUFFIX, + }; + use std::path::PathBuf; + + use super::StandaloneIngotContext; + + #[test] + fn workspace_standalone_context() { + let mut db = crate::backend::db::LanguageServerDatabase::default(); + let file_path = "tests/data/ingot1/src/main.fe"; + + let mut ctx = StandaloneIngotContext::new(); + let file = ctx.touch_input_for_file_path(&mut db, file_path); + + assert!(file.is_some()); + + let ingot = ctx.touch_ingot_for_file_path(&mut db, file_path); + assert!(ingot.is_some()); + assert_eq!( + ingot.unwrap().kind(&db), + common::input::IngotKind::StandAlone + ); + } + + #[test] + fn test_workspace_standalone_ingot() { + let mut workspace: Workspace = Workspace::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); + let file_path = "tests/data/ingot1/src/main.fe"; + let file = workspace.touch_input_for_file_path(&mut db, file_path); + assert!(file.is_some()); + } + + #[test] + fn test_get_containing_ingot() { + let config_path = "tests/data/ingot1/fe.toml"; + let mut workspace: Workspace = Workspace::default(); + + let _ingot_context_ingot = { + let ingot_context = workspace.ingot_context_from_config_path( + &crate::backend::db::LanguageServerDatabase::default(), + config_path, + ); + + assert!(ingot_context.is_some()); + ingot_context.map(|ctx| ctx.ingot) + }; + + assert!(workspace.ingot_contexts.len() == 1); + + let file_path = "tests/data/ingot1/src/main.fe"; + assert!(workspace + .ingot_contexts + .get_longest_common_prefix(file_path) + .is_some()); + + let containing_ingot = get_containing_ingot_mut(&mut workspace.ingot_contexts, file_path); + + assert!(containing_ingot.as_deref().is_some()); + + let ingot = workspace.touch_ingot_for_file_path( + &mut crate::backend::db::LanguageServerDatabase::default(), + file_path, + ); + assert!(ingot.is_some()); + } + + #[test] + fn test_workspace_local_ingot() { + let config_path = "tests/data/ingot1/fe.toml"; + let mut workspace: Workspace = Workspace::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); + + let ingot_context_ingot = { + let ingot_context = workspace.ingot_context_from_config_path(&db, config_path); + + assert!(ingot_context.is_some()); + ingot_context.map(|ctx| ctx.ingot) + }; + + let file_path = "tests/data/ingot1/src/main.fe"; + let (ingot, _file) = workspace + .touch_input_for_file_path(&mut db, file_path) + .unwrap(); + + assert_eq!( + ingot_context_ingot.unwrap().kind(&db), + common::input::IngotKind::Local + ); + assert_eq!(ingot.kind(&db), common::input::IngotKind::Local); + assert_eq!(ingot_context_ingot.unwrap(), ingot); + } + + #[test] + fn test_sync_single_ingot() { + let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let ingot_base_dir = + std::path::Path::new(&cargo_manifest_dir).join("test_files/single_ingot/"); + // let ingot_config_path = &ingot_base_dir.join("fe.toml"); + + let mut workspace: Workspace = Workspace::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); + + workspace + .set_workspace_root(&mut db, &ingot_base_dir) + .unwrap(); + + assert_eq!(workspace.ingot_contexts.len(), 1); + + let fe_source_path = ingot_base_dir.join("src/main.fe"); + let (ingot, _file) = workspace + .touch_input_for_file_path(&mut db, fe_source_path.to_str().unwrap()) + .unwrap(); + assert!(ingot.kind(&db) == common::input::IngotKind::Local); + } + + #[test] + fn test_sync_nested_ingots() { + let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let path = format!("{crate_dir}/test_files/nested_ingots"); + assert!( + glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) + .unwrap() + .count() + == 2 + ); + + let mut workspace: Workspace = Workspace::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); + + workspace.sync_local_ingots(&mut db, &path); + + assert!(workspace.ingot_contexts.len() == 2); + + let _ = workspace.set_workspace_root(&mut db, &PathBuf::from(&path)); + + // get all top level modules for .fe files in the workspace + let fe_files = glob::glob(&format!("{path}/**/*.fe")) + .unwrap() + .filter_map(Result::ok) + .map(|p| p.to_str().unwrap().to_string()) + .collect::>(); + + for src_path in fe_files { + let _file = workspace + .touch_input_for_file_path(&mut db, &src_path) + .unwrap(); + // normally would do this but it's not relevant here... + // file.sync(&mut db, None); + + // this would panic if a file has been added to multiple ingots + let (ingot, file) = workspace.get_input_for_file_path(&src_path).unwrap(); + let _top_mod = map_file_to_mod(&db, ingot, file); + } + } + + #[test] + fn test_sync_ingot_files() { + let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let path = format!("{crate_dir}/test_files/nested_ingots"); + assert!( + glob::glob(&format!("{}/**/{}", path, FE_CONFIG_SUFFIX)) + .unwrap() + .count() + == 2 + ); + + let mut workspace: Workspace = Workspace::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); + + workspace.sync_local_ingots(&mut db, &path); + + assert!(workspace.ingot_contexts.len() == 2); + + let foo_config = format!("{}/ingots/foo/{}", path, FE_CONFIG_SUFFIX); + workspace.sync_ingot_files(&mut db, &foo_config); + + let foo_context = workspace + .ingot_context_from_config_path(&db, &foo_config) + .unwrap(); + + assert!(foo_context.files.len() == 1); + + let foo_files = foo_context.files.keys().collect::>(); + for file in foo_files { + let contents = std::fs::read_to_string(&file).unwrap(); + let (_ingot, file) = foo_context + .touch_input_for_file_path(&mut db, &file) + .unwrap(); + + assert!(*file.text(&db) == contents); + } + } + + #[test] + fn test_dangling_fe_source() { + let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let messy_workspace_path = format!("{crate_dir}/test_files/messy"); + let dangling_path = format!("{crate_dir}/test_files/messy/dangling.fe"); + + let mut workspace: Workspace = Workspace::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); + + workspace.sync_local_ingots(&mut db, &messy_workspace_path); + let (d_ingot, _file) = workspace + .touch_input_for_file_path(&mut db, &dangling_path) + .unwrap(); + + assert_eq!(d_ingot.kind(&db), common::input::IngotKind::StandAlone); + + // TODO: make it easier to go both ways between an ingot root path and its config path + let ingot_paths = workspace + .ingot_contexts + .values() + .map(|ctx| format!("{}{}", ctx.ingot.path(&db), FE_CONFIG_SUFFIX)) + .collect::>(); + + for ingot_path in ingot_paths { + workspace.sync_ingot_files(&mut db, &ingot_path); + } + + let non_dangling_file_path = format!("{crate_dir}/test_files/messy/foo/bar/src/main.fe"); + let (n_ingot, _file) = workspace + .touch_input_for_file_path(&mut db, &non_dangling_file_path) + .unwrap(); + + assert_eq!(n_ingot.kind(&db), common::input::IngotKind::Local); + } +} diff --git a/crates/language-server/src/cli.rs b/crates/language-server/src/cli.rs new file mode 100644 index 0000000000..7515618f17 --- /dev/null +++ b/crates/language-server/src/cli.rs @@ -0,0 +1,30 @@ +use clap::{Parser, Subcommand}; + +/// Language Server Protocol (LSP) Server +#[derive(Parser, Debug)] +#[command(name = "fe-analyzer")] +#[command(author = "Your Name ")] +#[command(version = "1.0")] +#[command(about = "LSP server for the Fe language", long_about = None)] +pub struct CliArgs { + /// Choose the communication method + #[command(subcommand)] + pub command: Option, +} + +#[derive(Subcommand, Debug)] +pub enum Commands { + /// Start the LSP server with a TCP listener + Tcp(TcpArgs), +} + +#[derive(Parser, Debug)] +pub struct TcpArgs { + /// Port to listen on (default: 4242) + #[arg(short, long, default_value_t = 4242)] + pub port: u16, + + /// Timeout in seconds to shut down the server if no peers are connected (default: 10) + #[arg(short, long, default_value_t = 10)] + pub timeout: u64, +} diff --git a/crates/language-server/src/fallback.rs b/crates/language-server/src/fallback.rs new file mode 100644 index 0000000000..e57983fe14 --- /dev/null +++ b/crates/language-server/src/fallback.rs @@ -0,0 +1,80 @@ +use std::future::Future; +use std::ops::ControlFlow; +use std::pin::Pin; +use std::task::{Context, Poll}; + +use async_lsp::{AnyEvent, AnyNotification, AnyRequest, LspService, ResponseError}; +use serde_json::Value; +use tower::Service; + +use crate::lsp_actor::service::CanHandle; + +pub struct WithFallbackService { + primary: A, + fallback: B, +} + +impl WithFallbackService { + pub fn new(primary: A, fallback: B) -> Self { + Self { primary, fallback } + } +} + +impl Service for WithFallbackService +where + A: Service + + CanHandle, + B: Service, + F: Future> + Send + 'static, +{ + type Response = serde_json::Value; + type Error = ResponseError; + type Future = F; + + fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { + match self.primary.poll_ready(cx) { + Poll::Ready(Ok(())) => self.fallback.poll_ready(cx), + other => other, + } + } + + fn call(&mut self, req: AnyRequest) -> Self::Future { + if self.primary.can_handle(&req) { + self.primary.call(req) + } else { + self.fallback.call(req) + } + } +} + +impl LspService for WithFallbackService +where + A: LspService< + Response = Value, + Error = ResponseError, + Future = Pin> + Send + 'static>>, + > + CanHandle + + CanHandle + + CanHandle, + B: LspService< + Response = Value, + Error = ResponseError, + Future = Pin> + Send + 'static>>, + >, +{ + fn notify(&mut self, notif: AnyNotification) -> ControlFlow> { + if self.primary.can_handle(¬if) { + self.primary.notify(notif) + } else { + self.fallback.notify(notif) + } + } + + fn emit(&mut self, event: AnyEvent) -> ControlFlow> { + if self.primary.can_handle(&event) { + self.primary.emit(event) + } else { + self.fallback.emit(event) + } + } +} diff --git a/crates/language-server/src/functionality/capabilities.rs b/crates/language-server/src/functionality/capabilities.rs new file mode 100644 index 0000000000..e1b1c32de2 --- /dev/null +++ b/crates/language-server/src/functionality/capabilities.rs @@ -0,0 +1,101 @@ +use async_lsp::lsp_types::{HoverProviderCapability, ServerCapabilities}; + +#[cfg(target_arch = "wasm32")] +use crate::util::DummyFilePathConversion; + +pub(crate) fn server_capabilities() -> ServerCapabilities { + ServerCapabilities { + hover_provider: Some(HoverProviderCapability::Simple(true)), + // full sync mode for now + text_document_sync: Some(async_lsp::lsp_types::TextDocumentSyncCapability::Kind( + async_lsp::lsp_types::TextDocumentSyncKind::FULL, + )), + // goto definition + definition_provider: Some(async_lsp::lsp_types::OneOf::Left(true)), + // support for workspace add/remove changes + workspace: Some(async_lsp::lsp_types::WorkspaceServerCapabilities { + workspace_folders: Some(async_lsp::lsp_types::WorkspaceFoldersServerCapabilities { + supported: Some(true), + change_notifications: Some(async_lsp::lsp_types::OneOf::Left(true)), + }), + file_operations: Some( + async_lsp::lsp_types::WorkspaceFileOperationsServerCapabilities { + did_create: Some(async_lsp::lsp_types::FileOperationRegistrationOptions { + filters: vec![async_lsp::lsp_types::FileOperationFilter { + scheme: Some(String::from("file")), + pattern: async_lsp::lsp_types::FileOperationPattern { + glob: String::from("**/*"), + options: None, + // options: Some(async_lsp::lsp_types::FileOperationPatternOptions { + // ignore_case: Some(true), + // }), + matches: None, + }, + }], + }), + did_rename: Some(async_lsp::lsp_types::FileOperationRegistrationOptions { + filters: vec![async_lsp::lsp_types::FileOperationFilter { + scheme: Some(String::from("file")), + pattern: async_lsp::lsp_types::FileOperationPattern { + glob: String::from("**/*"), + options: None, + // options: Some(async_lsp::lsp_types::FileOperationPatternOptions { + // ignore_case: Some(true), + // }), + matches: None, + }, + }], + }), + did_delete: Some(async_lsp::lsp_types::FileOperationRegistrationOptions { + filters: vec![async_lsp::lsp_types::FileOperationFilter { + scheme: Some(String::from("file")), + pattern: async_lsp::lsp_types::FileOperationPattern { + glob: String::from("**/*"), + options: None, + // options: Some(async_lsp::lsp_types::FileOperationPatternOptions { + // ignore_case: Some(true), + // }), + matches: None, + }, + }], + }), + will_create: None, + will_rename: None, + will_delete: None, + // TODO: implement file operation refactors and workspace cache updates + // will_create: Some(async_lsp::lsp_types::FileOperationRegistrationOptions { + // filters: vec![async_lsp::lsp_types::FileOperationFilter { + // scheme: Some(String::from("file")), + // pattern: async_lsp::lsp_types::FileOperationPattern { + // glob: String::from("**/*"), + // options: None, + // matches: None, + // }, + // }], + // }), + // will_rename: Some(async_lsp::lsp_types::FileOperationRegistrationOptions { + // filters: vec![async_lsp::lsp_types::FileOperationFilter { + // scheme: Some(String::from("file")), + // pattern: async_lsp::lsp_types::FileOperationPattern { + // glob: String::from("**/*"), + // options: None, + // matches: None, + // }, + // }], + // }), + // will_delete: Some(async_lsp::lsp_types::FileOperationRegistrationOptions { + // filters: vec![async_lsp::lsp_types::FileOperationFilter { + // scheme: Some(String::from("file")), + // pattern: async_lsp::lsp_types::FileOperationPattern { + // glob: String::from("**/*"), + // options: None, + // matches: None, + // }, + // }], + // }), + }, + ), + }), + ..Default::default() + } +} diff --git a/crates/language-server/src/functionality/diagnostics.rs b/crates/language-server/src/functionality/diagnostics.rs new file mode 100644 index 0000000000..14e04919a8 --- /dev/null +++ b/crates/language-server/src/functionality/diagnostics.rs @@ -0,0 +1,143 @@ +use std::ops::Range; + +use camino::Utf8Path; +use codespan_reporting as cs; +use common::{diagnostics::CompleteDiagnostic, InputDb, InputFile, InputIngot}; +use cs::files as cs_files; +use fxhash::FxHashMap; +use hir::{ + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, lower::map_file_to_mod, + ParsingPass, +}; +use hir_analysis::{ + name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, + ty::{ + AdtDefAnalysisPass, BodyAnalysisPass, FuncAnalysisPass, ImplAnalysisPass, + ImplTraitAnalysisPass, TraitAnalysisPass, TypeAliasAnalysisPass, + }, +}; +use url::Url; + +use crate::{ + backend::db::{LanguageServerDatabase, LanguageServerDb}, + util::diag_to_lsp, +}; + +#[salsa::tracked(return_ref)] +pub fn file_line_starts(db: &dyn LanguageServerDb, file: InputFile) -> Vec { + cs::files::line_starts(file.text(db.as_input_db())).collect() +} + +impl<'a> cs_files::Files<'a> for LanguageServerDatabase { + type FileId = InputFile; + type Name = &'a Utf8Path; + type Source = &'a str; + + fn name(&'a self, file_id: Self::FileId) -> Result { + Ok(file_id.path(self).as_path()) + } + + fn source(&'a self, file_id: Self::FileId) -> Result { + Ok(file_id.text(self)) + } + + fn line_index( + &'a self, + file_id: Self::FileId, + byte_index: usize, + ) -> Result { + let starts = file_line_starts(self, file_id); + Ok(starts + .binary_search(&byte_index) + .unwrap_or_else(|next_line| next_line - 1)) + } + + fn line_range( + &'a self, + file_id: Self::FileId, + line_index: usize, + ) -> Result, cs_files::Error> { + let line_starts = file_line_starts(self, file_id); + + let start = *line_starts + .get(line_index) + .ok_or(cs_files::Error::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + })?; + + let end = if line_index == line_starts.len() - 1 { + file_id.text(self).len() + } else { + *line_starts + .get(line_index + 1) + .ok_or(cs_files::Error::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + })? + }; + + Ok(Range { start, end }) + } +} + +impl LanguageServerDatabase { + pub fn diagnostics_for_ingot( + &self, + ingot: InputIngot, + ) -> FxHashMap> { + let mut result = + FxHashMap::>::default( + ); + let mut pass_manager = initialize_analysis_pass(self); + let ingot_files = ingot.files(self).iter(); + + for file in ingot_files { + // initialize an empty diagnostic list for this file + // (to clear any previous diagnostics) + result + .entry( + Url::from_file_path(file.path(self)) + .expect("Failed to convert file path to URL"), + ) + .or_default(); + + let top_mod = map_file_to_mod(self, ingot, *file); + let diagnostics = pass_manager.run_on_module(top_mod); + let mut finalized_diags: Vec = diagnostics + .iter() + .map(|d| d.to_complete(self).clone()) + .collect(); + finalized_diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { + std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), + ord => ord, + }); + for diag in finalized_diags { + let lsp_diags = diag_to_lsp(self.as_input_db(), ingot, diag).clone(); + for (uri, more_diags) in lsp_diags { + let diags = result.entry(uri.clone()).or_insert_with(Vec::new); + diags.extend(more_diags); + } + } + } + + result + } +} + +fn initialize_analysis_pass(db: &LanguageServerDatabase) -> AnalysisPassManager<'_> { + let mut pass_manager = AnalysisPassManager::new(); + pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(AdtDefAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TypeAliasAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(TraitAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImplAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImplTraitAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(FuncAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(BodyAnalysisPass::new(db))); + + pass_manager +} diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs new file mode 100644 index 0000000000..cb136950cd --- /dev/null +++ b/crates/language-server/src/functionality/goto.rs @@ -0,0 +1,378 @@ +use async_lsp::ResponseError; +use hir::{ + hir_def::{scope_graph::ScopeId, ItemKind, PathId, TopLevelMod}, + lower::map_file_to_mod, + span::{DynLazySpan, LazySpan}, + visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, + SpannedHirDb, +}; +use hir_analysis::name_resolution::{resolve_path, PathResErrorKind}; + +use crate::{ + backend::{db::LanguageServerDb, Backend}, + util::{to_lsp_location_from_scope, to_offset_from_position}, +}; +pub type Cursor = rowan::TextSize; + +#[derive(Default)] +struct PathSpanCollector<'db> { + paths: Vec<(PathId<'db>, ScopeId<'db>, LazyPathSpan<'db>)>, +} + +impl<'db, 'ast: 'db> Visitor<'ast> for PathSpanCollector<'db> { + fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'ast, LazyPathSpan<'ast>>, path: PathId<'db>) { + let Some(span) = ctxt.span() else { + return; + }; + + let scope = ctxt.scope(); + self.paths.push((path, scope, span)); + } +} + +fn find_path_surrounding_cursor<'db>( + db: &'db dyn LanguageServerDb, + cursor: Cursor, + full_paths: Vec<(PathId<'db>, ScopeId<'db>, LazyPathSpan<'db>)>, +) -> Option<(PathId<'db>, bool, ScopeId<'db>)> { + let hir_db = db.as_hir_db(); + for (path, scope, lazy_span) in full_paths { + let span = lazy_span.resolve(db.as_spanned_hir_db()).unwrap(); + if span.range.contains(cursor) { + for idx in 0..=path.segment_index(hir_db) { + let seg_span = lazy_span + .segment(idx) + .resolve(db.as_spanned_hir_db()) + .unwrap(); + if seg_span.range.contains(cursor) { + return Some(( + path.segment(hir_db, idx).unwrap(), + idx != path.segment_index(hir_db), + scope, + )); + } + } + } + } + None +} + +pub fn find_enclosing_item<'db>( + db: &'db dyn SpannedHirDb, + top_mod: TopLevelMod<'db>, + cursor: Cursor, +) -> Option> { + let items = top_mod + .scope_graph(db.as_hir_db()) + .items_dfs(db.as_hir_db()); + + let mut smallest_enclosing_item = None; + let mut smallest_range_size = None; + + for item in items { + let lazy_item_span = DynLazySpan::from(item.lazy_span()); + let item_span = lazy_item_span + .resolve(SpannedHirDb::as_spanned_hir_db(db)) + .unwrap(); + + if item_span.range.contains(cursor) { + let range_size = item_span.range.end() - item_span.range.start(); + if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { + smallest_enclosing_item = Some(item); + smallest_range_size = Some(range_size); + } + } + } + + smallest_enclosing_item +} + +pub fn get_goto_target_scopes_for_cursor<'db>( + db: &'db dyn LanguageServerDb, + top_mod: TopLevelMod<'db>, + cursor: Cursor, +) -> Option>> { + let item: ItemKind = find_enclosing_item(db.as_spanned_hir_db(), top_mod, cursor)?; + + let mut visitor_ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); + let mut path_segment_collector = PathSpanCollector::default(); + path_segment_collector.visit_item(&mut visitor_ctxt, item); + + let (path, _is_intermediate, scope) = + find_path_surrounding_cursor(db, cursor, path_segment_collector.paths)?; + + let hdb = db.as_hir_analysis_db(); + + let resolved = resolve_path(hdb, path, scope, false); + let scopes = match resolved { + Ok(r) => r.as_scope(hdb).into_iter().collect::>(), + Err(err) => match err.kind { + PathResErrorKind::NotFound(bucket) => { + bucket.iter_ok().flat_map(|r| r.scope()).collect() + } + PathResErrorKind::Ambiguous(vec) => vec.into_iter().flat_map(|r| r.scope()).collect(), + _ => vec![], + }, + }; + + Some(scopes) +} + +use crate::backend::workspace::IngotFileContext; + +pub async fn handle_goto_definition( + backend: &mut Backend, + params: async_lsp::lsp_types::GotoDefinitionParams, +) -> Result, ResponseError> { + // Convert the position to an offset in the file + let params = params.text_document_position_params; + let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); + let cursor: Cursor = to_offset_from_position(params.position, file_text.unwrap().as_str()); + + // Get the module and the goto info + let file_path = params.text_document.uri.path(); + let (ingot, file) = backend + .workspace + .get_input_for_file_path(file_path) + .unwrap(); + let top_mod = map_file_to_mod(&backend.db, ingot, file); + + let scopes = + get_goto_target_scopes_for_cursor(&backend.db, top_mod, cursor).unwrap_or_default(); + + let locations = scopes + .iter() + .map(|scope| to_lsp_location_from_scope(backend.db.as_spanned_hir_db(), ingot, *scope)) + .collect::>(); + + let result: Result, ()> = + Ok(Some(async_lsp::lsp_types::GotoDefinitionResponse::Array( + locations + .into_iter() + .filter_map(std::result::Result::ok) + .collect(), + ))); + let response = match result { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling goto definition: {:?}", e); + None + } + }; + Ok(response) +} +// } +#[cfg(test)] +mod tests { + use std::{collections::BTreeMap, path::Path}; + + use common::input::IngotKind; + use dir_test::{dir_test, Fixture}; + use fe_compiler_test_utils::snap_test; + use fxhash::FxHashMap; + use hir::{HirDb, LowerHirDb}; + use salsa::Setter; + + use super::*; + use crate::backend::{ + db::LanguageServerDatabase, + workspace::{IngotFileContext, Workspace}, + }; + + // given a cursor position and a string, convert to cursor line and column + fn line_col_from_cursor(cursor: Cursor, s: &str) -> (usize, usize) { + let mut line = 0; + let mut col = 0; + for (i, c) in s.chars().enumerate() { + if i == Into::::into(cursor) { + return (line, col); + } + if c == '\n' { + line += 1; + col = 0; + } else { + col += 1; + } + } + (line, col) + } + + fn extract_multiple_cursor_positions_from_spans( + db: &LanguageServerDatabase, + top_mod: TopLevelMod, + ) -> Vec { + let hir_db = db.as_hir_db(); + let mut visitor_ctxt = VisitorCtxt::with_top_mod(hir_db, top_mod); + let mut path_collector = PathSpanCollector::default(); + path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); + + let mut cursors = Vec::new(); + for (path, _, lazy_span) in path_collector.paths { + for idx in 0..=path.segment_index(hir_db) { + let seg_span = lazy_span + .segment(idx) + .resolve(db.as_spanned_hir_db()) + .unwrap(); + cursors.push(seg_span.range.start()); + } + } + + cursors.sort(); + cursors.dedup(); + + eprintln!("Found cursors: {:?}", cursors); + cursors + } + + fn make_goto_cursors_snapshot( + db: &LanguageServerDatabase, + fixture: &Fixture<&str>, + top_mod: TopLevelMod, + ) -> String { + let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); + let mut cursor_path_map: BTreeMap = BTreeMap::default(); + + for cursor in &cursors { + let scopes = + get_goto_target_scopes_for_cursor(db, top_mod, *cursor).unwrap_or_default(); + + if !scopes.is_empty() { + cursor_path_map.insert( + *cursor, + scopes + .iter() + .flat_map(|x| x.pretty_path(db)) + .collect::>() + .join("\n"), + ); + } + } + + let cursor_lines = cursor_path_map + .iter() + .map(|(cursor, path)| { + let (cursor_line, cursor_col) = line_col_from_cursor(*cursor, fixture.content()); + format!("cursor position ({cursor_line:?}, {cursor_col:?}), path: {path}") + }) + .collect::>(); + + format!( + "{}\n---\n{}", + fixture + .content() + .lines() + .enumerate() + .map(|(i, line)| format!("{i:?}: {line}")) + .collect::>() + .join("\n"), + cursor_lines.join("\n") + ) + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/single_ingot", + glob: "**/lib.fe", + )] + fn test_goto_multiple_files(fixture: Fixture<&str>) { + let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let ingot_base_dir = Path::new(&cargo_manifest_dir).join("test_files/single_ingot"); + + let mut db = LanguageServerDatabase::default(); + let mut workspace = Workspace::default(); + + let _ = workspace.set_workspace_root(&mut db, &ingot_base_dir); + + let fe_source_path = ingot_base_dir.join(fixture.path()); + let fe_source_path = fe_source_path.to_str().unwrap(); + let (ingot, file) = workspace + .touch_input_for_file_path(&mut db, fixture.path()) + .unwrap(); + assert_eq!(ingot.kind(&db), IngotKind::Local); + + file.set_text(&mut db).to((*fixture.content()).to_string()); + + // Introduce a new scope to limit the lifetime of `top_mod` + { + let (ingot, file) = workspace.get_input_for_file_path(fe_source_path).unwrap(); + let top_mod = map_file_to_mod(db.as_lower_hir_db(), ingot, file); + + let snapshot = make_goto_cursors_snapshot(&db, &fixture, top_mod); + snap_test!(snapshot, fixture.path()); + } + + let ingot = workspace.touch_ingot_for_file_path(&mut db, fixture.path()); + assert_eq!(ingot.unwrap().kind(&db), IngotKind::Local); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files", + glob: "goto*.fe" + )] + fn test_goto_cursor_target(fixture: Fixture<&str>) { + let db = &mut LanguageServerDatabase::default(); + let workspace = &mut Workspace::default(); + let (ingot, file) = workspace + .touch_input_for_file_path(db, fixture.path()) + .unwrap(); + file.set_text(db).to((*fixture.content()).to_string()); + let top_mod = map_file_to_mod(db.as_lower_hir_db(), ingot, file); + + let snapshot = make_goto_cursors_snapshot(db, &fixture, top_mod); + snap_test!(snapshot, fixture.path()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files", + glob: "smallest_enclosing*.fe" + )] + fn test_find_path_surrounding_cursor(fixture: Fixture<&str>) { + let db = &mut LanguageServerDatabase::default(); + let workspace = &mut Workspace::default(); + + let (ingot, file) = workspace + .touch_input_for_file_path(db, fixture.path()) + .unwrap(); + file.set_text(db).to((*fixture.content()).to_string()); + let top_mod = map_file_to_mod(db.as_lower_hir_db(), ingot, file); + + let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); + + let mut cursor_path_map: FxHashMap = FxHashMap::default(); + + for cursor in &cursors { + let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); + let mut path_collector = PathSpanCollector::default(); + path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); + + let full_paths = path_collector.paths; + + if let Some((path, _, scope)) = find_path_surrounding_cursor(db, *cursor, full_paths) { + let resolved_enclosing_path = resolve_path(db, path, scope, false); + + let res = match resolved_enclosing_path { + Ok(res) => res.pretty_path(db).unwrap(), + Err(err) => match err.kind { + PathResErrorKind::Ambiguous(vec) => vec + .iter() + .map(|r| r.pretty_path(db).unwrap()) + .collect::>() + .join("\n"), + _ => "".into(), + }, + }; + cursor_path_map.insert(*cursor, res); + } + } + + let result = format!( + "{}\n---\n{}", + fixture.content(), + cursor_path_map + .iter() + .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path}") }) + .collect::>() + .join("\n") + ); + snap_test!(result, fixture.path()); + } +} diff --git a/crates/language-server/src/functionality/handlers.rs b/crates/language-server/src/functionality/handlers.rs new file mode 100644 index 0000000000..84ca58d1a9 --- /dev/null +++ b/crates/language-server/src/functionality/handlers.rs @@ -0,0 +1,264 @@ +use crate::backend::Backend; + +use async_lsp::lsp_types::FileChangeType; +use async_lsp::{ + lsp_types::{ + Hover, HoverParams, InitializeParams, InitializeResult, InitializedParams, LogMessageParams, + }, + LanguageClient, ResponseError, +}; +use common::InputDb; +use fxhash::FxHashSet; +use salsa::Setter; + +use super::{capabilities::server_capabilities, hover::hover_helper}; + +use crate::backend::workspace::IngotFileContext; + +use tracing::{error, info, warn}; + +#[derive(Debug)] +pub struct FilesNeedDiagnostics(pub Vec); + +#[derive(Debug)] +pub struct NeedsDiagnostics(pub url::Url); + +impl std::fmt::Display for FilesNeedDiagnostics { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "FilesNeedDiagnostics({:?})", self.0) + } +} + +impl std::fmt::Display for NeedsDiagnostics { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "FileNeedsDiagnostics({})", self.0) + } +} + +#[derive(Debug)] +pub struct FileChange { + pub uri: url::Url, + pub kind: ChangeKind, +} + +#[derive(Debug)] +pub enum ChangeKind { + Open(String), + Create, + Edit(Option), + Delete, +} + +impl Backend { + fn update_input_file_text(&mut self, path: &str, contents: String) { + let (_ingot, file) = self + .workspace + .touch_input_for_file_path(&mut self.db, path) + .unwrap(); + file.set_text(&mut self.db).to(contents); + } +} + +pub async fn initialize( + backend: &mut Backend, + message: InitializeParams, +) -> Result { + info!("initializing language server!"); + + let root = message + .workspace_folders + .and_then(|folders| folders.first().cloned()) + .and_then(|folder| folder.uri.to_file_path().ok()) + .unwrap_or_else(|| std::env::current_dir().unwrap()); + + let _ = backend.workspace.set_workspace_root(&mut backend.db, &root); + let _ = backend.workspace.load_std_lib(&mut backend.db, &root); + // let _ = backend.workspace.sync(); + + let capabilities = server_capabilities(); + let initialize_result = InitializeResult { + capabilities, + server_info: Some(async_lsp::lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + Ok(initialize_result) +} + +pub async fn initialized( + backend: &Backend, + _message: InitializedParams, +) -> Result<(), ResponseError> { + info!("language server initialized! recieved notification!"); + + backend.workspace.all_files().for_each(|file| { + let path = file.path(backend.db.as_input_db()); + let _ = backend + .client + .emit(NeedsDiagnostics(url::Url::from_file_path(path).unwrap())); + }); + + let _ = backend.client.clone().log_message(LogMessageParams { + typ: async_lsp::lsp_types::MessageType::INFO, + message: "language server initialized!".to_string(), + }); + Ok(()) +} + +pub async fn handle_exit(_backend: &Backend, _message: ()) -> Result<(), ResponseError> { + info!("shutting down language server"); + Ok(()) +} + +pub async fn handle_did_change_watched_files( + backend: &Backend, + message: async_lsp::lsp_types::DidChangeWatchedFilesParams, +) -> Result<(), ResponseError> { + for event in message.changes { + let kind = match event.typ { + FileChangeType::CHANGED => ChangeKind::Edit(None), + FileChangeType::CREATED => ChangeKind::Create, + FileChangeType::DELETED => ChangeKind::Delete, + _ => unreachable!(), + }; + let _ = backend.client.clone().emit(FileChange { + uri: event.uri, + kind, + }); + } + Ok(()) +} + +pub async fn handle_did_open_text_document( + backend: &Backend, + message: async_lsp::lsp_types::DidOpenTextDocumentParams, +) -> Result<(), ResponseError> { + info!("file opened: {:?}", message.text_document.uri); + let _ = backend.client.clone().emit(FileChange { + uri: message.text_document.uri, + kind: ChangeKind::Open(message.text_document.text), + }); + Ok(()) +} + +pub async fn handle_did_change_text_document( + backend: &Backend, + message: async_lsp::lsp_types::DidChangeTextDocumentParams, +) -> Result<(), ResponseError> { + info!("file changed: {:?}", message.text_document.uri); + let _ = backend.client.clone().emit(FileChange { + uri: message.text_document.uri, + kind: ChangeKind::Edit(Some(message.content_changes[0].text.clone())), + }); + Ok(()) +} + +pub async fn handle_did_save_text_document( + _backend: &Backend, + message: async_lsp::lsp_types::DidSaveTextDocumentParams, +) -> Result<(), ResponseError> { + info!("file saved: {:?}", message.text_document.uri); + Ok(()) +} + +pub async fn handle_file_change( + backend: &mut Backend, + message: FileChange, +) -> Result<(), ResponseError> { + let path = message + .uri + .to_file_path() + .unwrap_or_else(|_| panic!("Failed to convert URI to path: {:?}", message.uri)); + + let path = path.to_str().unwrap(); + + match message.kind { + ChangeKind::Open(contents) => { + info!("file opened: {:?}", &path); + backend.update_input_file_text(path, contents); + } + ChangeKind::Create => { + info!("file created: {:?}", &path); + let contents = tokio::fs::read_to_string(&path).await.unwrap(); + backend.update_input_file_text(path, contents) + } + ChangeKind::Edit(contents) => { + info!("file edited: {:?}", &path); + let contents = if let Some(text) = contents { + text + } else { + tokio::fs::read_to_string(&path).await.unwrap() + }; + backend.update_input_file_text(path, contents); + } + ChangeKind::Delete => { + info!("file deleted: {:?}", path); + backend + .workspace + .remove_input_for_file_path(&mut backend.db, path) + .unwrap(); + } + } + + let _ = backend.client.emit(NeedsDiagnostics(message.uri)); + Ok(()) +} + +pub async fn handle_files_need_diagnostics( + backend: &Backend, + message: FilesNeedDiagnostics, +) -> Result<(), ResponseError> { + let FilesNeedDiagnostics(need_diagnostics) = message; + let mut client = backend.client.clone(); + + let ingots_need_diagnostics: FxHashSet<_> = need_diagnostics + .iter() + .filter_map(|NeedsDiagnostics(file)| backend.workspace.get_ingot_for_file_path(file.path())) + .collect(); + + for ingot in ingots_need_diagnostics { + // Get diagnostics per file + let diagnostics_map = backend.db.diagnostics_for_ingot(ingot); + + info!( + "Computed diagnostics: {:?}", + diagnostics_map.keys().collect::>() + ); + for uri in diagnostics_map.keys() { + let diagnostic = diagnostics_map.get(uri).cloned().unwrap_or_default(); + let diagnostics_params = async_lsp::lsp_types::PublishDiagnosticsParams { + uri: uri.clone(), + diagnostics: diagnostic, + version: None, + }; + info!("Publishing diagnostics for URI: {:?}", uri); + client.publish_diagnostics(diagnostics_params).unwrap(); + } + } + Ok(()) +} + +pub async fn handle_hover_request( + backend: &Backend, + message: HoverParams, +) -> Result, ResponseError> { + let path = message + .text_document_position_params + .text_document + .uri + .path(); + + let Some((ingot, file)) = backend.workspace.get_input_for_file_path(path) else { + warn!("handle_hover_request failed to get file for path: `{path}`"); + return Ok(None); + }; + + info!("handling hover request in file: {:?}", file); + let response = hover_helper(&backend.db, ingot, file, message).unwrap_or_else(|e| { + error!("Error handling hover: {:?}", e); + None + }); + info!("sending hover response: {:?}", response); + Ok(response) +} diff --git a/crates/language-server/src/functionality/hover.rs b/crates/language-server/src/functionality/hover.rs new file mode 100644 index 0000000000..659280f301 --- /dev/null +++ b/crates/language-server/src/functionality/hover.rs @@ -0,0 +1,61 @@ +use anyhow::Error; +use async_lsp::lsp_types::Hover; +use common::{InputFile, InputIngot}; +use hir::lower::map_file_to_mod; +use tracing::info; + +use super::{ + goto::{get_goto_target_scopes_for_cursor, Cursor}, + item_info::{get_docstring, get_item_definition_markdown, get_item_path_markdown}, +}; +use crate::{backend::db::LanguageServerDb, util::to_offset_from_position}; + +pub fn hover_helper( + db: &dyn LanguageServerDb, + ingot: InputIngot, + file: InputFile, + params: async_lsp::lsp_types::HoverParams, +) -> Result, Error> { + info!("handling hover"); + let file_text = file.text(db.as_input_db()); + + let cursor: Cursor = to_offset_from_position( + params.text_document_position_params.position, + file_text.as_str(), + ); + + let top_mod = map_file_to_mod(db.as_lower_hir_db(), ingot, file); + let goto_info = &get_goto_target_scopes_for_cursor(db, top_mod, cursor).unwrap_or_default(); + + let hir_db = db.as_hir_db(); + let scopes_info = goto_info + .iter() + .map(|scope| { + let item = scope.item(); + let pretty_path = get_item_path_markdown(item, hir_db); + let definition_source = get_item_definition_markdown(item, db.as_spanned_hir_db()); + let docs = get_docstring(*scope, hir_db); + + let result = [pretty_path, definition_source, docs] + .iter() + .filter_map(|info| info.clone().map(|info| format!("{}\n", info))) + .collect::>() + .join("\n"); + + result + }) + .collect::>(); + + let info = scopes_info.join("\n---\n"); + + let result = async_lsp::lsp_types::Hover { + contents: async_lsp::lsp_types::HoverContents::Markup( + async_lsp::lsp_types::MarkupContent { + kind: async_lsp::lsp_types::MarkupKind::Markdown, + value: info, + }, + ), + range: None, + }; + Ok(Some(result)) +} diff --git a/crates/language-server/src/functionality/item_info.rs b/crates/language-server/src/functionality/item_info.rs new file mode 100644 index 0000000000..267740909a --- /dev/null +++ b/crates/language-server/src/functionality/item_info.rs @@ -0,0 +1,60 @@ +use hir::{ + hir_def::{scope_graph::ScopeId, Attr, ItemKind}, + span::LazySpan, + HirDb, SpannedHirDb, +}; + +pub fn get_docstring(scope: ScopeId, hir_db: &dyn HirDb) -> Option { + scope + .attrs(hir_db)? + .data(hir_db) + .iter() + .filter_map(|attr| { + if let Attr::DocComment(doc) = attr { + Some(doc.text.data(hir_db).clone()) + } else { + None + } + }) + .reduce(|a, b| a + "\n" + &b) +} + +pub fn get_item_path_markdown(item: ItemKind, hir_db: &dyn HirDb) -> Option { + item.scope() + .pretty_path(hir_db) + .map(|path| format!("```fe\n{}\n```", path)) +} + +pub fn get_item_definition_markdown(item: ItemKind, db: &dyn SpannedHirDb) -> Option { + // TODO: use pending AST features to get the definition without all this text manipulation + let hir_db = db.as_hir_db(); + let span = item.lazy_span().resolve(db)?; + + let mut start: usize = span.range.start().into(); + let mut end: usize = span.range.end().into(); + + // if the item has a body or children, cut that stuff out + let body_start = match item { + ItemKind::Func(func) => Some(func.body(hir_db)?.lazy_span().resolve(db)?.range.start()), + ItemKind::Mod(module) => Some(module.scope().name_span(hir_db)?.resolve(db)?.range.end()), + // TODO: handle other item types + _ => None, + }; + if let Some(body_start) = body_start { + end = body_start.into(); + } + + // let's start at the beginning of the line where the name is defined + let name_span = item.name_span()?.resolve(db); + if let Some(name_span) = name_span { + let mut name_line_start = name_span.range.start().into(); + let file_text = span.file.text(db.as_input_db()).as_str(); + while name_line_start > 0 && file_text.chars().nth(name_line_start - 1).unwrap() != '\n' { + name_line_start -= 1; + } + start = name_line_start; + } + + let item_definition = span.file.text(db.as_input_db()).as_str()[start..end].to_string(); + Some(format!("```fe\n{}\n```", item_definition.trim())) +} diff --git a/crates/language-server/src/functionality/mod.rs b/crates/language-server/src/functionality/mod.rs new file mode 100644 index 0000000000..dcb4a3104d --- /dev/null +++ b/crates/language-server/src/functionality/mod.rs @@ -0,0 +1,6 @@ +mod capabilities; +pub(super) mod diagnostics; +pub(super) mod goto; +pub(super) mod handlers; +pub(super) mod hover; +pub(super) mod item_info; diff --git a/crates/language-server/src/logging.rs b/crates/language-server/src/logging.rs new file mode 100644 index 0000000000..d7d3997626 --- /dev/null +++ b/crates/language-server/src/logging.rs @@ -0,0 +1,123 @@ +use async_lsp::{ + lsp_types::{LogMessageParams, MessageType}, + ClientSocket, LanguageClient, +}; +use tracing::{subscriber::set_default, Level, Metadata}; +use tracing_subscriber::{fmt::MakeWriter, layer::SubscriberExt}; +use tracing_tree::HierarchicalLayer; + +use std::{backtrace::Backtrace, sync::Arc}; + +pub fn setup_default_subscriber(client: ClientSocket) -> Option { + let client_socket_writer = ClientSocketWriterMaker::new(client); + let subscriber = tracing_subscriber::registry() + .with(tracing_subscriber::filter::LevelFilter::INFO) + .with( + HierarchicalLayer::new(2) + .with_thread_ids(true) + .with_thread_names(true) + .with_indent_lines(true) + .with_bracketed_fields(true) + .with_ansi(false) + .with_writer(client_socket_writer), + ); + Some(set_default(subscriber)) +} + +pub fn init_fn(client: ClientSocket) -> impl FnOnce() -> Option { + move || setup_default_subscriber(client) +} + +pub(crate) fn setup_panic_hook() { + // Set up a panic hook + std::panic::set_hook(Box::new(|panic_info| { + // Extract the panic message + let payload = panic_info.payload(); + let message = if let Some(s) = payload.downcast_ref::<&str>() { + *s + } else if let Some(s) = payload.downcast_ref::() { + &s[..] + } else { + "Unknown panic message" + }; + + // Get the location of the panic if available + let location = if let Some(location) = panic_info.location() { + format!(" at {}:{}", location.file(), location.line()) + } else { + String::from("Unknown location") + }; + + // Capture the backtrace + let backtrace = Backtrace::capture(); + + // Log the panic information and backtrace + tracing::error!( + "Panic occurred{}: {}\nBacktrace:\n{:?}", + location, + message, + backtrace + ); + })); +} + +pub(crate) struct ClientSocketWriterMaker { + pub(crate) client_socket: Arc, +} + +impl ClientSocketWriterMaker { + pub fn new(client_socket: ClientSocket) -> Self { + ClientSocketWriterMaker { + client_socket: Arc::new(client_socket), + } + } +} + +pub(crate) struct ClientSocketWriter { + client_socket: Arc, + typ: MessageType, +} + +impl std::io::Write for ClientSocketWriter { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + let message = String::from_utf8_lossy(buf).to_string(); + let params = LogMessageParams { + typ: self.typ, + message, + }; + + let mut client_socket = self.client_socket.as_ref(); + _ = client_socket.log_message(params); + Ok(buf.len()) + } + + fn flush(&mut self) -> std::io::Result<()> { + Ok(()) + } +} + +impl<'a> MakeWriter<'a> for ClientSocketWriterMaker { + type Writer = ClientSocketWriter; + + fn make_writer(&'a self) -> Self::Writer { + ClientSocketWriter { + client_socket: self.client_socket.clone(), + typ: MessageType::LOG, + } + } + + fn make_writer_for(&'a self, meta: &Metadata<'_>) -> Self::Writer { + let typ = match *meta.level() { + Level::ERROR => MessageType::ERROR, + Level::WARN => MessageType::WARNING, + Level::INFO => MessageType::INFO, + Level::DEBUG => MessageType::LOG, + Level::TRACE => MessageType::LOG, + }; + + ClientSocketWriter { + client_socket: self.client_socket.clone(), + typ, + } + } +} diff --git a/crates/language-server/src/lsp_actor/mod.rs b/crates/language-server/src/lsp_actor/mod.rs new file mode 100644 index 0000000000..d53730e9bc --- /dev/null +++ b/crates/language-server/src/lsp_actor/mod.rs @@ -0,0 +1,227 @@ +pub(crate) mod registration; +pub(crate) mod service; + +use async_lsp::{ + lsp_types::{notification::Notification, request::Request}, + AnyEvent, AnyNotification, AnyRequest, ResponseError, +}; +use service::LspActorKey; +use std::collections::HashMap; +use tracing::debug; + +use act_locally::{ + dispatcher::Dispatcher, + handler::{AsyncFunc, AsyncMutatingFunc}, + message::{Message, MessageDowncast, MessageKey, Response}, + types::ActorError, +}; + +type WrapperFunc = + Box) -> Result, ActorError> + Send + Sync>; + +type UnwrapperFunc = + Box) -> Result, ActorError> + Send + Sync>; + +#[allow(dead_code)] +pub trait LspActor { + fn handle_request( + &mut self, + handler: impl for<'a> AsyncFunc<'a, S, R::Params, R::Result, ResponseError> + + Send + + Sync + + 'static, + ) -> &mut Self; + fn handle_notification( + &mut self, + handler: impl for<'a> AsyncFunc<'a, S, N::Params, (), ResponseError> + Send + Sync + 'static, + ) -> &mut Self; + fn handle_event( + &mut self, + handler: impl for<'a> AsyncFunc<'a, S, E, (), ResponseError> + Send + Sync + 'static, + ) -> &mut Self; + fn handle_request_mut( + &mut self, + handler: impl for<'a> AsyncMutatingFunc<'a, S, R::Params, R::Result, ResponseError> + + Send + + Sync + + 'static, + ) -> &mut Self; + fn handle_notification_mut( + &mut self, + handler: impl for<'a> AsyncMutatingFunc<'a, S, N::Params, (), ResponseError> + + Send + + Sync + + 'static, + ) -> &mut Self; + fn handle_event_mut( + &mut self, + handler: impl for<'a> AsyncMutatingFunc<'a, S, E, (), ResponseError> + Send + Sync + 'static, + ) -> &mut Self; +} + +pub struct LspDispatcher { + pub(super) wrappers: HashMap, + pub(super) unwrappers: HashMap, +} + +impl LspDispatcher { + pub fn new() -> Self { + Self { + wrappers: HashMap::new(), + unwrappers: HashMap::new(), + } + } + fn register_wrapper(&mut self, key: MessageKey, wrapper: WrapperFunc) { + let MessageKey(key) = key; + self.wrappers.insert(key, wrapper); + } + pub fn register_unwrapper(&mut self, key: MessageKey, unwrapper: UnwrapperFunc) { + let MessageKey(key) = key; + self.unwrappers.insert(key, unwrapper); + } +} + +impl Dispatcher for LspDispatcher { + fn message_key(&self, message: &dyn Message) -> Result, ActorError> { + if let Some(request) = message.downcast_ref::() { + Ok(LspActorKey::from(&request.method).into()) + } else if let Some(notification) = message.downcast_ref::() { + Ok(LspActorKey::from(¬ification.method).into()) + } else if let Some(event) = message.downcast_ref::() { + Ok(LspActorKey::from(event.inner_type_id()).into()) + } else { + Err(ActorError::DispatchError) + } + } + + fn wrap( + &self, + message: Box, + key: MessageKey, + ) -> Result, ActorError> { + let MessageKey(key) = key; + if let Some(wrapper) = self.wrappers.get(&key) { + if let Some(request) = message.downcast_ref::() { + wrapper(Box::new(request.params.clone())) + } else if let Some(notification) = message.downcast_ref::() { + wrapper(Box::new(notification.params.clone())) + // } else if message.is::() { + // wrapper(message) + } else { + wrapper(message) + } + } else { + Err(ActorError::HandlerNotFound) + } + } + + fn unwrap( + &self, + message: Box, + key: MessageKey, + ) -> Result, ActorError> { + let MessageKey(key) = key; + if let Some(unwrapper) = self.unwrappers.get(&key) { + debug!("Found an unwrapper for key {}!", &key); + unwrapper(message) + } else { + Err(ActorError::HandlerNotFound) + } + } +} + +#[cfg(test)] +mod tests { + + use super::*; + use act_locally::builder::ActorBuilder; + use async_lsp::{ + lsp_types::{InitializeParams, InitializeResult}, + RequestId, + }; + use async_lsp::{AnyNotification, AnyRequest, LspService, ResponseError}; + use serde_json::json; + use service::LspActorService; + use std::ops::ControlFlow; + use tower::Service; + + #[derive(Debug)] + enum Initialize {} + + impl Request for Initialize { + type Params = InitializeParams; + type Result = InitializeResult; + const METHOD: &'static str = "initialize"; + } + + #[derive(Debug)] + enum Initialized {} + + impl Notification for Initialized { + type Params = (); + const METHOD: &'static str = "initialized"; + } + + struct TestState { + initialized: bool, + } + + #[tokio::test] + async fn test_lsp_actor() { + let actor_ref = ActorBuilder::new() + .with_state_init(|| { + let initial_state = TestState { initialized: false }; + Ok(initial_state) + }) + .spawn() + .expect("Failed to spawn actor"); + + async fn handle_initialize( + state: &mut TestState, + _: InitializeParams, + ) -> Result { + println!("Handling initialize request"); + state.initialized = true; + Ok(InitializeResult::default()) + } + + let mut service = LspActorService::with(actor_ref.clone()); + + service.handle_request_mut::(handle_initialize); + + async fn handle_initialized(state: &mut TestState, _: ()) -> Result<(), ResponseError> { + println!("Handling initialized notification"); + assert!(state.initialized, "State should be initialized"); + Ok(()) + } + service.handle_notification_mut::(handle_initialized); + + // Test initialize request + let init_params = InitializeParams::default(); + let init_request = AnyRequest::stub( + RequestId::Number(1), + Initialize::METHOD.to_string(), + serde_json::to_value(init_params).unwrap(), + ); + + println!("Sending initialize request"); + + let init_result = service.call(init_request).await.unwrap(); + + let init_result_deserialized: InitializeResult = + serde_json::from_value(init_result).unwrap(); + + assert_eq!(init_result_deserialized, InitializeResult::default()); + + // Test initialized notification + let init_notification = AnyNotification::stub(Initialized::METHOD.to_string(), json!(null)); + + println!("Sending initialized notification"); + if let ControlFlow::Break(Err(e)) = service.notify(init_notification) { + panic!("Failed to send Initialized notification: {:?}", e); + } + + // Wait a bit to ensure the notification is processed + tokio::time::sleep(std::time::Duration::from_millis(100)).await; + } +} diff --git a/crates/language-server/src/lsp_actor/registration.rs b/crates/language-server/src/lsp_actor/registration.rs new file mode 100644 index 0000000000..b52c623b6d --- /dev/null +++ b/crates/language-server/src/lsp_actor/registration.rs @@ -0,0 +1,219 @@ +use std::sync::Arc; + +use act_locally::{ + handler::{AsyncFunc, AsyncMutatingFunc}, + message::{Message, MessageDowncast, MessageKey, Response, ResponseDowncast}, + types::ActorError, +}; +use async_lsp::{ + lsp_types::{notification::Notification, request::Request}, + AnyEvent, ResponseError, +}; + +use super::{ + service::{LspActorKey, LspActorService}, + LspActor, +}; + +impl LspActor for LspActorService { + fn handle_request( + &mut self, + handler: impl for<'b> AsyncFunc<'b, S, R::Params, R::Result, ResponseError> + + 'static + + Send + + Sync, + ) -> &mut Self { + let param_handler = Box::new( + move |params: Box| -> Result, ActorError> { + let params = params.downcast::().map_err(|_| { + println!("Failed to downcast params to serde_json::Value"); + ActorError::DowncastError + })?; + let typed_params: R::Params = serde_json::from_value(*params).map_err(|e| { + println!("Deserialization error: {:?}", e); + ActorError::CustomError(Box::new(e)) + })?; + + Ok(Box::new(typed_params) as Box) + }, + ); + + Arc::get_mut(&mut self.dispatcher) + .expect("Failed to get mutable reference to dispatcher") + .register_wrapper(MessageKey(R::METHOD.into()), param_handler); + + self.actor_ref + .register_handler_async(MessageKey(R::METHOD.into()), handler); + + let result_unwrapper = Box::new( + move |result: Box| -> Result, ActorError> { + let lsp_result = *result + .downcast::() + .map_err(|_| ActorError::DowncastError)?; + + let json_value = serde_json::to_value(lsp_result) + .map_err(|e| ActorError::CustomError(Box::new(e)))?; + // println!("Unwrapped json result: {:?}", &json_value); + Ok(Box::new(json_value) as Box) + }, + ); + + Arc::get_mut(&mut self.dispatcher) + .expect("Failed to get mutable reference to dispatcher") + .register_unwrapper(MessageKey::new(R::METHOD.into()), result_unwrapper); + + self + } + + fn handle_request_mut( + &mut self, + handler: impl for<'b> AsyncMutatingFunc<'b, S, R::Params, R::Result, ResponseError> + + 'static + + Send + + Sync, + ) -> &mut Self { + let param_handler = Box::new( + move |params: Box| -> Result, ActorError> { + let params = params.downcast::().map_err(|_| { + println!("Failed to downcast params to serde_json::Value"); + ActorError::DowncastError + })?; + let typed_params: R::Params = serde_json::from_value(*params).map_err(|e| { + println!("Deserialization error: {:?}", e); + ActorError::CustomError(Box::new(e)) + })?; + + Ok(Box::new(typed_params) as Box) + }, + ); + + Arc::get_mut(&mut self.dispatcher) + .expect("Failed to get mutable reference to dispatcher") + .register_wrapper(MessageKey(R::METHOD.into()), param_handler); + + self.actor_ref + .register_handler_async_mutating(MessageKey(R::METHOD.into()), handler); + + let result_unwrapper = Box::new( + move |result: Box| -> Result, ActorError> { + let lsp_result = *result + .downcast::() + .map_err(|_| ActorError::DowncastError)?; + + let json_value = serde_json::to_value(lsp_result) + .map_err(|e| ActorError::CustomError(Box::new(e)))?; + // println!("Unwrapped json result: {:?}", &json_value); + Ok(Box::new(json_value) as Box) + }, + ); + + Arc::get_mut(&mut self.dispatcher) + .expect("Failed to get mutable reference to dispatcher") + .register_unwrapper(MessageKey::new(R::METHOD.into()), result_unwrapper); + + self + } + + fn handle_notification( + &mut self, + handler: impl for<'b> AsyncFunc<'b, S, N::Params, (), ResponseError> + 'static + Send + Sync, + ) -> &mut Self { + let param_handler = Box::new( + move |params: Box| -> Result, ActorError> { + let params = params.downcast::().map_err(|_| { + println!("Failed to downcast params to serde_json::Value"); + ActorError::DowncastError + })?; + let typed_params: N::Params = serde_json::from_value(*params).map_err(|e| { + println!("Deserialization error: {:?}", e); + ActorError::CustomError(Box::new(e)) + })?; + Ok(Box::new(typed_params) as Box) + }, + ); + + Arc::get_mut(&mut self.dispatcher) + .expect("Failed to get mutable reference to dispatcher") + .register_wrapper(MessageKey::new(N::METHOD.into()), param_handler); + + self.actor_ref + .register_handler_async(MessageKey::new(N::METHOD.into()), handler); + self + } + + fn handle_notification_mut( + &mut self, + handler: impl for<'b> AsyncMutatingFunc<'b, S, N::Params, (), ResponseError> + + 'static + + Send + + Sync, + ) -> &mut Self { + let param_handler = Box::new( + move |params: Box| -> Result, ActorError> { + let params = params.downcast::().map_err(|_| { + println!("Failed to downcast params to serde_json::Value"); + ActorError::DowncastError + })?; + let typed_params: N::Params = serde_json::from_value(*params).map_err(|e| { + println!("Deserialization error: {:?}", e); + ActorError::CustomError(Box::new(e)) + })?; + Ok(Box::new(typed_params) as Box) + }, + ); + + Arc::get_mut(&mut self.dispatcher) + .expect("Failed to get mutable reference to dispatcher") + .register_wrapper(MessageKey::new(N::METHOD.into()), param_handler); + + self.actor_ref + .register_handler_async_mutating(MessageKey::new(N::METHOD.into()), handler); + self + } + + fn handle_event( + &mut self, + handler: impl for<'b> AsyncFunc<'b, S, E, (), ResponseError> + 'static + Send + Sync, + ) -> &mut Self { + let wrapper = Box::new( + move |message: Box| -> Result, ActorError> { + let event = message + .downcast::() + .expect("Failed to downcast message to AnyEvent"); + let inner = event.downcast::().expect("Failed to downcast event"); + Ok(Box::new(inner)) + }, + ); + + Arc::get_mut(&mut self.dispatcher) + .expect("Failed to get mutable reference to dispatcher") + .register_wrapper(LspActorKey::of::().into(), wrapper); + + self.actor_ref + .register_handler_async(LspActorKey::of::().into(), handler); + self + } + + fn handle_event_mut( + &mut self, + handler: impl for<'b> AsyncMutatingFunc<'b, S, E, (), ResponseError> + Send + Sync + 'static, + ) -> &mut Self { + let wrapper = Box::new( + move |message: Box| -> Result, ActorError> { + let event = message + .downcast::() + .expect("Failed to downcast message to AnyEvent"); + let inner = event.downcast::().expect("Failed to downcast event"); + Ok(Box::new(inner)) + }, + ); + + Arc::get_mut(&mut self.dispatcher) + .expect("Failed to get mutable reference to dispatcher") + .register_wrapper(LspActorKey::of::().into(), wrapper); + + self.actor_ref + .register_handler_async_mutating(LspActorKey::of::().into(), handler); + self + } +} diff --git a/crates/language-server/src/lsp_actor/service.rs b/crates/language-server/src/lsp_actor/service.rs new file mode 100644 index 0000000000..5bc20d1d43 --- /dev/null +++ b/crates/language-server/src/lsp_actor/service.rs @@ -0,0 +1,214 @@ +use std::future::Future; +use std::ops::ControlFlow; +use std::pin::Pin; +use std::sync::Arc; +use std::task::{Context, Poll}; + +use act_locally::actor::ActorRef; +use act_locally::message::MessageKey; +use act_locally::types::ActorError; +use serde_json::Value; +use tracing::info; + +// use async_lsp::can_handle::CanHandle; +use async_lsp::{AnyEvent, AnyNotification, AnyRequest, Error, LspService, ResponseError}; +use std::any::TypeId; +use tower::Service; + +use crate::lsp_actor::LspDispatcher; + +pub struct LspActorService { + pub(super) actor_ref: ActorRef, + pub(super) dispatcher: Arc, +} + +impl LspActorService { + pub fn with(actor_ref: ActorRef) -> Self { + let dispatcher = LspDispatcher::new(); + Self { + actor_ref, + dispatcher: Arc::new(dispatcher), + } + } +} + +type BoxReqFuture = Pin> + Send>>; +impl Service for LspActorService { + type Response = serde_json::Value; + type Error = ResponseError; + // type Future = Pin> + Send>>; + type Future = BoxReqFuture; + + fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, req: AnyRequest) -> Self::Future { + let method = req.method.clone(); + info!("got LSP request: {method:?}"); + let actor_ref = self.actor_ref.clone(); + let dispatcher = self.dispatcher.clone(); + let method_log = method.clone().to_owned(); + let result = Box::pin(async move { + let dispatcher = dispatcher.as_ref(); + let ask = actor_ref.ask::<_, Self::Response, _>(dispatcher, req); + let lsp_result: Result = ask.await.map_err(|e| match e { + ActorError::HandlerNotFound => ResponseError::new( + async_lsp::ErrorCode::METHOD_NOT_FOUND, + "Method not found".to_string(), + ), + _ => ResponseError::new( + async_lsp::ErrorCode::INTERNAL_ERROR, + format!("There was an internal error... {:?}", e), + ), + }); + info!("Prepared LSP response for: {method_log:?}"); + lsp_result + }); + info!("Prepared future for LSP request: {method:?}"); + result + } +} + +impl LspService for LspActorService { + fn notify(&mut self, notif: AnyNotification) -> ControlFlow> { + let method = notif.method.clone(); + let dispatcher = self.dispatcher.clone(); + match self.actor_ref.tell(dispatcher.as_ref(), notif) { + Ok(()) => ControlFlow::Continue(()), + Err(ActorError::HandlerNotFound) => { + tracing::warn!("Method not found for notification `{}`", method); + ControlFlow::Continue(()) + } + Err(e) => ControlFlow::Break(Err(Error::Response(ResponseError::new( + async_lsp::ErrorCode::INTERNAL_ERROR, + format!( + "Failed to send notification: {:?} for notification `{}`", + e, method + ), + )))), + } + } + + fn emit(&mut self, event: AnyEvent) -> ControlFlow> { + let type_name = event.type_name(); + let dispatcher = self.dispatcher.clone(); + match self.actor_ref.tell(dispatcher.as_ref(), event) { + Ok(()) => ControlFlow::Continue(()), + Err(ActorError::HandlerNotFound) => { + tracing::warn!("Method not found for event: {:?}", type_name); + ControlFlow::Continue(()) + } + Err(e) => ControlFlow::Break(Err(Error::Response(ResponseError::new( + async_lsp::ErrorCode::INTERNAL_ERROR, + format!("Failed to emit event: {:?}", e), + )))), + } + } +} + +pub(crate) trait CanHandle { + fn can_handle(&self, item: &T) -> bool; +} + +impl CanHandle for LspActorService { + fn can_handle(&self, req: &AnyRequest) -> bool { + self.dispatcher + .wrappers + .contains_key(&LspActorKey::from(&req.method)) + } +} + +impl CanHandle for LspActorService { + fn can_handle(&self, notif: &AnyNotification) -> bool { + self.dispatcher + .wrappers + .contains_key(&LspActorKey::from(¬if.method)) + } +} + +impl CanHandle for LspActorService { + fn can_handle(&self, event: &AnyEvent) -> bool { + self.dispatcher + .wrappers + .contains_key(&LspActorKey::from(event.inner_type_id())) + } +} + +#[derive(Debug, Clone)] +pub enum LspActorKey { + ByMethod(String), + ByTypeId(TypeId), +} + +impl LspActorKey { + pub fn of() -> Self { + Self::ByTypeId(TypeId::of::()) + } +} + +impl std::fmt::Display for LspActorKey { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + LspActorKey::ByMethod(method) => write!(f, "Method({})", method), + LspActorKey::ByTypeId(type_id) => write!(f, "Custom({:?})", type_id), + } + } +} + +impl From for LspActorKey { + fn from(method: String) -> Self { + LspActorKey::ByMethod(method) + } +} + +impl From<&String> for LspActorKey { + fn from(method: &String) -> Self { + LspActorKey::ByMethod(method.clone()) + } +} + +impl From<&str> for LspActorKey { + fn from(method: &str) -> Self { + LspActorKey::ByMethod(method.to_string()) + } +} + +impl From for LspActorKey { + fn from(type_id: TypeId) -> Self { + LspActorKey::ByTypeId(type_id) + } +} + +impl From for MessageKey { + fn from(val: LspActorKey) -> Self { + MessageKey(val) + } +} + +impl PartialEq for LspActorKey { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (LspActorKey::ByMethod(a), LspActorKey::ByMethod(b)) => a == b, + (LspActorKey::ByTypeId(a), LspActorKey::ByTypeId(b)) => a == b, + _ => false, + } + } +} + +impl std::hash::Hash for LspActorKey { + fn hash(&self, state: &mut H) { + match self { + LspActorKey::ByMethod(method) => { + 0u8.hash(state); + method.hash(state); + } + LspActorKey::ByTypeId(type_id) => { + 1u8.hash(state); + type_id.hash(state); + } + } + } +} + +impl Eq for LspActorKey {} diff --git a/crates/language-server/src/lsp_streams.rs b/crates/language-server/src/lsp_streams.rs new file mode 100644 index 0000000000..3d1c7619ff --- /dev/null +++ b/crates/language-server/src/lsp_streams.rs @@ -0,0 +1,128 @@ +//! # async-lsp-streams +//! +//! This crate provides an extension to the `async-lsp` library, allowing easy creation of +//! stream-based handlers for LSP requests and notifications. + +use async_lsp::router::Router; +use async_lsp::{lsp_types::*, ResponseError}; +use futures::Stream; +use std::fmt::Debug; +use std::pin::Pin; +use std::task::{Context, Poll}; +use tokio::sync::{mpsc, oneshot}; + +/// A stream of LSP request messages with their response channels. +pub struct RequestStream { + receiver: mpsc::Receiver<( + Params, + oneshot::Sender>, + )>, +} + +impl Stream for RequestStream { + type Item = ( + Params, + oneshot::Sender>, + ); + + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + self.receiver.poll_recv(cx) + } +} + +/// A stream of LSP notification messages. +pub struct NotificationStream { + receiver: mpsc::Receiver, +} + +impl Stream for NotificationStream { + type Item = Params; + + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + self.receiver.poll_recv(cx) + } +} + +/// A stream of LSP event messages. +pub struct EventStream { + receiver: mpsc::Receiver, +} + +impl Stream for EventStream { + type Item = E; + + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + self.receiver.poll_recv(cx) + } +} + +/// An extension trait for `RouterBuilder` to add stream-based handlers. +#[allow(dead_code)] +pub trait RouterStreams { + /// Creates a stream for handling a specific LSP request. + fn request_stream(&mut self) -> RequestStream + where + R: request::Request, + R::Result: Debug; + + /// Creates a stream for handling a specific LSP notification. + fn notification_stream(&mut self) -> NotificationStream + where + N: notification::Notification; + + /// Creates a stream for handling a specific LSP event. + fn event_stream(&mut self) -> EventStream + where + E: Send + Sync + 'static; +} + +impl RouterStreams for Router { + fn request_stream(&mut self) -> RequestStream + where + R: request::Request, + R::Result: Debug, + { + let (tx, rx) = mpsc::channel(100); + self.request::(move |_, params| { + let tx = tx.clone(); + async move { + let (response_tx, response_rx) = oneshot::channel(); + tx.send((params, response_tx)).await.unwrap(); + response_rx.await.unwrap() + } + }); + RequestStream { receiver: rx } + } + + fn notification_stream(&mut self) -> NotificationStream + where + N: notification::Notification, + { + let (tx, rx) = mpsc::channel(100); + self.notification::(move |_, params| { + let tx = tx.clone(); + tokio::spawn(async move { + tx.send(params).await.unwrap(); + }); + std::ops::ControlFlow::Continue(()) + }); + NotificationStream { receiver: rx } + } + + fn event_stream(&mut self) -> EventStream + where + E: Send + Sync + 'static, + { + let (tx, rx) = mpsc::channel(100); + self.event::(move |_, event| { + let tx = tx.clone(); + tokio::spawn(async move { + if let Err(e) = tx.send(event).await { + tracing::error!("Failed to send event to stream: {}", e); + } + }); + std::ops::ControlFlow::Continue(()) + }); + EventStream { receiver: rx } + } +} diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs new file mode 100644 index 0000000000..37281e45f5 --- /dev/null +++ b/crates/language-server/src/main.rs @@ -0,0 +1,146 @@ +mod backend; +mod cli; +mod fallback; +mod functionality; +mod logging; +mod lsp_actor; +mod lsp_streams; +mod server; +mod util; + +use std::net::SocketAddr; +use std::time::Duration; + +use async_compat::CompatExt; +use async_lsp::concurrency::ConcurrencyLayer; +use async_lsp::panic::CatchUnwindLayer; +use async_lsp::server::LifecycleLayer; +use async_lsp::tracing::TracingLayer; +use async_std::net::TcpListener; +use clap::Parser; +use cli::{CliArgs, Commands}; +use futures::io::AsyncReadExt; +use futures::StreamExt; +use logging::setup_panic_hook; +use server::setup; +use tracing::instrument::WithSubscriber; +use tracing::{error, info}; + +use async_lsp::client_monitor::ClientProcessMonitorLayer; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Arc; +use tower::ServiceBuilder; + +#[tokio::main] +async fn main() { + std::env::set_var("RUST_BACKTRACE", "full"); + setup_panic_hook(); + + // Parse CLI arguments + let args = CliArgs::parse(); + + match args.command { + Some(Commands::Tcp(tcp_args)) => { + // Start server with TCP listener + start_tcp_server(tcp_args.port, Duration::from_secs(tcp_args.timeout)).await; + } + None => { + // Start server with stdio + start_stdio_server().await; + } + } +} + +async fn start_stdio_server() { + let (server, client) = async_lsp::MainLoop::new_server(|client| { + let tracing_layer = TracingLayer::default(); + let lsp_service = setup(client.clone(), "LSP actor".to_string()); + ServiceBuilder::new() + .layer(LifecycleLayer::default()) + .layer(CatchUnwindLayer::default()) + .layer(ConcurrencyLayer::default()) + .layer(tracing_layer) + .layer(ClientProcessMonitorLayer::new(client.clone())) + .service(lsp_service) + }); + + let (stdin, stdout) = (tokio::io::stdin(), tokio::io::stdout()); + let (stdin, stdout) = (stdin.compat(), stdout.compat()); + + let logging = logging::setup_default_subscriber(client); + match server.run_buffered(stdin, stdout).await { + Ok(_) => info!("Server finished successfully"), + Err(e) => error!("Server error: {:?}", e), + } + drop(logging); +} + +async fn start_tcp_server(port: u16, timeout: Duration) { + let addr = SocketAddr::from(([0, 0, 0, 0], port)); + let listener = TcpListener::bind(&addr) + .await + .expect("Failed to bind to address"); + let mut incoming = listener.incoming(); + let connections_count = Arc::new(AtomicUsize::new(0)); // we will timeout if no clients are connected + + info!("LSP server is listening on {}", addr); + + while let Some(Ok(stream)) = incoming.next().with_current_subscriber().await { + let client_address = stream.peer_addr().unwrap(); + let tracing_layer = TracingLayer::default(); + let connections_count = Arc::clone(&connections_count); + let task = async move { + let (server, client) = async_lsp::MainLoop::new_server(|client| { + let router = setup(client.clone(), format!("LSP actor for {client_address}")); + ServiceBuilder::new() + .layer(tracing_layer) + .layer(LifecycleLayer::default()) + .layer(CatchUnwindLayer::default()) + .layer(ConcurrencyLayer::default()) + .layer(ClientProcessMonitorLayer::new(client.clone())) + .service(router) + }); + let logging = logging::setup_default_subscriber(client); + let current_connections = connections_count.fetch_add(1, Ordering::SeqCst) + 1; + info!( + "New client connected. Total clients: {}", + current_connections + ); + + let (read, write) = stream.split(); + if let Err(e) = server.run_buffered(read, write).await { + error!("Server error for client {}: {:?}", client_address, e); + } else { + info!("Client {} disconnected", client_address); + } + let current_connections = connections_count.fetch_sub(1, Ordering::SeqCst) - 1; + info!( + "Client disconnected. Total clients: {}", + current_connections + ); + drop(logging); + }; + tokio::spawn(task.with_current_subscriber()); + } + + let timeout_task = { + let connections_count = Arc::clone(&connections_count); + tokio::spawn(async move { + loop { + tokio::time::sleep(Duration::from_secs(1)).await; + if connections_count.load(Ordering::Relaxed) == 0 { + tokio::time::sleep(timeout).await; + if connections_count.load(Ordering::Relaxed) == 0 { + info!( + "No clients connected for {:?}. Shutting down server.", + timeout + ); + std::process::exit(0); + } + } + } + }) + }; + + timeout_task.await.unwrap(); +} diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs new file mode 100644 index 0000000000..fae430aed1 --- /dev/null +++ b/crates/language-server/src/server.rs @@ -0,0 +1,92 @@ +use crate::fallback::WithFallbackService; +use crate::functionality::handlers::{FileChange, FilesNeedDiagnostics, NeedsDiagnostics}; +use crate::logging; +use crate::lsp_actor::service::LspActorService; +use crate::lsp_actor::LspActor; +use crate::lsp_streams::RouterStreams; +use act_locally::builder::ActorBuilder; +use async_lsp::lsp_types::notification::{ + self, DidChangeTextDocument, DidChangeWatchedFiles, DidOpenTextDocument, DidSaveTextDocument, + Initialized, +}; +use async_lsp::lsp_types::request::{GotoDefinition, HoverRequest}; +use async_lsp::ClientSocket; +use async_std::stream::StreamExt; +use futures_batch::ChunksTimeoutStreamExt; +// use serde_json::Value; +use tracing::instrument::WithSubscriber; +use tracing::{info, warn}; + +use crate::backend::Backend; +use crate::functionality::{goto, handlers}; +use async_lsp::lsp_types::request::Initialize; +use async_lsp::router::Router; + +pub(crate) fn setup( + client: ClientSocket, + name: String, +) -> WithFallbackService, Router<()>> { + info!("Setting up server"); + let client_for_actor = client.clone(); + let client_for_logging = client.clone(); + let backend_actor = ActorBuilder::new() + .with_name(name) + .with_state_init(move || Ok(Backend::new(client_for_actor))) + .with_subscriber_init(logging::init_fn(client_for_logging)) + .spawn() + .expect("Failed to spawn backend actor"); + + let mut lsp_actor_service = LspActorService::with(backend_actor); + + lsp_actor_service + // mutating handlers + .handle_request_mut::(handlers::initialize) + .handle_request_mut::(goto::handle_goto_definition) + .handle_event_mut::(handlers::handle_file_change) + .handle_event::(handlers::handle_files_need_diagnostics) + // non-mutating handlers + .handle_notification::(handlers::initialized) + .handle_request::(handlers::handle_hover_request) + .handle_notification::(handlers::handle_did_open_text_document) + .handle_notification::(handlers::handle_did_change_text_document) + .handle_notification::(handlers::handle_did_change_watched_files) + .handle_notification::(handlers::handle_did_save_text_document) + .handle_notification::(handlers::handle_exit); + + let mut streaming_router = Router::new(()); + setup_streams(client.clone(), &mut streaming_router); + setup_unhandled(&mut streaming_router); + + WithFallbackService::new(lsp_actor_service, streaming_router) +} + +fn setup_streams(client: ClientSocket, router: &mut Router<()>) { + info!("setting up streams"); + + let mut diagnostics_stream = router + .event_stream::() + .chunks_timeout(500, std::time::Duration::from_millis(30)) + .map(FilesNeedDiagnostics) + .fuse(); + + tokio::spawn( + async move { + while let Some(files_need_diagnostics) = diagnostics_stream.next().await { + let _ = client.emit(files_need_diagnostics); + } + } + .with_current_subscriber(), + ); +} + +fn setup_unhandled(router: &mut Router<()>) { + router + .unhandled_notification(|_, params| { + warn!("Unhandled notification: {:?}", params); + std::ops::ControlFlow::Continue(()) + }) + .unhandled_event(|_, params| { + warn!("Unhandled event: {:?}", params); + std::ops::ControlFlow::Continue(()) + }); +} diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs new file mode 100644 index 0000000000..b5f2590ff8 --- /dev/null +++ b/crates/language-server/src/util.rs @@ -0,0 +1,178 @@ +use async_lsp::lsp_types::{ + DiagnosticRelatedInformation, DiagnosticSeverity, NumberOrString, Position, +}; +use common::{ + diagnostics::{CompleteDiagnostic, Severity, Span}, + InputDb, InputIngot, +}; +use fxhash::FxHashMap; +use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; +use tracing::error; + +pub fn calculate_line_offsets(text: &str) -> Vec { + text.lines() + .scan(0, |state, line| { + let offset = *state; + *state += line.len() + 1; + Some(offset) + }) + .collect() +} + +pub fn to_offset_from_position(position: Position, text: &str) -> rowan::TextSize { + let line_offsets: Vec = calculate_line_offsets(text); + let line_offset = line_offsets[position.line as usize]; + let character_offset = position.character as usize; + + rowan::TextSize::from((line_offset + character_offset) as u32) +} + +pub fn to_lsp_range_from_span( + span: Span, + db: &dyn InputDb, +) -> Result> { + let text = span.file.text(db); + let line_offsets = calculate_line_offsets(text); + let start = span.range.start(); + let end = span.range.end(); + + let start_line = line_offsets + .binary_search(&start.into()) + .unwrap_or_else(|x| x - 1); + + let end_line = line_offsets + .binary_search(&end.into()) + .unwrap_or_else(|x| x - 1); + + let start_character: usize = usize::from(span.range.start()) - line_offsets[start_line]; + let end_character: usize = usize::from(span.range.end()) - line_offsets[end_line]; + + Ok(async_lsp::lsp_types::Range { + start: Position::new(start_line as u32, start_character as u32), + end: Position::new(end_line as u32, end_character as u32), + }) +} + +pub fn to_lsp_location_from_scope( + db: &dyn SpannedHirDb, + ingot: InputIngot, + scope: ScopeId, +) -> Result> { + let lazy_span = scope + .name_span(db.as_hir_db()) + .ok_or("Failed to get name span")?; + let span = lazy_span + .resolve(db.as_spanned_hir_db()) + .ok_or("Failed to resolve span")?; + to_lsp_location_from_span(db.as_input_db(), ingot, span) +} + +pub fn severity_to_lsp(is_primary: bool, severity: Severity) -> DiagnosticSeverity { + // We set the severity to `HINT` for a secondary diags. + if !is_primary { + return DiagnosticSeverity::HINT; + }; + + match severity { + Severity::Error => DiagnosticSeverity::ERROR, + Severity::Warning => DiagnosticSeverity::WARNING, + Severity::Note => DiagnosticSeverity::INFORMATION, + } +} + +pub fn diag_to_lsp( + db: &dyn InputDb, + ingot: InputIngot, + diag: CompleteDiagnostic, +) -> FxHashMap> { + let mut result = FxHashMap::default(); + let Ok(primary_location) = to_lsp_location_from_span(db, ingot, diag.primary_span()) else { + return result; + }; + + // TODO: this assumes that all sub_diagnostics point at files in the same ingot, + // which might not be the case + + diag.sub_diagnostics.into_iter().for_each(|sub| { + let is_primary = sub.is_primary(); + + let (location, code, message, related_information) = if is_primary { + ( + primary_location.clone(), + Some(NumberOrString::String(diag.error_code.to_string())), + diag.message.to_string() + "\n" + &sub.message, + None, + ) + } else { + let Some(span) = sub.span else { + return; + }; + + let location = match to_lsp_location_from_span(db, ingot, span) { + Ok(location) => location, + Err(e) => { + error!(e); + return; + } + }; + + ( + location, + None, + sub.message, + Some(vec![DiagnosticRelatedInformation { + location: primary_location.clone(), + message: "original diagnostic".to_string(), + }]), + ) + }; + + let diags = result.entry(location.uri).or_insert_with(Vec::new); + diags.push(async_lsp::lsp_types::Diagnostic { + range: location.range, + severity: Some(severity_to_lsp(is_primary, diag.severity)), + code, + source: None, + message, + related_information, + tags: None, + code_description: None, + data: None, + }); + }); + + result +} + +fn to_lsp_location_from_span( + db: &dyn InputDb, + ingot: InputIngot, + span: Span, +) -> Result> { + let uri = span.file.abs_path(db.as_input_db(), ingot); + let range = to_lsp_range_from_span(span, db.as_input_db())?; + let uri = async_lsp::lsp_types::Url::from_file_path(uri) + .map_err(|()| "Failed to convert path to URL")?; + Ok(async_lsp::lsp_types::Location { uri, range }) +} + +#[cfg(target_arch = "wasm32")] +use std::path::Path; + +#[cfg(target_arch = "wasm32")] +pub trait DummyFilePathConversion { + fn to_file_path(&self) -> Result; + fn from_file_path>(path: P) -> Result; +} + +#[cfg(target_arch = "wasm32")] +impl DummyFilePathConversion for async_lsp::lsp_types::Url { + fn to_file_path(&self) -> Result { + // for now we don't support file paths on wasm + Err(()) + } + fn from_file_path>(_path: P) -> Result { + // for now we don't support file paths on wasm + Err(()) + } +} diff --git a/crates/language-server/test_files/goto.fe b/crates/language-server/test_files/goto.fe new file mode 100644 index 0000000000..e3d2bd70fc --- /dev/null +++ b/crates/language-server/test_files/goto.fe @@ -0,0 +1,12 @@ +struct Foo {} +struct Bar {} + +fn main() { + let x: Foo + let y: Bar + let z: baz::Baz +} + +mod baz { + pub struct Baz {} +} \ No newline at end of file diff --git a/crates/language-server/test_files/goto.snap b/crates/language-server/test_files/goto.snap new file mode 100644 index 0000000000..3a74e9e809 --- /dev/null +++ b/crates/language-server/test_files/goto.snap @@ -0,0 +1,23 @@ +--- +source: crates/language-server/src/goto.rs +assertion_line: 283 +expression: snapshot +input_file: crates/language-server/test_files/goto.fe +--- +0: struct Foo {} +1: struct Bar {} +2: +3: fn main() { +4: let x: Foo +5: let y: Bar +6: let z: baz::Baz +7: } +8: +9: mod baz { +10: pub struct Baz {} +11: } +--- +cursor position (4, 11), path: goto::Foo +cursor position (5, 11), path: goto::Bar +cursor position (6, 11), path: goto::baz +cursor position (6, 16), path: goto::baz::Baz diff --git a/crates/language-server/test_files/hoverable/fe.toml b/crates/language-server/test_files/hoverable/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/hoverable/src/lib.fe b/crates/language-server/test_files/hoverable/src/lib.fe new file mode 100644 index 0000000000..a91ad35b84 --- /dev/null +++ b/crates/language-server/test_files/hoverable/src/lib.fe @@ -0,0 +1,31 @@ +use stuff::calculations::{ return_three, return_four } + +/// ## `return_seven` +/// ### a function of numbers +/// #### returns the 3+4=7 +pub fn return_seven() { + return_three() + return_four() +} + +fn calculate() { + return_seven() + let x: stuff::calculations::ambiguous +} + +/// Anything that can be calculated ought to implement a +/// `calculate` function +pub trait Calculatable { + fn calculate(self) +} + +/// A struct for holding numbers like `x` and `y` +struct Numbers { + x: i32, + y: i32 +} + +impl Calculatable for Numbers { + fn calculate(self) { + self.x + self.y + } +} \ No newline at end of file diff --git a/crates/language-server/test_files/hoverable/src/stuff.fe b/crates/language-server/test_files/hoverable/src/stuff.fe new file mode 100644 index 0000000000..b97ffe7660 --- /dev/null +++ b/crates/language-server/test_files/hoverable/src/stuff.fe @@ -0,0 +1,19 @@ +/// ### Calculation helper functions +pub mod calculations { + /// A function that returns `3` + pub fn return_three() -> u32 { + 3 + } + + /// ## A function that returns 4 + pub fn return_four() { + 4 + } + + /// which one is it? + pub mod ambiguous { + + } + /// is it this one? + pub fn ambiguous() {} +} \ No newline at end of file diff --git a/crates/language-server/test_files/lol.fe b/crates/language-server/test_files/lol.fe new file mode 100644 index 0000000000..f08c02f075 --- /dev/null +++ b/crates/language-server/test_files/lol.fe @@ -0,0 +1,12 @@ +struct Foo {} +struct Bar {} + +fn main() { + let x: Foo + let y: Barrr + let z: baz::Bazzz +} + +mod baz { + pub struct Baz {} +} \ No newline at end of file diff --git a/crates/language-server/test_files/messy/dangling.fe b/crates/language-server/test_files/messy/dangling.fe new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/messy/foo/bar/fe.toml b/crates/language-server/test_files/messy/foo/bar/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/messy/foo/bar/src/main.fe b/crates/language-server/test_files/messy/foo/bar/src/main.fe new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/nested_ingots/fe.toml b/crates/language-server/test_files/nested_ingots/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/nested_ingots/ingots/foo/fe.toml b/crates/language-server/test_files/nested_ingots/ingots/foo/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe b/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe new file mode 100644 index 0000000000..5b5a7b8335 --- /dev/null +++ b/crates/language-server/test_files/nested_ingots/ingots/foo/src/main.fe @@ -0,0 +1 @@ +let foo = 1; \ No newline at end of file diff --git a/crates/language-server/test_files/nested_ingots/src/lib.fe b/crates/language-server/test_files/nested_ingots/src/lib.fe new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/single_ingot/fe.toml b/crates/language-server/test_files/single_ingot/fe.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/crates/language-server/test_files/single_ingot/src/foo.fe b/crates/language-server/test_files/single_ingot/src/foo.fe new file mode 100644 index 0000000000..99e9264c32 --- /dev/null +++ b/crates/language-server/test_files/single_ingot/src/foo.fe @@ -0,0 +1,8 @@ +pub fn why() { + let x = 5 + x +} + +pub struct Why { + pub x: i32 +} \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.fe b/crates/language-server/test_files/single_ingot/src/lib.fe new file mode 100644 index 0000000000..5669526f6d --- /dev/null +++ b/crates/language-server/test_files/single_ingot/src/lib.fe @@ -0,0 +1,23 @@ +use ingot::foo::Why + +mod who { + use super::Why + pub mod what { + pub fn how() {} + pub mod how { + use ingot::Why + pub struct When { + x: Why + } + } + } + pub struct Bar { + x: Why + } +} + +fn bar() -> () { + let y: Why + let z = who::what::how + let z: who::what::how::When +} \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.snap b/crates/language-server/test_files/single_ingot/src/lib.snap new file mode 100644 index 0000000000..a6b73ab514 --- /dev/null +++ b/crates/language-server/test_files/single_ingot/src/lib.snap @@ -0,0 +1,39 @@ +--- +source: crates/language-server/src/functionality/goto.rs +expression: snapshot +input_file: crates/language-server/test_files/single_ingot/src/lib.fe +--- +0: use ingot::foo::Why +1: +2: mod who { +3: use super::Why +4: pub mod what { +5: pub fn how() {} +6: pub mod how { +7: use ingot::Why +8: pub struct When { +9: x: Why +10: } +11: } +12: } +13: pub struct Bar { +14: x: Why +15: } +16: } +17: +18: fn bar() -> () { +19: let y: Why +20: let z = who::what::how +21: let z: who::what::how::When +22: } +--- +cursor position (9, 11), path: lib::foo::Why +cursor position (14, 7), path: lib::foo::Why +cursor position (19, 11), path: lib::foo::Why +cursor position (20, 12), path: lib::who +cursor position (20, 17), path: lib::who::what +cursor position (20, 23), path: lib::who::what::how +cursor position (21, 11), path: lib::who +cursor position (21, 16), path: lib::who::what +cursor position (21, 22), path: lib::who::what::how +cursor position (21, 27), path: lib::who::what::how::When diff --git a/crates/language-server/test_files/smallest_enclosing.fe b/crates/language-server/test_files/smallest_enclosing.fe new file mode 100644 index 0000000000..fa1ae4c2ff --- /dev/null +++ b/crates/language-server/test_files/smallest_enclosing.fe @@ -0,0 +1,7 @@ +struct Foo {} +struct Bar {} + +fn main() { + let x: Foo + let y: Bar +} \ No newline at end of file diff --git a/crates/language-server/test_files/smallest_enclosing.snap b/crates/language-server/test_files/smallest_enclosing.snap new file mode 100644 index 0000000000..88e36f1a69 --- /dev/null +++ b/crates/language-server/test_files/smallest_enclosing.snap @@ -0,0 +1,18 @@ +--- +source: crates/language-server/src/goto.rs +assertion_line: 198 +expression: result +input_file: crates/language-server/test_files/smallest_enclosing.fe +--- +struct Foo {} +struct Bar {} + +fn main() { + let x: Foo + let y: Bar +} +--- +cursor position: 64, path: +cursor position: 52, path: smallest_enclosing::Foo +cursor position: 49, path: +cursor position: 67, path: smallest_enclosing::Bar diff --git a/crates/mir/src/analysis/cfg.rs b/crates/mir/src/analysis/cfg.rs index d4de8b2cfa..2d7010c586 100644 --- a/crates/mir/src/analysis/cfg.rs +++ b/crates/mir/src/analysis/cfg.rs @@ -123,7 +123,7 @@ impl<'a> CfgPostOrder<'a> { } } -impl<'a> Iterator for CfgPostOrder<'a> { +impl Iterator for CfgPostOrder<'_> { type Item = BasicBlockId; fn next(&mut self) -> Option { diff --git a/crates/mir/src/analysis/loop_tree.rs b/crates/mir/src/analysis/loop_tree.rs index 8aed611135..e6e82b2eed 100644 --- a/crates/mir/src/analysis/loop_tree.rs +++ b/crates/mir/src/analysis/loop_tree.rs @@ -1,9 +1,7 @@ -use id_arena::{Arena, Id}; - use fxhash::FxHashMap; +use id_arena::{Arena, Id}; use super::{cfg::ControlFlowGraph, domtree::DomTree}; - use crate::ir::BasicBlockId; #[derive(Debug, Default, Clone)] @@ -193,7 +191,7 @@ impl<'a, 'b> BlocksInLoopPostOrder<'a, 'b> { } } -impl<'a, 'b> Iterator for BlocksInLoopPostOrder<'a, 'b> { +impl Iterator for BlocksInLoopPostOrder<'_, '_> { type Item = BasicBlockId; fn next(&mut self) -> Option { @@ -239,7 +237,6 @@ enum BlockState { #[cfg(test)] mod tests { use super::*; - use crate::ir::{body_builder::BodyBuilder, FunctionBody, FunctionId, SourceInfo, TypeId}; fn compute_loop(func: &FunctionBody) -> LoopTree { diff --git a/crates/mir/src/db/queries/function.rs b/crates/mir/src/db/queries/function.rs index d48cbe8ab0..e9f0e9f282 100644 --- a/crates/mir/src/db/queries/function.rs +++ b/crates/mir/src/db/queries/function.rs @@ -1,9 +1,9 @@ use std::{collections::BTreeMap, rc::Rc}; -use fe_analyzer::display::Displayable; -use fe_analyzer::namespace::items as analyzer_items; -use fe_analyzer::namespace::items::Item; -use fe_analyzer::namespace::types as analyzer_types; +use fe_analyzer::{ + display::Displayable, + namespace::{items as analyzer_items, items::Item, types as analyzer_types}, +}; use smol_str::SmolStr; @@ -28,8 +28,8 @@ pub fn mir_lowered_monomorphized_func_signature( lower_monomorphized_func_signature(db, analyzer_func, resolved_generics) } -/// Generate MIR function and monomorphize generic parameters as if they were called with unit type -/// NOTE: THIS SHOULD ONLY BE USED IN TEST CODE +/// Generate MIR function and monomorphize generic parameters as if they were +/// called with unit type NOTE: THIS SHOULD ONLY BE USED IN TEST CODE pub fn mir_lowered_pseudo_monomorphized_func_signature( db: &dyn MirDb, analyzer_func: analyzer_items::FunctionId, diff --git a/crates/mir/src/db/queries/types.rs b/crates/mir/src/db/queries/types.rs index a0d13511d5..a60e706bbf 100644 --- a/crates/mir/src/db/queries/types.rs +++ b/crates/mir/src/db/queries/types.rs @@ -465,7 +465,7 @@ fn expect_projection_index(value: &Value) -> usize { } fn round_up(value: usize, slot_size: usize) -> usize { - ((value + slot_size - 1) / slot_size) * slot_size + value.div_ceil(slot_size) * slot_size } #[cfg(test)] diff --git a/crates/mir/src/ir/body_order.rs b/crates/mir/src/ir/body_order.rs index 70df3cf76a..b2a0eef249 100644 --- a/crates/mir/src/ir/body_order.rs +++ b/crates/mir/src/ir/body_order.rs @@ -408,7 +408,7 @@ struct BlockIter<'a> { blocks: &'a FxHashMap, } -impl<'a> Iterator for BlockIter<'a> { +impl Iterator for BlockIter<'_> { type Item = BasicBlockId; fn next(&mut self) -> Option { @@ -423,7 +423,7 @@ struct InstIter<'a> { insts: &'a FxHashMap, } -impl<'a> Iterator for InstIter<'a> { +impl Iterator for InstIter<'_> { type Item = InstId; fn next(&mut self) -> Option { diff --git a/crates/mir/src/ir/function.rs b/crates/mir/src/ir/function.rs index 026b7fb0ca..c359f20f71 100644 --- a/crates/mir/src/ir/function.rs +++ b/crates/mir/src/ir/function.rs @@ -1,5 +1,4 @@ -use fe_analyzer::namespace::items as analyzer_items; -use fe_analyzer::namespace::types as analyzer_types; +use fe_analyzer::namespace::{items as analyzer_items, types as analyzer_types}; use fe_common::impl_intern_key; use fxhash::FxHashMap; use id_arena::Arena; diff --git a/crates/mir/src/ir/inst.rs b/crates/mir/src/ir/inst.rs index 4ef76fa906..568f031d71 100644 --- a/crates/mir/src/ir/inst.rs +++ b/crates/mir/src/ir/inst.rs @@ -667,7 +667,7 @@ pub enum BranchInfo<'a> { Switch(ValueId, &'a SwitchTable, Option), } -impl<'a> BranchInfo<'a> { +impl BranchInfo<'_> { pub fn is_not_a_branch(&self) -> bool { matches!(self, BranchInfo::NotBranch) } @@ -695,7 +695,7 @@ pub enum IterBase<'a, T> { Chain(Box>, Box>), } -impl<'a, T> IterBase<'a, T> { +impl IterBase<'_, T> { fn one(value: T) -> Self { Self::One(Some(value)) } @@ -705,7 +705,7 @@ impl<'a, T> IterBase<'a, T> { } } -impl<'a, T> Iterator for IterBase<'a, T> +impl Iterator for IterBase<'_, T> where T: Copy, { diff --git a/crates/mir/src/ir/types.rs b/crates/mir/src/ir/types.rs index 9692d858c5..8bdd9995c2 100644 --- a/crates/mir/src/ir/types.rs +++ b/crates/mir/src/ir/types.rs @@ -1,5 +1,4 @@ -use fe_analyzer::namespace::items as analyzer_items; -use fe_analyzer::namespace::types as analyzer_types; +use fe_analyzer::namespace::{items as analyzer_items, types as analyzer_types}; use fe_common::{impl_intern_key, Span}; use smol_str::SmolStr; diff --git a/crates/mir/src/lower/function.rs b/crates/mir/src/lower/function.rs index 358a9f2fac..0ae4ffbc92 100644 --- a/crates/mir/src/lower/function.rs +++ b/crates/mir/src/lower/function.rs @@ -264,8 +264,8 @@ impl<'db, 'a> BodyLowerHelper<'db, 'a> { let value = self.declare_var(name, ty, var.into()); if let Some(init) = init { let (init, _init_ty) = self.lower_expr(init); - // debug_assert_eq!(ty.deref(self.db), init_ty, "vardecl init type mismatch: {} != {}", - // ty.as_string(self.db), + // debug_assert_eq!(ty.deref(self.db), init_ty, "vardecl init type mismatch: {} + // != {}", ty.as_string(self.db), // init_ty.as_string(self.db)); self.builder.map_result(init, value.into()); } diff --git a/crates/parser/src/ast.rs b/crates/parser/src/ast.rs index 5273440f74..5f619588ad 100644 --- a/crates/parser/src/ast.rs +++ b/crates/parser/src/ast.rs @@ -3,9 +3,10 @@ use fe_common::{Span, Spanned}; use indenter::indented; use serde::{Deserialize, Serialize}; pub use smol_str::SmolStr; -use std::fmt; -use std::fmt::Formatter; -use std::fmt::Write; +use std::{ + fmt, + fmt::{Formatter, Write}, +}; use vec1::Vec1; #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone)] diff --git a/crates/parser/src/grammar/contracts.rs b/crates/parser/src/grammar/contracts.rs index ed234aab57..acc38a4591 100644 --- a/crates/parser/src/grammar/contracts.rs +++ b/crates/parser/src/grammar/contracts.rs @@ -1,9 +1,13 @@ -use super::functions::parse_fn_def; -use super::types::{parse_field, parse_opt_qualifier}; +use super::{ + functions::parse_fn_def, + types::{parse_field, parse_opt_qualifier}, +}; -use crate::ast::{Contract, ContractStmt}; -use crate::node::{Node, Span}; -use crate::{ParseFailed, ParseResult, Parser, TokenKind}; +use crate::{ + ast::{Contract, ContractStmt}, + node::{Node, Span}, + ParseFailed, ParseResult, Parser, TokenKind, +}; // Rule: all "statement" level parse functions consume their trailing // newline(s), either directly or via a function they call. diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index 10a752163d..82b6873b85 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -1,6 +1,8 @@ -use crate::ast::{self, CallArg, Expr, GenericArg, Path}; -use crate::node::Node; -use crate::{Label, ParseFailed, ParseResult, Parser, Token, TokenKind}; +use crate::{ + ast::{self, CallArg, Expr, GenericArg, Path}, + node::Node, + Label, ParseFailed, ParseResult, Parser, Token, TokenKind, +}; use super::types::parse_generic_args; diff --git a/crates/parser/src/grammar/functions.rs b/crates/parser/src/grammar/functions.rs index 3b2a8dae99..77344ae432 100644 --- a/crates/parser/src/grammar/functions.rs +++ b/crates/parser/src/grammar/functions.rs @@ -1,12 +1,13 @@ -use super::expressions::parse_expr; -use super::types::parse_type_desc; - -use crate::ast::{ - BinOperator, Expr, FuncStmt, Function, FunctionArg, FunctionSignature, GenericParameter, - LiteralPattern, MatchArm, Path, Pattern, TypeDesc, VarDeclTarget, +use super::{expressions::parse_expr, types::parse_type_desc}; + +use crate::{ + ast::{ + BinOperator, Expr, FuncStmt, Function, FunctionArg, FunctionSignature, GenericParameter, + LiteralPattern, MatchArm, Path, Pattern, TypeDesc, VarDeclTarget, + }, + node::{Node, Span}, + Label, ParseFailed, ParseResult, Parser, TokenKind, }; -use crate::node::{Node, Span}; -use crate::{Label, ParseFailed, ParseResult, Parser, TokenKind}; /// Parse a function definition without a body. The optional `pub` qualifier /// must be parsed by the caller, and passed in. Next token must be `unsafe` or diff --git a/crates/parser/src/grammar/module.rs b/crates/parser/src/grammar/module.rs index e6c1561695..9abd2d4d9e 100644 --- a/crates/parser/src/grammar/module.rs +++ b/crates/parser/src/grammar/module.rs @@ -1,13 +1,17 @@ -use super::expressions::parse_expr; -use super::functions::parse_fn_def; -use super::types::{ - parse_impl_def, parse_path_tail, parse_struct_def, parse_trait_def, parse_type_alias, - parse_type_desc, +use super::{ + contracts::parse_contract_def, + expressions::parse_expr, + functions::parse_fn_def, + types::{ + parse_enum_def, parse_impl_def, parse_path_tail, parse_struct_def, parse_trait_def, + parse_type_alias, parse_type_desc, + }, +}; +use crate::{ + ast::{ConstantDecl, Module, ModuleStmt, Pragma, Use, UseTree}, + node::{Node, Span}, + Label, ParseFailed, ParseResult, Parser, TokenKind, }; -use super::{contracts::parse_contract_def, types::parse_enum_def}; -use crate::ast::{ConstantDecl, Module, ModuleStmt, Pragma, Use, UseTree}; -use crate::node::{Node, Span}; -use crate::{Label, ParseFailed, ParseResult, Parser, TokenKind}; use semver::VersionReq; diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs index ba3c6cddfa..d557e04c43 100644 --- a/crates/parser/src/grammar/types.rs +++ b/crates/parser/src/grammar/types.rs @@ -1,11 +1,14 @@ -use crate::ast::{ - self, Enum, Field, GenericArg, Impl, Path, Trait, TypeAlias, TypeDesc, Variant, VariantKind, +use crate::{ + ast::{ + self, Enum, Field, GenericArg, Impl, Path, Trait, TypeAlias, TypeDesc, Variant, VariantKind, + }, + grammar::{ + expressions::parse_expr, + functions::{parse_fn_def, parse_fn_sig}, + }, + node::{Node, Span}, + ParseFailed, ParseResult, Parser, Token, TokenKind, }; -use crate::grammar::expressions::parse_expr; -use crate::grammar::functions::{parse_fn_def, parse_fn_sig}; -use crate::node::{Node, Span}; -use crate::Token; -use crate::{ParseFailed, ParseResult, Parser, TokenKind}; use fe_common::diagnostics::Label; use if_chain::if_chain; use smol_str::SmolStr; @@ -34,7 +37,8 @@ pub fn parse_struct_def( let attributes = if let Some(attr) = par.optional(TokenKind::Hash) { let attr_name = par.expect_with_notes(TokenKind::Name, "failed to parse attribute definition", |_| vec!["Note: an attribute name must start with a letter or underscore, and contain letters, numbers, or underscores".into()])?; - // This hints to a future where we would support multiple attributes per field. For now we don't need it. + // This hints to a future where we would support multiple attributes per field. + // For now we don't need it. vec![Node::new(attr_name.text.into(), attr.span + attr_name.span)] } else { vec![] diff --git a/crates/parser/src/lexer.rs b/crates/parser/src/lexer.rs index 980cb76df5..ba6274f98e 100644 --- a/crates/parser/src/lexer.rs +++ b/crates/parser/src/lexer.rs @@ -12,7 +12,7 @@ pub struct Lexer<'a> { impl<'a> Lexer<'a> { /// Create a new lexer with the given source code string. - pub fn new(file_id: SourceFileId, src: &'a str) -> Lexer { + pub fn new(file_id: SourceFileId, src: &'a str) -> Lexer<'a> { Lexer { file_id, inner: TokenKind::lexer(src), diff --git a/crates/parser/src/lexer/token.rs b/crates/parser/src/lexer/token.rs index 643ffe74d4..5400982bfc 100644 --- a/crates/parser/src/lexer/token.rs +++ b/crates/parser/src/lexer/token.rs @@ -1,5 +1,4 @@ -use crate::node::Node; -use crate::node::Span; +use crate::node::{Node, Span}; use logos::Logos; use smol_str::SmolStr; use std::ops::Add; diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index fbd217b1b5..5556e195e5 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -7,8 +7,7 @@ pub use parser::{Label, ParseFailed, ParseResult, Parser}; pub mod node; use ast::Module; -use fe_common::diagnostics::Diagnostic; -use fe_common::files::SourceFileId; +use fe_common::{diagnostics::Diagnostic, files::SourceFileId}; /// Parse a [`Module`] from the file content string. /// @@ -18,8 +17,8 @@ use fe_common::files::SourceFileId; /// /// If a fatal parse error occurred, the last element of the `Module::body` will /// be a `ModuleStmt::ParseError`. The parser currently has very limited ability -/// to recover from syntax errors; this is just a first meager attempt at returning a -/// useful AST when there are syntax errors. +/// to recover from syntax errors; this is just a first meager attempt at +/// returning a useful AST when there are syntax errors. /// /// A [`SourceFileId`] is required to associate any diagnostics with the /// underlying file. diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs index 56436d4097..4c7fdaa8a3 100644 --- a/crates/parser/src/parser.rs +++ b/crates/parser/src/parser.rs @@ -1,9 +1,13 @@ pub use fe_common::diagnostics::Label; -use fe_common::diagnostics::{Diagnostic, Severity}; -use fe_common::files::SourceFileId; - -use crate::lexer::{Lexer, Token, TokenKind}; -use crate::node::Span; +use fe_common::{ + diagnostics::{Diagnostic, Severity}, + files::SourceFileId, +}; + +use crate::{ + lexer::{Lexer, Token, TokenKind}, + node::Span, +}; use std::{error, fmt}; #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] @@ -359,7 +363,7 @@ impl<'a, 'b> BTParser<'a, 'b> { } } -impl<'a, 'b> std::ops::Deref for BTParser<'a, 'b> { +impl<'a> std::ops::Deref for BTParser<'a, '_> { type Target = Parser<'a>; fn deref(&self) -> &Self::Target { @@ -367,7 +371,7 @@ impl<'a, 'b> std::ops::Deref for BTParser<'a, 'b> { } } -impl<'a, 'b> std::ops::DerefMut for BTParser<'a, 'b> { +impl std::ops::DerefMut for BTParser<'_, '_> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.parser } diff --git a/crates/parser/tests/cases/errors.rs b/crates/parser/tests/cases/errors.rs index becc803558..dd7fa7b6ea 100644 --- a/crates/parser/tests/cases/errors.rs +++ b/crates/parser/tests/cases/errors.rs @@ -1,8 +1,8 @@ -use fe_common::db::TestDb; -use fe_common::diagnostics::diagnostics_string; -use fe_common::SourceFileId; -use fe_parser::grammar::{expressions, functions, module}; -use fe_parser::Parser; +use fe_common::{db::TestDb, diagnostics::diagnostics_string, SourceFileId}; +use fe_parser::{ + grammar::{expressions, functions, module}, + Parser, +}; use insta::assert_snapshot; pub fn err_string(test_name: &str, mut parse_fn: F, src: &str) -> String diff --git a/crates/parser/tests/cases/parse_ast.rs b/crates/parser/tests/cases/parse_ast.rs index 78a849d58e..8102fe5857 100644 --- a/crates/parser/tests/cases/parse_ast.rs +++ b/crates/parser/tests/cases/parse_ast.rs @@ -1,10 +1,12 @@ -use fe_common::db::TestDb; -use fe_common::diagnostics::print_diagnostics; -use fe_common::utils::ron::to_ron_string_pretty; -use fe_common::SourceFileId; -use fe_parser::grammar::{expressions, functions, module, types}; -use fe_parser::node::Node; -use fe_parser::{ast, ParseResult, Parser}; +use fe_common::{ + db::TestDb, diagnostics::print_diagnostics, utils::ron::to_ron_string_pretty, SourceFileId, +}; +use fe_parser::{ + ast, + grammar::{expressions, functions, module, types}, + node::Node, + ParseResult, Parser, +}; use insta::assert_snapshot; use serde::Serialize; use wasm_bindgen_test::wasm_bindgen_test; diff --git a/crates/parser/tests/cases/print_ast.rs b/crates/parser/tests/cases/print_ast.rs index 900f3472d1..d6292efd84 100644 --- a/crates/parser/tests/cases/print_ast.rs +++ b/crates/parser/tests/cases/print_ast.rs @@ -1,6 +1,4 @@ -use fe_common::db::TestDb; -use fe_common::diagnostics::print_diagnostics; -use fe_common::SourceFileId; +use fe_common::{db::TestDb, diagnostics::print_diagnostics, SourceFileId}; use fe_parser::parse_file; use fe_test_files::fixture; use insta::assert_snapshot; diff --git a/crates/parser2/Cargo.toml b/crates/parser2/Cargo.toml new file mode 100644 index 0000000000..ff4ad46ab7 --- /dev/null +++ b/crates/parser2/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "fe-parser2" +version = "0.26.0" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Parser lib for Fe." + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +rowan = "0.15.10" +logos = "0.12.1" +rustc-hash = "1.1.0" +lazy_static = "1.4.0" +derive_more = "0.99" +smallvec = { version = "1.11.2", features = ["union"] } +unwrap-infallible = "0.1.5" + +[dev-dependencies] +fe-compiler-test-utils = { path = "../test-utils" } +dir-test = "0.3" +wasm-bindgen-test = "0.3" + +[target.'cfg(target_arch = "wasm32")'.dependencies] +wasm-bindgen = "0.2" diff --git a/crates/parser2/build.rs b/crates/parser2/build.rs new file mode 100644 index 0000000000..8e048f9218 --- /dev/null +++ b/crates/parser2/build.rs @@ -0,0 +1,4 @@ +fn main() { + #[cfg(test)] + println!("cargo:rerun-if-changed=./test_files"); +} diff --git a/crates/parser2/src/ast/attr.rs b/crates/parser2/src/ast/attr.rs new file mode 100644 index 0000000000..314f82473a --- /dev/null +++ b/crates/parser2/src/ast/attr.rs @@ -0,0 +1,190 @@ +use rowan::ast::{support, AstNode}; + +use super::ast_node; +use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; + +ast_node! { + pub struct AttrList, + SK::AttrList, + IntoIterator, +} +impl AttrList { + /// Returns only normal attributes in the attribute list. + pub fn normal_attrs(&self) -> impl Iterator { + self.iter().filter_map(|attr| match attr.kind() { + AttrKind::Normal(attr) => Some(attr), + AttrKind::DocComment(_) => None, + }) + } + + /// Returns only doc comment attributes in the attribute list. + pub fn doc_attrs(&self) -> impl Iterator { + self.iter().filter_map(|attr| match attr.kind() { + AttrKind::Normal(_) => None, + AttrKind::DocComment(attr) => Some(attr), + }) + } +} + +ast_node! { + /// An attribute, which can be either a normal attribute or a doc comment attribute. + pub struct Attr, + SK::Attr | SK::DocCommentAttr, +} +impl Attr { + /// Returns the kind of the attribute. + pub fn kind(&self) -> AttrKind { + match self.syntax().kind() { + SK::Attr => AttrKind::Normal(AstNode::cast(self.syntax().clone()).unwrap()), + SK::DocCommentAttr => { + AttrKind::DocComment(AstNode::cast(self.syntax().clone()).unwrap()) + } + _ => unreachable!(), + } + } +} + +ast_node! { + /// A normal attribute. + /// `#attr(arg1: Arg, arg2: Arg)` + pub struct NormalAttr, + SK::Attr, +} +impl NormalAttr { + /// Returns the name of the attribute. + /// `foo` in `#foo(..)` + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + pub fn args(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// An attribute argument list. + /// `(arg1: Arg, arg2: Arg)` in `#foo(arg1: Arg, arg2: Arg)` + pub struct AttrArgList, + SK::AttrArgList, + IntoIterator, +} + +ast_node! { + /// An Attribute argument. + /// `arg1: Arg` in `#foo(arg1: Arg, arg2: Arg)` + pub struct AttrArg, + SK::AttrArg +} +impl AttrArg { + /// Returns the key of the attribute argument. + /// `arg1` in `arg1: Arg`. + pub fn key(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the value of the attribute argument. + /// `Arg` in `arg1: Arg`. + pub fn value(&self) -> Option { + self.syntax() + .children_with_tokens() + .filter_map(|c| match c.into_token() { + Some(c) if c.kind() == SK::Ident => Some(c), + _ => None, + }) + .nth(1) + } +} + +ast_node! { + pub struct DocCommentAttr, + SK::DocCommentAttr, +} +impl DocCommentAttr { + /// Returns the underlying token of the doc comment, which includes `///`. + pub fn doc(&self) -> Option { + support::token(self.syntax(), SK::DocComment) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum AttrKind { + /// A normal attribute. + Normal(NormalAttr), + /// A doc comment attribute. + DocComment(DocCommentAttr), +} + +/// A trait for AST nodes that can have an attributes. +pub trait AttrListOwner: AstNode { + /// Returns the attribute list of the node. + fn attr_list(&self) -> Option { + support::child(self.syntax()) + } +} + +#[cfg(test)] +mod tests { + use crate::{ + lexer::Lexer, + parser::{attr::AttrListScope, Parser}, + }; + + use wasm_bindgen_test::wasm_bindgen_test; + + use super::*; + + fn parse_attr_list(source: &str) -> AttrList { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(AttrListScope::default()).unwrap(); + AttrList::cast(parser.finish_to_node().0).unwrap() + } + + #[test] + #[wasm_bindgen_test] + fn attr_list() { + let source = r#" + #foo + /// Doc1 + #cfg(target: evm, abi: solidity) + /// Doc2 + "#; + let attr_list = parse_attr_list(source); + for (i, attr) in attr_list.doc_attrs().enumerate() { + match i { + 0 => assert_eq!(attr.doc().unwrap().text(), "/// Doc1"), + 1 => assert_eq!(attr.doc().unwrap().text(), "/// Doc2"), + _ => unreachable!(), + } + } + + for (i, attr) in attr_list.normal_attrs().enumerate() { + match i { + 0 => { + assert_eq!(attr.name().unwrap().text(), "foo"); + assert!(attr.args().is_none()); + } + + 1 => { + assert_eq!(attr.name().unwrap().text(), "cfg"); + for (i, arg) in attr.args().unwrap().iter().enumerate() { + match i { + 0 => { + assert_eq!(arg.key().unwrap().text(), "target"); + assert_eq!(arg.value().unwrap().text(), "evm"); + } + 1 => { + assert_eq!(arg.key().unwrap().text(), "abi"); + assert_eq!(arg.value().unwrap().text(), "solidity"); + } + _ => unreachable!(), + } + } + } + + _ => unreachable!(), + } + } + } +} diff --git a/crates/parser2/src/ast/expr.rs b/crates/parser2/src/ast/expr.rs new file mode 100644 index 0000000000..b0a133ec48 --- /dev/null +++ b/crates/parser2/src/ast/expr.rs @@ -0,0 +1,1018 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, GenericArgsOwner, LitInt}; +use crate::{SyntaxKind as SK, SyntaxNode, SyntaxToken}; + +ast_node! { + /// An expression. + /// Use [`Self::kind`] to determine the type of expression. + pub struct Expr, + SK::BlockExpr + | SK::BinExpr + | SK::UnExpr + | SK::CallExpr + | SK::MethodCallExpr + | SK::PathExpr + | SK::RecordInitExpr + | SK::FieldExpr + | SK::IndexExpr + | SK::TupleExpr + | SK::ArrayExpr + | SK::ArrayRepExpr + | SK::LitExpr + | SK::IfExpr + | SK::MatchExpr + | SK::ParenExpr + | SK::AssignExpr + | SK::AugAssignExpr, +} + +impl Expr { + /// Returns the kind of expression. + pub fn kind(&self) -> ExprKind { + match self.syntax().kind() { + SK::BlockExpr => ExprKind::Block(AstNode::cast(self.syntax().clone()).unwrap()), + SK::BinExpr => ExprKind::Bin(AstNode::cast(self.syntax().clone()).unwrap()), + SK::UnExpr => ExprKind::Un(AstNode::cast(self.syntax().clone()).unwrap()), + SK::CallExpr => ExprKind::Call(AstNode::cast(self.syntax().clone()).unwrap()), + SK::MethodCallExpr => { + ExprKind::MethodCall(AstNode::cast(self.syntax().clone()).unwrap()) + } + SK::PathExpr => ExprKind::Path(AstNode::cast(self.syntax().clone()).unwrap()), + SK::RecordInitExpr => { + ExprKind::RecordInit(AstNode::cast(self.syntax().clone()).unwrap()) + } + SK::FieldExpr => ExprKind::Field(AstNode::cast(self.syntax().clone()).unwrap()), + SK::IndexExpr => ExprKind::Index(AstNode::cast(self.syntax().clone()).unwrap()), + SK::TupleExpr => ExprKind::Tuple(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ArrayExpr => ExprKind::Array(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ArrayRepExpr => ExprKind::ArrayRep(AstNode::cast(self.syntax().clone()).unwrap()), + SK::LitExpr => ExprKind::Lit(AstNode::cast(self.syntax().clone()).unwrap()), + SK::IfExpr => ExprKind::If(AstNode::cast(self.syntax().clone()).unwrap()), + SK::MatchExpr => ExprKind::Match(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ParenExpr => ExprKind::Paren(AstNode::cast(self.syntax().clone()).unwrap()), + SK::AssignExpr => ExprKind::Assign(AstNode::cast(self.syntax().clone()).unwrap()), + SK::AugAssignExpr => ExprKind::AugAssign(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// `{ stmt1\n stmt2\n ..}` + pub struct BlockExpr, + SK::BlockExpr, + IntoIterator, +} +impl BlockExpr { + /// Returns the statements in the block. + pub fn stmts(&self) -> impl Iterator { + self.iter() + } + + /// Returns items declared in the block. + pub fn items(&self) -> impl Iterator { + support::children(self.syntax()) + } +} + +ast_node! { + /// `lhs op rhs` + pub struct BinExpr, + SK::BinExpr +} +impl BinExpr { + /// Returns the left-hand side of the binary operation. + pub fn lhs(&self) -> Option { + support::children(self.syntax()).next() + } + + /// Returns the right-hand side of the binary operation. + pub fn rhs(&self) -> Option { + support::children(self.syntax()).nth(1) + } + + /// Returns the operator of the binary operation. + pub fn op(&self) -> Option { + self.syntax() + .children_with_tokens() + .find_map(BinOp::from_node_or_token) + } +} + +ast_node! { + /// `op expr` + pub struct UnExpr, + SK::UnExpr +} +impl UnExpr { + /// Returns the operand of the unary operation. + pub fn expr(&self) -> Option { + support::children(self.syntax()).next() + } + + /// Returns the operator of the unary operation. + pub fn op(&self) -> Option { + self.syntax().children_with_tokens().find_map(|c| match c { + rowan::NodeOrToken::Token(token) => UnOp::from_token(token), + rowan::NodeOrToken::Node(_) => None, + }) + } +} + +ast_node! { + /// `func(arg1, arg2, ..)` + pub struct CallExpr, + SK::CallExpr, +} +impl CallExpr { + /// Returns the callee of the call expression. + pub fn callee(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the arguments of the call expression. + pub fn args(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `obj.method(arg1, arg2, ..)` + pub struct MethodCallExpr, + SK::MethodCallExpr +} +impl GenericArgsOwner for MethodCallExpr {} +impl MethodCallExpr { + /// Returns the receiver of the method call expression. + pub fn receiver(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the name of the method being called. + pub fn method_name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the arguments of the method call expression. + pub fn args(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `path` + pub struct PathExpr, + SK::PathExpr +} +impl PathExpr { + /// Returns the path of the path expression. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `path { field1: expr1, field2: expr2, .. }` + pub struct RecordInitExpr, + SK::RecordInitExpr +} +impl RecordInitExpr { + /// Returns the path of the record init expression. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the fields of the record init expression. + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `expr.field` or `expr.0` + pub struct FieldExpr, + SK::FieldExpr +} +impl FieldExpr { + /// Returns the expression being accessed. + pub fn receiver(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the name of the field. + pub fn field_name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the index number of the field. + pub fn field_index(&self) -> Option { + support::token(self.syntax(), SK::Int).map(|it| LitInt { token: it }) + } + + pub fn name_or_index(&self) -> Option { + self.field_name() + .or_else(|| self.field_index().map(|i| i.token().clone())) + } +} + +ast_node! { + /// `expr[index]` + pub struct IndexExpr, + SK::IndexExpr +} +impl IndexExpr { + /// Returns the expression being indexed. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the index of the index expression. + pub fn index(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +ast_node! { + /// `(expr1, expr2, ..)` + pub struct TupleExpr, + SK::TupleExpr, +} +impl TupleExpr { + /// Returns the expressions in the tuple. + pub fn elems(&self) -> impl Iterator> { + self.syntax().children().map(Expr::cast) + } +} + +ast_node! { + /// `[expr1, expr2, ..]` + pub struct ArrayExpr, + SK::ArrayExpr, +} +impl ArrayExpr { + /// Returns the expressions in the array. + /// Returns the expressions in the tuple. + pub fn elems(&self) -> impl Iterator> { + self.syntax().children().map(Expr::cast) + } +} + +ast_node! { + /// `[expr; size]` + pub struct ArrayRepExpr, + SK::ArrayRepExpr, +} +impl ArrayRepExpr { + /// Returns the expression being repeated. + pub fn val(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the size of the array. + pub fn len(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +ast_node! { + pub struct LitExpr, + SK::LitExpr +} +impl LitExpr { + /// Returns the literal of the literal expression. + pub fn lit(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `if cond { then } else { else_ }` + pub struct IfExpr, + SK::IfExpr +} +impl IfExpr { + /// Returns the condition of the if expression. + pub fn cond(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the then block of the if expression. + pub fn then(&self) -> Option { + self.syntax().children().skip(1).find_map(BlockExpr::cast) + } + + /// Returns the else block of the if expression. + pub fn else_(&self) -> Option { + self.syntax().children().filter_map(Expr::cast).nth(2) + } +} + +ast_node! { + /// `match expr { arm1, arm2, .. }` + pub struct MatchExpr, + SK::MatchExpr +} +impl MatchExpr { + /// Returns the expression being matched. + pub fn scrutinee(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the arms of the match expression. + pub fn arms(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `(expr)` + pub struct ParenExpr, + SK::ParenExpr +} +impl ParenExpr { + /// Returns the expression in the parentheses. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `x = 1` + pub struct AssignExpr, + SK::AssignExpr, +} +impl AssignExpr { + /// Returns the expression of the lhs and rhs of the assignment. + pub fn lhs_expr(&self) -> Option { + support::children(self.syntax()).next() + } + + pub fn rhs_expr(&self) -> Option { + support::children(self.syntax()).nth(1) + } + + pub fn eq(&self) -> Option { + support::token(self.syntax(), SK::Eq) + } +} + +ast_node! { + /// `x += 1` + pub struct AugAssignExpr, + SK::AugAssignExpr, +} +impl AugAssignExpr { + /// Returns the expression of the lhs of the aug assignment. + pub fn lhs_expr(&self) -> Option { + support::children(self.syntax()).next() + } + + pub fn op(&self) -> Option { + self.syntax() + .children_with_tokens() + .find_map(ArithBinOp::from_node_or_token) + } + + /// Returns the expression of the rhs of the assignment. + pub fn rhs_expr(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum ExprKind { + Lit(LitExpr), + Block(BlockExpr), + Bin(BinExpr), + Un(UnExpr), + Call(CallExpr), + MethodCall(MethodCallExpr), + Path(PathExpr), + RecordInit(RecordInitExpr), + Field(FieldExpr), + Index(IndexExpr), + Tuple(TupleExpr), + Array(ArrayExpr), + ArrayRep(ArrayRepExpr), + If(IfExpr), + Match(MatchExpr), + Paren(ParenExpr), + Assign(AssignExpr), + AugAssign(AugAssignExpr), +} + +ast_node! { + /// `{ label1: expr1, expr2 }` + pub struct FieldList, + SK::RecordFieldList, + IntoIterator +} +ast_node! { + pub struct RecordField, + SK::RecordField, +} +impl RecordField { + /// Returns the name of the field. + pub fn label(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the expression of the field. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct MatchArmList, + SK::MatchArmList, + IntoIterator +} +ast_node! { + pub struct MatchArm, + SK::MatchArm +} +impl MatchArm { + /// Returns the pattern of the match arm. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the body of the match arm. + pub fn body(&self) -> Option { + support::child(self.syntax()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum BinOp { + Arith(ArithBinOp), + Comp(CompBinOp), + Logical(LogicalBinOp), +} + +impl BinOp { + pub fn syntax(&self) -> crate::NodeOrToken { + match self { + BinOp::Arith(op) => op.syntax(), + BinOp::Comp(op) => op.syntax(), + BinOp::Logical(op) => op.syntax(), + } + } + + pub(super) fn from_node_or_token(node_or_token: crate::NodeOrToken) -> Option { + match node_or_token { + rowan::NodeOrToken::Token(token) => Self::from_token(token), + rowan::NodeOrToken::Node(node) => Self::from_node(node), + } + } + pub(super) fn from_token(token: SyntaxToken) -> Option { + ArithBinOp::from_token(token.clone()) + .map(Self::Arith) + .or_else(|| CompBinOp::from_token(token.clone()).map(Self::Comp)) + .or_else(move || LogicalBinOp::from_token(token).map(Self::Logical)) + } + + pub(super) fn from_node(node: SyntaxNode) -> Option { + ArithBinOp::from_node(node.clone()) + .map(Self::Arith) + .or_else(|| CompBinOp::from_node(node).map(Self::Comp)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum UnOp { + /// `+` + Plus(SyntaxToken), + /// `-` + Minus(SyntaxToken), + /// `!` + Not(SyntaxToken), + /// `~` + BitNot(SyntaxToken), +} +impl UnOp { + pub fn syntax(&self) -> SyntaxToken { + match self { + UnOp::Plus(token) => token.clone(), + UnOp::Minus(token) => token.clone(), + UnOp::Not(token) => token.clone(), + UnOp::BitNot(token) => token.clone(), + } + } + + fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Plus => Some(Self::Plus(token)), + SK::Minus => Some(Self::Minus(token)), + SK::Not => Some(Self::Not(token)), + SK::Tilde => Some(Self::BitNot(token)), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ArithBinOp { + /// `+` + Add(SyntaxToken), + /// `-` + Sub(SyntaxToken), + /// `*` + Mul(SyntaxToken), + /// `/` + Div(SyntaxToken), + /// `%` + Mod(SyntaxToken), + /// `**` + Pow(SyntaxToken), + /// `<<` + LShift(SyntaxNode), + /// `>>` + RShift(SyntaxNode), + /// `&` + BitAnd(SyntaxToken), + /// `|` + BitOr(SyntaxToken), + /// `^` + BitXor(SyntaxToken), +} +impl ArithBinOp { + pub fn syntax(&self) -> crate::NodeOrToken { + match self { + ArithBinOp::Add(token) => token.clone().into(), + ArithBinOp::Sub(token) => token.clone().into(), + ArithBinOp::Mul(token) => token.clone().into(), + ArithBinOp::Div(token) => token.clone().into(), + ArithBinOp::Mod(token) => token.clone().into(), + ArithBinOp::Pow(token) => token.clone().into(), + ArithBinOp::LShift(node) => node.clone().into(), + ArithBinOp::RShift(node) => node.clone().into(), + ArithBinOp::BitAnd(token) => token.clone().into(), + ArithBinOp::BitOr(token) => token.clone().into(), + ArithBinOp::BitXor(token) => token.clone().into(), + } + } + + pub(super) fn from_node_or_token( + node_or_token: rowan::NodeOrToken, + ) -> Option { + match node_or_token { + rowan::NodeOrToken::Token(token) => Self::from_token(token), + rowan::NodeOrToken::Node(node) => Self::from_node(node), + } + } + + // NOTE: We need to have `from_node` because `<<` and `>>` are not primitive + // tokens in our lexer. + pub(super) fn from_node(node: SyntaxNode) -> Option { + match node.kind() { + SK::LShift => Some(Self::LShift(node)), + SK::RShift => Some(Self::RShift(node)), + _ => None, + } + } + + pub(super) fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Plus => Some(Self::Add(token)), + SK::Minus => Some(Self::Sub(token)), + SK::Star => Some(Self::Mul(token)), + SK::Slash => Some(Self::Div(token)), + SK::Percent => Some(Self::Mod(token)), + SK::Star2 => Some(Self::Pow(token)), + SK::Amp => Some(Self::BitAnd(token)), + SK::Pipe => Some(Self::BitOr(token)), + SK::Hat => Some(Self::BitXor(token)), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum CompBinOp { + /// `==` + Eq(SyntaxToken), + /// `!=` + NotEq(SyntaxToken), + /// `<` + Lt(SyntaxToken), + /// `<=` + LtEq(SyntaxNode), + /// `>` + Gt(SyntaxToken), + /// `>=` + GtEq(SyntaxNode), +} +impl CompBinOp { + pub fn syntax(&self) -> crate::NodeOrToken { + match self { + CompBinOp::Eq(token) => token.clone().into(), + CompBinOp::NotEq(token) => token.clone().into(), + CompBinOp::Lt(token) => token.clone().into(), + CompBinOp::LtEq(node) => node.clone().into(), + CompBinOp::Gt(token) => token.clone().into(), + CompBinOp::GtEq(node) => node.clone().into(), + } + } + + pub(super) fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Eq2 => Some(Self::Eq(token)), + SK::NotEq => Some(Self::NotEq(token)), + SK::Lt => Some(Self::Lt(token)), + SK::Gt => Some(Self::Gt(token)), + _ => None, + } + } + + pub(super) fn from_node(node: SyntaxNode) -> Option { + match node.kind() { + SK::LtEq => Some(Self::LtEq(node)), + SK::GtEq => Some(Self::GtEq(node)), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum LogicalBinOp { + /// `&&` + And(SyntaxToken), + /// `||` + Or(SyntaxToken), +} +impl LogicalBinOp { + pub fn syntax(&self) -> crate::NodeOrToken { + match self { + LogicalBinOp::And(token) => token.clone().into(), + LogicalBinOp::Or(token) => token.clone().into(), + } + } + + pub(super) fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Amp2 => Some(Self::And(token)), + SK::Pipe2 => Some(Self::Or(token)), + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ast::*, lexer::Lexer, parser::Parser}; + + use wasm_bindgen_test::wasm_bindgen_test; + + fn parse_expr(source: &str) -> T + where + T: TryFrom, + { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + crate::parser::expr::parse_expr(&mut parser).unwrap(); + Expr::cast(parser.finish_to_node().0) + .unwrap() + .kind() + .try_into() + .unwrap() + } + + #[test] + #[wasm_bindgen_test] + fn block_expr() { + let source = r#"{ + let a = 1 + let b = a + 2 + return b + }"#; + let block_expr: BlockExpr = parse_expr(source); + assert_eq!(block_expr.stmts().count(), 3); + } + + #[test] + #[wasm_bindgen_test] + fn bin_expr() { + let bin_expr: BinExpr = parse_expr("1 + 2"); + assert!(matches!(bin_expr.lhs().unwrap().kind(), ExprKind::Lit(_))); + assert!(matches!( + bin_expr.op().unwrap(), + BinOp::Arith(ArithBinOp::Add(_)) + )); + assert!(matches!(bin_expr.rhs().unwrap().kind(), ExprKind::Lit(_))); + + let bin_expr: BinExpr = parse_expr("1 <= 2"); + assert!(matches!( + bin_expr.op().unwrap(), + BinOp::Comp(CompBinOp::LtEq(_)) + )); + } + + #[test] + #[wasm_bindgen_test] + fn un_expr() { + let un_expr: UnExpr = parse_expr("-1"); + assert!(matches!(un_expr.op().unwrap(), UnOp::Minus(_))); + assert!(matches!(un_expr.expr().unwrap().kind(), ExprKind::Lit(_))); + } + + #[test] + #[wasm_bindgen_test] + fn call_expr() { + let call_expr: CallExpr = parse_expr("foo(1, label: 2, 3 + 4)"); + let ExprKind::Path(path) = call_expr.callee().unwrap().kind() else { + panic!(); + }; + + assert_eq!( + path.path() + .unwrap() + .segments() + .next() + .unwrap() + .generic_args() + .unwrap() + .into_iter() + .count(), + 2 + ); + + for (i, arg) in call_expr.args().unwrap().into_iter().enumerate() { + match i { + 0 => { + assert!(arg.label().is_none()); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 1 => { + assert_eq!(arg.label().unwrap().text(), "label"); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 2 => { + assert!(arg.label().is_none()); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Bin(_))) + } + _ => panic!("unexpected arg"), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn method_call_expr() { + let method_call_expr: MethodCallExpr = parse_expr("foo.bar(1, label: 2, 3 + 4)"); + + assert!(matches!( + method_call_expr.receiver().unwrap().kind(), + ExprKind::Path(_) + )); + + assert_eq!(method_call_expr.method_name().unwrap().text(), "bar"); + + assert!(matches!( + method_call_expr + .generic_args() + .unwrap() + .into_iter() + .collect::>() + .len(), + 1 + )); + + for (i, arg) in method_call_expr.args().unwrap().into_iter().enumerate() { + match i { + 0 => { + assert!(arg.label().is_none()); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 1 => { + assert_eq!(arg.label().unwrap().text(), "label"); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 2 => { + assert!(arg.label().is_none()); + assert!(matches!(arg.expr().unwrap().kind(), ExprKind::Bin(_))) + } + _ => panic!("unexpected arg"), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn record_init_expr() { + let record_init_expr: RecordInitExpr = parse_expr("Foo { a: 1, b: 2, c: 3 }"); + + assert!(record_init_expr.path().is_some()); + for (i, field) in record_init_expr.fields().unwrap().into_iter().enumerate() { + match i { + 0 => { + assert_eq!(field.label().unwrap().text(), "a"); + assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 1 => { + assert_eq!(field.label().unwrap().text(), "b"); + assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) + } + 2 => { + assert_eq!(field.label().unwrap().text(), "c"); + assert!(matches!(field.expr().unwrap().kind(), ExprKind::Lit(_))) + } + _ => panic!("unexpected field"), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn field_expr() { + let field_expr: FieldExpr = parse_expr("foo(1, 2).bar"); + + assert!(matches!( + field_expr.receiver().unwrap().kind(), + ExprKind::Call(_) + )); + assert_eq!(field_expr.field_name().unwrap().text(), "bar"); + + let field_expr: FieldExpr = parse_expr("(1, 2).1"); + + assert!(matches!( + field_expr.receiver().unwrap().kind(), + ExprKind::Tuple(_) + )); + assert_eq!(field_expr.field_index().unwrap().token().text(), "1"); + } + + #[test] + #[wasm_bindgen_test] + fn tuple_expr() { + let tuple_expr: TupleExpr = parse_expr("(1, 2, 3)"); + + for (i, expr) in tuple_expr.elems().flatten().enumerate() { + match i { + 0 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + 1 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + 2 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + _ => panic!("unexpected expr"), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn array_expr() { + let array_expr: ArrayExpr = parse_expr("[1, 2, 3]"); + + for (i, expr) in array_expr.elems().flatten().enumerate() { + match i { + 0 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + 1 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + 2 => assert!(matches!(expr.kind(), ExprKind::Lit(_))), + _ => panic!("unexpected expr"), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn index_expr() { + let index_expr: IndexExpr = parse_expr("foo[1]"); + + assert!(matches!( + index_expr.expr().unwrap().kind(), + ExprKind::Path(_) + )); + assert!(matches!( + index_expr.index().unwrap().kind(), + ExprKind::Lit(_) + )); + } + + #[test] + #[wasm_bindgen_test] + fn array_rep_expr() { + let array_rep_expr: ArrayRepExpr = parse_expr("[1; 2]"); + + assert!(matches!( + array_rep_expr.val().unwrap().kind(), + ExprKind::Lit(_) + )); + assert!(matches!( + array_rep_expr.len().unwrap().kind(), + ExprKind::Lit(_) + )); + } + + #[test] + #[wasm_bindgen_test] + fn if_expr() { + let if_expr: IfExpr = parse_expr("if true { 1 } else { 2 }"); + assert!(matches!(if_expr.cond().unwrap().kind(), ExprKind::Lit(_))); + assert!(if_expr.then().is_some()); + + let if_expr: IfExpr = parse_expr("if { true } { return } else { continue }"); + if let ExprKind::Block(stmts) = if_expr.cond().unwrap().kind() { + assert!(matches!( + stmts.into_iter().next().unwrap().kind(), + crate::ast::StmtKind::Expr(_) + )) + } else { + panic!("expected block statement"); + }; + matches!( + if_expr.then().unwrap().into_iter().next().unwrap().kind(), + crate::ast::StmtKind::Return(_) + ); + let ExprKind::Block(else_) = if_expr.else_().unwrap().kind() else { + panic!("expected block statement"); + }; + matches!( + else_.into_iter().next().unwrap().kind(), + crate::ast::StmtKind::Return(_) + ); + + let if_expr: IfExpr = parse_expr("if false { return } else if true { continue }"); + assert!(matches!(if_expr.else_().unwrap().kind(), ExprKind::If(_))); + } + + #[test] + #[wasm_bindgen_test] + fn match_expr() { + let source = r#" + match foo { + Foo::Bar => { 2 }, + Bar::Baz(Int) => (4), + _ => 5, + } + }"#; + + let match_expr: MatchExpr = parse_expr(source); + + assert!(matches!( + match_expr.scrutinee().unwrap().kind(), + ExprKind::Path(_) + )); + let mut count = 0; + for arm in match_expr.arms().unwrap() { + match count { + 0 => { + assert!(matches!(arm.pat().unwrap().kind(), PatKind::Path(_))); + assert!(matches!(arm.body().unwrap().kind(), ExprKind::Block(_))); + } + + 1 => { + assert!(matches!(arm.pat().unwrap().kind(), PatKind::PathTuple(_))); + assert!(matches!(arm.body().unwrap().kind(), ExprKind::Paren(_))); + } + + 2 => { + assert!(matches!(arm.pat().unwrap().kind(), PatKind::WildCard(_))); + assert!(matches!(arm.body().unwrap().kind(), ExprKind::Lit(_))); + } + _ => panic!("unexpected arm"), + } + count += 1; + } + assert_eq!(count, 3) + } + #[test] + #[wasm_bindgen_test] + fn assign() { + let assign_expr: AssignExpr = parse_expr(r#"Foo{x, y} = foo"#); + + assert!(matches!( + assign_expr.lhs_expr().unwrap().kind(), + ExprKind::RecordInit(_) + )); + assert!(matches!( + assign_expr.rhs_expr().unwrap().kind(), + ExprKind::Path(_) + )); + } + + #[test] + #[wasm_bindgen_test] + fn aug_assign() { + let aug_assign_expr: AugAssignExpr = parse_expr("x += 1"); + assert!(matches!( + aug_assign_expr.lhs_expr().unwrap().kind(), + ExprKind::Path(_) + )); + assert!(matches!( + aug_assign_expr.op().unwrap(), + crate::ast::ArithBinOp::Add(_) + )); + + let aug_assign_expr: AugAssignExpr = parse_expr("x.y <<= 1"); + assert!(matches!( + aug_assign_expr.lhs_expr().unwrap().kind(), + ExprKind::Field(_) + )); + assert!(matches!( + aug_assign_expr.op().unwrap(), + crate::ast::ArithBinOp::LShift(_) + )); + } +} diff --git a/crates/parser2/src/ast/item.rs b/crates/parser2/src/ast/item.rs new file mode 100644 index 0000000000..9626764f15 --- /dev/null +++ b/crates/parser2/src/ast/item.rs @@ -0,0 +1,806 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, TraitRef, TupleType}; +use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// The top-level node of the AST tree. + pub struct Root, + SK::Root, +} +impl Root { + pub fn items(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A list of items in a module. + pub struct ItemList, + SK::ItemList, + IntoIterator +} + +ast_node! { + /// A single item in a module. + /// Use `[Item::kind]` to get the specific type of item. + pub struct Item, + SK::Item +} +impl Item { + pub fn kind(&self) -> Option { + support::child(self.syntax()) + .map(ItemKind::Mod) + .or_else(|| support::child(self.syntax()).map(ItemKind::Func)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Struct)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Contract)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Enum)) + .or_else(|| support::child(self.syntax()).map(ItemKind::TypeAlias)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Impl)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Trait)) + .or_else(|| support::child(self.syntax()).map(ItemKind::ImplTrait)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Const)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Use)) + .or_else(|| support::child(self.syntax()).map(ItemKind::Extern)) + } +} + +ast_node! { + pub struct Mod, + SK::Mod, +} +impl super::AttrListOwner for Mod {} +impl super::ItemModifierOwner for Mod {} +impl Mod { + /// Returns the name of the function. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the function's parameter list. + pub fn items(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `pub fn foo(_ x: T, from u: U) -> T where T: Trait2 { ... }` + pub struct Func, + SK::Func, +} +impl super::GenericParamsOwner for Func {} +impl super::WhereClauseOwner for Func {} +impl super::AttrListOwner for Func {} +impl super::ItemModifierOwner for Func {} +impl Func { + /// Returns the name of the function. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the function's parameter list. + pub fn params(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the function's return type. + pub fn ret_ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the function's body. + pub fn body(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct Struct, + SK::Struct, +} +impl super::GenericParamsOwner for Struct {} +impl super::WhereClauseOwner for Struct {} +impl super::AttrListOwner for Struct {} +impl super::ItemModifierOwner for Struct {} +impl Struct { + /// Returns the name of the struct. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the struct's field def list. + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct Contract, + SK::Contract, +} +impl super::AttrListOwner for Contract {} +impl super::ItemModifierOwner for Contract {} +impl Contract { + /// Returns the name of the contract. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the contract's field def list. + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct Enum, + SK::Enum, +} +impl super::GenericParamsOwner for Enum {} +impl super::WhereClauseOwner for Enum {} +impl super::AttrListOwner for Enum {} +impl super::ItemModifierOwner for Enum {} +impl Enum { + /// Returns the name of the enum. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the enum's variant def list. + pub fn variants(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `type Foo = Bar` + pub struct TypeAlias, + SK::TypeAlias, +} +impl super::GenericParamsOwner for TypeAlias {} +impl super::AttrListOwner for TypeAlias {} +impl super::ItemModifierOwner for TypeAlias {} +impl TypeAlias { + /// Returns the name of the type alias. + /// `Foo` in `type Foo = Bar` + pub fn alias(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the type alias's type. + /// `Bar` in `type Foo = Bar` + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `trait Foo<..> where .. { .. }` + pub struct Trait, + SK::Trait, +} +impl super::GenericParamsOwner for Trait {} +impl super::WhereClauseOwner for Trait {} +impl super::AttrListOwner for Trait {} +impl super::ItemModifierOwner for Trait {} +impl Trait { + /// Returns the name of the trait. + /// `Foo` in `trait Foo<..> where .. { .. }` + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the trait's item list. + /// `{ .. }` in `trait Foo<..> where .. { .. }` + /// NOTE: Currently only supports `fn` items. + pub fn item_list(&self) -> Option { + support::child(self.syntax()) + } + + pub fn super_trait_list(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct SuperTraitList, + SK::SuperTraitList, + IntoIterator +} +impl SuperTraitList { + pub fn colon(&self) -> Option { + support::token(self.syntax(), SK::Colon) + } +} + +ast_node! { + /// `impl Foo::Bar where .. { .. }` + pub struct Impl, + SK::Impl, +} +impl super::GenericParamsOwner for Impl {} +impl super::WhereClauseOwner for Impl {} +impl super::AttrListOwner for Impl {} +impl Impl { + /// Returns the type of the impl. + /// `Foo::Bar` in `impl Foo::Bar where .. { .. }` + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the impl item list. + /// `{ .. }` in `impl Foo::Bar where .. { .. }` + /// NOTE: Currently only supports `fn` items. + pub fn item_list(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `impl Foo for Bar { .. }` + pub struct ImplTrait, + SK::ImplTrait, +} +impl super::GenericParamsOwner for ImplTrait {} +impl super::WhereClauseOwner for ImplTrait {} +impl super::AttrListOwner for ImplTrait {} +impl ImplTrait { + /// Returns the trait of the impl. + /// `Foo` in `impl Foo for Bar { .. }` + pub fn trait_ref(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the type of the impl. + /// `Bar` in `impl Foo for Bar { .. }` + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the trait impl item list. + /// `{ .. }` in `impl Foo for Bar { .. }` + /// NOTE: Currently only supports `fn` items. + pub fn item_list(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `const FOO: u32 = 42;` + pub struct Const, + SK::Const, +} +impl super::AttrListOwner for Const {} +impl ItemModifierOwner for Const {} +impl Const { + /// Returns the name of the const. + /// `FOO` in `const FOO: u32 = 42;` + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the type of the const. + /// `u32` in `const FOO: u32 = 42;` + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the value of the const. + /// `42` in `const FOO: u32 = 42;` + pub fn value(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `use foo::{bar, Baz::*}` + pub struct Use, + SK::Use, +} +impl super::AttrListOwner for Use {} +impl ItemModifierOwner for Use {} +impl Use { + /// Returns the use tree. + /// `foo::{bar, Baz::*}` in `use foo::{bar, Baz::*}` + pub fn use_tree(&self) -> Option { + support::child(self.syntax()) + } + + pub fn has_sub_tree(&self) -> bool { + self.use_tree().is_some_and(|it| it.has_subtree()) + } +} + +ast_node! { + /// `extern { .. }` + pub struct Extern, + SK::Extern, +} +impl super::AttrListOwner for Extern {} +impl Extern { + /// Returns the item list. + /// NOTE: Currently only supports `fn` items. + pub fn extern_block(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct RecordFieldDefList, + SK::RecordFieldDefList, + IntoIterator +} +ast_node! { + pub struct RecordFieldDef, + SK::RecordFieldDef, +} +impl super::AttrListOwner for RecordFieldDef {} +impl RecordFieldDef { + /// Returns the pub keyword if exists. + pub fn pub_kw(&self) -> Option { + support::token(self.syntax(), SK::PubKw) + } + + /// Returns the name of the field. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the type of the field. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct VariantDefList, + SK::VariantDefList, + IntoIterator +} + +ast_node! { + /// `Foo(i32, u32)` + pub struct VariantDef, + SK::VariantDef, +} +impl super::AttrListOwner for VariantDef {} +impl VariantDef { + /// Returns the name of the variant. + /// `Foo` in `Foo(i32, u32)` + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the kind of the variant. + pub fn kind(&self) -> VariantKind { + support::child(self.syntax()) + .map(VariantKind::Tuple) + .or_else(|| support::child(self.syntax()).map(VariantKind::Record)) + .unwrap_or(VariantKind::Unit) + } + + /// Returns the variant's field def list. + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } + + pub fn tuple_type(&self) -> Option { + support::child(self.syntax()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum VariantKind { + Unit, + Tuple(TupleType), + Record(RecordFieldDefList), +} + +ast_node! { + pub struct TraitItemList, + SK::TraitItemList, + IntoIterator, +} + +ast_node! { + pub struct ImplItemList, + SK::ImplItemList, + IntoIterator, +} + +ast_node! { + pub struct ImplTraitItemList, + SK::ImplTraitItemList, + IntoIterator, +} + +ast_node! { + pub struct ExternItemList, + SK::ExternItemList, + IntoIterator, +} + +ast_node! { + /// A modifier on an item. + /// `pub unsafe` + pub struct ItemModifier, + SK::ItemModifier, +} +impl ItemModifier { + pub fn pub_kw(&self) -> Option { + support::token(self.syntax(), SK::PubKw) + } + + pub fn unsafe_kw(&self) -> Option { + support::token(self.syntax(), SK::UnsafeKw) + } +} + +pub trait ItemModifierOwner: AstNode { + fn modifier(&self) -> Option { + support::child(self.syntax()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum ItemKind { + Mod(Mod), + Func(Func), + Struct(Struct), + Contract(Contract), + Enum(Enum), + TypeAlias(TypeAlias), + Impl(Impl), + Trait(Trait), + ImplTrait(ImplTrait), + Const(Const), + Use(Use), + Extern(Extern), +} + +#[cfg(test)] +mod tests { + use wasm_bindgen_test::wasm_bindgen_test; + + use super::*; + use crate::{ + ast::{prelude::*, ExprKind, TypeKind}, + lexer::Lexer, + parser::{ItemListScope, Parser}, + }; + + fn parse_item(source: &str) -> T + where + T: TryFrom, + { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + + let _ = parser.parse(ItemListScope::default()); + let (node, errs) = parser.finish_to_node(); + for e in errs { + eprintln!("{:?}", e); + } + let item_list = ItemList::cast(node).unwrap(); + let mut items = item_list.into_iter().collect::>(); + assert_eq!(items.len(), 1); + items.pop().unwrap().kind().unwrap().try_into().unwrap() + } + + #[test] + #[wasm_bindgen_test] + fn mod_() { + let source = r" + pub mod foo { + pub fn bar() {} + pub struct Baz + } + "; + let mod_: Mod = parse_item(source); + assert_eq!(mod_.name().unwrap().text(), "foo"); + let mut i = 0; + for item in mod_.items().unwrap().into_iter() { + match i { + 0 => { + assert!(matches!(item.kind().unwrap(), ItemKind::Func(_))); + let func: Func = item.kind().unwrap().try_into().unwrap(); + assert_eq!(func.name().unwrap().text(), "bar"); + } + 1 => { + assert!(matches!(item.kind().unwrap(), ItemKind::Struct(_))); + let struct_: Struct = item.kind().unwrap().try_into().unwrap(); + assert_eq!(struct_.name().unwrap().text(), "Baz"); + } + _ => panic!(), + } + i += 1; + } + + assert_eq!(i, 2); + } + + #[test] + #[wasm_bindgen_test] + fn func() { + let source = r#" + /// This is doc comment + #evm + pub unsafe fn foo(_ x: T, from u: U) -> (T, U) where T: Trait2 { return } + "#; + let func: Func = parse_item(source); + + assert_eq!(func.name().unwrap().text(), "foo"); + assert_eq!(func.attr_list().unwrap().iter().count(), 2); + assert_eq!(func.generic_params().unwrap().iter().count(), 2); + assert!(func.where_clause().is_some()); + assert!(func.body().is_some()); + assert!(matches!(func.ret_ty().unwrap().kind(), TypeKind::Tuple(_))); + let modifier = func.modifier().unwrap(); + assert!(modifier.pub_kw().is_some()); + assert!(modifier.unsafe_kw().is_some()); + } + + #[test] + #[wasm_bindgen_test] + fn struct_() { + let source = r#" + pub struct Foo where T: Trait2 { + pub x: T, + y: (U, i32), + } + "#; + let s: Struct = parse_item(source); + assert_eq!(s.name().unwrap().text(), "Foo"); + let mut count = 0; + for field in s.fields().unwrap() { + match count { + 0 => { + assert!(field.pub_kw().is_some()); + assert_eq!(field.name().unwrap().text(), "x"); + assert!(matches!(field.ty().unwrap().kind(), TypeKind::Path(_))); + } + 1 => { + assert!(field.pub_kw().is_none()); + assert_eq!(field.name().unwrap().text(), "y"); + assert!(matches!(field.ty().unwrap().kind(), TypeKind::Tuple(_))); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + #[wasm_bindgen_test] + fn contract() { + let source = r#" + pub contract Foo { + pub x: u32, + y: (i32, u32), + } + "#; + let c: Contract = parse_item(source); + assert_eq!(c.name().unwrap().text(), "Foo"); + let mut count = 0; + for field in c.fields().unwrap() { + match count { + 0 => { + assert!(field.pub_kw().is_some()); + assert_eq!(field.name().unwrap().text(), "x"); + assert!(matches!(field.ty().unwrap().kind(), TypeKind::Path(_))); + } + 1 => { + assert!(field.pub_kw().is_none()); + assert_eq!(field.name().unwrap().text(), "y"); + assert!(matches!(field.ty().unwrap().kind(), TypeKind::Tuple(_))); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + #[wasm_bindgen_test] + fn enum_() { + let source = r#" + pub enum Foo where T: Trait2 { + Bar + Baz(T, U) + Bux { + x: i8 + y: i8 + } + } + "#; + let e: Enum = parse_item(source); + assert_eq!(e.name().unwrap().text(), "Foo"); + + let mut count = 0; + for variant in e.variants().unwrap() { + match count { + 0 => { + assert_eq!(variant.name().unwrap().text(), "Bar"); + assert_eq!(variant.kind(), VariantKind::Unit); + } + 1 => { + assert_eq!(variant.name().unwrap().text(), "Baz"); + assert!(matches!(variant.kind(), VariantKind::Tuple(_))); + } + 2 => { + assert_eq!(variant.name().unwrap().text(), "Bux"); + assert!(matches!(variant.kind(), VariantKind::Record(_))); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 3); + } + + #[test] + #[wasm_bindgen_test] + fn type_() { + let source = r#" + type MyError where T: Debug = Error + "#; + let t: TypeAlias = parse_item(source); + assert_eq!(t.alias().unwrap().text(), "MyError"); + assert!(matches!(t.ty().unwrap().kind(), TypeKind::Path(_))); + } + + #[test] + #[wasm_bindgen_test] + fn impl_() { + let source = r#" + impl Foo { + pub fn foo(self, t: T) -> T { return t } + pub fn bar(self) -> u32 { return 1 } + pub fn baz(mut self) { self.x = 1 } + } + "#; + let i: Impl = parse_item(source); + assert!(matches!(i.ty().unwrap().kind(), TypeKind::Path(_))); + assert_eq!(i.item_list().unwrap().iter().count(), 3); + } + + #[test] + #[wasm_bindgen_test] + fn trait_() { + let source = r#" + pub trait Foo { + pub fn foo(self, t: T) -> T + pub fn default(self) -> u32 { return 1 } + } + "#; + let t: Trait = parse_item(source); + assert_eq!(t.name().unwrap().text(), "Foo"); + + let mut count = 0; + for f in t.item_list().unwrap() { + match count { + 0 => { + assert!(f.body().is_none()); + } + 1 => { + assert!(f.body().is_some()); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + #[wasm_bindgen_test] + fn impl_trait() { + let source = r#" + impl Trait::Foo for (i32) { + fn foo(self, _t: T) -> u32 { return 1 }; + }"#; + let i: ImplTrait = parse_item(source); + assert!(i.generic_params().is_none()); + assert!(i.trait_ref().is_some()); + assert!(matches!(i.ty().unwrap().kind(), TypeKind::Tuple(_))); + assert!(i.item_list().unwrap().iter().count() == 1); + } + + #[test] + #[wasm_bindgen_test] + fn const_() { + let source = r#" + pub const FOO: u32 = 1 + 1 + "#; + let c: Const = parse_item(source); + assert_eq!(c.name().unwrap().text(), "FOO"); + assert!(matches!(c.ty().unwrap().kind(), TypeKind::Path(_))); + assert!(matches!(c.value().unwrap().kind(), ExprKind::Bin(_))); + } + + #[test] + #[wasm_bindgen_test] + fn use_() { + let source = r#" + use foo::bar::{bar::*, baz::{Baz, Baz2}} + "#; + let u: Use = parse_item(source); + let use_tree = u.use_tree().unwrap(); + let mut count = 0; + for segment in use_tree.path().unwrap() { + match count { + 0 => { + assert_eq!(segment.ident().unwrap().text(), "foo"); + } + 1 => { + assert_eq!(segment.ident().unwrap().text(), "bar"); + } + _ => unreachable!(), + } + count += 1; + } + + count = 0; + let children = use_tree.children().unwrap(); + for child in children { + match count { + 0 => { + let mut segments = child.path().unwrap().iter(); + assert_eq!(segments.next().unwrap().ident().unwrap().text(), "bar"); + assert!(segments.next().unwrap().glob().is_some()); + assert!(segments.next().is_none()); + assert!(child.children().is_none()); + } + 1 => { + let mut segments = child.path().unwrap().iter(); + assert_eq!(segments.next().unwrap().ident().unwrap().text(), "baz"); + assert!(child.children().unwrap().iter().count() == 2); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + + let source = r#" + use {foo as _foo, bar::Baz as _} + "#; + let u: Use = parse_item(source); + let use_tree = u.use_tree().unwrap(); + assert!(use_tree.path().is_none()); + let mut count = 0; + for child in use_tree.children().unwrap() { + match count { + 0 => { + let alias = child.alias().unwrap(); + assert_eq!(alias.ident().unwrap().text(), "_foo"); + } + 1 => { + let alias = child.alias().unwrap(); + assert!(alias.underscore().is_some()); + } + _ => unreachable!(), + } + count += 1; + } + assert_eq!(count, 2); + } + + #[test] + #[wasm_bindgen_test] + fn extern_() { + let source = r#" + extern { + pub unsafe fn foo() + pub unsafe fn bar() + }"#; + let e: Extern = parse_item(source); + + for f in e.extern_block().unwrap() { + assert!(f.body().is_none()); + } + assert_eq!(e.extern_block().unwrap().iter().count(), 2); + } +} diff --git a/crates/parser2/src/ast/lit.rs b/crates/parser2/src/ast/lit.rs new file mode 100644 index 0000000000..c88fcbc643 --- /dev/null +++ b/crates/parser2/src/ast/lit.rs @@ -0,0 +1,58 @@ +use rowan::ast::AstNode; + +use crate::{syntax_kind::SyntaxKind as SK, SyntaxToken}; + +use super::ast_node; + +ast_node! { + pub struct Lit, + SK::Lit +} +impl Lit { + pub fn kind(&self) -> LitKind { + let token = self.syntax().first_token().unwrap(); + match token.kind() { + SK::Int => LitKind::Int(LitInt { token }), + SK::TrueKw | SK::FalseKw => LitKind::Bool(LitBool { token }), + SK::String => LitKind::String(LitString { token }), + _ => unreachable!(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LitInt { + pub(super) token: SyntaxToken, +} +impl LitInt { + pub fn token(&self) -> &SyntaxToken { + &self.token + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LitBool { + token: SyntaxToken, +} +impl LitBool { + pub fn token(&self) -> &SyntaxToken { + &self.token + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LitString { + token: SyntaxToken, +} +impl LitString { + pub fn token(&self) -> &SyntaxToken { + &self.token + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum LitKind { + Int(LitInt), + Bool(LitBool), + String(LitString), +} diff --git a/crates/parser2/src/ast/mod.rs b/crates/parser2/src/ast/mod.rs new file mode 100644 index 0000000000..c5486f6083 --- /dev/null +++ b/crates/parser2/src/ast/mod.rs @@ -0,0 +1,115 @@ +pub mod attr; +pub mod expr; +pub mod item; +pub mod lit; +pub mod param; +pub mod pat; +pub mod path; +pub mod stmt; +pub mod types; +pub mod use_tree; + +pub use attr::*; +pub use expr::*; +pub use item::*; +pub use lit::*; +pub use param::*; +pub use pat::*; +pub use path::*; +pub use stmt::*; +pub use types::*; +pub use use_tree::*; + +pub type AstChildren = rowan::ast::AstChildren; +pub type SyntaxText = rowan::SyntaxText; +pub type AstPtr = rowan::ast::AstPtr; +pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr; + +pub mod prelude { + pub use super::{ + AttrListOwner, GenericArgsOwner, GenericParamsOwner, ItemModifierOwner, WhereClauseOwner, + }; + pub use rowan::ast::AstNode; +} + +macro_rules! ast_node { + ( + $(#[$attrs: meta])* + $visibility: vis struct $name: ident $({ + $($field_vis: vis $field: ident: $ty: ty),* + })?, + $kind: pat $(,)? + ) => { + $(#[$attrs])* + #[derive(Debug, Clone, PartialEq, Eq, Hash)] + $visibility struct $name { + __syntax: crate::SyntaxNode, + $($($field: $ty),*)? + } + + impl rowan::ast::AstNode for $name { + type Language = $crate::FeLang; + fn can_cast(node: crate::SyntaxKind) -> bool { + matches!(node, $kind) + } + fn cast(node: crate::SyntaxNode) -> Option { + Self::can_cast(node.kind()).then(|| Self{ + __syntax: node.into(), + $($($field: Default::default(),)*)? + }) + } + fn syntax(&self) -> &crate::SyntaxNode { + &self.__syntax + } + } + + impl std::fmt::Display for $name { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(rowan::ast::AstNode::syntax(self), f) + } + } + }; + ( + $(#[$attrs: meta])* + $visibility: vis struct $name: ident $({ + $($field_vis: vis $field: ident: $ty: ty),* + })?, + $kind: pat, + IntoIterator $(,)? + ) => { + ast_node!{ + $(#[$attrs])* + $visibility struct $name $({ + $($field_vis $field: $ty),* + })?, + $kind + } + impl IntoIterator for $name { + type Item = $item_ty; + type IntoIter = crate::ast::AstChildren<$item_ty>; + + fn into_iter(self) -> Self::IntoIter { + rowan::ast::support::children(rowan::ast::AstNode::syntax(&self)) + } + } + impl IntoIterator for &$name { + type Item = $item_ty; + type IntoIter = crate::ast::AstChildren<$item_ty>; + + fn into_iter(self) -> Self::IntoIter { + rowan::ast::support::children(rowan::ast::AstNode::syntax(self)) + } + } + + impl $name { + /// Returns an iterator over the children of this node. + pub fn iter(&self) -> crate::ast::AstChildren<$item_ty> { + self.into_iter() + } + } + }; +} + +use ast_node; + +use crate::FeLang; diff --git a/crates/parser2/src/ast/param.rs b/crates/parser2/src/ast/param.rs new file mode 100644 index 0000000000..a8aa0bb45e --- /dev/null +++ b/crates/parser2/src/ast/param.rs @@ -0,0 +1,578 @@ +use rowan::ast::{support, AstNode}; + +use super::ast_node; +use crate::{FeLang, SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// A list of parameters. + /// `(self, a: u256, b: u256)` + pub struct FuncParamList, + SK::FuncParamList, + IntoIterator, +} + +ast_node! { + /// A single parameter. + /// `self` + /// `label a: u256` + pub struct FuncParam, + SK::FnParam, +} +impl FuncParam { + /// Returns the `mut` keyword if the parameter is mutable. + pub fn mut_token(&self) -> Option { + support::token(self.syntax(), SK::MutKw) + } + + /// Returns the `label` if the parameter is labeled. + /// `label` in `label a: u256`. + pub fn label(&self) -> Option { + self.syntax() + .children_with_tokens() + .find_map(|child| match child { + rowan::NodeOrToken::Token(token) => FuncParamLabel::from_token(token), + _ => None, + }) + } + + /// Returns the name of the parameter. + /// `a` in `label a: u256`. + pub fn name(&self) -> Option { + let mut param_names = self.syntax().children_with_tokens().filter_map(|child| { + if let rowan::NodeOrToken::Token(token) = child { + FuncParamName::from_token(token) + } else { + None + } + }); + + let first = param_names.next(); + match param_names.next() { + Some(second) => Some(second), + None => first, + } + } + + /// Returns the type of the parameter. + /// `u256` in `a: u256`. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A list of generic parameters. + /// `` + pub struct GenericParamList, + SK::GenericParamList, + IntoIterator, + +} + +ast_node! { + /// A generic parameter. + /// `T` + /// `T: Trait` + /// `const N: usize` + pub struct GenericParam, + SK::TypeGenericParam | SK::ConstGenericParam, +} +impl GenericParam { + /// Returns the specific kind of the generic parameter. + pub fn kind(&self) -> GenericParamKind { + match self.syntax().kind() { + SK::TypeGenericParam => { + GenericParamKind::Type(AstNode::cast(self.syntax().clone()).unwrap()) + } + SK::ConstGenericParam => { + GenericParamKind::Const(AstNode::cast(self.syntax().clone()).unwrap()) + } + _ => unreachable!(), + } + } +} + +/// A generic parameter kind. +/// `Type` is either `T` or `T: Trait`. +/// `Const` is `const N: usize`. +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum GenericParamKind { + Type(TypeGenericParam), + Const(ConstGenericParam), +} + +impl GenericParamKind { + pub fn syntax(&self) -> &rowan::SyntaxNode { + match self { + GenericParamKind::Type(param) => param.syntax(), + GenericParamKind::Const(param) => param.syntax(), + } + } +} + +ast_node! { + /// `(label1: arg1, arg2, ..)` + pub struct CallArgList, + SK::CallArgList, + IntoIterator, +} + +ast_node! { + /// `label1: arg1` + pub struct CallArg, + SK::CallArg, +} +impl CallArg { + /// Returns the label of the argument. + /// `label1` in `label1: arg1`. + pub fn label(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the expression of the argument. + /// `arg1` in `label1: arg1`. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A type generic parameter. + /// `T` + /// `T: Trait` + pub struct TypeGenericParam, + SK::TypeGenericParam, +} +impl TypeGenericParam { + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + pub fn bounds(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A const generic parameter. + /// `const N: usize`. + pub struct ConstGenericParam, + SK::ConstGenericParam, +} +impl ConstGenericParam { + /// Returns the name of the const generic parameter. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + pub fn const_kw(&self) -> Option { + support::token(self.syntax(), SK::ConstKw) + } + + /// Returns the type of the const generic parameter. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A list of generic arguments. + /// `, + +} + +ast_node! { + /// A generic argument. + /// `T` + /// `T: Trait` + /// `{expr}` + /// `lit` + pub struct GenericArg, + SK::TypeGenericArg | SK::ConstGenericArg, +} +impl GenericArg { + pub fn kind(&self) -> GenericArgKind { + match self.syntax().kind() { + SK::TypeGenericArg => { + GenericArgKind::Type(AstNode::cast(self.syntax().clone()).unwrap()) + } + SK::ConstGenericArg => { + GenericArgKind::Const(AstNode::cast(self.syntax().clone()).unwrap()) + } + _ => unreachable!(), + } + } +} + +ast_node! { + pub struct TypeGenericArg, + SK::TypeGenericArg, +} +impl TypeGenericArg { + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct ConstGenericArg, + SK::ConstGenericArg, +} +impl ConstGenericArg { + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `where T: Trait` + pub struct WhereClause, + SK::WhereClause, + IntoIterator, +} +impl WhereClause { + pub fn where_kw(&self) -> Option { + support::token(self.syntax(), SK::WhereKw) + } +} + +ast_node! { + /// `T: Trait` + pub struct WherePredicate, + SK::WherePredicate, +} +impl WherePredicate { + /// Returns `T` in `T: Trait`. + pub fn ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns `Trait` in `T: Trait`. + pub fn bounds(&self) -> Option { + support::child(self.syntax()) + } +} + +/// A generic argument kind. +/// `Type` is either `Type` or `T: Trait`. +/// `Const` is either `{expr}` or `lit`. +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum GenericArgKind { + Type(TypeGenericArg), + Const(ConstGenericArg), +} + +ast_node! { + /// A type bound list. + /// `: Trait + Trait2` + pub struct TypeBoundList, + SK::TypeBoundList, + IntoIterator, +} + +ast_node! { + /// A type bound. + /// `Trait` + /// `Trait` + /// `(* -> *) -> *` + pub struct TypeBound, + SK::TypeBound, +} +impl TypeBound { + /// A path of the type bound. + pub fn trait_bound(&self) -> Option { + support::child(self.syntax()) + } + + pub fn kind_bound(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct TraitRef, + SK::TraitRef +} +impl TraitRef { + /// A path to the trait. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + /// A generic argument list for the trait. + pub fn generic_args(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct KindBound, + SK::KindBoundAbs | SK::KindBoundMono +} +impl KindBound { + pub fn mono(&self) -> Option { + match self.syntax().kind() { + SK::KindBoundMono => Some(KindBoundMono::cast(self.syntax().clone()).unwrap()), + _ => None, + } + } + + pub fn abs(&self) -> Option { + match self.syntax().kind() { + SK::KindBoundAbs => Some(KindBoundAbs::cast(self.syntax().clone()).unwrap()), + _ => None, + } + } +} + +ast_node! { + pub struct KindBoundMono, + SK::KindBoundMono, +} + +ast_node! { + pub struct KindBoundAbs, + SK::KindBoundAbs, +} +impl KindBoundAbs { + pub fn lhs(&self) -> Option { + support::child(self.syntax()) + } + + pub fn rhs(&self) -> Option { + support::children(self.syntax()).nth(1) + } + + pub fn arrow(&self) -> Option { + support::token(self.syntax(), SK::Arrow) + } +} + +#[derive(Debug, Clone)] +pub enum KindBoundVariant { + /// `*` + Mono(KindBoundMono), + /// `KindBound -> KindBound` + Abs(KindBoundAbs), +} + +/// A trait for AST nodes that can have generic parameters. +pub trait GenericParamsOwner: AstNode { + /// Returns the generic parameter list of the node. + fn generic_params(&self) -> Option { + support::child(self.syntax()) + } +} + +/// A trait for AST nodes that can have generic arguments. +pub trait GenericArgsOwner: AstNode { + /// Returns the generic argument list of the node. + fn generic_args(&self) -> Option { + support::child(self.syntax()) + } +} + +/// A trait for AST nodes that can have a where clause. +pub trait WhereClauseOwner: AstNode { + /// Returns the where clause of the node. + fn where_clause(&self) -> Option { + support::child(self.syntax()) + } +} + +pub enum FuncParamLabel { + /// `label` in `label a: u256` + Ident(SyntaxToken), + /// `_` in `_ a: u256`. + Underscore(SyntaxToken), +} +impl FuncParamLabel { + pub fn syntax(&self) -> SyntaxToken { + match self { + FuncParamLabel::Ident(token) => token, + FuncParamLabel::Underscore(token) => token, + } + .clone() + } + + fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Ident => Some(FuncParamLabel::Ident(token)), + SK::Underscore => Some(FuncParamLabel::Underscore(token)), + _ => None, + } + } +} + +pub enum FuncParamName { + /// `a` in `label a: u256` + Ident(SyntaxToken), + /// `self` parameter. + SelfParam(SyntaxToken), + /// `_` parameter. + Underscore(SyntaxToken), +} +impl FuncParamName { + pub fn syntax(&self) -> SyntaxToken { + match self { + FuncParamName::Ident(token) => token, + FuncParamName::SelfParam(token) => token, + FuncParamName::Underscore(token) => token, + } + .clone() + } + + fn from_token(token: SyntaxToken) -> Option { + match token.kind() { + SK::Ident => Some(FuncParamName::Ident(token)), + SK::SelfKw => Some(FuncParamName::SelfParam(token)), + SK::Underscore => Some(FuncParamName::Underscore(token)), + _ => None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + ast::TypeKind, + lexer::Lexer, + parser::{ + param::{GenericArgListScope, GenericParamListScope, WhereClauseScope}, + Parser, + }, + }; + + use wasm_bindgen_test::wasm_bindgen_test; + + fn parse_generic_params(source: &str) -> GenericParamList { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(GenericParamListScope::default()).unwrap(); + GenericParamList::cast(parser.finish_to_node().0).unwrap() + } + + fn parse_generic_arg(source: &str) -> GenericArgList { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(GenericArgListScope::default()).unwrap(); + GenericArgList::cast(parser.finish_to_node().0).unwrap() + } + + fn parse_where_clause(source: &str) -> WhereClause { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(WhereClauseScope::default()).unwrap(); + WhereClause::cast(parser.finish_to_node().0).unwrap() + } + + #[test] + #[wasm_bindgen_test] + fn generic_param() { + let source = r#", U, const N: usize>"#; + let gp = parse_generic_params(source); + let mut params = gp.into_iter(); + + let GenericParamKind::Type(p1) = params.next().unwrap().kind() else { + panic!("expected type param"); + }; + assert_eq!(p1.name().unwrap().text(), "T"); + let p1_bounds = p1.bounds().unwrap(); + let mut p1_bounds = p1_bounds.iter(); + + assert_eq!( + p1_bounds + .next() + .unwrap() + .trait_bound() + .unwrap() + .path() + .unwrap() + .segments() + .next() + .unwrap() + .ident() + .unwrap() + .text(), + "Trait" + ); + let p1_bounds_trait2 = p1_bounds.next().unwrap(); + + assert_eq!( + p1_bounds_trait2 + .trait_bound() + .unwrap() + .path() + .unwrap() + .segments() + .next() + .unwrap() + .ident() + .unwrap() + .text(), + "Trait2" + ); + + let GenericParamKind::Type(p2) = params.next().unwrap().kind() else { + panic!("expected type param"); + }; + assert_eq!(p2.name().unwrap().text(), "U"); + + let GenericParamKind::Const(p3) = params.next().unwrap().kind() else { + panic!("expected const param"); + }; + assert_eq!(p3.name().unwrap().text(), "N"); + assert!(p3.ty().is_some()); + } + + #[test] + #[wasm_bindgen_test] + fn generic_arg() { + let source = r#""#; + let ga = parse_generic_arg(source); + let mut args = ga.iter(); + + let GenericArgKind::Type(_) = args.next().unwrap().kind() else { + panic!("expected type arg"); + }; + let GenericArgKind::Const(a2) = args.next().unwrap().kind() else { + panic!("expected const arg"); + }; + assert!(a2.expr().is_some()); + } + + #[test] + #[wasm_bindgen_test] + fn where_clause() { + let source = r#"where + T: Trait + Trait2 + *U: Trait3 + (T, U): Trait4 + Trait5 + "#; + let wc = parse_where_clause(source); + let mut count = 0; + for pred in wc { + match count { + 0 => { + assert!(matches!(pred.ty().unwrap().kind(), TypeKind::Path(_))); + assert_eq!(pred.bounds().unwrap().iter().count(), 2); + } + 1 => { + assert!(matches!(pred.ty().unwrap().kind(), TypeKind::Ptr(_))); + assert_eq!(pred.bounds().unwrap().iter().count(), 1); + } + 2 => { + assert!(matches!(pred.ty().unwrap().kind(), TypeKind::Tuple(_))); + assert_eq!(pred.bounds().unwrap().iter().count(), 2); + } + _ => panic!("unexpected predicate"), + } + count += 1; + } + assert!(count == 3); + } +} diff --git a/crates/parser2/src/ast/pat.rs b/crates/parser2/src/ast/pat.rs new file mode 100644 index 0000000000..73a8c0bc16 --- /dev/null +++ b/crates/parser2/src/ast/pat.rs @@ -0,0 +1,306 @@ +use rowan::ast::{support, AstNode}; + +use super::ast_node; +use crate::{SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// A pattern. + /// Use [`Self::kind`] to get the specific kind of the pattern. + pub struct Pat, + SK::WildCardPat + | SK::RestPat + | SK::LitPat + | SK::TuplePat + | SK::PathPat + | SK::PathTuplePat + | SK::RecordPat + | SK::OrPat +} +impl Pat { + /// Returns the specific kind of the pattern. + pub fn kind(&self) -> PatKind { + match self.syntax().kind() { + SK::WildCardPat => PatKind::WildCard(AstNode::cast(self.syntax().clone()).unwrap()), + SK::RestPat => PatKind::Rest(AstNode::cast(self.syntax().clone()).unwrap()), + SK::LitPat => PatKind::Lit(AstNode::cast(self.syntax().clone()).unwrap()), + SK::TuplePat => PatKind::Tuple(AstNode::cast(self.syntax().clone()).unwrap()), + SK::PathPat => PatKind::Path(AstNode::cast(self.syntax().clone()).unwrap()), + SK::PathTuplePat => { + PatKind::PathTuple(PathTuplePat::cast(self.syntax().clone()).unwrap()) + } + SK::RecordPat => PatKind::Record(AstNode::cast(self.syntax().clone()).unwrap()), + SK::OrPat => PatKind::Or(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// `_` + pub struct WildCardPat, + SK::WildCardPat, +} + +ast_node! { + /// `..` + pub struct RestPat, + SK::RestPat, +} + +ast_node! { + /// `1` + pub struct LitPat, + SK::LitPat, +} +impl LitPat { + /// Returns the underlying literal. + pub fn lit(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `(Foo::Bar, 1, ..)` + pub struct TuplePat, + SK::TuplePat, +} +impl TuplePat { + pub fn elems(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `(Foo::Bar, 1, ..)` + pub struct TuplePatElemList, + SK::TuplePatElemList, + IntoIterator +} + +ast_node! { + /// `Foo::Bar` + pub struct PathPat, + SK::PathPat, +} +impl PathPat { + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the `mut` keyword if the patter is mutable. + pub fn mut_token(&self) -> Option { + support::token(self.syntax(), SK::MutKw) + } +} + +ast_node! { + /// `Foo::Bar(1, 2)` + pub struct PathTuplePat, + SK::PathTuplePat, +} +impl PathTuplePat { + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + pub fn elems(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `Foo::Bar{a: 1, b: Foo::baz, c} + pub struct RecordPat, + SK::RecordPat, +} +impl RecordPat { + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + pub fn fields(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `{a: 1, b: Foo::baz, c}` + pub struct RecordPatFieldList, + SK::RecordPatFieldList, + IntoIterator +} + +ast_node! { + /// `a: 1` + pub struct RecordPatField, + SK::RecordPatField, +} +impl RecordPatField { + /// Returns the field name. + pub fn name(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns the field pattern. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `Foo::Bar | 1` + pub struct OrPat, + SK::OrPat, +} +impl OrPat { + pub fn lhs(&self) -> Option { + support::child(self.syntax()) + } + pub fn rhs(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +/// A specific pattern kind. +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum PatKind { + WildCard(WildCardPat), + Rest(RestPat), + Lit(LitPat), + Tuple(TuplePat), + Path(PathPat), + PathTuple(PathTuplePat), + Record(RecordPat), + Or(OrPat), +} + +#[cfg(test)] +mod tests { + use wasm_bindgen_test::wasm_bindgen_test; + + use super::*; + use crate::{lexer::Lexer, parser::Parser}; + + fn parse_pat(source: &str) -> T + where + T: TryFrom, + { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + crate::parser::pat::parse_pat(&mut parser).unwrap(); + Pat::cast(parser.finish_to_node().0) + .unwrap() + .kind() + .try_into() + .unwrap() + } + + #[test] + #[wasm_bindgen_test] + fn wildcard() { + let _: WildCardPat = parse_pat("_"); + } + + #[test] + #[wasm_bindgen_test] + fn rest() { + let _: RestPat = parse_pat(".."); + } + + #[test] + #[wasm_bindgen_test] + fn lit() { + let _: LitPat = parse_pat("0x1"); + let _: LitPat = parse_pat("true"); + let _: LitPat = parse_pat(r#""foo""#); + } + + #[test] + #[wasm_bindgen_test] + fn tuple() { + let source = r#"(Foo::Bar, true, ..)"#; + let tuple_pat: TuplePat = parse_pat(source); + + for (i, pat) in tuple_pat.elems().unwrap().iter().enumerate() { + match i { + 0 => assert!(matches!(pat.kind(), PatKind::Path(_))), + 1 => assert!(matches!(pat.kind(), PatKind::Lit(_))), + 2 => assert!(matches!(pat.kind(), PatKind::Rest(_))), + _ => panic!("unexpected tuple pat"), + } + } + + let tuple_pat: TuplePat = parse_pat("()"); + assert!(tuple_pat.elems().unwrap().iter().next().is_none()); + } + + #[test] + #[wasm_bindgen_test] + fn path_tuple() { + let source = r#"Self::Bar(1, Foo::Bar)"#; + let path_tuple_pat: PathTuplePat = parse_pat(source); + + for (i, seg) in path_tuple_pat.path().unwrap().segments().enumerate() { + match i { + 0 => assert!(seg.is_self_ty()), + 1 => assert_eq!(seg.ident().unwrap().text(), "Bar"), + _ => panic!("unexpected path tuple pat"), + } + } + + for (i, pat) in path_tuple_pat.elems().unwrap().iter().enumerate() { + match i { + 0 => assert!(matches!(pat.kind(), PatKind::Lit(_))), + 1 => assert!(matches!(pat.kind(), PatKind::Path(_))), + _ => panic!("unexpected path tuple pat"), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn record() { + let source = r#"Foo::Bar{a: 1, b: Foo::baz, mut c}"#; + let record_pat: RecordPat = parse_pat(source); + + for (i, seg) in record_pat.path().unwrap().segments().enumerate() { + match i { + 0 => assert_eq!(seg.ident().unwrap().text(), "Foo"), + 1 => assert_eq!(seg.ident().unwrap().text(), "Bar"), + _ => panic!("unexpected record pat"), + } + } + + for (i, field) in record_pat.fields().unwrap().iter().enumerate() { + match i { + 0 => { + assert_eq!(field.name().unwrap().text(), "a"); + assert!(matches!(field.pat().unwrap().kind(), PatKind::Lit(_))); + } + 1 => { + assert_eq!(field.name().unwrap().text(), "b"); + assert!(matches!(field.pat().unwrap().kind(), PatKind::Path(_))); + } + 2 => { + let PatKind::Path(pat) = field.pat().unwrap().kind() else { + panic!("unexpected record pat"); + }; + + assert!(field.name().is_none()); + assert!(matches!(field.pat().unwrap().kind(), PatKind::Path(_))); + assert!(pat.mut_token().is_some()); + } + _ => panic!("unexpected record pat"), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn or() { + let source = r#"Foo::Int | Foo::Float | Foo::Str "#; + let or_pat: OrPat = parse_pat(source); + + assert!(matches!(or_pat.lhs().unwrap().kind(), PatKind::Path(_))); + assert!(matches!(or_pat.rhs().unwrap().kind(), PatKind::Or(_))); + } +} diff --git a/crates/parser2/src/ast/path.rs b/crates/parser2/src/ast/path.rs new file mode 100644 index 0000000000..eb35979d5c --- /dev/null +++ b/crates/parser2/src/ast/path.rs @@ -0,0 +1,110 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, AstChildren, GenericArgsOwner}; +use crate::{syntax_node::SyntaxToken, SyntaxKind as SK}; + +ast_node! { + /// A path. + /// `foo::bar::baz` + pub struct Path, + SK::Path, + IntoIterator, +} +impl Path { + /// Returns the segments of the path. + pub fn segments(&self) -> AstChildren { + support::children(self.syntax()) + } +} + +ast_node! { + /// A path segment. + pub struct PathSegment, + SK::PathSegment +} +impl GenericArgsOwner for PathSegment {} +impl PathSegment { + pub fn kind(&self) -> Option { + match self.syntax().first_child_or_token() { + Some(node) => match node.kind() { + SK::IngotKw => Some(PathSegmentKind::Ingot(node.into_token().unwrap())), + SK::SuperKw => Some(PathSegmentKind::Super(node.into_token().unwrap())), + SK::SelfTypeKw => Some(PathSegmentKind::SelfTy(node.into_token().unwrap())), + SK::SelfKw => Some(PathSegmentKind::Self_(node.into_token().unwrap())), + SK::Ident => Some(PathSegmentKind::Ident(node.into_token().unwrap())), + _ => None, + }, + _ => None, + } + } + /// Returns the identifier of the segment. + pub fn ident(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns `true` if the segment is a `self` keyword. + pub fn is_self(&self) -> bool { + support::token(self.syntax(), SK::SelfKw).is_some() + } + + /// Returns `true` if the segment is a `Self` keyword. + pub fn is_self_ty(&self) -> bool { + support::token(self.syntax(), SK::SelfTypeKw).is_some() + } +} + +/// A path segment kind. +pub enum PathSegmentKind { + /// `ingot` + Ingot(SyntaxToken), + /// `super` + Super(SyntaxToken), + /// `Self` + SelfTy(SyntaxToken), + /// `self` + Self_(SyntaxToken), + /// `foo` + Ident(SyntaxToken), +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + lexer::Lexer, + parser::{path::PathScope, Parser}, + }; + + use wasm_bindgen_test::wasm_bindgen_test; + + fn parse_path(source: &str) -> Path { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + parser.parse(PathScope::default()).unwrap(); + Path::cast(parser.finish_to_node().0).unwrap() + } + + #[test] + #[wasm_bindgen_test] + fn path_ast() { + let source = r#"self::Foo"#; + let path = parse_path(source); + let mut segments = path.segments(); + + assert!(segments.next().unwrap().is_self()); + assert_eq!(segments.next().unwrap().ident().unwrap().text(), "Foo"); + assert!(segments.next().is_none()); + } + + #[test] + #[wasm_bindgen_test] + fn path_ast2() { + let source = r#"Self::Dep"#; + let path = parse_path(source); + let mut segments = path.segments(); + + assert!(segments.next().unwrap().is_self_ty()); + assert_eq!(segments.next().unwrap().ident().unwrap().text(), "Dep"); + assert!(segments.next().is_none()); + } +} diff --git a/crates/parser2/src/ast/stmt.rs b/crates/parser2/src/ast/stmt.rs new file mode 100644 index 0000000000..0ea4f990e9 --- /dev/null +++ b/crates/parser2/src/ast/stmt.rs @@ -0,0 +1,238 @@ +use rowan::ast::{support, AstNode}; + +use super::ast_node; +use crate::SyntaxKind as SK; + +ast_node! { + /// A statement. + /// Use [`Self::kind`] to get the specific kind of the statement. + pub struct Stmt, + SK::LetStmt + | SK::ForStmt + | SK::WhileStmt + | SK::ContinueStmt + | SK::BreakStmt + | SK::ReturnStmt + | SK::ExprStmt +} +impl Stmt { + /// Returns the specific kind of the statement. + pub fn kind(&self) -> StmtKind { + match self.syntax().kind() { + SK::LetStmt => StmtKind::Let(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ForStmt => StmtKind::For(AstNode::cast(self.syntax().clone()).unwrap()), + SK::WhileStmt => StmtKind::While(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ContinueStmt => StmtKind::Continue(AstNode::cast(self.syntax().clone()).unwrap()), + SK::BreakStmt => StmtKind::Break(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ReturnStmt => StmtKind::Return(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ExprStmt => StmtKind::Expr(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// `let x: i32 = 1` + pub struct LetStmt, + SK::LetStmt, +} +impl LetStmt { + /// Returns the pattern of the binding. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the type annotation. + pub fn type_annotation(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the initializer. + pub fn initializer(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// `for pat in expr {..}` + pub struct ForStmt, + SK::ForStmt +} +impl ForStmt { + /// Returns the pattern of the binding in the for loop. + pub fn pat(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the expression of the iterator in the for loop. + pub fn iterable(&self) -> Option { + support::child(self.syntax()) + } + + pub fn body(&self) -> Option { + let mut block_exprs = support::children(self.syntax()); + let first = block_exprs.next(); + match block_exprs.next() { + Some(expr) => Some(expr), + None => first, + } + } +} + +ast_node! { + /// `while cond {..}` + pub struct WhileStmt, + SK::WhileStmt +} +impl WhileStmt { + /// Returns the condition of the while loop. + pub fn cond(&self) -> Option { + support::child(self.syntax()) + } + + pub fn body(&self) -> Option { + let mut block_exprs = support::children(self.syntax()); + let first = block_exprs.next(); + match block_exprs.next() { + Some(expr) => Some(expr), + None => first, + } + } +} + +ast_node! { + /// `continue` + pub struct ContinueStmt, + SK::ContinueStmt +} + +ast_node! { + /// `break` + pub struct BreakStmt, + SK::BreakStmt +} + +ast_node! { + /// `return` or + /// `return expr` + pub struct ReturnStmt, + SK::ReturnStmt +} +impl ReturnStmt { + /// Returns the expression of the return statement. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns `true` if there is an expression or `Error` node after `return` + /// keyword. + pub fn has_value(&self) -> bool { + self.syntax().children().count() >= 1 + } +} + +ast_node! { + pub struct ExprStmt, + SK::ExprStmt +} +impl ExprStmt { + /// Returns the expression of the expression statement. + pub fn expr(&self) -> Option { + support::child(self.syntax()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum StmtKind { + Let(LetStmt), + For(ForStmt), + While(WhileStmt), + Continue(ContinueStmt), + Break(BreakStmt), + Return(ReturnStmt), + Expr(ExprStmt), +} + +#[cfg(test)] +mod tests { + use wasm_bindgen_test::wasm_bindgen_test; + + use super::*; + use crate::{ + ast::{PatKind, TypeKind}, + lexer::Lexer, + parser::Parser, + }; + + fn parse_stmt(source: &str) -> T + where + T: TryFrom, + { + let lexer = Lexer::new(source); + let mut parser = Parser::new(lexer); + crate::parser::stmt::parse_stmt(&mut parser).unwrap(); + Stmt::cast(parser.finish_to_node().0) + .unwrap() + .kind() + .try_into() + .unwrap() + } + + #[test] + #[wasm_bindgen_test] + fn let_() { + let let_stmt: LetStmt = parse_stmt("let x: i32 = 1"); + + assert!(matches!(let_stmt.pat().unwrap().kind(), PatKind::Path(_))); + assert!(matches!( + let_stmt.type_annotation().unwrap().kind(), + TypeKind::Path(_) + )); + assert!(let_stmt.initializer().is_some()); + + let let_stmt: LetStmt = parse_stmt("let x"); + assert!(matches!(let_stmt.pat().unwrap().kind(), PatKind::Path(_))); + assert!(let_stmt.type_annotation().is_none()); + assert!(let_stmt.initializer().is_none()); + } + + #[test] + #[wasm_bindgen_test] + fn for_() { + let source = r#" + for x in foo { + bar + } + "#; + + let for_stmt: ForStmt = parse_stmt(source); + assert!(matches!(for_stmt.pat().unwrap().kind(), PatKind::Path(_))); + assert!(for_stmt.iterable().is_some()); + assert!(for_stmt.body().is_some()); + } + + #[test] + #[wasm_bindgen_test] + fn while_() { + let source = r#" + while { x } { + bar + } + "#; + + let while_stmt: WhileStmt = parse_stmt(source); + assert!(while_stmt.cond().is_some()); + assert!(while_stmt.body().is_some()); + assert_ne!(while_stmt.cond(), while_stmt.body()); + } + + #[test] + #[wasm_bindgen_test] + fn return_() { + let ret_stmt: ReturnStmt = parse_stmt("return x"); + assert!(ret_stmt.expr().is_some()); + + let ret_stmt: ReturnStmt = parse_stmt("return"); + assert!(ret_stmt.expr().is_none()); + } +} diff --git a/crates/parser2/src/ast/types.rs b/crates/parser2/src/ast/types.rs new file mode 100644 index 0000000000..b5c71859f6 --- /dev/null +++ b/crates/parser2/src/ast/types.rs @@ -0,0 +1,228 @@ +use rowan::ast::{support, AstNode}; + +use super::{ast_node, AstChildren}; +use crate::{SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// A type node. + /// If you want to match a specific kind of type, use `[Type::kind]`. + pub struct Type, + SK::PtrType + | SK::PathType + | SK::SelfType + | SK::TupleType + | SK::ArrayType + | SK::NeverType +} +impl Type { + pub fn kind(&self) -> TypeKind { + match self.syntax().kind() { + SK::PtrType => TypeKind::Ptr(AstNode::cast(self.syntax().clone()).unwrap()), + SK::PathType => TypeKind::Path(AstNode::cast(self.syntax().clone()).unwrap()), + SK::SelfType => TypeKind::SelfType(AstNode::cast(self.syntax().clone()).unwrap()), + SK::TupleType => TypeKind::Tuple(AstNode::cast(self.syntax().clone()).unwrap()), + SK::ArrayType => TypeKind::Array(AstNode::cast(self.syntax().clone()).unwrap()), + SK::NeverType => TypeKind::Never(AstNode::cast(self.syntax().clone()).unwrap()), + _ => unreachable!(), + } + } +} + +ast_node! { + /// A pointer type. + /// `*i32` + pub struct PtrType, + SK::PtrType, +} +impl PtrType { + /// Returns the `*` token. + pub fn star(&self) -> Option { + support::token(self.syntax(), SK::Star) + } + + /// Returns the type pointed to. + pub fn inner(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + /// A path type. + /// `foo::Type` + pub struct PathType, + SK::PathType +} +impl PathType { + /// Returns the path of the type. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } +} +impl super::GenericArgsOwner for PathType {} + +ast_node! { + /// A self type. + /// `Self` + pub struct SelfType, + SK::SelfType, +} +impl SelfType { + /// Returns the `Self` keyword. + pub fn self_kw(&self) -> Option { + support::token(self.syntax(), SK::SelfTypeKw) + } +} +impl super::GenericArgsOwner for SelfType {} + +ast_node! { + /// A tuple type. + /// `(i32, foo::Bar)` + pub struct TupleType, + SK::TupleType, + IntoIterator, +} +impl TupleType { + pub fn l_paren(&self) -> Option { + support::token(self.syntax(), SK::LParen) + } + + pub fn r_paren(&self) -> Option { + support::token(self.syntax(), SK::RParen) + } + + /// Returns the types in the tuple. + pub fn elem_tys(&self) -> AstChildren { + support::children(self.syntax()) + } +} + +ast_node! { + /// An array type. + /// `[i32; 4]` + pub struct ArrayType, + SK::ArrayType, +} +impl ArrayType { + pub fn l_bracket(&self) -> Option { + support::token(self.syntax(), SK::LBracket) + } + + pub fn r_bracket(&self) -> Option { + support::token(self.syntax(), SK::LBracket) + } + /// Returns the type of the array elements. + pub fn elem_ty(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the length of the array. + pub fn len(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct NeverType, + SK::NeverType, +} + +/// A specific kind of type. +#[derive(Debug, Clone, PartialEq, Eq, Hash, derive_more::From, derive_more::TryInto)] +pub enum TypeKind { + Ptr(PtrType), + Path(PathType), + SelfType(SelfType), + Tuple(TupleType), + Array(ArrayType), + Never(NeverType), +} + +#[cfg(test)] +mod tests { + use wasm_bindgen_test::wasm_bindgen_test; + + use super::*; + use crate::{ast::prelude::*, lexer::Lexer, parser}; + + fn parse_type(source: &str) -> T + where + T: TryFrom, + { + let lexer = Lexer::new(source); + let mut parser = parser::Parser::new(lexer); + let _ = parser::type_::parse_type(&mut parser, None); + Type::cast(parser.finish_to_node().0) + .unwrap() + .kind() + .try_into() + .unwrap() + } + + #[test] + #[wasm_bindgen_test] + fn ptr_type() { + let ptr_ty: PtrType = parse_type("*i32"); + + assert_eq!(ptr_ty.star().unwrap().text(), "*"); + assert!(matches!(ptr_ty.inner().unwrap().kind(), TypeKind::Path(_))); + } + + #[test] + #[wasm_bindgen_test] + fn path_type() { + let path_ty: PathType = parse_type("Foo::Bar"); + + for (i, segment) in path_ty.path().unwrap().segments().enumerate() { + match i { + 0 => assert_eq!(segment.ident().unwrap().text(), "Foo"), + 1 => { + assert_eq!(segment.ident().unwrap().text(), "Bar"); + let generic_args = segment.generic_args().unwrap(); + for (i, arg) in generic_args.iter().enumerate() { + match i { + 0 => assert!(matches!(arg.kind(), crate::ast::GenericArgKind::Type(_))), + 1 => { + assert!(matches!(arg.kind(), crate::ast::GenericArgKind::Const(_))) + } + _ => panic!(), + } + } + } + _ => panic!(), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn self_type() { + let _: SelfType = parse_type("Self"); + } + + #[test] + #[wasm_bindgen_test] + fn tuple_type() { + let tuple_ty: TupleType = parse_type("((i32, u32), foo::Bar, *usize"); + + for (i, ty) in tuple_ty.elem_tys().enumerate() { + match i { + 0 => assert!(matches!(ty.kind(), TypeKind::Tuple(_))), + 1 => assert!(matches!(ty.kind(), TypeKind::Path(_))), + 2 => assert!(matches!(ty.kind(), TypeKind::Ptr(_))), + _ => panic!(), + } + } + } + + #[test] + #[wasm_bindgen_test] + fn array_type() { + let array_ty: ArrayType = parse_type("[(i32, u32); 1]"); + + assert!(matches!( + array_ty.elem_ty().unwrap().kind(), + TypeKind::Tuple(_) + )); + assert!(array_ty.len().is_some()); + } +} diff --git a/crates/parser2/src/ast/use_tree.rs b/crates/parser2/src/ast/use_tree.rs new file mode 100644 index 0000000000..1ea0bf33d6 --- /dev/null +++ b/crates/parser2/src/ast/use_tree.rs @@ -0,0 +1,128 @@ +use rowan::ast::{support, AstNode}; + +use super::ast_node; + +use crate::{SyntaxKind as SK, SyntaxToken}; + +ast_node! { + /// A use tree. + /// `Foo::Foo2::{Bar::*, Baz::{x, y}}` + pub struct UseTree, + SK::UseTree, +} +impl UseTree { + /// Returns the path of this use tree. + /// `Foo::Foo2` in `Foo::Foo2::{Bar::*, Baz::{x, y}}` + /// + /// NOTE: If the tree root is started with `{}`, then this method will + /// return `None`. + pub fn path(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns the children of this use tree. + /// + /// `Bar::*` and `Baz::{x, y}` in `Foo::Foo2::{Bar::*, Baz::{x, y}}`. + pub fn children(&self) -> Option { + support::child(self.syntax()) + } + + /// Returns `true` if this use tree has children tree. + pub fn has_subtree(&self) -> bool { + self.children().is_some() + } + + //// Returns the alias of this use tree. + /// `Bar` in `Foo as Bar;` + pub fn alias(&self) -> Option { + support::child(self.syntax()) + } +} + +ast_node! { + pub struct UseTreeList, + SK::UseTreeList, + IntoIterator, +} + +ast_node! { + pub struct UsePath, + SK::UsePath, + IntoIterator, +} + +ast_node! { + pub struct UsePathSegment, + SK::UsePathSegment, +} +impl UsePathSegment { + pub fn kind(&self) -> Option { + match self.syntax().first_child_or_token() { + Some(node) => match node.kind() { + SK::IngotKw => Some(UsePathSegmentKind::Ingot(node.into_token().unwrap())), + SK::SuperKw => Some(UsePathSegmentKind::Super(node.into_token().unwrap())), + SK::SelfKw => Some(UsePathSegmentKind::Self_(node.into_token().unwrap())), + SK::Ident => Some(UsePathSegmentKind::Ident(node.into_token().unwrap())), + SK::Star => Some(UsePathSegmentKind::Glob(node.into_token().unwrap())), + _ => None, + }, + _ => None, + } + } + + pub fn ident(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + pub fn ingot_token(&self) -> Option { + support::token(self.syntax(), SK::IngotKw) + } + + pub fn super_token(&self) -> Option { + support::token(self.syntax(), SK::SuperKw) + } + + pub fn self_token(&self) -> Option { + support::token(self.syntax(), SK::SelfKw) + } + + pub fn glob(&self) -> Option { + support::token(self.syntax(), SK::Star) + } +} + +ast_node! { + pub struct UseAlias, + SK::UseTreeRename, +} +impl UseAlias { + //// Returns `Some` if the alias is specified as an ident. + pub fn ident(&self) -> Option { + support::token(self.syntax(), SK::Ident) + } + + /// Returns `Some` if the alias is specified as `_`. + pub fn underscore(&self) -> Option { + support::token(self.syntax(), SK::Underscore) + } + + /// Returns `Some` if the alias has a name or `_`. + pub fn alias(&self) -> Option { + self.ident().or_else(|| self.underscore()) + } +} + +/// A path segment in a use tree. +pub enum UsePathSegmentKind { + /// `ingot` + Ingot(SyntaxToken), + /// `super` + Super(SyntaxToken), + /// `self` + Self_(SyntaxToken), + /// `foo` + Ident(SyntaxToken), + /// `*` + /// This is only allowed in the last segment of a path. + Glob(SyntaxToken), +} diff --git a/crates/parser2/src/lexer.rs b/crates/parser2/src/lexer.rs new file mode 100644 index 0000000000..a9d6660c35 --- /dev/null +++ b/crates/parser2/src/lexer.rs @@ -0,0 +1,57 @@ +use crate::{ + parser::token_stream::{LexicalToken, TokenStream}, + SyntaxKind, +}; + +pub struct Lexer<'s> { + peek: Option>, + inner: logos::Lexer<'s, SyntaxKind>, +} + +impl<'s> Lexer<'s> { + pub fn new(text: &'s str) -> Self { + Self { + peek: None, + inner: logos::Lexer::new(text), + } + } +} + +impl<'s> TokenStream for Lexer<'s> { + type Token = Token<'s>; + + fn next(&mut self) -> Option { + if let Some(token) = self.peek.take() { + return Some(token); + } + + let syntax_kind = self.inner.next()?; + Some(Token { + syntax_kind, + text: self.inner.slice(), + }) + } + + fn peek(&mut self) -> Option<&Self::Token> { + if self.peek.is_none() { + self.peek = self.next(); + } + self.peek.as_ref() + } +} + +#[derive(Clone)] +pub struct Token<'s> { + syntax_kind: SyntaxKind, + text: &'s str, +} + +impl LexicalToken for Token<'_> { + fn syntax_kind(&self) -> SyntaxKind { + self.syntax_kind + } + + fn text(&self) -> &str { + self.text + } +} diff --git a/crates/parser2/src/lib.rs b/crates/parser2/src/lib.rs new file mode 100644 index 0000000000..1c09055784 --- /dev/null +++ b/crates/parser2/src/lib.rs @@ -0,0 +1,119 @@ +pub mod ast; +pub mod lexer; +pub mod parser; +pub mod syntax_kind; +pub mod syntax_node; + +use rowan::TextSize; +use smallvec::SmallVec; +pub use syntax_kind::SyntaxKind; +pub use syntax_node::{FeLang, GreenNode, NodeOrToken, SyntaxNode, SyntaxToken, TextRange}; + +use parser::RootScope; + +pub fn parse_source_file(text: &str) -> (GreenNode, Vec) { + let lexer = lexer::Lexer::new(text); + let mut parser = parser::Parser::new(lexer); + let checkpoint = parser.enter(RootScope::default(), None); + + let _ = parser.parse(parser::ItemListScope::default()); + + parser.leave(checkpoint); + let (node, errs) = parser.finish(); + (node, errs) +} + +/// An parse error which is accumulated in the [`parser::Parser`] while parsing. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum ParseError { + Expected(SmallVec<[SyntaxKind; 2]>, ExpectedKind, TextSize), + Unexpected(String, TextRange), + Msg(String, TextRange), +} + +impl ParseError { + pub fn expected(tokens: &[SyntaxKind], kind: Option, pos: TextSize) -> Self { + ParseError::Expected( + SmallVec::from_slice(tokens), + kind.unwrap_or(ExpectedKind::Unspecified), + pos, + ) + } + + pub fn msg(&self) -> String { + match self { + ParseError::Expected(_, exp, _) => match exp { + ExpectedKind::Body(kind) => format!("{} requires a body", kind.describe()), + ExpectedKind::Name(kind) => format!("expected name for {}", kind.describe()), + ExpectedKind::ClosingBracket { bracket, parent } => format!( + "missing closing {} for {}", + bracket.describe(), + parent.describe() + ), + ExpectedKind::Separator { separator, element } => { + format!( + "expected {} separator after {}", + separator.describe(), + element.describe() + ) + } + ExpectedKind::TypeSpecifier(kind) => { + format!("missing type bound for {}", kind.describe()) + } + ExpectedKind::Syntax(kind) => format!("expected {}", kind.describe()), + ExpectedKind::Unspecified => self.label(), + }, + ParseError::Unexpected(m, _) => m.clone(), + ParseError::Msg(m, _) => m.clone(), + } + } + + pub fn label(&self) -> String { + match self { + ParseError::Expected(tokens, _, _) => { + if tokens.len() == 1 { + return format!("expected {}", tokens[0].describe()); + } + + let mut s = "expected ".to_string(); + let mut delim = ""; + for (i, t) in tokens.iter().enumerate() { + s.push_str(delim); + s.push_str(t.describe()); + + delim = if i + 2 == tokens.len() { " or " } else { ", " }; + } + s + } + ParseError::Unexpected(_, _) => "unexpected".into(), + ParseError::Msg(msg, _) => msg.clone(), + } + } + + pub fn range(&self) -> TextRange { + match self { + ParseError::Expected(_, _, pos) => TextRange::empty(*pos), + ParseError::Unexpected(_, r) | ParseError::Msg(_, r) => *r, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ExpectedKind { + Body(SyntaxKind), + Name(SyntaxKind), + ClosingBracket { + bracket: SyntaxKind, + parent: SyntaxKind, + }, + TypeSpecifier(SyntaxKind), + Separator { + separator: SyntaxKind, + element: SyntaxKind, + }, + Syntax(SyntaxKind), + Unspecified, + // TODO: + // - newline after attribute in attrlistscope + // +} diff --git a/crates/parser2/src/parser/attr.rs b/crates/parser2/src/parser/attr.rs new file mode 100644 index 0000000000..68f45d4b4e --- /dev/null +++ b/crates/parser2/src/parser/attr.rs @@ -0,0 +1,118 @@ +use std::convert::Infallible; +use unwrap_infallible::UnwrapInfallible; + +use super::{ + define_scope, parse_list, token_stream::TokenStream, Checkpoint, ErrProof, Parser, Recovery, +}; + +use crate::{ExpectedKind, SyntaxKind}; + +pub(super) fn parse_attr_list( + parser: &mut Parser, +) -> Result, Recovery> { + if let Some(SyntaxKind::DocComment) | Some(SyntaxKind::Pound) = parser.current_kind() { + parser.parse_cp(AttrListScope::default(), None).map(Some) + } else { + Ok(None) + } +} + +define_scope! { pub(crate) AttrListScope, AttrList, (Newline) } +impl super::Parse for AttrListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + loop { + parser.set_newline_as_trivia(true); + match parser.current_kind() { + Some(SyntaxKind::Pound) => { + parser.parse(AttrScope::default())?; + } + Some(SyntaxKind::DocComment) => parser + .parse(DocCommentAttrScope::default()) + .unwrap_infallible(), + _ => break, + }; + parser.set_newline_as_trivia(false); + if parser.find( + SyntaxKind::Newline, + ExpectedKind::Separator { + separator: SyntaxKind::Newline, + element: SyntaxKind::Attr, + }, + )? { + parser.bump(); + } + } + Ok(()) + } +} + +define_scope! { AttrScope, Attr } +impl super::Parse for AttrScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::Pound); + + parser.set_scope_recovery_stack(&[SyntaxKind::LParen]); + if parser.find(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Attr))? { + parser.bump() + } + + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.pop_recovery_stack(); + parser.parse(AttrArgListScope::default()) + } else { + Ok(()) + } + } +} + +define_scope! { AttrArgListScope, AttrArgList, (Comma, RParen) } +impl super::Parse for AttrArgListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + false, + SyntaxKind::AttrArgList, + (SyntaxKind::LParen, SyntaxKind::RParen), + |parser| parser.parse(AttrArgScope::default()), + ) + } +} + +define_scope! { AttrArgScope, AttrArg } +impl super::Parse for AttrArgScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + let expected_err = ExpectedKind::Syntax(SyntaxKind::AttrArg); + + parser.set_scope_recovery_stack(&[SyntaxKind::Ident, SyntaxKind::Colon]); + if parser.find_and_pop(SyntaxKind::Ident, expected_err)? { + parser.bump(); + } + if parser.find_and_pop(SyntaxKind::Colon, expected_err)? { + parser.bump(); + } + if parser.find(SyntaxKind::Ident, expected_err)? { + parser.bump(); + } + Ok(()) + } +} + +define_scope! { DocCommentAttrScope, DocCommentAttr } +impl super::Parse for DocCommentAttrScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::DocComment); + parser.bump_if(SyntaxKind::Newline); + Ok(()) + } +} diff --git a/crates/parser2/src/parser/expr.rs b/crates/parser2/src/parser/expr.rs new file mode 100644 index 0000000000..415eee496a --- /dev/null +++ b/crates/parser2/src/parser/expr.rs @@ -0,0 +1,508 @@ +use std::convert::{identity, Infallible}; +use unwrap_infallible::UnwrapInfallible; + +use super::{ + define_scope, + expr_atom::{self, is_expr_atom_head}, + param::{CallArgListScope, GenericArgListScope}, + token_stream::TokenStream, + Checkpoint, ErrProof, Parser, Recovery, +}; +use crate::{ExpectedKind, SyntaxKind}; + +/// Parses expression. +pub fn parse_expr(parser: &mut Parser) -> Result<(), Recovery> { + parse_expr_with_min_bp(parser, 0, true) +} + +/// Parses expression except for `struct` initialization expression. +pub fn parse_expr_no_struct( + parser: &mut Parser, +) -> Result<(), Recovery> { + parse_expr_with_min_bp(parser, 0, false) +} + +// Expressions are parsed in Pratt's top-down operator precedence style. +// +/// Parse an expression, stopping if/when we reach an operator that binds less +/// tightly than given binding power. +/// +/// Returns `true` if parsing succeeded, `false` otherwise. +fn parse_expr_with_min_bp( + parser: &mut Parser, + min_bp: u8, + allow_struct_init: bool, +) -> Result<(), Recovery> { + let checkpoint = parse_expr_atom(parser, allow_struct_init)?; + + loop { + let is_trivia = parser.set_newline_as_trivia(true); + let Some(kind) = parser.current_kind() else { + parser.set_newline_as_trivia(is_trivia); + break; + }; + parser.set_newline_as_trivia(is_trivia); + + // Parse postfix operators. + match postfix_binding_power(parser) { + Some(lbp) if lbp < min_bp => break, + Some(_) => { + match kind { + SyntaxKind::LBracket => { + parser.parse_cp(IndexExprScope::default(), Some(checkpoint))?; + continue; + } + + SyntaxKind::LParen => { + if parser + .parse_cp(CallExprScope::default(), Some(checkpoint)) + .is_ok() + { + continue; + } + } + + // `expr.method()` + SyntaxKind::Dot => { + if is_method_call(parser) { + parser.parse_cp(MethodExprScope::default(), Some(checkpoint))?; + continue; + } + } + _ => unreachable!(), + } + } + None => {} + } + + if let Some((lbp, _)) = infix_binding_power(parser) { + if lbp < min_bp { + break; + } + + if kind == SyntaxKind::Dot { + parser.parse_cp(FieldExprScope::default(), Some(checkpoint)) + } else if is_assign(parser) { + parser.parse_cp(AssignExprScope::default(), Some(checkpoint)) + } else if is_aug_assign(parser) { + parser.parse_cp(AugAssignExprScope::default(), Some(checkpoint)) + } else { + parser.parse_cp(BinExprScope::default(), Some(checkpoint)) + }?; + continue; + } + break; + } + + Ok(()) +} + +fn parse_expr_atom( + parser: &mut Parser, + allow_struct_init: bool, +) -> Result> { + match parser.current_kind() { + Some(kind) if prefix_binding_power(kind).is_some() => { + parser.parse_cp(UnExprScope::default(), None) + } + Some(kind) if is_expr_atom_head(kind) => { + expr_atom::parse_expr_atom(parser, allow_struct_init) + } + _ => parser + .error_and_recover("expected expression") + .map(|_| parser.checkpoint()), + } +} + +/// Specifies how tightly a prefix unary operator binds to its operand. +fn prefix_binding_power(kind: SyntaxKind) -> Option { + use SyntaxKind::*; + match kind { + Not | Plus | Minus | Tilde => Some(145), + _ => None, + } +} + +/// Specifies how tightly a postfix operator binds to its operand. +fn postfix_binding_power(parser: &mut Parser) -> Option { + use SyntaxKind::*; + + let is_trivia = parser.set_newline_as_trivia(true); + if let Some(Dot) = parser.current_kind() { + parser.set_newline_as_trivia(is_trivia); + return Some(151); + } + + parser.set_newline_as_trivia(false); + let power = match parser.current_kind() { + Some(LBracket | LParen) => Some(147), + _ => None, + }; + + parser.set_newline_as_trivia(is_trivia); + power +} + +/// Specifies how tightly does an infix operator bind to its left and right +/// operands. +fn infix_binding_power(parser: &mut Parser) -> Option<(u8, u8)> { + use SyntaxKind::*; + + let is_trivia = parser.set_newline_as_trivia(true); + if let Some(Dot) = parser.current_kind() { + parser.set_newline_as_trivia(is_trivia); + return Some((151, 150)); + } + + parser.set_newline_as_trivia(false); + if is_aug_assign(parser) { + parser.set_newline_as_trivia(is_trivia); + return Some((11, 10)); + } + + let Some(kind) = parser.current_kind() else { + parser.set_newline_as_trivia(is_trivia); + return None; + }; + + let bp = match kind { + Pipe2 => (50, 51), + Amp2 => (60, 61), + NotEq | Eq2 => (70, 71), + Lt => { + if is_lshift(parser) { + (110, 111) + } else { + // `LT` and `LtEq` has the same binding power. + (70, 71) + } + } + Gt => { + if is_rshift(parser) { + (110, 111) + } else { + // `Gt` and `GtEq` has the same binding power. + (70, 71) + } + } + Pipe => (80, 81), + Hat => (90, 91), + Amp => (100, 101), + LShift | RShift => (110, 111), + Plus | Minus => (120, 121), + Star | Slash | Percent => (130, 131), + Star2 => (141, 140), + Eq => { + // `Assign` and `AugAssign` have the same binding power + (11, 10) + } + _ => { + return { + parser.set_newline_as_trivia(is_trivia); + None + } + } + }; + + parser.set_newline_as_trivia(is_trivia); + Some(bp) +} + +define_scope! { UnExprScope, UnExpr } +impl super::Parse for UnExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + let kind = parser.current_kind().unwrap(); + let bp = prefix_binding_power(kind).unwrap(); + parser.bump(); + parse_expr_with_min_bp(parser, bp, true) + } +} + +define_scope! { BinExprScope, BinExpr } +impl super::Parse for BinExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + let (_, rbp) = infix_binding_power(parser).unwrap(); + bump_bin_op(parser); + parse_expr_with_min_bp(parser, rbp, false) + } +} + +define_scope! { AugAssignExprScope, AugAssignExpr } +impl super::Parse for AugAssignExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + let (_, rbp) = infix_binding_power(parser).unwrap(); + bump_aug_assign_op(parser); + parse_expr_with_min_bp(parser, rbp, false) + } +} + +define_scope! { AssignExprScope, AssignExpr } +impl super::Parse for AssignExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + let (_, rbp) = infix_binding_power(parser).unwrap(); + parser.bump_expected(SyntaxKind::Eq); + parse_expr_with_min_bp(parser, rbp, true) + } +} + +define_scope! { IndexExprScope, IndexExpr, (RBracket, Newline) } +impl super::Parse for IndexExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::LBracket); + parse_expr(parser)?; + + if parser.find( + SyntaxKind::RBracket, + ExpectedKind::ClosingBracket { + bracket: SyntaxKind::RBracket, + parent: SyntaxKind::IndexExpr, + }, + )? { + parser.bump(); + } + Ok(()) + } +} + +define_scope! { CallExprScope, CallExpr } +impl super::Parse for CallExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + + parser.set_scope_recovery_stack(&[SyntaxKind::LParen]); + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default())?; + } + + if parser.find_and_pop( + SyntaxKind::LParen, + ExpectedKind::Syntax(SyntaxKind::CallArgList), + )? { + parser.parse(CallArgListScope::default())?; + } + Ok(()) + } +} + +define_scope! { MethodExprScope, MethodCallExpr } +impl super::Parse for MethodExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Dot); + parser.set_newline_as_trivia(false); + + parser.set_scope_recovery_stack(&[SyntaxKind::Ident, SyntaxKind::Lt, SyntaxKind::LParen]); + if parser.find_and_pop( + SyntaxKind::Ident, + ExpectedKind::Name(SyntaxKind::MethodCallExpr), + )? { + parser.bump(); + } + + parser.pop_recovery_stack(); + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default())?; + } + + if parser.find_and_pop( + SyntaxKind::LParen, + ExpectedKind::Syntax(SyntaxKind::CallArgList), + )? { + parser.parse(CallArgListScope::default())?; + } + Ok(()) + } +} + +define_scope! { FieldExprScope, FieldExpr } +impl super::Parse for FieldExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Dot); + + parser.expect(&[SyntaxKind::Ident, SyntaxKind::Int], None)?; + parser.bump(); + Ok(()) + } +} + +define_scope! { pub(super) LShiftScope, LShift } +impl super::Parse for LShiftScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Lt); + parser.bump_expected(SyntaxKind::Lt); + Ok(()) + } +} + +define_scope! { pub(super) RShiftScope, RShift } +impl super::Parse for RShiftScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Gt); + parser.bump_expected(SyntaxKind::Gt); + Ok(()) + } +} + +define_scope! { pub(super) LtEqScope, LtEq } +impl super::Parse for LtEqScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Lt); + parser.bump_expected(SyntaxKind::Eq); + Ok(()) + } +} + +define_scope! { pub(super) GtEqScope, GtEq } +impl super::Parse for GtEqScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Gt); + parser.bump_expected(SyntaxKind::Eq); + Ok(()) + } +} + +pub(crate) fn is_lshift(parser: &mut Parser) -> bool { + parser.peek_two() == (Some(SyntaxKind::Lt), Some(SyntaxKind::Lt)) +} + +pub(crate) fn is_rshift(parser: &mut Parser) -> bool { + parser.peek_two() == (Some(SyntaxKind::Gt), Some(SyntaxKind::Gt)) +} + +pub(crate) fn is_lt_eq(parser: &mut Parser) -> bool { + parser.peek_two() == (Some(SyntaxKind::Lt), Some(SyntaxKind::Eq)) +} + +fn is_gt_eq(parser: &mut Parser) -> bool { + parser.peek_two() == (Some(SyntaxKind::Gt), Some(SyntaxKind::Eq)) +} + +fn is_aug_assign(parser: &mut Parser) -> bool { + use SyntaxKind::*; + matches!( + parser.peek_three(), + ( + Some(Pipe | Hat | Amp | Plus | Minus | Star | Slash | Percent | Star2), + Some(Eq), + _ + ) | (Some(Lt), Some(Lt), Some(Eq)) + | (Some(Gt), Some(Gt), Some(Eq)) + ) +} + +fn is_assign(parser: &mut Parser) -> bool { + let nt = parser.set_newline_as_trivia(false); + let is_asn = parser.current_kind() == Some(SyntaxKind::Eq); + parser.set_newline_as_trivia(nt); + is_asn +} + +fn bump_bin_op(parser: &mut Parser) { + match parser.current_kind() { + Some(SyntaxKind::Lt) => { + if is_lshift(parser) { + parser.parse(LShiftScope::default()).unwrap_infallible(); + } else if is_lt_eq(parser) { + parser.parse(LtEqScope::default()).unwrap_infallible(); + } else { + parser.bump(); + } + } + Some(SyntaxKind::Gt) => { + if is_rshift(parser) { + parser.parse(RShiftScope::default()).unwrap_infallible(); + } else if is_gt_eq(parser) { + parser.parse(GtEqScope::default()).unwrap_infallible(); + } else { + parser.bump(); + } + } + _ => { + parser.bump(); + } + } +} + +fn bump_aug_assign_op(parser: &mut Parser) -> bool { + use SyntaxKind::*; + match parser.peek_three() { + (Some(Pipe | Hat | Amp | Plus | Minus | Star | Slash | Percent | Star2), Some(Eq), _) => { + parser.bump(); + parser.bump(); + true + } + (Some(Lt), Some(Lt), Some(Eq)) => { + parser.parse(LShiftScope::default()).unwrap_infallible(); + parser.bump_expected(SyntaxKind::Eq); + true + } + (Some(Gt), Some(Gt), Some(Eq)) => { + parser.parse(RShiftScope::default()).unwrap_infallible(); + parser.bump_expected(SyntaxKind::Eq); + true + } + _ => false, + } +} + +fn is_method_call(parser: &mut Parser) -> bool { + let is_trivia = parser.set_newline_as_trivia(true); + let res = parser.dry_run(|parser| { + if !parser.bump_if(SyntaxKind::Dot) { + return false; + } + + if !parser.bump_if(SyntaxKind::Ident) { + return false; + } + + if parser.current_kind() == Some(SyntaxKind::Lt) + && (is_lt_eq(parser) + || is_lshift(parser) + || !parser + .parse_ok(GenericArgListScope::default()) + .is_ok_and(identity)) + { + return false; + } + + if parser.current_kind() != Some(SyntaxKind::LParen) { + false + } else { + parser.set_newline_as_trivia(is_trivia); + parser + .parse_ok(CallArgListScope::default()) + .is_ok_and(identity) + } + }); + parser.set_newline_as_trivia(is_trivia); + res +} diff --git a/crates/parser2/src/parser/expr_atom.rs b/crates/parser2/src/parser/expr_atom.rs new file mode 100644 index 0000000000..6b884ea083 --- /dev/null +++ b/crates/parser2/src/parser/expr_atom.rs @@ -0,0 +1,336 @@ +use std::convert::Infallible; + +use rowan::Checkpoint; +use unwrap_infallible::UnwrapInfallible; + +use super::{ + define_scope, + expr::{parse_expr, parse_expr_no_struct}, + item::ItemScope, + parse_list, parse_pat, + stmt::parse_stmt, + token_stream::TokenStream, + ErrProof, Parser, Recovery, +}; +use crate::{ + parser::{lit, path}, + ExpectedKind, SyntaxKind, TextRange, +}; + +// Must be kept in sync with `parse_expr_atom` +pub(super) fn is_expr_atom_head(kind: SyntaxKind) -> bool { + use SyntaxKind::*; + match kind { + IfKw | MatchKw | LBrace | LParen | LBracket => true, + kind if lit::is_lit(kind) => true, + kind if path::is_path_segment(kind) => true, + _ => false, + } +} + +/// Panics if `!is_expr_atom_head(parser.current_kind())` +pub(super) fn parse_expr_atom( + parser: &mut Parser, + allow_record_init: bool, +) -> Result> { + use SyntaxKind::*; + match parser.current_kind() { + Some(IfKw) => parser.parse_cp(IfExprScope::default(), None), + Some(MatchKw) => parser.parse_cp(MatchExprScope::default(), None), + Some(LBrace) => parser.parse_cp(BlockExprScope::default(), None), + Some(LParen) => parser.parse_cp(ParenScope::default(), None), + Some(LBracket) => parser.parse_cp(ArrayScope::default(), None), + Some(kind) if lit::is_lit(kind) => Ok(parser + .parse_cp(LitExprScope::default(), None) + .unwrap_infallible()), + Some(kind) if path::is_path_segment(kind) => { + parser.parse_cp(PathExprScope::new(allow_record_init), None) + } + _ => unreachable!(), + } +} + +define_scope! { + pub(crate) BlockExprScope, + BlockExpr, + ( + RBrace, + Newline, + LetKw, + ForKw, + WhileKw, + ContinueKw, + BreakKw, + ReturnKw + ) +} +impl super::Parse for BlockExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::LBrace); + + loop { + parser.set_newline_as_trivia(true); + if parser.current_kind() == Some(SyntaxKind::RBrace) || parser.current_kind().is_none() + { + break; + } + + if parser + .current_kind() + .map(SyntaxKind::is_item_head) + .unwrap_or_default() + { + parser.parse(ItemScope::default())?; + continue; + } + + parse_stmt(parser)?; + + parser.set_newline_as_trivia(false); + parser.expect(&[SyntaxKind::Newline, SyntaxKind::RBrace], None)?; + parser.bump_if(SyntaxKind::Newline); + } + + if parser.find( + SyntaxKind::RBrace, + crate::ExpectedKind::ClosingBracket { + bracket: SyntaxKind::RBrace, + parent: SyntaxKind::BlockExpr, + }, + )? { + parser.bump(); + } + Ok(()) + } +} + +define_scope! { IfExprScope, IfExpr } +impl super::Parse for IfExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::IfKw); + + parser.set_scope_recovery_stack(&[SyntaxKind::LBrace, SyntaxKind::ElseKw]); + parse_expr_no_struct(parser)?; + + if parser.find_and_pop(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::IfExpr))? { + parser.parse(BlockExprScope::default())?; + } + + if parser.current_kind() == Some(SyntaxKind::ElseKw) { + parser.bump(); + + parser.expect(&[SyntaxKind::LBrace, SyntaxKind::IfKw], None)?; + parse_expr(parser)?; + } + Ok(()) + } +} + +define_scope! { MatchExprScope, MatchExpr } +impl super::Parse for MatchExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::MatchKw); + + parse_expr_no_struct(parser)?; + if parser.find( + SyntaxKind::LBrace, + ExpectedKind::Body(SyntaxKind::MatchExpr), + )? { + parser.parse(MatchArmListScope::default())?; + } + Ok(()) + } +} + +define_scope! { MatchArmListScope, MatchArmList, (SyntaxKind::Newline, SyntaxKind::RBrace) } +impl super::Parse for MatchArmListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::LBrace); + + loop { + parser.set_newline_as_trivia(true); + if parser.current_kind() == Some(SyntaxKind::RBrace) { + break; + } + + parser.parse(MatchArmScope::default())?; + parser.set_newline_as_trivia(false); + + parser.expect(&[SyntaxKind::Newline, SyntaxKind::RBrace], None)?; + if !parser.bump_if(SyntaxKind::Newline) { + break; + } + } + parser.bump_expected(SyntaxKind::RBrace); + Ok(()) + } +} + +define_scope! { MatchArmScope, MatchArm } +impl super::Parse for MatchArmScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + + parser.set_scope_recovery_stack(&[SyntaxKind::FatArrow]); + parse_pat(parser)?; + + if parser.find_and_pop(SyntaxKind::FatArrow, ExpectedKind::Unspecified)? { + parser.bump(); + } + parse_expr(parser) + } +} + +define_scope! { pub(crate) LitExprScope, LitExpr } +impl super::Parse for LitExprScope { + type Error = Infallible; + + /// Caller is expected to verify that the next token is a literal. + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.parse(lit::LitScope::default()) + } +} + +define_scope! { PathExprScope{ allow_record_init: bool }, PathExpr } +impl super::Parse for PathExprScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.or_recover(|p| { + p.parse(path::PathScope::new(true)).map_err(|_| { + crate::ParseError::Msg( + "expected an expression".into(), + TextRange::empty(p.end_of_prev_token), + ) + }) + })?; + + if parser.current_kind() == Some(SyntaxKind::LBrace) && self.allow_record_init { + self.set_kind(SyntaxKind::RecordInitExpr); + parser.parse(RecordFieldListScope::default())?; + } + Ok(()) + } +} + +define_scope! { RecordFieldListScope, RecordFieldList, (RBrace, Comma) } +impl super::Parse for RecordFieldListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + true, + SyntaxKind::RecordFieldList, + (SyntaxKind::LBrace, SyntaxKind::RBrace), + |parser| parser.parse(RecordFieldScope::default()), + ) + } +} + +define_scope! { RecordFieldScope, RecordField } +impl super::Parse for RecordFieldScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + + if matches!( + parser.peek_two(), + (Some(SyntaxKind::Ident), Some(SyntaxKind::Colon)) + ) { + parser.bump_if(SyntaxKind::Ident); + parser.bump_expected(SyntaxKind::Colon); + } + + parse_expr(parser) + } +} + +define_scope! { ParenScope, ParenExpr, (RParen, Comma) } +impl super::Parse for ParenScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::LParen); + + if parser.bump_if(SyntaxKind::RParen) { + self.set_kind(SyntaxKind::TupleExpr); + return Ok(()); + } + + loop { + if parser.bump_if(SyntaxKind::RParen) { + return Ok(()); + } + parse_expr(parser)?; + parser.expect(&[SyntaxKind::RParen, SyntaxKind::Comma], None)?; + + if parser.bump_if(SyntaxKind::Comma) { + self.set_kind(SyntaxKind::TupleExpr); + continue; + } + break; + } + parser.bump_expected(SyntaxKind::RParen); + Ok(()) + } +} + +define_scope! { ArrayScope, ArrayExpr, (RBracket, Comma, SemiColon) } +impl super::Parse for ArrayScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::LBracket); + + if parser.bump_if(SyntaxKind::RBracket) { + return Ok(()); + } + + parse_expr(parser)?; + parser.expect( + &[ + SyntaxKind::SemiColon, + SyntaxKind::Comma, + SyntaxKind::RBracket, + ], + None, + )?; + + if parser.bump_if(SyntaxKind::SemiColon) { + self.set_kind(SyntaxKind::ArrayRepExpr); + parse_expr(parser)?; + } else { + while parser.bump_if(SyntaxKind::Comma) { + if parser.bump_if(SyntaxKind::RBracket) { + return Ok(()); + } + + parse_expr(parser)?; + parser.expect(&[SyntaxKind::Comma, SyntaxKind::RBracket], None)?; + } + } + + if parser.find( + SyntaxKind::RBracket, + ExpectedKind::ClosingBracket { + bracket: SyntaxKind::RBracket, + parent: SyntaxKind::ArrayExpr, + }, + )? { + parser.bump(); + } + Ok(()) + } +} diff --git a/crates/parser2/src/parser/func.rs b/crates/parser2/src/parser/func.rs new file mode 100644 index 0000000000..2438a83178 --- /dev/null +++ b/crates/parser2/src/parser/func.rs @@ -0,0 +1,149 @@ +use super::{ + define_scope, + expr_atom::BlockExprScope, + param::{parse_generic_params_opt, parse_where_clause_opt, FuncParamListScope}, + token_stream::TokenStream, + type_::parse_type, + ErrProof, Parser, Recovery, +}; +use crate::{ExpectedKind, SyntaxKind}; + +define_scope! { + pub(crate) FuncScope { + fn_def_scope: FuncDefScope + }, + Func +} + +#[derive(Clone, Copy, Debug)] +pub(crate) enum FuncDefScope { + Normal, + Impl, + TraitDef, + Extern, +} +impl Default for FuncDefScope { + fn default() -> Self { + Self::Normal + } +} + +impl super::Parse for FuncScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::FnKw); + + match self.fn_def_scope { + FuncDefScope::Normal => parse_normal_fn_def_impl(parser, false), + FuncDefScope::Impl => parse_normal_fn_def_impl(parser, true), + FuncDefScope::TraitDef => parse_trait_fn_def_impl(parser), + FuncDefScope::Extern => parse_extern_fn_def_impl(parser), + } + } +} + +fn parse_normal_fn_def_impl( + parser: &mut Parser, + allow_self: bool, +) -> Result<(), Recovery> { + parser.set_scope_recovery_stack(&[ + SyntaxKind::Ident, + SyntaxKind::Lt, + SyntaxKind::LParen, + SyntaxKind::Arrow, + SyntaxKind::WhereKw, + SyntaxKind::LBrace, + ]); + + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Func))? { + parser.bump(); + } + + parser.expect_and_pop_recovery_stack()?; + parse_generic_params_opt(parser, false)?; + + if parser.find_and_pop( + SyntaxKind::LParen, + ExpectedKind::Syntax(SyntaxKind::FuncParamList), + )? { + parser.parse(FuncParamListScope::new(allow_self))?; + } + + parser.expect_and_pop_recovery_stack()?; + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None)?; + } + + parser.expect_and_pop_recovery_stack()?; + parse_where_clause_opt(parser)?; + + if parser.find_and_pop(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::Func))? { + parser.parse(BlockExprScope::default())?; + } + Ok(()) +} + +fn parse_trait_fn_def_impl( + parser: &mut Parser, +) -> Result<(), Recovery> { + parser.set_scope_recovery_stack(&[ + SyntaxKind::Ident, + SyntaxKind::Lt, + SyntaxKind::LParen, + SyntaxKind::Arrow, + SyntaxKind::WhereKw, + SyntaxKind::LBrace, + ]); + + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Func))? { + parser.bump(); + } + + parser.expect_and_pop_recovery_stack()?; + parse_generic_params_opt(parser, false)?; + + if parser.find_and_pop( + SyntaxKind::LParen, + ExpectedKind::Syntax(SyntaxKind::FuncParamList), + )? { + parser.parse(FuncParamListScope::new(true))?; + } + + parser.pop_recovery_stack(); + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None)?; + } + + parser.pop_recovery_stack(); + parse_where_clause_opt(parser)?; + + if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(BlockExprScope::default())?; + } + Ok(()) +} + +fn parse_extern_fn_def_impl( + parser: &mut Parser, +) -> Result<(), Recovery> { + parser.set_scope_recovery_stack(&[SyntaxKind::Ident, SyntaxKind::LParen, SyntaxKind::Arrow]); + + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Func))? { + parser.bump(); + } + + if parser.find_and_pop( + SyntaxKind::LParen, + ExpectedKind::Syntax(SyntaxKind::FuncParamList), + )? { + parser.parse(FuncParamListScope::new(true))?; + } + + parser.pop_recovery_stack(); + if parser.bump_if(SyntaxKind::Arrow) { + parse_type(parser, None)?; + } + + Ok(()) +} diff --git a/crates/parser2/src/parser/item.rs b/crates/parser2/src/parser/item.rs new file mode 100644 index 0000000000..d41f173226 --- /dev/null +++ b/crates/parser2/src/parser/item.rs @@ -0,0 +1,605 @@ +use std::{cell::Cell, convert::Infallible, rc::Rc}; + +use unwrap_infallible::UnwrapInfallible; + +use super::{ + attr::{self, parse_attr_list}, + define_scope, + expr::parse_expr, + func::FuncDefScope, + param::{parse_generic_params_opt, parse_where_clause_opt, TraitRefScope}, + parse_list, + struct_::RecordFieldDefListScope, + token_stream::{LexicalToken, TokenStream}, + type_::{parse_type, TupleTypeScope}, + use_tree::UseTreeScope, + ErrProof, Parser, Recovery, +}; +use crate::{parser::func::FuncScope, ExpectedKind, SyntaxKind}; + +define_scope! { + #[doc(hidden)] + pub ItemListScope {inside_mod: bool}, + ItemList, + ( + ModKw, + FnKw, + StructKw, + ContractKw, + EnumKw, + TraitKw, + ImplKw, + UseKw, + ConstKw, + ExternKw, + TypeKw, + PubKw, + UnsafeKw, + DocComment, + Pound + ) +} +impl super::Parse for ItemListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + use crate::SyntaxKind::*; + + if self.inside_mod { + parser.bump_expected(LBrace); + parser.set_scope_recovery_stack(&[RBrace]); + } + + loop { + parser.set_newline_as_trivia(true); + if self.inside_mod && parser.bump_if(RBrace) { + break; + } + if parser.current_kind().is_none() { + if self.inside_mod { + parser.add_error(crate::ParseError::expected( + &[RBrace], + Some(ExpectedKind::ClosingBracket { + bracket: RBrace, + parent: Mod, + }), + parser.current_pos, + )); + } + break; + } + + let ok = parser.parse_ok(ItemScope::default())?; + if parser.current_kind().is_none() || (self.inside_mod && parser.bump_if(RBrace)) { + break; + } + if ok { + parser.set_newline_as_trivia(false); + if parser.find( + Newline, + ExpectedKind::Separator { + separator: Newline, + element: Item, + }, + )? { + parser.bump(); + } + } + } + Ok(()) + } +} + +define_scope! { + #[doc(hidden)] + pub(super) ItemScope, + Item +} +impl super::Parse for ItemScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + use crate::SyntaxKind::*; + + let mut checkpoint = attr::parse_attr_list(parser)?; + let modifier_scope = ItemModifierScope::default(); + let modifier = match parser.current_kind() { + Some(kind) if kind.is_modifier_head() => { + let modifier_checkpoint = parser.parse_cp(modifier_scope.clone(), None).unwrap(); + checkpoint.get_or_insert(modifier_checkpoint); + modifier_scope.kind.get() + } + _ => ModifierKind::None, + }; + + if modifier.is_unsafe() && parser.current_kind() != Some(FnKw) { + parser.error("expected `fn` after `unsafe` keyword"); + } else if modifier.is_pub() && matches!(parser.current_kind(), Some(ImplKw | ExternKw)) { + let error_msg = format!( + "`pub` can't be used for `{}`", + parser.current_token().unwrap().text() + ); + parser.error(&error_msg); + } + + parser.expect( + &[ + ModKw, FnKw, StructKw, ContractKw, EnumKw, TraitKw, ImplKw, UseKw, ConstKw, + ExternKw, TypeKw, + ], + Some(ExpectedKind::Syntax(SyntaxKind::Item)), + )?; + + match parser.current_kind() { + Some(ModKw) => parser.parse_cp(ModScope::default(), checkpoint), + Some(FnKw) => parser.parse_cp(FuncScope::default(), checkpoint), + Some(StructKw) => parser.parse_cp(super::struct_::StructScope::default(), checkpoint), + Some(ContractKw) => parser.parse_cp(ContractScope::default(), checkpoint), + Some(EnumKw) => parser.parse_cp(EnumScope::default(), checkpoint), + Some(TraitKw) => parser.parse_cp(TraitScope::default(), checkpoint), + Some(ImplKw) => parser.parse_cp(ImplScope::default(), checkpoint), + Some(UseKw) => parser.parse_cp(UseScope::default(), checkpoint), + Some(ConstKw) => parser.parse_cp(ConstScope::default(), checkpoint), + Some(ExternKw) => parser.parse_cp(ExternScope::default(), checkpoint), + Some(TypeKw) => parser.parse_cp(TypeAliasScope::default(), checkpoint), + _ => unreachable!(), + }?; + + Ok(()) + } +} + +define_scope! { + ItemModifierScope {kind: Rc>}, + ItemModifier +} +impl super::Parse for ItemModifierScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + let mut modifier_kind = ModifierKind::None; + + loop { + match parser.current_kind() { + Some(kind) if kind.is_modifier_head() => { + let new_kind = modifier_kind.union(kind); + if new_kind == modifier_kind { + parser.unexpected_token_error(format!( + "duplicate {} modifier", + kind.describe(), + )); + } else if kind == SyntaxKind::PubKw && modifier_kind.is_unsafe() { + parser.unexpected_token_error( + "`pub` modifier must come before `unsafe`".into(), + ); + } else { + parser.bump(); + } + modifier_kind = new_kind; + } + _ => break, + } + } + Ok(()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum ModifierKind { + None, + Pub, + Unsafe, + PubAndUnsafe, +} +impl Default for ModifierKind { + fn default() -> Self { + Self::None + } +} +impl ModifierKind { + fn union(&self, kind: SyntaxKind) -> ModifierKind { + match kind { + SyntaxKind::PubKw => { + if self.is_unsafe() { + Self::PubAndUnsafe + } else { + Self::Pub + } + } + SyntaxKind::UnsafeKw => { + if self.is_pub() { + Self::PubAndUnsafe + } else { + Self::Unsafe + } + } + _ => unreachable!(), + } + } + + fn is_pub(&self) -> bool { + matches!(self, Self::Pub | Self::PubAndUnsafe) + } + + fn is_unsafe(&self) -> bool { + matches!(self, Self::Unsafe | Self::PubAndUnsafe) + } +} + +define_scope! { ModScope, Mod } +impl super::Parse for ModScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::ModKw); + + parser.set_scope_recovery_stack(&[ + SyntaxKind::Ident, + SyntaxKind::LBrace, + SyntaxKind::RBrace, + ]); + + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Mod))? { + parser.bump(); + } + if parser.find_and_pop(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::Mod))? { + parser.parse(ItemListScope::new(true))?; + } + Ok(()) + } +} + +define_scope! { ContractScope, Contract } +impl super::Parse for ContractScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::ContractKw); + + parser.set_scope_recovery_stack(&[SyntaxKind::Ident, SyntaxKind::LBrace]); + + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Contract))? { + parser.bump(); + } + if parser.find_and_pop(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::Contract))? { + parser.parse(RecordFieldDefListScope::default())?; + } + Ok(()) + } +} + +define_scope! { EnumScope, Enum } +impl super::Parse for EnumScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::EnumKw); + + parser.set_scope_recovery_stack(&[ + SyntaxKind::Ident, + SyntaxKind::Lt, + SyntaxKind::WhereKw, + SyntaxKind::LBrace, + ]); + + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Enum))? { + parser.bump(); + } + + parser.pop_recovery_stack(); + parse_generic_params_opt(parser, false)?; + + parser.pop_recovery_stack(); + parse_where_clause_opt(parser)?; + + if parser.find_and_pop(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::Enum))? { + parser.parse(VariantDefListScope::default())?; + } + Ok(()) + } +} + +define_scope! { VariantDefListScope, VariantDefList, (Comma, RBrace) } +impl super::Parse for VariantDefListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + true, + SyntaxKind::VariantDefList, + (SyntaxKind::LBrace, SyntaxKind::RBrace), + |parser| parser.parse(VariantDefScope::default()), + ) + } +} + +define_scope! { VariantDefScope, VariantDef } +impl super::Parse for VariantDefScope { + type Error = Recovery; + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_attr_list(parser)?; + parser.bump_or_recover(SyntaxKind::Ident, "expected ident for the variant name")?; + + if parser.current_kind() == Some(SyntaxKind::LParen) { + parser.parse(TupleTypeScope::default())?; + } else if parser.current_kind() == Some(SyntaxKind::LBrace) { + parser.parse(RecordFieldDefListScope::default())?; + } + Ok(()) + } +} + +define_scope! { TraitScope, Trait } +impl super::Parse for TraitScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::TraitKw); + parser.set_scope_recovery_stack(&[ + SyntaxKind::Ident, + SyntaxKind::Lt, + SyntaxKind::Colon, + SyntaxKind::WhereKw, + SyntaxKind::LBrace, + ]); + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Trait))? { + parser.bump(); + } + + parser.expect_and_pop_recovery_stack()?; + parse_generic_params_opt(parser, false)?; + + parser.expect_and_pop_recovery_stack()?; + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.parse(SuperTraitListScope::default())?; + } + + parser.expect_and_pop_recovery_stack()?; + parse_where_clause_opt(parser)?; + + if parser.find(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::Trait))? { + parser.parse(TraitItemListScope::default())?; + } + Ok(()) + } +} + +define_scope! {SuperTraitListScope, SuperTraitList, (Plus)} +impl super::Parse for SuperTraitListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Colon); + parser.parse(TraitRefScope::default())?; + while parser.bump_if(SyntaxKind::Plus) { + parser.parse(TraitRefScope::default())?; + } + Ok(()) + } +} + +define_scope! { TraitItemListScope, TraitItemList, (RBrace, Newline, FnKw) } +impl super::Parse for TraitItemListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_fn_item_block(parser, false, FuncDefScope::TraitDef) + } +} + +define_scope! { ImplScope, Impl, (ForKw, LBrace) } +impl super::Parse for ImplScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::ImplKw); + + parse_generic_params_opt(parser, false)?; + + let is_impl_trait = parser.dry_run(|parser| { + parser.parse(TraitRefScope::default()).is_ok() + && parser + .find(SyntaxKind::ForKw, ExpectedKind::Unspecified) + .is_ok_and(|x| x) + }); + + if is_impl_trait { + self.set_kind(SyntaxKind::ImplTrait); + parser.set_scope_recovery_stack(&[ + SyntaxKind::ForKw, + SyntaxKind::WhereKw, + SyntaxKind::LBrace, + ]); + + parser.parse(TraitRefScope::default())?; + if parser.find_and_pop(SyntaxKind::ForKw, ExpectedKind::Unspecified)? { + parser.bump(); + } + } else { + parser.set_scope_recovery_stack(&[SyntaxKind::WhereKw, SyntaxKind::LBrace]); + } + + parse_type(parser, None)?; + + parser.expect_and_pop_recovery_stack()?; + parse_where_clause_opt(parser)?; + + if parser.find_and_pop( + SyntaxKind::LBrace, + ExpectedKind::Body(SyntaxKind::ImplTrait), + )? { + if is_impl_trait { + parser.parse(ImplTraitItemListScope::default())?; + } else { + parser.parse(ImplItemListScope::default())?; + } + } + Ok(()) + } +} + +define_scope! { ImplTraitItemListScope, ImplTraitItemList, (RBrace, FnKw) } +impl super::Parse for ImplTraitItemListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_fn_item_block(parser, false, FuncDefScope::Impl) + } +} + +define_scope! { ImplItemListScope, ImplItemList, (RBrace, FnKw) } +impl super::Parse for ImplItemListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_fn_item_block(parser, true, FuncDefScope::Impl) + } +} + +define_scope! { UseScope, Use } +impl super::Parse for UseScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::UseKw); + parser.parse(UseTreeScope::default()) + } +} + +define_scope! { ConstScope, Const } +impl super::Parse for ConstScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_attr_list(parser)?; + + parser.bump_expected(SyntaxKind::ConstKw); + parser.set_newline_as_trivia(false); + parser.set_scope_recovery_stack(&[SyntaxKind::Ident, SyntaxKind::Colon, SyntaxKind::Eq]); + + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Const))? { + parser.bump(); + } + if parser.find_and_pop( + SyntaxKind::Colon, + ExpectedKind::TypeSpecifier(SyntaxKind::Const), + )? { + parser.bump(); + parse_type(parser, None)?; + } + + parser.set_newline_as_trivia(true); + if parser.find_and_pop(SyntaxKind::Eq, ExpectedKind::Unspecified)? { + parser.bump(); + parse_expr(parser)?; + } + Ok(()) + } +} + +define_scope! { ExternScope, Extern } +impl super::Parse for ExternScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::ExternKw); + + parser.set_scope_recovery_stack(&[SyntaxKind::LBrace]); + if parser.find(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::Extern))? { + parser.parse(ExternItemListScope::default())?; + } + Ok(()) + } +} + +define_scope! { ExternItemListScope, ExternItemList, (PubKw, UnsafeKw, FnKw) } +impl super::Parse for ExternItemListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_fn_item_block(parser, true, FuncDefScope::Extern) + } +} + +define_scope! { TypeAliasScope, TypeAlias } +impl super::Parse for TypeAliasScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::TypeKw); + + parser.set_scope_recovery_stack(&[SyntaxKind::Ident, SyntaxKind::Lt, SyntaxKind::Eq]); + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::TypeAlias))? { + parser.bump(); + } + + parser.pop_recovery_stack(); + parse_generic_params_opt(parser, true)?; + + if parser.find_and_pop(SyntaxKind::Eq, ExpectedKind::Unspecified)? { + parser.bump(); + parse_type(parser, None)?; + } + Ok(()) + } +} + +/// Currently, `impl` block, `impl trait` block, `trait` block and `extern` +/// block only allow `fn` as their items. This function is used to parse the +/// `fn` item in these blocks. NOTE: This function will be invalidated when +/// these block have their own allowed items, eg. `trait` block will allow +/// `type` item. +fn parse_fn_item_block( + parser: &mut Parser, + allow_modifier: bool, + fn_def_scope: FuncDefScope, +) -> Result<(), Recovery> { + parser.bump_expected(SyntaxKind::LBrace); + loop { + parser.set_newline_as_trivia(true); + if matches!(parser.current_kind(), Some(SyntaxKind::RBrace) | None) { + break; + } + + let mut checkpoint = attr::parse_attr_list(parser)?; + + let is_modifier = |kind: Option| match kind { + Some(kind) => kind.is_modifier_head(), + _ => false, + }; + + if is_modifier(parser.current_kind()) { + if allow_modifier { + let modifier_checkpoint = parser + .parse_cp(ItemModifierScope::default(), None) + .unwrap_infallible(); + checkpoint.get_or_insert(modifier_checkpoint); + } else { + while is_modifier(parser.current_kind()) { + let kind = parser.current_kind().unwrap(); + parser.unexpected_token_error(format!( + "{} modifier is not allowed in this block", + kind.describe() + )); + } + } + } + + match parser.current_kind() { + Some(SyntaxKind::FnKw) => { + parser.parse_cp(FuncScope::new(fn_def_scope), checkpoint)?; + + parser.set_newline_as_trivia(false); + parser.expect(&[SyntaxKind::Newline, SyntaxKind::RBrace], None)?; + } + _ => { + let proof = parser.error_msg_on_current_token("only `fn` is allowed in this block"); + parser.try_recover().map_err(|r| r.add_err_proof(proof))?; + } + } + } + + parser.bump_or_recover(SyntaxKind::RBrace, "expected `}` to close the block") +} diff --git a/crates/parser2/src/parser/lit.rs b/crates/parser2/src/parser/lit.rs new file mode 100644 index 0000000000..a2748796c0 --- /dev/null +++ b/crates/parser2/src/parser/lit.rs @@ -0,0 +1,24 @@ +use std::convert::Infallible; + +use crate::SyntaxKind; + +use super::{define_scope, token_stream::TokenStream, Parser}; + +define_scope! { pub(crate) LitScope, Lit } +impl super::Parse for LitScope { + type Error = Infallible; + + /// Caller is expected to verify that the next token is a literal. + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + assert!(is_lit(parser.current_kind().unwrap())); + parser.bump(); + Ok(()) + } +} + +pub fn is_lit(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::Int | SyntaxKind::TrueKw | SyntaxKind::FalseKw | SyntaxKind::String + ) +} diff --git a/crates/parser2/src/parser/mod.rs b/crates/parser2/src/parser/mod.rs new file mode 100644 index 0000000000..801d81f686 --- /dev/null +++ b/crates/parser2/src/parser/mod.rs @@ -0,0 +1,908 @@ +use std::{collections::VecDeque, convert::Infallible}; + +pub(crate) use item::ItemListScope; +use smallvec::SmallVec; + +use self::token_stream::{BackTrackableTokenStream, LexicalToken, TokenStream}; +use crate::{syntax_node::SyntaxNode, ExpectedKind, GreenNode, ParseError, SyntaxKind, TextRange}; + +pub mod token_stream; + +pub use pat::parse_pat; + +pub mod attr; +pub mod expr; +pub mod func; +pub mod item; +pub mod lit; +pub mod param; +pub mod pat; +pub mod path; +pub mod stmt; +pub mod struct_; +pub mod type_; +pub mod use_tree; + +mod expr_atom; + +type Checkpoint = rowan::Checkpoint; + +/// Parser to build a rowan syntax tree. +pub struct Parser { + /// Token stream to parse. + stream: BackTrackableTokenStream, + + builder: rowan::GreenNodeBuilder<'static>, + parents: Vec, + errors: Vec, + + next_trivias: VecDeque, + /// if `is_newline_trivia` is `true`, `Newline` is also regarded as a trivia + /// token. + is_newline_trivia: bool, + + current_pos: rowan::TextSize, + end_of_prev_token: rowan::TextSize, + /// The dry run states which holds the each state of the parser when it + /// enters dry run mode. + dry_run_states: Vec>, +} + +impl Parser { + /// Create a parser with the given token stream. + pub fn new(stream: S) -> Self { + Self { + stream: BackTrackableTokenStream::new(stream), + builder: rowan::GreenNodeBuilder::new(), + parents: Vec::new(), + errors: Vec::new(), + current_pos: rowan::TextSize::from(0), + end_of_prev_token: rowan::TextSize::from(0), + is_newline_trivia: true, + next_trivias: VecDeque::new(), + dry_run_states: Vec::new(), + } + } + + /// Returns the current token of the parser. + pub fn current_token(&mut self) -> Option { + self.peek_non_trivia() + } + + /// Returns the current non-trivia token kind of the parser. + pub fn current_kind(&mut self) -> Option { + self.current_token().map(|tok| tok.syntax_kind()) + } + + /// Sets the newline kind as trivia if `is_trivia` is `true`. Otherwise, the + /// newline kind is not regarded as a trivia. + /// + /// Returns previous value. + pub fn set_newline_as_trivia(&mut self, is_trivia: bool) -> bool { + std::mem::replace(&mut self.is_newline_trivia, is_trivia) + } + + /// Finish the parsing and return the GreeNode. + pub fn finish(self) -> (GreenNode, Vec) { + debug_assert!(self.parents.is_empty()); + debug_assert!(!self.is_dry_run()); + + (self.builder.finish(), self.errors) + } + + /// Finish the parsing and return the SyntaxNode. + /// **NOTE:** This method is mainly used for testing. + pub fn finish_to_node(self) -> (SyntaxNode, Vec) { + let (green_node, errors) = self.finish(); + (SyntaxNode::new_root(green_node), errors) + } + + pub fn set_scope_recovery_stack(&mut self, tokens: &[SyntaxKind]) { + let rec = self.scope_aux_recovery(); + rec.clear(); + rec.extend(tokens.iter().rev().copied()); + } + + pub fn pop_recovery_stack(&mut self) { + self.scope_aux_recovery().pop(); + } + + fn scope_aux_recovery(&mut self) -> &mut SmallVec<[SyntaxKind; 4]> { + &mut self.parents.last_mut().unwrap().aux_recovery_tokens + } + + pub fn expect_and_pop_recovery_stack(&mut self) -> Result<(), Recovery> { + let current = self.current_kind(); + let r = if current.is_some() && self.scope_aux_recovery().contains(¤t.unwrap()) { + Ok(()) + } else { + let pos = self.current_pos; + let (index, unexpected) = self.recover(); + let proof = if unexpected.is_some() { + ErrProof(()) + } else { + let err = ParseError::expected(self.scope_aux_recovery(), None, pos); + self.add_error(err) + }; + self.allow_local_recovery(Err(Recovery(index, proof))) + }; + self.pop_recovery_stack(); + r + } + + pub fn expect( + &mut self, + expected: &[SyntaxKind], + kind: Option, + ) -> Result<(), Recovery> { + let current = self.current_kind(); + + let aux = self.scope_aux_recovery(); + let truncate_to = aux.len(); + aux.extend_from_slice(expected); + + let res = if current.is_some() && aux.contains(¤t.unwrap()) { + Ok(()) + } else { + let pos = self.current_pos; + let (index, unexpected) = self.recover(); + let proof = if unexpected.is_some() { + ErrProof(()) + } else { + self.add_error(ParseError::expected(expected, kind, pos)) + }; + self.pop_recovery_stack(); + self.allow_local_recovery(Err(Recovery(index, proof))) + }; + self.scope_aux_recovery().truncate(truncate_to); + + res + } + + /// Adds the `recovery_tokens` as a temporary recovery token set. + /// These tokens are used as a recovery token set in addition to scope's + /// recovery token set. + /// + /// This is useful when you want to specify auxiliary recovery tokens which + /// are valid only in a limited part of the scope. + pub fn with_recovery_tokens(&mut self, f: F, recovery_tokens: &[SyntaxKind]) -> R + where + F: FnOnce(&mut Self) -> R, + { + let truncate_to = self.scope_aux_recovery().len(); + self.scope_aux_recovery().extend_from_slice(recovery_tokens); + let r = f(self); + self.scope_aux_recovery().truncate(truncate_to); + r + } + + /// Invoke the scope to parse. The scope is wrapped up by the node specified + /// by the scope. + /// + /// # Arguments + /// * If the `checkpoint` is `Some`, the marked branch is wrapped up by the + /// node. + /// * If the `checkpoint` is `None`, the current branch is wrapped up by the + /// node. + /// + /// # Returns + /// * If the parsing succeeds, the first element of the return value is + /// `true`. otherwise, the first element is `false`. + /// * The second element of the return value is the checkpoint of the start + /// of the node. + pub fn parse_cp( + &mut self, + mut scope: T, + checkpoint: Option, + ) -> Result + where + T: Parse + 'static, + E: Recoverable, + { + let checkpoint = self.enter(scope.clone(), checkpoint); + let start_checkpoint = self.checkpoint(); + let res = scope.parse(self); + self.leave(checkpoint); + let res = self.allow_local_recovery(res); + res.map(|_| start_checkpoint) + } + + pub fn parse(&mut self, scope: T) -> Result<(), E> + where + T: Parse + 'static, + E: Recoverable, + { + self.parse_ok(scope).map(|_| ()) + } + + pub fn parse_ok(&mut self, mut scope: T) -> Result + where + T: Parse + 'static, + E: Recoverable, + { + let checkpoint = self.enter(scope.clone(), None); + let res = scope.parse(self); + self.leave(checkpoint); + let ok = res.is_ok(); + let res = self.allow_local_recovery(res); + res.map(|_| ok) + } + + pub fn parses_without_error(&mut self, mut scope: T) -> bool + where + T: Parse + 'static, + E: Recoverable, + { + let checkpoint = self.enter(scope.clone(), None); + let ok = scope.parse(self).is_ok(); + self.leave(checkpoint); + ok && !self.dry_run_states.last().unwrap().err + } + + pub fn or_recover(&mut self, f: F) -> Result<(), Recovery> + where + F: FnOnce(&mut Self) -> Result<(), ParseError>, + { + if let Err(err) = f(self) { + let proof = self.add_error(err); + self.try_recover().map_err(|r| r.add_err_proof(proof))?; + } + Ok(()) + } + + #[doc(hidden)] + /// Enter the scope and return the checkpoint. The checkpoint branch will be + /// wrapped up by the scope's node when [`leave`] is called. + // NOTE: This method is limited to testing and internal usage. + pub fn enter(&mut self, scope: T, checkpoint: Option) -> Checkpoint + where + T: ParsingScope + 'static, + { + // Ensure the leading trivias are added to the parent node. + if !self.parents.is_empty() { + self.bump_trivias(); + } + + self.parents + .push(ScopeEntry::new(Box::new(scope), self.is_newline_trivia)); + // `is_newline_trivia` is always `true` when entering a scope. + self.is_newline_trivia = true; + checkpoint.unwrap_or_else(|| self.checkpoint()) + } + + #[doc(hidden)] + /// Leave the scope and wrap up the checkpoint by the scope's node. + /// Returns `is_err` value for exited scope. + // NOTE: This method is limited to testing and internal usage. + pub fn leave(&mut self, checkpoint: Checkpoint) -> bool { + let scope = self.parents.pop().unwrap(); + self.is_newline_trivia = scope.is_newline_trivia; + + // Ensure the trailing trivias are added to the current node if the current + // scope is the root. + if self.parents.is_empty() { + self.bump_trivias() + } + + if !self.is_dry_run() { + self.builder + .start_node_at(checkpoint, scope.scope.syntax_kind().into()); + self.builder.finish_node(); + } else { + self.dry_run_states.last_mut().unwrap().err |= scope.is_err; + } + scope.is_err + } + + pub fn add_error(&mut self, err: ParseError) -> ErrProof { + self.parents.last_mut().unwrap().is_err = true; + self.errors.push(err); + ErrProof(()) + } + + /// Add `msg` as an error to the error list, then bumps consecutive tokens + /// until a token in the recovery set is found. + /// + /// * If checkpoint is `Some`, the marked branch is wrapped up by an error + /// node. + /// * If checkpoint is `None`, the current branch is wrapped up by an error + /// node. + pub fn error_and_recover(&mut self, msg: &str) -> Result<(), Recovery> { + let proof = self.add_error(ParseError::Msg( + msg.into(), + TextRange::empty(self.end_of_prev_token), + )); + self.try_recover().map_err(|r| r.add_err_proof(proof)) + } + + /// Runs the parser in the dry run mode. + /// + /// Any changes to the parser state will be reverted. + pub fn dry_run(&mut self, f: F) -> R + where + F: FnOnce(&mut Self) -> R, + { + // Enters the dry run mode. + self.stream.set_bt_point(); + self.dry_run_states.push(DryRunState { + pos: self.current_pos, + end_of_prev_token: self.end_of_prev_token, + err_num: self.errors.len(), + next_trivias: self.next_trivias.clone(), + err: false, + }); + + let r = f(self); + + // Leaves the dry run mode. + self.stream.backtrack(); + let state = self.dry_run_states.pop().unwrap(); + self.errors.truncate(state.err_num); + self.current_pos = state.pos; + self.end_of_prev_token = state.end_of_prev_token; + self.next_trivias = state.next_trivias; + + r + } + + /// Bumps the current token and its leading trivias. + pub fn bump(&mut self) { + // Bump leading trivias. + self.bump_trivias(); + + self.bump_raw(); + self.end_of_prev_token = self.current_pos; + } + + /// Bumps the current token if the current token is the `expected` kind. + /// + /// # Panics + /// Panics If the current token is not the `expected` kind. + pub fn bump_expected(&mut self, expected: SyntaxKind) { + assert_eq!(self.current_kind(), Some(expected), "expected {expected:?}"); + self.bump(); + } + + /// Bumps the current token if the current token is the `expected` kind. + /// Return `true` if the current token is the `expected` kind. + pub fn bump_if(&mut self, expected: SyntaxKind) -> bool { + if self.current_kind() == Some(expected) { + self.bump(); + true + } else { + false + } + } + + pub fn find( + &mut self, + kind: SyntaxKind, + err: ExpectedKind, + ) -> Result> { + self.scope_aux_recovery().push(kind); + self.find_and_pop(kind, err) + } + + pub fn find_and_pop( + &mut self, + kind: SyntaxKind, + err: ExpectedKind, + ) -> Result> { + debug_assert_eq!(self.scope_aux_recovery().last(), Some(&kind)); + + let r = if self.current_kind() == Some(kind) { + Ok(true) + } else { + let pos = self.current_pos; + let r = self.try_recover(); + if self.current_kind() == Some(kind) { + Ok(true) + } else { + let proof = self.add_error(ParseError::expected(&[kind], Some(err), pos)); + r.map(|_| false).map_err(|rec| rec.add_err_proof(proof)) + } + }; + self.scope_aux_recovery().pop(); + r + } + + pub fn try_recover(&mut self) -> Result<(), Recovery<()>> { + let (index, _) = self.recover(); + self.allow_local_recovery(Err(Recovery(index, ()))) + } + + /// Consumes tokens until a recovery token is found, and reports an error on + /// any unexpected tokens. + /// Returns the index of the scope that matched the recovery token, + /// and the total string length of the unexpected tokens. + fn recover(&mut self) -> (Option, Option) { + let mut unexpected = None; + let mut match_scope_index = None; + while let Some(kind) = self.current_kind() { + if let Some((scope_index, _)) = self + .parents + .iter() + .enumerate() + .rev() + .find(|(_i, scope)| scope.is_recovery_match(kind)) + { + match_scope_index = Some(scope_index); + break; + } + + if unexpected.is_none() { + if !self.parents.is_empty() { + self.bump_trivias(); + } + unexpected = Some((self.current_pos, self.checkpoint())); + } + self.bump(); + } + + if let Some((start_pos, checkpoint)) = unexpected { + if !self.is_dry_run() { + self.builder + .start_node_at(checkpoint, SyntaxKind::Error.into()); + self.builder.finish_node(); + + self.add_error(ParseError::Unexpected( + format!( + "unexpected syntax while parsing {}", + self.parents.last().unwrap().scope.syntax_kind().describe() + ), + TextRange::new(start_pos, self.current_pos), + )); + } else { + self.dry_run_states.last_mut().unwrap().err = true; + } + } + + ( + match_scope_index.map(ScopeIndex), + unexpected.map(|(start_pos, _)| start_pos), + ) + } + + fn allow_local_recovery(&self, r: Result<(), E>) -> Result<(), E> { + match r { + Ok(()) => Ok(()), + Err(e) if e.is_local_recovery(self) => Ok(()), + _ => r, + } + } + + fn is_current_scope(&self, index: ScopeIndex) -> bool { + index.0 + 1 == self.parents.len() + } + + /// Bumps the current token if the current token is the `expected` kind. + /// Otherwise, reports an error and proceeds the parser to the recovery + /// tokens. + pub fn bump_or_recover( + &mut self, + expected: SyntaxKind, + msg: &str, + ) -> Result<(), Recovery> { + if !self.bump_if(expected) { + let proof = self.add_error(ParseError::Msg( + msg.into(), + TextRange::empty(self.current_pos), + )); + self.try_recover().map_err(|r| r.add_err_proof(proof)) + } else { + Ok(()) + } + } + + fn checkpoint(&mut self) -> Checkpoint { + self.builder.checkpoint() + } + + /// Bumps the current token and + /// current branch. + fn bump_raw(&mut self) { + let tok = match self.next_trivias.pop_front() { + Some(tok) => tok, + None => self.stream.next().unwrap(), + }; + + self.current_pos += rowan::TextSize::of(tok.text()); + if !self.is_dry_run() { + self.builder.token(tok.syntax_kind().into(), tok.text()); + } + } + + fn bump_trivias(&mut self) { + // Bump trivias. + loop { + match self.peek_raw() { + Some(tok) if self.is_trivia(tok.syntax_kind()) => self.bump_raw(), + _ => break, + } + } + } + + /// Peek the next non-trivia token. + fn peek_non_trivia(&mut self) -> Option { + if !self.is_newline_trivia { + for tok in &self.next_trivias { + if tok.syntax_kind() == SyntaxKind::Newline { + return Some(tok.clone()); + } + } + } + + while let Some(next) = self.stream.peek().map(|tok| tok.syntax_kind()) { + if self.is_trivia(next) { + let next = self.stream.next().unwrap(); + self.next_trivias.push_back(next); + continue; + } else { + return self.stream.peek().cloned(); + } + } + + None + } + + fn peek_raw(&mut self) -> Option { + if let Some(tok) = self.next_trivias.front() { + Some(tok.clone()) + } else { + self.stream.peek().cloned() + } + } + + /// Skip trivias (and newlines), then peek the next three tokens. + pub fn peek_three(&mut self) -> (Option, Option, Option) { + self.stream.set_bt_point(); + + while let Some(next) = self.stream.peek().map(|tok| tok.syntax_kind()) { + if !(next.is_trivia() || next == SyntaxKind::Newline) { + break; + } + self.stream.next(); + } + + let tokens = ( + self.stream.next().map(|t| t.syntax_kind()), + self.stream.next().map(|t| t.syntax_kind()), + self.stream.next().map(|t| t.syntax_kind()), + ); + + self.stream.backtrack(); + tokens + } + + /// Skip trivias, then peek the next two tokens. + pub fn peek_two(&mut self) -> (Option, Option) { + let (a, b, _) = self.peek_three(); + (a, b) + } + + /// Add the `msg` to the error list, at `current_pos`. + fn error(&mut self, msg: &str) -> ErrProof { + let pos = self.current_pos; + self.errors + .push(ParseError::Msg(msg.into(), TextRange::new(pos, pos))); + ErrProof(()) + } + + /// Add the `msg` to the error list, on `current_token()`. + /// Bumps trivias. + fn error_msg_on_current_token(&mut self, msg: &str) -> ErrProof { + self.bump_trivias(); + let start = self.current_pos; + let end = if let Some(current_token) = self.current_token() { + start + current_token.text_size() + } else { + start + }; + + self.add_error(ParseError::Msg(msg.into(), TextRange::new(start, end))) + } + + /// Wrap the current token in a `SyntaxKind::Error`, and add a + /// `ParseError::Unexpected`. + fn unexpected_token_error(&mut self, msg: String) { + let checkpoint = self.enter(ErrorScope::default(), None); + + let start_pos = self.current_pos; + self.bump(); + + self.add_error(ParseError::Unexpected( + msg, + TextRange::new(start_pos, self.current_pos), + )); + self.leave(checkpoint); + } + + /// Returns `true` if the parser is in the dry run mode. + fn is_dry_run(&self) -> bool { + !self.dry_run_states.is_empty() + } + + fn is_trivia(&self, kind: SyntaxKind) -> bool { + kind.is_trivia() || (self.is_newline_trivia && kind == SyntaxKind::Newline) + } +} + +pub trait ParsingScope { + /// Returns the recovery method of the current scope. + fn recovery_tokens(&self) -> &[SyntaxKind]; + + fn syntax_kind(&self) -> SyntaxKind; +} + +pub trait Parse: ParsingScope + Clone { + type Error; + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error>; +} + +pub trait ParseInfalible: ParsingScope + Clone { + fn parse(&mut self, parser: &mut Parser); +} + +#[derive(Debug, Copy, Clone)] +pub struct ScopeIndex(usize); +#[derive(Debug, Copy, Clone)] +pub struct Recovery(Option, T); +impl Recovery<()> { + pub fn add_err_proof(self, proof: ErrProof) -> Recovery { + Recovery(self.0, proof) + } +} + +#[derive(Debug)] +pub struct ErrProof(()); + +pub trait Recoverable { + fn is_local_recovery(&self, _parser: &Parser) -> bool { + false + } +} +impl Recoverable for ParseError {} +impl Recoverable for Infallible {} +impl Recoverable for Recovery { + fn is_local_recovery(&self, parser: &Parser) -> bool { + self.0 + .as_ref() + .map(|i| parser.is_current_scope(*i)) + .unwrap_or(false) + } +} + +impl From for ErrProof { + fn from(_: Infallible) -> ErrProof { + ErrProof(()) + } +} + +impl From> for Recovery { + fn from(recovery: Recovery) -> Self { + Self(recovery.0, recovery.1.into()) + } +} + +struct DryRunState { + /// The text position is the position when the dry run started. + pos: rowan::TextSize, + end_of_prev_token: rowan::TextSize, + /// The number of errors when the dry run started. + err_num: usize, + /// The stored trivias when the dry run started. + next_trivias: VecDeque, + err: bool, +} + +struct ScopeEntry { + scope: Box, + is_newline_trivia: bool, + is_err: bool, + aux_recovery_tokens: SmallVec<[SyntaxKind; 4]>, +} +impl ScopeEntry { + fn new(scope: Box, is_newline_trivia: bool) -> Self { + Self { + scope, + is_newline_trivia, + is_err: false, + aux_recovery_tokens: SmallVec::new(), + } + } + + fn is_recovery_match(&self, kind: SyntaxKind) -> bool { + self.scope.recovery_tokens().contains(&kind) || self.aux_recovery_tokens.contains(&kind) + } +} + +impl std::fmt::Debug for ScopeEntry { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ScopeEntry") + .field("scope", &self.scope.syntax_kind()) + .field("is_newline_trivia", &self.is_newline_trivia) + .field("is_err", &self.is_err) + .field("aux_recovery_tokens", &self.aux_recovery_tokens) + .finish() + } +} + +trait TextSize { + fn text_size(&self) -> rowan::TextSize; +} + +impl TextSize for T +where + T: LexicalToken, +{ + fn text_size(&self) -> rowan::TextSize { + rowan::TextSize::of(self.text()) + } +} + +define_scope! { ErrorScope, Error } +define_scope! { pub RootScope, Root } + +macro_rules! define_scope { + ( + $(#[$attrs: meta])* + $visibility: vis $scope_name: ident $({ $($field: ident: $ty: ty),* })?, + $kind: path + ) => { + crate::parser::define_scope_struct! {$visibility $scope_name {$($($field: $ty), *)?}, $kind} + impl crate::parser::ParsingScope for $scope_name { + fn recovery_tokens(&self) -> &[crate::SyntaxKind] { + &[] + } + + fn syntax_kind(&self) -> crate::SyntaxKind { + self.__inner.get() + } + } + }; + + ( + $(#[$attrs: meta])* + $visibility: vis $scope_name: ident $({ $($field: ident: $ty: ty),* })?, + $kind: path, + ($($recoveries: path), *) + ) => { + crate::parser::define_scope_struct! {$visibility $scope_name {$($($field: $ty), *)?}, $kind} + + impl crate::parser::ParsingScope for $scope_name { + fn recovery_tokens(&self) -> &[crate::SyntaxKind] { + lazy_static::lazy_static! { + pub(super) static ref RECOVERY_TOKENS: smallvec::SmallVec<[SyntaxKind; 4]> = { + #[allow(unused)] + use crate::SyntaxKind::*; + smallvec::SmallVec::from_slice(&[$($recoveries), *]) + }; + } + + &RECOVERY_TOKENS + } + + fn syntax_kind(&self) -> crate::SyntaxKind { + self.__inner.get() + } + } + }; +} + +macro_rules! define_scope_struct { + ( + $(#[$attrs: meta])* + $visibility: vis $scope_name: ident { $($field: ident: $ty: ty),* }, + $kind: path + ) => { + $(#[$attrs])* + #[derive(Debug, Clone)] + $visibility struct $scope_name { + __inner: std::rc::Rc>, + $($field: $ty),* + } + impl $scope_name { + #[allow(unused)] + $visibility fn new($($field: $ty),*) -> Self { + use crate::SyntaxKind::*; + Self { + $($field,)* + __inner: std::cell::Cell::new($kind).into(), + } + } + #[allow(unused)] + fn set_kind(&mut self, kind: crate::SyntaxKind) { + self.__inner.set(kind); + } + } + impl Default for $scope_name { + fn default() -> Self { + use crate::SyntaxKind::*; + Self { + __inner: std::cell::Cell::new($kind).into(), + $($field: Default::default()),* + } + } + } + }; +} + +use define_scope; +#[doc(hidden)] +use define_scope_struct; + +/// Parse a comma-separated list of elements, with trailing commas allowed. +/// Panics if `parser.current_kind() != Some(brackets.0)` +fn parse_list( + parser: &mut Parser, + newline_delim: bool, + list_kind: SyntaxKind, + brackets: (SyntaxKind, SyntaxKind), + element: F, +) -> Result<(), Recovery> +where + F: Fn(&mut Parser) -> Result<(), Recovery>, +{ + parser.bump_expected(brackets.0); + + let expected_closing_bracket = Some(ExpectedKind::ClosingBracket { + bracket: brackets.1, + parent: list_kind, + }); + + loop { + if parser.bump_if(brackets.1) { + return Ok(()); + } + + element(parser)?; + + if parser.current_kind() != Some(SyntaxKind::Comma) + && parser.current_kind() != Some(brackets.1) + { + // Recover gracefully if list elements are separated by newline instead of comma + let nt = parser.set_newline_as_trivia(false); + let newline = parser.current_kind() == Some(SyntaxKind::Newline) || { + parser.with_recovery_tokens( + |parser| { + let pos = parser.current_pos; + let (index, unexpected) = parser.recover(); + if unexpected.is_none() { + parser.add_error(ParseError::expected( + &[brackets.1, SyntaxKind::Comma], + expected_closing_bracket, + pos, + )); + } + parser.allow_local_recovery(Err(Recovery(index, ErrProof(())))) + }, + &[SyntaxKind::Newline, SyntaxKind::Comma, brackets.1], + )?; + parser.current_kind() == Some(SyntaxKind::Newline) + }; + parser.set_newline_as_trivia(nt); + + if newline { + parser.add_error(ParseError::expected( + &[brackets.1, SyntaxKind::Comma], + expected_closing_bracket, + parser.current_pos, + )); + if !newline_delim { + return Ok(()); + } + } else { + parser.expect(&[brackets.1, SyntaxKind::Comma], expected_closing_bracket)?; + if !parser.bump_if(SyntaxKind::Comma) { + break; + } + } + } else if !parser.bump_if(SyntaxKind::Comma) { + parser.expect(&[brackets.1], expected_closing_bracket)?; + break; + } + } + parser.bump_expected(brackets.1); + + Ok(()) +} diff --git a/crates/parser2/src/parser/param.rs b/crates/parser2/src/parser/param.rs new file mode 100644 index 0000000000..a1cd39aaec --- /dev/null +++ b/crates/parser2/src/parser/param.rs @@ -0,0 +1,491 @@ +use std::convert::Infallible; + +use unwrap_infallible::UnwrapInfallible; + +use crate::{ExpectedKind, ParseError, SyntaxKind}; + +use super::{ + define_scope, + expr::parse_expr, + expr_atom::{BlockExprScope, LitExprScope}, + parse_list, + path::PathScope, + token_stream::TokenStream, + type_::{is_type_start, parse_type}, + ErrProof, Parser, Recovery, +}; + +define_scope! { + pub(crate) FuncParamListScope{ allow_self: bool}, + FuncParamList, + (RParen, Comma) +} +impl super::Parse for FuncParamListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + false, + SyntaxKind::FuncParamList, + (SyntaxKind::LParen, SyntaxKind::RParen), + |parser| parser.parse(FnParamScope::new(self.allow_self)), + ) + } +} + +define_scope! { FnParamScope{allow_self: bool}, FnParam } +impl super::Parse for FnParamScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_if(SyntaxKind::MutKw); + parser.expect( + &[ + SyntaxKind::SelfKw, + SyntaxKind::Ident, + SyntaxKind::Underscore, + ], + None, + )?; + + match parser.current_kind() { + Some(SyntaxKind::SelfKw) => { + if !self.allow_self { + parser.error_msg_on_current_token("`self` is not allowed here"); + } + parser.bump_expected(SyntaxKind::SelfKw); + if parser.bump_if(SyntaxKind::Colon) { + parse_type(parser, None)?; + } + } + Some(SyntaxKind::Ident | SyntaxKind::Underscore) => { + parser.bump(); + + parser.expect( + &[SyntaxKind::Ident, SyntaxKind::Underscore, SyntaxKind::Colon], + None, + )?; + if !parser.bump_if(SyntaxKind::Ident) { + parser.bump_if(SyntaxKind::Underscore); + } + if parser.find( + SyntaxKind::Colon, + ExpectedKind::TypeSpecifier(SyntaxKind::FnParam), + )? { + parser.bump(); + parse_type(parser, None)?; + } + } + _ => unreachable!(), // only reachable if a recovery token is added + }; + Ok(()) + } +} + +define_scope! { + pub(crate) GenericParamListScope {disallow_trait_bound: bool}, + GenericParamList, + (Comma, Gt) +} +impl super::Parse for GenericParamListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + false, + SyntaxKind::GenericParamList, + (SyntaxKind::Lt, SyntaxKind::Gt), + |parser| { + parser.expect( + &[SyntaxKind::Ident, SyntaxKind::ConstKw, SyntaxKind::Gt], + None, + )?; + match parser.current_kind() { + Some(SyntaxKind::ConstKw) => parser.parse(ConstGenericParamScope::default()), + Some(SyntaxKind::Ident) => { + parser.parse(TypeGenericParamScope::new(self.disallow_trait_bound)) + } + Some(SyntaxKind::Gt) => Ok(()), + _ => unreachable!(), + } + }, + ) + } +} + +define_scope! { ConstGenericParamScope, ConstGenericParam } +impl super::Parse for ConstGenericParamScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::ConstKw); + + parser.set_scope_recovery_stack(&[SyntaxKind::Ident, SyntaxKind::Colon]); + if parser.find_and_pop( + SyntaxKind::Ident, + ExpectedKind::Name(SyntaxKind::ConstGenericParam), + )? { + parser.bump(); + } + if parser.find_and_pop( + SyntaxKind::Colon, + ExpectedKind::TypeSpecifier(SyntaxKind::ConstGenericParam), + )? { + parser.bump(); + parse_type(parser, None)?; + } + + // parse trait bound even though it's not allowed (checked in hir) + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.parse(TypeBoundListScope::new(true))?; + } + Ok(()) + } +} + +define_scope! { + TypeGenericParamScope {disallow_trait_bound: bool}, + TypeGenericParam +} +impl super::Parse for TypeGenericParamScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::Ident); + + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.parse(TypeBoundListScope::new(self.disallow_trait_bound))?; + } + Ok(()) + } +} + +define_scope! { + TypeBoundListScope{disallow_trait_bound: bool}, + TypeBoundList, + (Plus) +} +impl super::Parse for TypeBoundListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Colon); + + parser.parse(TypeBoundScope::new(self.disallow_trait_bound))?; + while parser.current_kind() == Some(SyntaxKind::Plus) { + parser.bump_expected(SyntaxKind::Plus); + parser.parse(TypeBoundScope::new(self.disallow_trait_bound))?; + } + Ok(()) + } +} + +define_scope! { + TypeBoundScope{disallow_trait_bound: bool}, + TypeBound +} +impl super::Parse for TypeBoundScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + let is_type_kind = matches!( + parser.current_kind(), + Some(SyntaxKind::LParen | SyntaxKind::Star) + ); + + if is_type_kind { + parse_kind_bound(parser) + } else { + if self.disallow_trait_bound { + return parser.error_and_recover("trait bounds are not allowed here"); + } + parser.parse(TraitRefScope::default()) + } + } +} + +fn parse_kind_bound(parser: &mut Parser) -> Result<(), Recovery> { + let checkpoint = parser.checkpoint(); + let is_newline_trivia = parser.set_newline_as_trivia(false); + + parser.expect(&[SyntaxKind::Star, SyntaxKind::LParen], None)?; + + if parser.bump_if(SyntaxKind::LParen) { + parse_kind_bound(parser)?; + if parser.find( + SyntaxKind::RParen, + ExpectedKind::ClosingBracket { + bracket: SyntaxKind::RParen, + parent: SyntaxKind::TypeBound, + }, + )? { + parser.bump(); + } + } else if parser.current_kind() == Some(SyntaxKind::Star) { + parser + .parse(KindBoundMonoScope::default()) + .unwrap_infallible(); + } else { + // guaranteed by `expected`, unless other recovery + // other tokens are added to the current scope + unreachable!(); + } + + if parser.current_kind() == Some(SyntaxKind::Arrow) { + parser.parse_cp(KindBoundAbsScope::default(), checkpoint.into())?; + } + parser.set_newline_as_trivia(is_newline_trivia); + Ok(()) +} + +define_scope! { KindBoundMonoScope, KindBoundMono } +impl super::Parse for KindBoundMonoScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Star); + Ok(()) + } +} + +define_scope! { KindBoundAbsScope, KindBoundAbs } +impl super::Parse for KindBoundAbsScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Arrow); + parse_kind_bound(parser) + } +} + +define_scope! { pub(super) TraitRefScope, TraitRef } +impl super::Parse for TraitRefScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.or_recover(|parser| { + parser.parse(PathScope::default()).map_err(|_| { + ParseError::expected(&[SyntaxKind::TraitRef], None, parser.end_of_prev_token) + }) + })?; + + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default())?; + } + Ok(()) + } +} + +define_scope! { + pub(crate) GenericArgListScope { is_expr: bool }, + GenericArgList, + (Gt, Comma) +} +impl super::Parse for GenericArgListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Lt); + + let err_kind = Some(ExpectedKind::ClosingBracket { + bracket: SyntaxKind::Gt, + parent: SyntaxKind::GenericArgList, + }); + let mut has_seen_comma = false; + loop { + if parser.bump_if(SyntaxKind::Gt) { + return Ok(()); + } + + parser.parse(GenericArgScope::default())?; + + // If we're parsing an expr, recover less aggressively. + if self.is_expr + && !matches!( + parser.current_kind(), + Some(SyntaxKind::Gt | SyntaxKind::Comma) + ) + && !has_seen_comma + { + let p = parser.add_error(ParseError::expected( + &[SyntaxKind::Gt, SyntaxKind::Comma], + err_kind, + parser.current_pos, + )); + return Err(Recovery(None, p)); + } + parser.expect(&[SyntaxKind::Gt, SyntaxKind::Comma], err_kind)?; + if !parser.bump_if(SyntaxKind::Comma) { + break; + } + has_seen_comma = true; + } + parser.bump_expected(SyntaxKind::Gt); + + Ok(()) + } +} + +define_scope! { GenericArgScope, TypeGenericArg } +impl super::Parse for GenericArgScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + match parser.current_kind() { + Some(SyntaxKind::LBrace) => { + self.set_kind(SyntaxKind::ConstGenericArg); + parser.parse(BlockExprScope::default())?; + } + + Some(kind) if kind.is_literal_leaf() => { + self.set_kind(SyntaxKind::ConstGenericArg); + parser.parse(LitExprScope::default()).unwrap_infallible(); + } + + _ => { + parse_type(parser, None)?; + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.error_and_recover("type bounds are not allowed here")?; + } + } + } + Ok(()) + } +} + +define_scope! { pub(crate) CallArgListScope, CallArgList, (RParen, Comma) } +impl super::Parse for CallArgListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + false, + SyntaxKind::CallArgList, + (SyntaxKind::LParen, SyntaxKind::RParen), + |parser| parser.parse(CallArgScope::default()), + ) + } +} + +define_scope! { CallArgScope, CallArg, (Comma, RParen) } +impl super::Parse for CallArgScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + let has_label = parser.dry_run(|parser| { + parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon) + }); + + if has_label { + parser.bump_expected(SyntaxKind::Ident); + parser.bump_expected(SyntaxKind::Colon); + } + parse_expr(parser)?; + Ok(()) + } +} + +define_scope! { pub(crate) WhereClauseScope, WhereClause, (Newline) } +impl super::Parse for WhereClauseScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::WhereKw); + + let mut pred_count = 0; + + loop { + parser.set_newline_as_trivia(true); + match parser.current_kind() { + Some(kind) if is_type_start(kind) => { + parser.parse(WherePredicateScope::default())?; + pred_count += 1; + } + _ => break, + } + + if !parser.bump_if(SyntaxKind::Comma) + && parser.current_kind().is_some() + && is_type_start(parser.current_kind().unwrap()) + { + parser.set_newline_as_trivia(false); + let newline = parser.current_kind() == Some(SyntaxKind::Newline); + parser.set_newline_as_trivia(true); + + if newline { + parser.add_error(ParseError::expected( + &[SyntaxKind::Comma], + None, + parser.current_pos, + )); + } else if parser.find( + SyntaxKind::Comma, + ExpectedKind::Separator { + separator: SyntaxKind::Comma, + element: SyntaxKind::WherePredicate, + }, + )? { + parser.bump(); + } else { + break; + } + } + } + + if pred_count == 0 { + parser.error("`where` clause requires one or more type constraints"); + } + Ok(()) + } +} + +define_scope! { pub(crate) WherePredicateScope, WherePredicate } +impl super::Parse for WherePredicateScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_type(parser, None)?; + + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.parse(TypeBoundListScope::default())?; + } else { + parser.add_error(ParseError::expected( + &[SyntaxKind::Colon], + Some(ExpectedKind::TypeSpecifier(SyntaxKind::WherePredicate)), + parser.end_of_prev_token, + )); + } + Ok(()) + } +} + +pub(crate) fn parse_where_clause_opt( + parser: &mut Parser, +) -> Result<(), Recovery> { + let newline_as_trivia = parser.set_newline_as_trivia(true); + let r = if parser.current_kind() == Some(SyntaxKind::WhereKw) { + parser.parse(WhereClauseScope::default()) + } else { + Ok(()) + }; + parser.set_newline_as_trivia(newline_as_trivia); + r +} + +pub(crate) fn parse_generic_params_opt( + parser: &mut Parser, + disallow_trait_bound: bool, +) -> Result<(), Recovery> { + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericParamListScope::new(disallow_trait_bound)) + } else { + Ok(()) + } +} diff --git a/crates/parser2/src/parser/pat.rs b/crates/parser2/src/parser/pat.rs new file mode 100644 index 0000000000..fa392a05aa --- /dev/null +++ b/crates/parser2/src/parser/pat.rs @@ -0,0 +1,179 @@ +use std::convert::Infallible; + +use super::{define_scope, path::PathScope, token_stream::TokenStream, ErrProof, Parser, Recovery}; +use crate::{ + parser::{ + lit::{is_lit, LitScope}, + parse_list, + token_stream::LexicalToken, + }, + ParseError, SyntaxKind, +}; + +pub fn parse_pat(parser: &mut Parser) -> Result<(), Recovery> { + use SyntaxKind::*; + parser.bump_trivias(); + let checkpoint = parser.checkpoint(); + let has_mut = parser.bump_if(SyntaxKind::MutKw); + + let token = parser.current_token(); + if has_mut { + match token.as_ref().map(|t| t.syntax_kind()) { + Some(Underscore | Dot2 | LParen) => { + parser.error_msg_on_current_token(&format!( + "`mut` is not allowed on `{}`", + token.unwrap().text() + )); + } + + Some(kind) if is_lit(kind) => { + parser.error_msg_on_current_token(&format!( + "`mut` is not allowed on `{}`", + token.unwrap().text() + )); + } + + _ => {} + } + } + + match parser.current_kind() { + Some(Underscore) => parser + .parse_cp(WildCardPatScope::default(), Some(checkpoint)) + .unwrap(), + Some(Dot2) => parser + .parse_cp(RestPatScope::default(), Some(checkpoint)) + .unwrap(), + Some(LParen) => parser.parse_cp(TuplePatScope::default(), Some(checkpoint))?, + Some(kind) if is_lit(kind) => parser + .parse_cp(LitPatScope::default(), Some(checkpoint)) + .unwrap(), + _ => parser.parse_cp(PathPatScope::default(), Some(checkpoint))?, + }; + + if parser.current_kind() == Some(SyntaxKind::Pipe) { + parser.parse_cp(OrPatScope::default(), Some(checkpoint))?; + } + Ok(()) +} + +define_scope! { WildCardPatScope, WildCardPat, (Pipe) } +impl super::Parse for WildCardPatScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::Underscore); + Ok(()) + } +} + +define_scope! { RestPatScope, RestPat } +impl super::Parse for RestPatScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::Dot2); + Ok(()) + } +} + +define_scope! { LitPatScope, LitPat, (Pipe) } +impl super::Parse for LitPatScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.parse(LitScope::default()) + } +} + +define_scope! { TuplePatScope, TuplePat } +impl super::Parse for TuplePatScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.parse(TuplePatElemListScope::default()) + } +} + +define_scope! { TuplePatElemListScope, TuplePatElemList, (RParen, Comma) } +impl super::Parse for TuplePatElemListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + false, + SyntaxKind::TuplePatElemList, + (SyntaxKind::LParen, SyntaxKind::RParen), + parse_pat, + ) + } +} + +define_scope! { PathPatScope, PathPat, (Pipe) } +impl super::Parse for PathPatScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.or_recover(|p| { + p.parse(PathScope::default()) + .map_err(|e| ParseError::expected(&[SyntaxKind::PathPat], None, e.range().start())) + })?; + + parser.set_newline_as_trivia(false); + if parser.current_kind() == Some(SyntaxKind::LParen) { + self.set_kind(SyntaxKind::PathTuplePat); + parser.parse(TuplePatElemListScope::default()) + } else if parser.current_kind() == Some(SyntaxKind::LBrace) { + self.set_kind(SyntaxKind::RecordPat); + parser.parse(RecordPatFieldListScope::default()) + } else { + Ok(()) + } + } +} + +define_scope! { RecordPatFieldListScope, RecordPatFieldList, (Comma, RBrace) } +impl super::Parse for RecordPatFieldListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + true, + SyntaxKind::RecordPatFieldList, + (SyntaxKind::LBrace, SyntaxKind::RBrace), + |parser| parser.parse(RecordPatFieldScope::default()), + ) + } +} + +define_scope! { RecordPatFieldScope, RecordPatField } +impl super::Parse for RecordPatFieldScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + let has_label = parser.dry_run(|parser| { + // + parser.bump_if(SyntaxKind::Ident) && parser.bump_if(SyntaxKind::Colon) + }); + if has_label { + parser.bump_expected(SyntaxKind::Ident); + parser.bump_expected(SyntaxKind::Colon); + } + parse_pat(parser) + } +} + +define_scope! { OrPatScope, OrPat, (Pipe) } +impl super::Parse for OrPatScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Pipe); + parse_pat(parser) + } +} diff --git a/crates/parser2/src/parser/path.rs b/crates/parser2/src/parser/path.rs new file mode 100644 index 0000000000..c62b258844 --- /dev/null +++ b/crates/parser2/src/parser/path.rs @@ -0,0 +1,73 @@ +use std::convert::identity; + +use crate::{ParseError, SyntaxKind}; + +use super::{ + define_scope, + expr::{is_lshift, is_lt_eq}, + param::GenericArgListScope, + token_stream::TokenStream, + Parser, +}; + +define_scope! { + #[doc(hidden)] + pub PathScope { is_expr: bool }, + Path, + (Colon2) +} +impl super::Parse for PathScope { + type Error = ParseError; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.parse(PathSegmentScope::new(self.is_expr))?; + while parser.bump_if(SyntaxKind::Colon2) { + parser.parse(PathSegmentScope::default())?; + } + Ok(()) + } +} + +define_scope! { PathSegmentScope { is_expr: bool }, PathSegment } +impl super::Parse for PathSegmentScope { + type Error = ParseError; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + match parser.current_kind() { + Some(kind) if is_path_segment(kind) => { + parser.bump(); + + if parser.current_kind() == Some(SyntaxKind::Lt) + && !(is_lt_eq(parser) || is_lshift(parser)) + && parser.dry_run(|parser| { + parser + .parse_ok(GenericArgListScope::new(self.is_expr)) + .is_ok_and(identity) + }) + { + parser + .parse(GenericArgListScope::new(self.is_expr)) + .expect("dry_run suggests this will succeed"); + } + Ok(()) + } + _ => Err(ParseError::expected( + &[SyntaxKind::PathSegment], + None, + parser.end_of_prev_token, + )), + } + } +} + +pub(super) fn is_path_segment(kind: SyntaxKind) -> bool { + matches!( + kind, + SyntaxKind::SelfTypeKw + | SyntaxKind::SelfKw + | SyntaxKind::IngotKw + | SyntaxKind::SuperKw + | SyntaxKind::Ident + ) +} diff --git a/crates/parser2/src/parser/stmt.rs b/crates/parser2/src/parser/stmt.rs new file mode 100644 index 0000000000..3873c94b19 --- /dev/null +++ b/crates/parser2/src/parser/stmt.rs @@ -0,0 +1,150 @@ +use std::convert::Infallible; + +use unwrap_infallible::UnwrapInfallible; + +use crate::{ExpectedKind, SyntaxKind}; + +use super::{ + define_scope, + expr::{parse_expr, parse_expr_no_struct}, + expr_atom::BlockExprScope, + pat::parse_pat, + token_stream::TokenStream, + type_::parse_type, + ErrProof, Parser, Recovery, +}; + +pub fn parse_stmt(parser: &mut Parser) -> Result<(), Recovery> { + use SyntaxKind::*; + + match parser.current_kind() { + Some(LetKw) => parser.parse(LetStmtScope::default()), + Some(ForKw) => parser.parse(ForStmtScope::default()), + Some(WhileKw) => parser.parse(WhileStmtScope::default()), + Some(ContinueKw) => { + parser + .parse(ContinueStmtScope::default()) + .unwrap_infallible(); + Ok(()) + } + Some(BreakKw) => { + parser.parse(BreakStmtScope::default()).unwrap_infallible(); + Ok(()) + } + Some(ReturnKw) => parser.parse(ReturnStmtScope::default()), + _ => parser.parse(ExprStmtScope::default()), + } +} + +define_scope! { LetStmtScope, LetStmt } +impl super::Parse for LetStmtScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::LetKw); + parser.set_newline_as_trivia(false); + parse_pat(parser)?; + + if parser.current_kind() == Some(SyntaxKind::Colon) { + parser.bump_expected(SyntaxKind::Colon); + parse_type(parser, None)?; + } + + if parser.bump_if(SyntaxKind::Eq) { + parse_expr(parser)?; + } + Ok(()) + } +} + +define_scope! { ForStmtScope, ForStmt } +impl super::Parse for ForStmtScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::ForKw); + + parser.set_scope_recovery_stack(&[SyntaxKind::InKw, SyntaxKind::Ident, SyntaxKind::LBrace]); + parse_pat(parser)?; + + if parser.find_and_pop(SyntaxKind::InKw, ExpectedKind::Unspecified)? { + parser.bump(); + } + parse_expr_no_struct(parser)?; + + // pop `Ident` recovery token, which is only included because it solves a contrived test case + parser.pop_recovery_stack(); + + if parser.find_and_pop(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::ForStmt))? { + parser.parse(BlockExprScope::default())?; + } + Ok(()) + } +} + +define_scope! { WhileStmtScope, WhileStmt } +impl super::Parse for WhileStmtScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::WhileKw); + + parser.set_scope_recovery_stack(&[SyntaxKind::LBrace]); + parse_expr_no_struct(parser)?; + + if parser.find_and_pop( + SyntaxKind::LBrace, + ExpectedKind::Body(SyntaxKind::WhileStmt), + )? { + parser.parse(BlockExprScope::default())?; + } + Ok(()) + } +} + +define_scope! { ContinueStmtScope, ContinueStmt } +impl super::Parse for ContinueStmtScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::ContinueKw); + Ok(()) + } +} + +define_scope! { BreakStmtScope, BreakStmt } +impl super::Parse for BreakStmtScope { + type Error = Infallible; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::BreakKw); + Ok(()) + } +} + +define_scope! { ReturnStmtScope, ReturnStmt } +impl super::Parse for ReturnStmtScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::ReturnKw); + parser.set_newline_as_trivia(false); + + if !matches!( + parser.current_kind(), + None | Some(SyntaxKind::Newline | SyntaxKind::RBrace) + ) { + parse_expr(parser)?; + } + Ok(()) + } +} + +define_scope! { ExprStmtScope, ExprStmt } +impl super::Parse for ExprStmtScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_expr(parser) + } +} diff --git a/crates/parser2/src/parser/struct_.rs b/crates/parser2/src/parser/struct_.rs new file mode 100644 index 0000000000..d89264cab7 --- /dev/null +++ b/crates/parser2/src/parser/struct_.rs @@ -0,0 +1,107 @@ +use crate::{ExpectedKind, SyntaxKind}; + +use super::{ + attr::parse_attr_list, + define_scope, + func::FuncScope, + param::{parse_generic_params_opt, parse_where_clause_opt}, + parse_list, + token_stream::TokenStream, + type_::parse_type, + ErrProof, Parser, Recovery, +}; + +define_scope! { pub(crate) StructScope, Struct } +impl super::Parse for StructScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::StructKw); + + parser.set_scope_recovery_stack(&[ + SyntaxKind::Ident, + SyntaxKind::Lt, + SyntaxKind::WhereKw, + SyntaxKind::LBrace, + ]); + + if parser.find_and_pop(SyntaxKind::Ident, ExpectedKind::Name(SyntaxKind::Struct))? { + parser.bump(); + } + + parser.expect_and_pop_recovery_stack()?; + parse_generic_params_opt(parser, false)?; + + parser.expect_and_pop_recovery_stack()?; + parse_where_clause_opt(parser)?; + + if parser.find_and_pop(SyntaxKind::LBrace, ExpectedKind::Body(SyntaxKind::Struct))? { + parser.parse(RecordFieldDefListScope::default())?; + } + Ok(()) + } +} + +define_scope! { + pub(crate) RecordFieldDefListScope, + RecordFieldDefList, + (RBrace, Comma, Newline) +} +impl super::Parse for RecordFieldDefListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + true, + SyntaxKind::RecordFieldDefList, + (SyntaxKind::LBrace, SyntaxKind::RBrace), + |parser| parser.parse(RecordFieldDefScope::default()), + ) + } +} + +define_scope! { RecordFieldDefScope, RecordFieldDef } +impl super::Parse for RecordFieldDefScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parse_attr_list(parser)?; + + parser.bump_if(SyntaxKind::PubKw); + // Since the Fe-V2 doesn't support method definition in a struct, we add an + // ad-hoc check for the method definition in a struct to avoid the confusing + // error message. + // The reason that justifies this ad-hoc check is + // 1. This error is difficult to recover properly with the current parser + // design, and the emitted error message is confusing. + // 2. We anticipate that this error would happen often in the transition period + // to Fe-V2. + if parser.current_kind() == Some(SyntaxKind::FnKw) { + parser.error_msg_on_current_token("function definition in struct is not allowed"); + let checkpoint = parser.enter(super::ErrorScope::new(), None); + parser.parse(FuncScope::default())?; + parser.leave(checkpoint); + return Ok(()); + } + + parser.set_scope_recovery_stack(&[SyntaxKind::Colon]); + + if parser.find( + SyntaxKind::Ident, + ExpectedKind::Name(SyntaxKind::RecordField), + )? { + parser.bump(); + } + + if parser.find( + SyntaxKind::Colon, + ExpectedKind::TypeSpecifier(SyntaxKind::RecordField), + )? { + parser.bump(); + parse_type(parser, None).map(|_| ())?; + } + Ok(()) + } +} diff --git a/crates/parser2/src/parser/token_stream.rs b/crates/parser2/src/parser/token_stream.rs new file mode 100644 index 0000000000..c5e93053a3 --- /dev/null +++ b/crates/parser2/src/parser/token_stream.rs @@ -0,0 +1,118 @@ +use std::collections::VecDeque; + +use crate::SyntaxKind; + +/// This trait works as an abstraction layer to encapsulate the differences +/// between input sources. There are mainly two types of input sources, +/// 1. text in source file +/// 2. tokens stream produced by procedural macros. +pub trait TokenStream { + type Token: LexicalToken; + + /// Returns the next token in the stream. + fn next(&mut self) -> Option; + + /// Returns the next token in the stream without consuming it. + fn peek(&mut self) -> Option<&Self::Token>; +} + +/// This trait represents a single token in the token stream. +pub trait LexicalToken: Clone { + /// Returns `SyntaxKind` of the token. + fn syntax_kind(&self) -> SyntaxKind; + + /// Returns raw text of the token. + fn text(&self) -> &str; +} + +/// This struct is a thin wrapper around `TokenStream` which allows the parser +/// to backtrack. +pub struct BackTrackableTokenStream { + stream: T, + /// Backtrack buffer which stores tokens that have been already consumed. + bt_buffer: VecDeque, + bt_points: Vec, + /// Points to the current position of the backtrack buffer. + bt_cursor: Option, +} + +impl BackTrackableTokenStream { + /// Creates a new `BackTrackableTokenStream` from the given `TokenStream`. + pub fn new(stream: T) -> Self { + Self { + stream, + bt_buffer: VecDeque::new(), + bt_points: Vec::new(), + bt_cursor: None, + } + } + + /// Returns the next token in the stream. + #[allow(clippy::should_implement_trait)] + pub fn next(&mut self) -> Option { + if !self.has_parent() { + if let Some(bt_buffer) = self.bt_buffer.pop_front() { + return Some(bt_buffer); + } else { + return self.stream.next(); + } + } + + if let Some(cursor) = self.bt_cursor { + if cursor < self.bt_buffer.len() { + let token = self.bt_buffer.get(cursor).cloned(); + self.bt_cursor = Some(cursor + 1); + return token; + } else { + self.bt_cursor = Some(cursor + 1); + } + } + + let token = self.stream.next()?; + if self.has_parent() { + self.bt_buffer.push_back(token.clone()); + } + + Some(token) + } + + /// Returns the next token in the stream without consuming it. + pub fn peek(&mut self) -> Option<&T::Token> { + if let Some(cursor) = self.bt_cursor { + if cursor < self.bt_buffer.len() { + return self.bt_buffer.get(cursor); + } + } + + self.stream.peek() + } + + /// Set a backtrack point which allows the parser to backtrack to this + /// point. + pub fn set_bt_point(&mut self) { + if self.has_parent() { + self.bt_points.push(self.bt_cursor.unwrap()); + } else { + self.bt_points.push(0); + self.bt_cursor = Some(0); + } + } + + /// Backtracks to the last backtrack point. + /// + /// # Panics + /// Panics if the `set_bt_point` method has not been called before. + pub fn backtrack(&mut self) { + debug_assert!(self.has_bt_point(), "backtrack without `bt_point`"); + self.bt_cursor = Some(self.bt_points.pop().unwrap()); + } + + /// Returns `true` if the stream has a backtrack point. + pub fn has_bt_point(&mut self) -> bool { + !self.bt_points.is_empty() + } + + pub fn has_parent(&mut self) -> bool { + !self.bt_points.is_empty() + } +} diff --git a/crates/parser2/src/parser/type_.rs b/crates/parser2/src/parser/type_.rs new file mode 100644 index 0000000000..40eca83ff1 --- /dev/null +++ b/crates/parser2/src/parser/type_.rs @@ -0,0 +1,138 @@ +use std::convert::Infallible; + +use super::{ + define_scope, + expr::parse_expr, + param::GenericArgListScope, + parse_list, + path::{is_path_segment, PathScope}, + token_stream::TokenStream, + Checkpoint, ErrProof, Parser, Recovery, +}; +use crate::{ExpectedKind, ParseError, SyntaxKind}; + +pub fn parse_type( + parser: &mut Parser, + checkpoint: Option, +) -> Result> { + match parser.current_kind() { + Some(SyntaxKind::Star) => parser.parse_cp(PtrTypeScope::default(), checkpoint), + Some(SyntaxKind::SelfTypeKw) => parser.parse_cp(SelfTypeScope::new(), checkpoint), + Some(SyntaxKind::LParen) => parser.parse_cp(TupleTypeScope::default(), checkpoint), + Some(SyntaxKind::LBracket) => parser.parse_cp(ArrayTypeScope::default(), checkpoint), + Some(SyntaxKind::Not) => parser + .parse_cp(NeverTypeScope::default(), checkpoint) + .map_err(|e| e.into()), + _ => parser.parse_cp(PathTypeScope::default(), checkpoint), + } +} + +pub(crate) fn is_type_start(kind: SyntaxKind) -> bool { + match kind { + SyntaxKind::Star | SyntaxKind::SelfTypeKw | SyntaxKind::LParen | SyntaxKind::LBracket => { + true + } + kind if is_path_segment(kind) => true, + _ => false, + } +} + +define_scope!(PtrTypeScope, PtrType); +impl super::Parse for PtrTypeScope { + type Error = Recovery; + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::Star); + parse_type(parser, None).map(|_| ()) + } +} + +define_scope!(pub(crate) PathTypeScope , PathType); +impl super::Parse for PathTypeScope { + type Error = Recovery; + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + + parser.or_recover(|p| { + p.parse(PathScope::default()).map_err(|_| { + ParseError::expected(&[SyntaxKind::PathType], None, p.end_of_prev_token) + }) + })?; + + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default())?; + } + Ok(()) + } +} + +define_scope!(pub(super) SelfTypeScope, SelfType); +impl super::Parse for SelfTypeScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::SelfTypeKw); + if parser.current_kind() == Some(SyntaxKind::Lt) { + parser.parse(GenericArgListScope::default())?; + } + Ok(()) + } +} +define_scope! { pub(crate) TupleTypeScope, TupleType, (RParen, Comma) } +impl super::Parse for TupleTypeScope { + type Error = Recovery; + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + false, + SyntaxKind::TupleType, + (SyntaxKind::LParen, SyntaxKind::RParen), + |parser| { + parse_type(parser, None)?; + Ok(()) + }, + ) + } +} + +define_scope! { ArrayTypeScope, ArrayType } +impl super::Parse for ArrayTypeScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::LBracket); + + parser.set_scope_recovery_stack(&[SyntaxKind::SemiColon, SyntaxKind::RBracket]); + + parse_type(parser, None)?; + + if parser.find_and_pop(SyntaxKind::SemiColon, ExpectedKind::Unspecified)? { + parser.bump(); + } + + parse_expr(parser)?; + + if parser.find_and_pop( + SyntaxKind::RBracket, + ExpectedKind::ClosingBracket { + bracket: SyntaxKind::RBracket, + parent: SyntaxKind::ArrayType, + }, + )? { + parser.bump(); + } + Ok(()) + } +} + +define_scope! {NeverTypeScope, NeverType} +impl super::Parse for NeverTypeScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.bump_expected(SyntaxKind::Not); + Ok(()) + } +} diff --git a/crates/parser2/src/parser/use_tree.rs b/crates/parser2/src/parser/use_tree.rs new file mode 100644 index 0000000000..51eaad579d --- /dev/null +++ b/crates/parser2/src/parser/use_tree.rs @@ -0,0 +1,138 @@ +use std::{cell::Cell, convert::identity, rc::Rc}; + +use crate::{parser::path::is_path_segment, ParseError, SyntaxKind, TextRange}; + +use super::{define_scope, parse_list, token_stream::TokenStream, ErrProof, Parser, Recovery}; + +define_scope! { pub(crate) UseTreeScope, UseTree } +impl super::Parse for UseTreeScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + if let Some(SyntaxKind::LBrace) = parser.current_kind() { + return parser.parse(UseTreeListScope::default()); + } + + let use_path_scope = UsePathScope::default(); + parser.or_recover(|p| p.parse(use_path_scope.clone()))?; + let is_glob = use_path_scope.is_glob.get(); + + if parser.current_kind() == Some(SyntaxKind::AsKw) { + if is_glob { + parser.error_msg_on_current_token("can't use `as` with `*`"); + } + if parser.current_kind() == Some(SyntaxKind::AsKw) { + parser.or_recover(|p| p.parse(UseTreeAliasScope::default()))?; + } + return Ok(()); + } + + if !parser.bump_if(SyntaxKind::Colon2) { + return Ok(()); + } + if parser.current_kind() == Some(SyntaxKind::LBrace) { + if is_glob { + parser.error_msg_on_current_token("can't use `*` with `{}`"); + } + parser.parse(UseTreeListScope::default())?; + } + Ok(()) + } +} + +define_scope! { UseTreeListScope, UseTreeList, (Comma, RBrace) } +impl super::Parse for UseTreeListScope { + type Error = Recovery; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parse_list( + parser, + true, + SyntaxKind::UseTreeList, + (SyntaxKind::LBrace, SyntaxKind::RBrace), + |parser| parser.parse(UseTreeScope::default()), + ) + } +} + +define_scope! { + UsePathScope{ is_glob: Rc>}, + UsePath, + (Colon2) +} +impl super::Parse for UsePathScope { + type Error = ParseError; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.parse(UsePathSegmentScope::default())?; + + loop { + let is_path_segment = parser.dry_run(|parser| { + parser.bump_if(SyntaxKind::Colon2) + && parser + .parse_ok(UsePathSegmentScope::default()) + .is_ok_and(identity) + }); + if is_path_segment { + if self.is_glob.get() { + parser.error_msg_on_current_token("can't specify path after `*`"); + } + parser.bump_expected(SyntaxKind::Colon2); + self.is_glob + .set(parser.current_kind() == Some(SyntaxKind::Star)); + parser.parse(UsePathSegmentScope::default())?; + } else { + break; + } + } + Ok(()) + } +} + +define_scope! { UsePathSegmentScope, UsePathSegment } +impl super::Parse for UsePathSegmentScope { + type Error = ParseError; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + match parser.current_kind() { + Some(kind) if is_use_path_segment(kind) => { + parser.bump(); + } + _ => { + return Err(ParseError::Msg( + "expected identifier or `self`".into(), + TextRange::empty(parser.end_of_prev_token), + )); + } + } + Ok(()) + } +} + +define_scope! { UseTreeAliasScope, UseTreeRename } +impl super::Parse for UseTreeAliasScope { + type Error = ParseError; + + fn parse(&mut self, parser: &mut Parser) -> Result<(), Self::Error> { + parser.set_newline_as_trivia(false); + parser.bump_expected(SyntaxKind::AsKw); + + match parser.current_kind() { + Some(SyntaxKind::Ident) => parser.bump_expected(SyntaxKind::Ident), + Some(SyntaxKind::Underscore) => parser.bump_expected(SyntaxKind::Underscore), + _ => { + return Err(ParseError::Msg( + "expected identifier or `_`".into(), + TextRange::empty(parser.current_pos), + )) + } + }; + Ok(()) + } +} + +fn is_use_path_segment(kind: SyntaxKind) -> bool { + is_path_segment(kind) || matches!(kind, SyntaxKind::Star) +} diff --git a/crates/parser2/src/syntax_kind.rs b/crates/parser2/src/syntax_kind.rs new file mode 100644 index 0000000000..a87a2798e0 --- /dev/null +++ b/crates/parser2/src/syntax_kind.rs @@ -0,0 +1,801 @@ +//! This module contains the definition of the [`SyntaxKind`]. + +use logos::Logos; + +/// The definition of the `SyntaxKind'. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Logos)] +#[repr(u16)] +pub enum SyntaxKind { + // Atom kinds. These are leaf nodes. + #[error] + InvalidToken = 0, + #[regex(r"[\n|\r\n|\r]+")] + Newline, + #[regex(r"[ ]+")] + WhiteSpace, + /// `foo` + #[regex("[a-zA-Z_][a-zA-Z0-9_]*")] + Ident, + /// `1`, `0b1010`, `0o77`, `0xff` + #[regex("[0-9]+(?:_[0-9]+)*")] + #[regex("0[bB][0-1]+")] + #[regex("0[oO][0-7]+")] + #[regex("0[xX][0-9a-fA-F]+")] + Int, + /// "MyString" + #[regex(r#""([^"\\]|\\.)*""#)] + String, + /// `(` + #[token("(")] + LParen, + /// `)` + #[token(")")] + RParen, + /// `{` + #[token("{")] + LBrace, + /// `}` + #[token("}")] + RBrace, + /// `[` + #[token("[")] + LBracket, + /// `]` + #[token("]")] + RBracket, + /// `:` + #[token(":")] + Colon, + /// `::` + #[token("::")] + Colon2, + /// `;` + #[token(";")] + SemiColon, + /// `.` + #[token(".")] + Dot, + /// `..` + #[token("..")] + Dot2, + /// `,` + #[token(",")] + Comma, + /// `->` + #[token("->")] + Arrow, + /// `=>` + #[token("=>")] + FatArrow, + /// `_` + #[token("_")] + Underscore, + /// `#` + #[token("#")] + Pound, + /// `// Comment` + #[regex(r"//[^\n\r]*")] + Comment, + /// `/// DocComment` + #[regex(r"///[^\n\r]*")] + DocComment, + + /// `+` + #[token("+")] + Plus, + /// `-` + #[token("-")] + Minus, + /// `*` + #[token("*")] + Star, + #[token("**")] + Star2, + /// `/` + #[token("/")] + Slash, + /// `%` + #[token("%")] + Percent, + #[token("~")] + Tilde, + #[token("!")] + Not, + #[token("^")] + Hat, + /// `&` + #[token("&")] + Amp, + /// `&&` + #[token("&&")] + Amp2, + /// `|` + #[token("|")] + Pipe, + /// `||` + #[token("||")] + Pipe2, + /// `<` + #[token("<")] + Lt, + /// `>` + #[token(">")] + Gt, + /// `=` + #[token("=")] + Eq, + /// `==` + #[token("==")] + Eq2, + /// `!=` + #[token("!=")] + NotEq, + + /// `as`' + #[token("as")] + AsKw, + /// `true' + #[token("true")] + TrueKw, + /// `false` + #[token("false")] + FalseKw, + /// `break` + #[token("break")] + BreakKw, + /// `continue` + #[token("continue")] + ContinueKw, + /// `contract` + #[token("contract")] + ContractKw, + /// `fn` + #[token("fn")] + FnKw, + /// `mod` + #[token("mod")] + ModKw, + /// `const` + #[token("const")] + ConstKw, + /// `if` + #[token("if")] + IfKw, + /// `else` + #[token("else")] + ElseKw, + /// `match` + #[token("match")] + MatchKw, + /// `for` + #[token("for")] + ForKw, + /// `in` + #[token("in")] + InKw, + /// `where` + #[token("where")] + WhereKw, + /// `while` + #[token("while")] + WhileKw, + /// `pub` + #[token("pub")] + PubKw, + /// `return` + #[token("return")] + ReturnKw, + /// `self` + #[token("self")] + SelfKw, + #[token("Self")] + SelfTypeKw, + /// `struct` + #[token("struct")] + StructKw, + /// `enum` + #[token("enum")] + EnumKw, + /// `trait` + #[token("trait")] + TraitKw, + /// `impl` + #[token("impl")] + ImplKw, + /// `type` + #[token("type")] + TypeKw, + /// `let` + #[token("let")] + LetKw, + /// `mut` + #[token("mut")] + MutKw, + /// `use` + #[token("use")] + UseKw, + /// `extern` + #[token("extern")] + ExternKw, + /// `unsafe` + #[token("unsafe")] + UnsafeKw, + /// `ingot` + #[token("ingot")] + IngotKw, + /// `super` + #[token("super")] + SuperKw, + + /// `<<` + LShift, + /// `>>` + RShift, + /// `<=` + LtEq, + /// `>=` + GtEq, + + /// `1', `false`, `"String" + Lit, + + // Expressions. These are non-leaf nodes. + /// { statement-list } + BlockExpr, + /// `x + 1` + BinExpr, + /// `!x` + UnExpr, + /// `foo(x, y)` + CallExpr, + /// `(arg: 1, y)` + CallArgList, + /// `arg: 1`, `y` + CallArg, + /// `foo.bar(x, y)` + MethodCallExpr, + /// `` + GenericArgList, + /// `T` + TypeGenericArg, + /// `1` + ConstGenericArg, + /// `FOO::Bar` + PathExpr, + /// `Foo { x: 1, y: "String"` }` + RecordInitExpr, + /// `{ x: 1, y: "String"` }` + RecordFieldList, + /// `x: 1` + RecordField, + /// `foo.bar`, `foo.0` + FieldExpr, + /// `foo[1]` + IndexExpr, + /// `(x ,y)` + TupleExpr, + /// `[x, y, z]` + ArrayExpr, + /// `[x; 4]` + ArrayRepExpr, + /// `1` + LitExpr, + /// `if x { 1 } else { 2 }` + IfExpr, + /// `match x { pat => { .. } }` + MatchExpr, + /// `(1 + 2)` + ParenExpr, + /// x = 1 + AssignExpr, + /// x += 1 + AugAssignExpr, + + // Statements. These are non-leaf nodes. + /// `let x = 1` + LetStmt, + /// `for x in y {..}` + ForStmt, + /// `while expr {..}` + WhileStmt, + /// `continue` + ContinueStmt, + /// `break` + BreakStmt, + + /// `return 1` + ReturnStmt, + /// `1` + ExprStmt, + + // Patterns. These are non-leaf nodes. + /// `_` + WildCardPat, + /// `..` + RestPat, + /// `x` + LitPat, + /// `(x, y)` + TuplePat, + /// `(x, y)` + TuplePatElemList, + /// `Enum::Variant` + PathPat, + /// `Enum::Variant(x, y)` + PathTuplePat, + /// `Struct {x, y}` + RecordPat, + /// `{a: b, y}` + RecordPatFieldList, + /// `a: b` + RecordPatField, + /// `pat1 | pat2` + OrPat, + + // MatchArms. + // `pat => { stmtlist }` + MatchArm, + MatchArmList, + + // Items. These are non-leaf nodes. + Item, + /// `mod s { .. }` + Mod, + /// `fn foo(x: i32) -> i32 { .. }` + Func, + /// `struct Foo { .. }` + Struct, + /// `contract Foo { .. }` + Contract, + /// `enum Foo { .. }` + Enum, + /// `type Foo = i32` + TypeAlias, + /// `impl Foo { .. }` + Impl, + /// `{ fn ... }` + ImplItemList, + /// `trait Foo {..}` + Trait, + /// `: Trait + Trait2` + SuperTraitList, + /// `{ fn foo() {..} }` + TraitItemList, + /// `impl Trait for Foo { .. }` + ImplTrait, + /// `{ fn foo() {..} }` + ImplTraitItemList, + /// `const FOO: i32 = 1` + Const, + /// `use foo::{Foo as Foo1, bar::Baz}` + Use, + /// `foo::{Foo as Foo1, bar::Baz}` + UseTree, + /// `{Foo as Foo1, bar::Baz}` + UseTreeList, + /// `Foo::Bar`, `Foo::*`,`*`. + UsePath, + /// `Foo`, `self` + UsePathSegment, + /// `as Foo` + UseTreeRename, + /// `extern { .. }` + Extern, + /// `extern { .. }` + ExternItemList, + ItemList, + + /// `pub unsafe ` + ItemModifier, + + // Types. These are non-leaf nodes. + /// `*i32` + PtrType, + /// `foo::Type` + PathType, + /// `Self` + SelfType, + /// `(i32, foo::Bar)` + TupleType, + /// `[i32; 4]` + ArrayType, + /// `!` + NeverType, + + // Paths. These are non-leaf nodes. + /// `Segment1::Segment2` + Path, + /// `Segment1` + PathSegment, + + /// `#attr` + Attr, + /// `(key1: value1, key2: value2)` + AttrArgList, + /// `key: value` + AttrArg, + /// `/// Comment` + DocCommentAttr, + AttrList, + + /// `pub` + Visibility, + + /// `x: i32` + RecordFieldDef, + /// `{x: i32, y: u32}` + RecordFieldDefList, + + VariantDef, + VariantDefList, + + /// `T` + /// `T: Trait` + TypeGenericParam, + /// `const N: usize` + ConstGenericParam, + /// `` + GenericParamList, + + /// fn foo(t: T) -> U + FuncSignature, + /// `(x: i32, _ y: mut i32)` + FuncParamList, + + /// `_ x: mut i32` + FnParam, + + /// `foo::Trait1 + Trait2` + TypeBoundList, + /// `TraitBound` or `TypeKind`. + TypeBound, + /// `Trait1` + TraitRef, + /// `* -> *` or `(*-> *) -> *` + KindBoundAbs, + /// `*`. + KindBoundMono, + /// `where Option: Trait1 + Trait2` + WhereClause, + /// `Option: Trait1 + Trait2` + WherePredicate, + + /// Root node of the input source. + Root, + + /// Represents an error branch. + Error, +} + +impl SyntaxKind { + /// Returns `true` if this is a trivia token. + pub fn is_trivia(self) -> bool { + matches!(self, SyntaxKind::WhiteSpace | SyntaxKind::Comment) + } + + /// Returns `true` if the token is a literal leaf. + pub fn is_literal_leaf(self) -> bool { + matches!( + self, + SyntaxKind::Int | SyntaxKind::String | SyntaxKind::TrueKw | SyntaxKind::FalseKw + ) + } + + pub fn is_open_bracket_kind(self) -> bool { + matches!( + self, + SyntaxKind::LBrace | SyntaxKind::LParen | SyntaxKind::LBracket | SyntaxKind::Lt + ) + } + + /// Returns its corresponding open bracket kind if it is a close bracket + /// kind. + pub fn corresponding_open_bracket_kind(self) -> Option { + match self { + SyntaxKind::RBrace => Some(SyntaxKind::LBrace), + SyntaxKind::RParen => Some(SyntaxKind::LParen), + SyntaxKind::RBracket => Some(SyntaxKind::LBracket), + SyntaxKind::Gt => Some(SyntaxKind::Lt), + _ => None, + } + } + + pub(crate) fn is_modifier_head(self) -> bool { + matches!(self, SyntaxKind::PubKw | SyntaxKind::UnsafeKw) + } + + pub(crate) fn is_item_head(self) -> bool { + self.is_modifier_head() + || matches!( + self, + SyntaxKind::ModKw + | SyntaxKind::FnKw + | SyntaxKind::StructKw + | SyntaxKind::ContractKw + | SyntaxKind::EnumKw + | SyntaxKind::TypeKw + | SyntaxKind::ImplKw + | SyntaxKind::TraitKw + | SyntaxKind::ConstKw + | SyntaxKind::UseKw + | SyntaxKind::ExternKw + ) + } + + pub fn describe(self) -> &'static str { + match self { + SyntaxKind::Newline => "newline", + SyntaxKind::Ident => "identifier", + SyntaxKind::Int => "integer", + SyntaxKind::String => "string literal", + SyntaxKind::LParen => "`(`", + SyntaxKind::RParen => "`)`", + SyntaxKind::LBrace => "`{`", + SyntaxKind::RBrace => "`}`", + SyntaxKind::LBracket => "`[`", + SyntaxKind::RBracket => "`]`", + SyntaxKind::Colon => "`:`", + SyntaxKind::Colon2 => "`::`", + SyntaxKind::SemiColon => "`;`", + SyntaxKind::Dot => "`.`", + SyntaxKind::Dot2 => "`..`", + SyntaxKind::Comma => "`,`", + SyntaxKind::Arrow => "`->`", + SyntaxKind::FatArrow => "`=>`", + SyntaxKind::Underscore => "`_`", + SyntaxKind::Pound => "`#`", + SyntaxKind::Plus => "`+`", + SyntaxKind::Minus => "`-`", + SyntaxKind::Star => "`*`", + SyntaxKind::Star2 => "`**`", + SyntaxKind::Slash => "`/`", + SyntaxKind::Percent => "`%`", + SyntaxKind::Tilde => "`~`", + SyntaxKind::Not => "`!`", + SyntaxKind::Hat => "`^`", + SyntaxKind::Amp => "`&`", + SyntaxKind::Amp2 => "`&&`", + SyntaxKind::Pipe => "`|`", + SyntaxKind::Pipe2 => "`||`", + SyntaxKind::Lt => "`<`", + SyntaxKind::Gt => "`>`", + SyntaxKind::Eq => "`=`", + SyntaxKind::Eq2 => "`==`", + SyntaxKind::NotEq => "`!=`", + SyntaxKind::AsKw => "`as`", + SyntaxKind::TrueKw => "`true`", + SyntaxKind::FalseKw => "`false`", + SyntaxKind::BreakKw => "`break`", + SyntaxKind::ContinueKw => "`continue`", + SyntaxKind::ContractKw => "`contract`", + SyntaxKind::FnKw => "`fn`", + SyntaxKind::ModKw => "`mod`", + SyntaxKind::ConstKw => "`const`", + SyntaxKind::IfKw => "`if`", + SyntaxKind::ElseKw => "`else`", + SyntaxKind::MatchKw => "`match`", + SyntaxKind::ForKw => "`for`", + SyntaxKind::InKw => "`in`", + SyntaxKind::WhereKw => "`where`", + SyntaxKind::WhileKw => "`while`", + SyntaxKind::PubKw => "`pub`", + SyntaxKind::ReturnKw => "`return`", + SyntaxKind::SelfKw => "`self`", + SyntaxKind::SelfTypeKw => "`Self`", + SyntaxKind::StructKw => "`struct`", + SyntaxKind::EnumKw => "`enum`", + SyntaxKind::TraitKw => "`trait`", + SyntaxKind::ImplKw => "`impl`", + SyntaxKind::TypeKw => "`type`", + SyntaxKind::LetKw => "`let`", + SyntaxKind::MutKw => "`mut`", + SyntaxKind::UseKw => "`use`", + SyntaxKind::ExternKw => "`extern`", + SyntaxKind::UnsafeKw => "`unsafe`", + SyntaxKind::IngotKw => "`ingot`", + SyntaxKind::SuperKw => "`super`", + SyntaxKind::LShift => "`<<`", + SyntaxKind::RShift => "`>>`", + SyntaxKind::LtEq => "`<=`", + SyntaxKind::GtEq => "`>=`", + + SyntaxKind::PathType => "type", + SyntaxKind::TraitRef => "trait name", + SyntaxKind::PathSegment => "path segment", + SyntaxKind::PathPat => "pattern", + + SyntaxKind::ArrayExpr => "array definition", + SyntaxKind::RecordFieldDef => "field", + SyntaxKind::IndexExpr => "index expression", + SyntaxKind::BlockExpr => "block", + SyntaxKind::TypeBound => "type bound", + SyntaxKind::CallArgList => "function call arguments", + + SyntaxKind::InvalidToken => unimplemented!(), + SyntaxKind::WhiteSpace => "whitespace", + SyntaxKind::Comment => "comment", + SyntaxKind::DocComment => "doc comment", + + SyntaxKind::Lit => "literal", + SyntaxKind::BinExpr => "binary expression", + SyntaxKind::UnExpr => "unary expression", + SyntaxKind::CallExpr => "function call expression", + SyntaxKind::CallArg => "function call argument", + SyntaxKind::MethodCallExpr => "method call expression", + SyntaxKind::GenericArgList => "generic type argument list", + SyntaxKind::TypeGenericArg => "generic type argument", + SyntaxKind::ConstGenericArg => "generic const argument", + SyntaxKind::PathExpr => "path", + SyntaxKind::RecordInitExpr => "record initialization expression", + SyntaxKind::RecordFieldList => "record field list", + SyntaxKind::RecordField => "field", + SyntaxKind::FieldExpr => "field", + SyntaxKind::TupleExpr => "tuple expression", + SyntaxKind::ArrayRepExpr => "array expression", + SyntaxKind::LitExpr => "literal expression", + SyntaxKind::IfExpr => "`if` expression", + SyntaxKind::MatchExpr => "`match` expression", + SyntaxKind::ParenExpr => "parenthesized expression", + SyntaxKind::AssignExpr => "assignment expression", + SyntaxKind::AugAssignExpr => "augmented assignment expression", + SyntaxKind::LetStmt => "`let` statement", + SyntaxKind::ForStmt => "`for` statement", + SyntaxKind::WhileStmt => "`while` statement", + SyntaxKind::ContinueStmt => "`continue` statement", + SyntaxKind::BreakStmt => "`break` statement", + SyntaxKind::ReturnStmt => "`return` statement", + SyntaxKind::ExprStmt => "`expr` statement", + SyntaxKind::WildCardPat => "wildcard pattern", + SyntaxKind::RestPat => "`..` pattern", + SyntaxKind::LitPat => "literal pattern", + SyntaxKind::TuplePat => "tuple pattern", + SyntaxKind::TuplePatElemList => "tuple pattern element list", + SyntaxKind::PathTuplePat => "path tuple pattern", + SyntaxKind::RecordPat => "record pattern", + SyntaxKind::RecordPatFieldList => "record pattern field list", + SyntaxKind::RecordPatField => "record pattern field", + SyntaxKind::OrPat => "`or` pattern", + SyntaxKind::MatchArm => "`match` arm", + SyntaxKind::MatchArmList => "`match` arm list", + SyntaxKind::Item => "item", + SyntaxKind::Mod => "`mod`", + SyntaxKind::Func => "function definition", + SyntaxKind::Struct => "struct definition", + SyntaxKind::Contract => "contract definition", + SyntaxKind::Enum => "enum definition", + SyntaxKind::TypeAlias => "type alias", + SyntaxKind::Impl => "`impl` block", + SyntaxKind::ImplItemList => "`impl` item list", + SyntaxKind::Trait => "trait definition", + SyntaxKind::SuperTraitList => "supertrait list", + SyntaxKind::TraitItemList => "`trait` item list", + SyntaxKind::ImplTrait => "`impl` trait block", + SyntaxKind::ImplTraitItemList => "`impl` trait item list", + SyntaxKind::Const => "const definition", + SyntaxKind::Use => "`use` statement", + SyntaxKind::UseTree => "`use` tree", + SyntaxKind::UseTreeList => "`use` tree list", + SyntaxKind::UsePath => "`use` path", + SyntaxKind::UsePathSegment => "`use` path segment", + SyntaxKind::UseTreeRename => "`use as` rename", + SyntaxKind::Extern => "`extern` block", + SyntaxKind::ExternItemList => "`extern` body", + SyntaxKind::ItemList => "item list", + SyntaxKind::ItemModifier => "item modifier", + SyntaxKind::PtrType => "pointer type", + SyntaxKind::SelfType => "`Self` type", + SyntaxKind::TupleType => "tuple type definition", + SyntaxKind::NeverType => "never type", + SyntaxKind::ArrayType => "array type definition", + SyntaxKind::Path => "path", + SyntaxKind::Attr => "attribute", + SyntaxKind::AttrArgList => "attribute argument list", + SyntaxKind::AttrArg => "attribute argument", + SyntaxKind::DocCommentAttr => "doc comment", + SyntaxKind::AttrList => "attribute list", + SyntaxKind::Visibility => "visibility modifier", + SyntaxKind::RecordFieldDefList => "record field list", + SyntaxKind::VariantDef => "`enum` variant definition", + SyntaxKind::VariantDefList => "`enum` variant list", + SyntaxKind::TypeGenericParam => "generic type parameter", + SyntaxKind::ConstGenericParam => "constant generic parameter", + SyntaxKind::GenericParamList => "generic parameter list", + SyntaxKind::FuncSignature => "function signature", + SyntaxKind::FuncParamList => "function parameter list", + SyntaxKind::FnParam => "function parameter", + SyntaxKind::TypeBoundList => "type bound list", + SyntaxKind::KindBoundAbs => "kind bound", + SyntaxKind::KindBoundMono => "kind bound", + SyntaxKind::WhereClause => "`where` clause", + SyntaxKind::WherePredicate => "`where` predicate", + SyntaxKind::Root => "module", + SyntaxKind::Error => todo!(), + } + } + + pub fn is_token(self) -> bool { + matches!( + self, + SyntaxKind::Newline + | SyntaxKind::Ident + | SyntaxKind::Int + | SyntaxKind::String + | SyntaxKind::LParen + | SyntaxKind::RParen + | SyntaxKind::LBrace + | SyntaxKind::RBrace + | SyntaxKind::LBracket + | SyntaxKind::RBracket + | SyntaxKind::Colon + | SyntaxKind::Colon2 + | SyntaxKind::SemiColon + | SyntaxKind::Dot + | SyntaxKind::Dot2 + | SyntaxKind::Comma + | SyntaxKind::Arrow + | SyntaxKind::FatArrow + | SyntaxKind::Underscore + | SyntaxKind::Pound + | SyntaxKind::Plus + | SyntaxKind::Minus + | SyntaxKind::Star + | SyntaxKind::Star2 + | SyntaxKind::Slash + | SyntaxKind::Percent + | SyntaxKind::Tilde + | SyntaxKind::Not + | SyntaxKind::Hat + | SyntaxKind::Amp + | SyntaxKind::Amp2 + | SyntaxKind::Pipe + | SyntaxKind::Pipe2 + | SyntaxKind::Lt + | SyntaxKind::Gt + | SyntaxKind::Eq + | SyntaxKind::Eq2 + | SyntaxKind::NotEq + | SyntaxKind::AsKw + | SyntaxKind::TrueKw + | SyntaxKind::FalseKw + | SyntaxKind::BreakKw + | SyntaxKind::ContinueKw + | SyntaxKind::ContractKw + | SyntaxKind::FnKw + | SyntaxKind::ModKw + | SyntaxKind::ConstKw + | SyntaxKind::IfKw + | SyntaxKind::ElseKw + | SyntaxKind::MatchKw + | SyntaxKind::ForKw + | SyntaxKind::InKw + | SyntaxKind::WhereKw + | SyntaxKind::WhileKw + | SyntaxKind::PubKw + | SyntaxKind::ReturnKw + | SyntaxKind::SelfKw + | SyntaxKind::SelfTypeKw + | SyntaxKind::StructKw + | SyntaxKind::EnumKw + | SyntaxKind::TraitKw + | SyntaxKind::ImplKw + | SyntaxKind::TypeKw + | SyntaxKind::LetKw + | SyntaxKind::MutKw + | SyntaxKind::UseKw + | SyntaxKind::ExternKw + | SyntaxKind::UnsafeKw + | SyntaxKind::IngotKw + | SyntaxKind::SuperKw + | SyntaxKind::LShift + | SyntaxKind::RShift + | SyntaxKind::LtEq + | SyntaxKind::GtEq + ) + } +} + +impl From for rowan::SyntaxKind { + fn from(kind: SyntaxKind) -> Self { + Self(kind as u16) + } +} diff --git a/crates/parser2/src/syntax_node.rs b/crates/parser2/src/syntax_node.rs new file mode 100644 index 0000000000..70d47c4431 --- /dev/null +++ b/crates/parser2/src/syntax_node.rs @@ -0,0 +1,22 @@ +use crate::SyntaxKind; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum FeLang {} + +impl rowan::Language for FeLang { + type Kind = SyntaxKind; + + fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind { + unsafe { std::mem::transmute::(raw.0) } + } + + fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind { + kind.into() + } +} + +pub type SyntaxNode = rowan::SyntaxNode; +pub type SyntaxToken = rowan::SyntaxToken; +pub type GreenNode = rowan::GreenNode; +pub type TextRange = rowan::TextRange; +pub type NodeOrToken = rowan::NodeOrToken; diff --git a/crates/parser2/test_files/error_recovery/exprs/array.fe b/crates/parser2/test_files/error_recovery/exprs/array.fe new file mode 100644 index 0000000000..603646659f --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/array.fe @@ -0,0 +1,2 @@ +[1, 2 a, 3] +[1, 2,] \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/array.snap b/crates/parser2/test_files/error_recovery/exprs/array.snap new file mode 100644 index 0000000000..f5531d8d5d --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/array.snap @@ -0,0 +1,39 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/array.fe +--- +Root@0..19 + ArrayExpr@0..11 + LBracket@0..1 "[" + LitExpr@1..2 + Lit@1..2 + Int@1..2 "1" + Comma@2..3 "," + WhiteSpace@3..4 " " + LitExpr@4..5 + Lit@4..5 + Int@4..5 "2" + WhiteSpace@5..6 " " + Error@6..7 + Ident@6..7 "a" + Comma@7..8 "," + WhiteSpace@8..9 " " + LitExpr@9..10 + Lit@9..10 + Int@9..10 "3" + RBracket@10..11 "]" + Newline@11..12 "\n" + ArrayExpr@12..19 + LBracket@12..13 "[" + LitExpr@13..14 + Lit@13..14 + Int@13..14 "1" + Comma@14..15 "," + WhiteSpace@15..16 " " + LitExpr@16..17 + Lit@16..17 + Int@16..17 "2" + Comma@17..18 "," + RBracket@18..19 "]" + diff --git a/crates/parser2/test_files/error_recovery/exprs/block.fe b/crates/parser2/test_files/error_recovery/exprs/block.fe new file mode 100644 index 0000000000..f1e1ac3703 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/block.fe @@ -0,0 +1,5 @@ +{ + let x: i32 u32 = 10 + let y = 10 + +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/block.snap b/crates/parser2/test_files/error_recovery/exprs/block.snap new file mode 100644 index 0000000000..997d333a67 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/block.snap @@ -0,0 +1,48 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/block.fe +--- +Root@0..43 + BlockExpr@0..43 + LBrace@0..1 "{" + Newline@1..2 "\n" + WhiteSpace@2..6 " " + LetStmt@6..16 + LetKw@6..9 "let" + WhiteSpace@9..10 " " + PathPat@10..11 + Path@10..11 + PathSegment@10..11 + Ident@10..11 "x" + Colon@11..12 ":" + WhiteSpace@12..13 " " + PathType@13..16 + Path@13..16 + PathSegment@13..16 + Ident@13..16 "i32" + WhiteSpace@16..17 " " + Error@17..25 + Ident@17..20 "u32" + WhiteSpace@20..21 " " + Eq@21..22 "=" + WhiteSpace@22..23 " " + Int@23..25 "10" + Newline@25..26 "\n" + WhiteSpace@26..30 " " + LetStmt@30..40 + LetKw@30..33 "let" + WhiteSpace@33..34 " " + PathPat@34..35 + Path@34..35 + PathSegment@34..35 + Ident@34..35 "y" + WhiteSpace@35..36 " " + Eq@36..37 "=" + WhiteSpace@37..38 " " + LitExpr@38..40 + Lit@38..40 + Int@38..40 "10" + Newline@40..42 "\n\n" + RBrace@42..43 "}" + diff --git a/crates/parser2/test_files/error_recovery/exprs/call.fe b/crates/parser2/test_files/error_recovery/exprs/call.fe new file mode 100644 index 0000000000..ebd351f6f0 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/call.fe @@ -0,0 +1,3 @@ +foo(x, y a, z ;) + +foo(x, y) diff --git a/crates/parser2/test_files/error_recovery/exprs/call.snap b/crates/parser2/test_files/error_recovery/exprs/call.snap new file mode 100644 index 0000000000..ecb375af60 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/call.snap @@ -0,0 +1,82 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/call.fe +--- +Root@0..40 + CallExpr@0..16 + PathExpr@0..3 + Path@0..3 + PathSegment@0..3 + Ident@0..3 "foo" + CallArgList@3..16 + LParen@3..4 "(" + CallArg@4..5 + PathExpr@4..5 + Path@4..5 + PathSegment@4..5 + Ident@4..5 "x" + Comma@5..6 "," + WhiteSpace@6..7 " " + CallArg@7..8 + PathExpr@7..8 + Path@7..8 + PathSegment@7..8 + Ident@7..8 "y" + WhiteSpace@8..9 " " + Error@9..10 + Ident@9..10 "a" + Comma@10..11 "," + WhiteSpace@11..12 " " + CallArg@12..13 + PathExpr@12..13 + Path@12..13 + PathSegment@12..13 + Ident@12..13 "z" + WhiteSpace@13..14 " " + Error@14..15 + SemiColon@14..15 ";" + RParen@15..16 ")" + Newline@16..18 "\n\n" + CallExpr@18..39 + PathExpr@18..33 + Path@18..33 + PathSegment@18..33 + Ident@18..21 "foo" + GenericArgList@21..33 + Lt@21..22 "<" + TypeGenericArg@22..25 + PathType@22..25 + Path@22..25 + PathSegment@22..25 + Ident@22..25 "i32" + Comma@25..26 "," + WhiteSpace@26..27 " " + TypeGenericArg@27..28 + PathType@27..28 + Path@27..28 + PathSegment@27..28 + Ident@27..28 "T" + WhiteSpace@28..29 " " + Error@29..30 + Ident@29..30 "E" + Comma@30..31 "," + WhiteSpace@31..32 " " + Gt@32..33 ">" + CallArgList@33..39 + LParen@33..34 "(" + CallArg@34..35 + PathExpr@34..35 + Path@34..35 + PathSegment@34..35 + Ident@34..35 "x" + Comma@35..36 "," + WhiteSpace@36..37 " " + CallArg@37..38 + PathExpr@37..38 + Path@37..38 + PathSegment@37..38 + Ident@37..38 "y" + RParen@38..39 ")" + Newline@39..40 "\n" + diff --git a/crates/parser2/test_files/error_recovery/exprs/if_.fe b/crates/parser2/test_files/error_recovery/exprs/if_.fe new file mode 100644 index 0000000000..5b9f9b43fa --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/if_.fe @@ -0,0 +1,14 @@ +if a b { +} + +if c { + +} else d {} + +if e { } else f if g { } else { } + +if h { + 10 +else { + 1 +} diff --git a/crates/parser2/test_files/error_recovery/exprs/if_.snap b/crates/parser2/test_files/error_recovery/exprs/if_.snap new file mode 100644 index 0000000000..c40a18cfd6 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/if_.snap @@ -0,0 +1,114 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/if_.fe +--- +Root@0..97 + IfExpr@0..10 + IfKw@0..2 "if" + WhiteSpace@2..3 " " + PathExpr@3..4 + Path@3..4 + PathSegment@3..4 + Ident@3..4 "a" + WhiteSpace@4..5 " " + Error@5..6 + Ident@5..6 "b" + WhiteSpace@6..7 " " + BlockExpr@7..10 + LBrace@7..8 "{" + Newline@8..9 "\n" + RBrace@9..10 "}" + Newline@10..12 "\n\n" + IfExpr@12..31 + IfKw@12..14 "if" + WhiteSpace@14..15 " " + PathExpr@15..16 + Path@15..16 + PathSegment@15..16 + Ident@15..16 "c" + WhiteSpace@16..17 " " + BlockExpr@17..21 + LBrace@17..18 "{" + Newline@18..20 "\n\n" + RBrace@20..21 "}" + WhiteSpace@21..22 " " + ElseKw@22..26 "else" + WhiteSpace@26..27 " " + Error@27..28 + Ident@27..28 "d" + WhiteSpace@28..29 " " + BlockExpr@29..31 + LBrace@29..30 "{" + RBrace@30..31 "}" + Newline@31..33 "\n\n" + IfExpr@33..66 + IfKw@33..35 "if" + WhiteSpace@35..36 " " + PathExpr@36..37 + Path@36..37 + PathSegment@36..37 + Ident@36..37 "e" + WhiteSpace@37..38 " " + BlockExpr@38..41 + LBrace@38..39 "{" + WhiteSpace@39..40 " " + RBrace@40..41 "}" + WhiteSpace@41..42 " " + ElseKw@42..46 "else" + WhiteSpace@46..47 " " + Error@47..48 + Ident@47..48 "f" + WhiteSpace@48..49 " " + IfExpr@49..66 + IfKw@49..51 "if" + WhiteSpace@51..52 " " + PathExpr@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "g" + WhiteSpace@53..54 " " + BlockExpr@54..57 + LBrace@54..55 "{" + WhiteSpace@55..56 " " + RBrace@56..57 "}" + WhiteSpace@57..58 " " + ElseKw@58..62 "else" + WhiteSpace@62..63 " " + BlockExpr@63..66 + LBrace@63..64 "{" + WhiteSpace@64..65 " " + RBrace@65..66 "}" + Newline@66..68 "\n\n" + IfExpr@68..96 + IfKw@68..70 "if" + WhiteSpace@70..71 " " + PathExpr@71..72 + Path@71..72 + PathSegment@71..72 + Ident@71..72 "h" + WhiteSpace@72..73 " " + BlockExpr@73..82 + LBrace@73..74 "{" + Newline@74..75 "\n" + WhiteSpace@75..79 " " + ExprStmt@79..81 + LitExpr@79..81 + Lit@79..81 + Int@79..81 "10" + Newline@81..82 "\n" + ExprStmt@82..82 + ElseKw@82..86 "else" + WhiteSpace@86..87 " " + BlockExpr@87..96 + LBrace@87..88 "{" + Newline@88..89 "\n" + WhiteSpace@89..93 " " + ExprStmt@93..94 + LitExpr@93..94 + Lit@93..94 + Int@93..94 "1" + Newline@94..95 "\n" + RBrace@95..96 "}" + Newline@96..97 "\n" + diff --git a/crates/parser2/test_files/error_recovery/exprs/index.fe b/crates/parser2/test_files/error_recovery/exprs/index.fe new file mode 100644 index 0000000000..afd62778b9 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/index.fe @@ -0,0 +1,3 @@ +x[1 a] +x[2 + 3 +x[41] \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/index.snap b/crates/parser2/test_files/error_recovery/exprs/index.snap new file mode 100644 index 0000000000..aa3cbcdcae --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/index.snap @@ -0,0 +1,48 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/index.fe +--- +Root@0..20 + IndexExpr@0..6 + PathExpr@0..1 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" + LBracket@1..2 "[" + LitExpr@2..3 + Lit@2..3 + Int@2..3 "1" + WhiteSpace@3..4 " " + Error@4..5 + Ident@4..5 "a" + RBracket@5..6 "]" + Newline@6..7 "\n" + IndexExpr@7..14 + PathExpr@7..8 + Path@7..8 + PathSegment@7..8 + Ident@7..8 "x" + LBracket@8..9 "[" + BinExpr@9..14 + LitExpr@9..10 + Lit@9..10 + Int@9..10 "2" + WhiteSpace@10..11 " " + Plus@11..12 "+" + WhiteSpace@12..13 " " + LitExpr@13..14 + Lit@13..14 + Int@13..14 "3" + Newline@14..15 "\n" + IndexExpr@15..20 + PathExpr@15..16 + Path@15..16 + PathSegment@15..16 + Ident@15..16 "x" + LBracket@16..17 "[" + LitExpr@17..19 + Lit@17..19 + Int@17..19 "41" + RBracket@19..20 "]" + diff --git a/crates/parser2/test_files/error_recovery/exprs/match_.fe b/crates/parser2/test_files/error_recovery/exprs/match_.fe new file mode 100644 index 0000000000..43c649187c --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/match_.fe @@ -0,0 +1,9 @@ +match X => { + Foo() => true + Bar +} + +match X { + Foo(i, j, => true x + Bar => x +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/match_.snap b/crates/parser2/test_files/error_recovery/exprs/match_.snap new file mode 100644 index 0000000000..6a6e366e58 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/match_.snap @@ -0,0 +1,108 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/match_.fe +--- +Root@0..94 + MatchExpr@0..40 + MatchKw@0..5 "match" + WhiteSpace@5..6 " " + PathExpr@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "X" + WhiteSpace@7..9 " " + Error@9..11 + FatArrow@9..11 "=>" + WhiteSpace@11..12 " " + MatchArmList@12..40 + LBrace@12..13 "{" + WhiteSpace@13..14 " " + Newline@14..15 "\n" + WhiteSpace@15..18 " " + MatchArm@18..31 + PathTuplePat@18..23 + Path@18..21 + PathSegment@18..21 + Ident@18..21 "Foo" + TuplePatElemList@21..23 + LParen@21..22 "(" + RParen@22..23 ")" + WhiteSpace@23..24 " " + FatArrow@24..26 "=>" + WhiteSpace@26..27 " " + LitExpr@27..31 + Lit@27..31 + TrueKw@27..31 "true" + Newline@31..32 "\n" + WhiteSpace@32..35 " " + MatchArm@35..38 + PathPat@35..38 + Path@35..38 + PathSegment@35..38 + Ident@35..38 "Bar" + Newline@38..39 "\n" + RBrace@39..40 "}" + WhiteSpace@40..41 " " + Newline@41..43 "\n\n" + MatchExpr@43..93 + MatchKw@43..48 "match" + WhiteSpace@48..49 " " + PathExpr@49..50 + Path@49..50 + PathSegment@49..50 + Ident@49..50 "X" + WhiteSpace@50..52 " " + MatchArmList@52..93 + LBrace@52..53 "{" + WhiteSpace@53..54 " " + Newline@54..55 "\n" + WhiteSpace@55..58 " " + MatchArm@58..77 + PathTuplePat@58..70 + Path@58..61 + PathSegment@58..61 + Ident@58..61 "Foo" + TuplePatElemList@61..70 + LParen@61..62 "(" + PathPat@62..63 + Path@62..63 + PathSegment@62..63 + Ident@62..63 "i" + Comma@63..64 "," + WhiteSpace@64..65 " " + PathPat@65..66 + Path@65..66 + PathSegment@65..66 + Ident@65..66 "j" + Comma@66..67 "," + WhiteSpace@67..70 " " + PathPat@70..70 + Path@70..70 + PathSegment@70..70 + FatArrow@70..72 "=>" + WhiteSpace@72..73 " " + LitExpr@73..77 + Lit@73..77 + TrueKw@73..77 "true" + WhiteSpace@77..78 " " + Error@78..79 + Ident@78..79 "x" + Newline@79..80 "\n" + WhiteSpace@80..83 " " + MatchArm@83..91 + PathPat@83..86 + Path@83..86 + PathSegment@83..86 + Ident@83..86 "Bar" + WhiteSpace@86..87 " " + FatArrow@87..89 "=>" + WhiteSpace@89..90 " " + PathExpr@90..91 + Path@90..91 + PathSegment@90..91 + Ident@90..91 "x" + Newline@91..92 "\n" + RBrace@92..93 "}" + WhiteSpace@93..94 " " + diff --git a/crates/parser2/test_files/error_recovery/exprs/method.fe b/crates/parser2/test_files/error_recovery/exprs/method.fe new file mode 100644 index 0000000000..577b390824 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/method.fe @@ -0,0 +1,5 @@ +foo::bar.baz(1, 2) + +foo::bar.x(1, 2 E,) + +foo::bar.baz() \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/exprs/method.snap b/crates/parser2/test_files/error_recovery/exprs/method.snap new file mode 100644 index 0000000000..984d5bafbd --- /dev/null +++ b/crates/parser2/test_files/error_recovery/exprs/method.snap @@ -0,0 +1,106 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/exprs/method.fe +--- +Root@0..78 + MethodCallExpr@0..31 + PathExpr@0..8 + Path@0..8 + PathSegment@0..3 + Ident@0..3 "foo" + Colon2@3..5 "::" + PathSegment@5..8 + Ident@5..8 "bar" + Dot@8..9 "." + Ident@9..12 "baz" + GenericArgList@12..25 + Lt@12..13 "<" + TypeGenericArg@13..16 + PathType@13..16 + Path@13..16 + PathSegment@13..16 + Ident@13..16 "i32" + Comma@16..17 "," + WhiteSpace@17..18 " " + TypeGenericArg@18..21 + PathType@18..21 + Path@18..21 + PathSegment@18..21 + Ident@18..21 "u32" + WhiteSpace@21..22 " " + Error@22..23 + Ident@22..23 "T" + Comma@23..24 "," + Gt@24..25 ">" + CallArgList@25..31 + LParen@25..26 "(" + CallArg@26..27 + LitExpr@26..27 + Lit@26..27 + Int@26..27 "1" + Comma@27..28 "," + WhiteSpace@28..29 " " + CallArg@29..30 + LitExpr@29..30 + Lit@29..30 + Int@29..30 "2" + RParen@30..31 ")" + Newline@31..33 "\n\n" + MethodCallExpr@33..52 + PathExpr@33..41 + Path@33..41 + PathSegment@33..36 + Ident@33..36 "foo" + Colon2@36..38 "::" + PathSegment@38..41 + Ident@38..41 "bar" + Dot@41..42 "." + Ident@42..43 "x" + CallArgList@43..52 + LParen@43..44 "(" + CallArg@44..45 + LitExpr@44..45 + Lit@44..45 + Int@44..45 "1" + Comma@45..46 "," + WhiteSpace@46..47 " " + CallArg@47..48 + LitExpr@47..48 + Lit@47..48 + Int@47..48 "2" + WhiteSpace@48..49 " " + Error@49..50 + Ident@49..50 "E" + Comma@50..51 "," + RParen@51..52 ")" + Newline@52..54 "\n\n" + MethodCallExpr@54..78 + PathExpr@54..62 + Path@54..62 + PathSegment@54..57 + Ident@54..57 "foo" + Colon2@57..59 "::" + PathSegment@59..62 + Ident@59..62 "bar" + Dot@62..63 "." + Ident@63..66 "baz" + GenericArgList@66..76 + Lt@66..67 "<" + TypeGenericArg@67..70 + PathType@67..70 + Path@67..70 + PathSegment@67..70 + Ident@67..70 "i32" + Comma@70..71 "," + WhiteSpace@71..72 " " + TypeGenericArg@72..75 + PathType@72..75 + Path@72..75 + PathSegment@72..75 + Ident@72..75 "u32" + Gt@75..76 ">" + CallArgList@76..78 + LParen@76..77 "(" + RParen@77..78 ")" + diff --git a/crates/parser2/test_files/error_recovery/items/const_.fe b/crates/parser2/test_files/error_recovery/items/const_.fe new file mode 100644 index 0000000000..a9ab53676f --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/const_.fe @@ -0,0 +1,5 @@ +const X = 10 + +const X: i32 + +const X: ]@ = 1 \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/const_.snap b/crates/parser2/test_files/error_recovery/items/const_.snap new file mode 100644 index 0000000000..04dc26dcef --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/const_.snap @@ -0,0 +1,52 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/const_.fe +--- +Root@0..44 + ItemList@0..44 + Item@0..12 + Const@0..12 + ConstKw@0..5 "const" + WhiteSpace@5..6 " " + Ident@6..7 "X" + WhiteSpace@7..8 " " + Eq@8..9 "=" + WhiteSpace@9..10 " " + LitExpr@10..12 + Lit@10..12 + Int@10..12 "10" + Newline@12..14 "\n\n" + Item@14..26 + Const@14..26 + ConstKw@14..19 "const" + WhiteSpace@19..20 " " + Ident@20..21 "X" + Colon@21..22 ":" + WhiteSpace@22..23 " " + PathType@23..26 + Path@23..26 + PathSegment@23..26 + Ident@23..26 "i32" + WhiteSpace@26..27 " " + Newline@27..29 "\n\n" + Item@29..44 + Const@29..44 + ConstKw@29..34 "const" + WhiteSpace@34..35 " " + Ident@35..36 "X" + Colon@36..37 ":" + WhiteSpace@37..38 " " + PathType@38..40 + Path@38..38 + PathSegment@38..38 + Error@38..40 + RBracket@38..39 "]" + InvalidToken@39..40 "@" + WhiteSpace@40..41 " " + Eq@41..42 "=" + WhiteSpace@42..43 " " + LitExpr@43..44 + Lit@43..44 + Int@43..44 "1" + diff --git a/crates/parser2/test_files/error_recovery/items/enum_.fe b/crates/parser2/test_files/error_recovery/items/enum_.fe new file mode 100644 index 0000000000..0e85c5b496 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/enum_.fe @@ -0,0 +1,15 @@ +pub enum MyEnum { + X(u32, T + A + Y(T, u32) B + Z +} + +pub enum MyEnum2 +where + T: * -> (* -> * + U: * -> * +{ + T(t) + U(U) +} diff --git a/crates/parser2/test_files/error_recovery/items/enum_.snap b/crates/parser2/test_files/error_recovery/items/enum_.snap new file mode 100644 index 0000000000..3b0e168ce1 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/enum_.snap @@ -0,0 +1,164 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/enum_.fe +--- +Root@0..150 + ItemList@0..149 + Item@0..63 + Enum@0..63 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + EnumKw@4..8 "enum" + WhiteSpace@8..9 " " + Ident@9..15 "MyEnum" + GenericParamList@15..18 + Lt@15..16 "<" + TypeGenericParam@16..17 + Ident@16..17 "T" + Gt@17..18 ">" + WhiteSpace@18..19 " " + VariantDefList@19..63 + LBrace@19..20 "{" + Newline@20..21 "\n" + WhiteSpace@21..25 " " + VariantDef@25..33 + Ident@25..26 "X" + TupleType@26..33 + LParen@26..27 "(" + PathType@27..30 + Path@27..30 + PathSegment@27..30 + Ident@27..30 "u32" + Comma@30..31 "," + WhiteSpace@31..32 " " + PathType@32..33 + Path@32..33 + PathSegment@32..33 + Ident@32..33 "T" + Newline@33..34 "\n" + WhiteSpace@34..38 " " + VariantDef@38..39 + Ident@38..39 "A" + Newline@39..40 "\n" + WhiteSpace@40..44 " " + VariantDef@44..53 + Ident@44..45 "Y" + TupleType@45..53 + LParen@45..46 "(" + PathType@46..47 + Path@46..47 + PathSegment@46..47 + Ident@46..47 "T" + Comma@47..48 "," + WhiteSpace@48..49 " " + PathType@49..52 + Path@49..52 + PathSegment@49..52 + Ident@49..52 "u32" + RParen@52..53 ")" + WhiteSpace@53..54 " " + Error@54..55 + Ident@54..55 "B" + Newline@55..56 "\n" + WhiteSpace@56..60 " " + VariantDef@60..61 + Ident@60..61 "Z" + Newline@61..62 "\n" + RBrace@62..63 "}" + Newline@63..65 "\n\n" + Item@65..149 + Enum@65..149 + ItemModifier@65..68 + PubKw@65..68 "pub" + WhiteSpace@68..69 " " + EnumKw@69..73 "enum" + WhiteSpace@73..74 " " + Ident@74..81 "MyEnum2" + GenericParamList@81..87 + Lt@81..82 "<" + TypeGenericParam@82..83 + Ident@82..83 "T" + Comma@83..84 "," + WhiteSpace@84..85 " " + TypeGenericParam@85..86 + Ident@85..86 "U" + Gt@86..87 ">" + Newline@87..88 "\n" + WhereClause@88..127 + WhereKw@88..93 "where" + Newline@93..94 "\n" + WhiteSpace@94..98 " " + WherePredicate@98..113 + PathType@98..99 + Path@98..99 + PathSegment@98..99 + Ident@98..99 "T" + TypeBoundList@99..113 + Colon@99..100 ":" + WhiteSpace@100..101 " " + TypeBound@101..113 + KindBoundAbs@101..113 + KindBoundMono@101..102 + Star@101..102 "*" + WhiteSpace@102..103 " " + Arrow@103..105 "->" + WhiteSpace@105..106 " " + LParen@106..107 "(" + KindBoundAbs@107..113 + KindBoundMono@107..108 + Star@107..108 "*" + WhiteSpace@108..109 " " + Arrow@109..111 "->" + WhiteSpace@111..112 " " + KindBoundMono@112..113 + Star@112..113 "*" + Newline@113..114 "\n" + WhiteSpace@114..118 " " + WherePredicate@118..127 + PathType@118..119 + Path@118..119 + PathSegment@118..119 + Ident@118..119 "U" + TypeBoundList@119..127 + Colon@119..120 ":" + WhiteSpace@120..121 " " + TypeBound@121..127 + KindBoundAbs@121..127 + KindBoundMono@121..122 + Star@121..122 "*" + WhiteSpace@122..123 " " + Arrow@123..125 "->" + WhiteSpace@125..126 " " + KindBoundMono@126..127 + Star@126..127 "*" + Newline@127..128 "\n" + VariantDefList@128..149 + LBrace@128..129 "{" + Newline@129..130 "\n" + WhiteSpace@130..134 " " + VariantDef@134..138 + Ident@134..135 "T" + TupleType@135..138 + LParen@135..136 "(" + PathType@136..137 + Path@136..137 + PathSegment@136..137 + Ident@136..137 "t" + RParen@137..138 ")" + Newline@138..139 "\n" + WhiteSpace@139..143 " " + VariantDef@143..147 + Ident@143..144 "U" + TupleType@144..147 + LParen@144..145 "(" + PathType@145..146 + Path@145..146 + PathSegment@145..146 + Ident@145..146 "U" + RParen@146..147 ")" + Newline@147..148 "\n" + RBrace@148..149 "}" + Newline@149..150 "\n" + diff --git a/crates/parser2/test_files/error_recovery/items/extern_.fe b/crates/parser2/test_files/error_recovery/items/extern_.fe new file mode 100644 index 0000000000..d33068b690 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/extern_.fe @@ -0,0 +1,9 @@ +extern { + pub unsafe fn Foo + + pub fn bar() + + struct Foo { + + pub unsafe fn foo() +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/extern_.snap b/crates/parser2/test_files/error_recovery/items/extern_.snap new file mode 100644 index 0000000000..5f7fbf7824 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/extern_.snap @@ -0,0 +1,66 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/extern_.fe +--- +Root@0..93 + ItemList@0..93 + Item@0..54 + Extern@0..54 + ExternKw@0..6 "extern" + WhiteSpace@6..7 " " + ExternItemList@7..54 + LBrace@7..8 "{" + Newline@8..9 "\n" + WhiteSpace@9..13 " " + Func@13..30 + ItemModifier@13..23 + PubKw@13..16 "pub" + WhiteSpace@16..17 " " + UnsafeKw@17..23 "unsafe" + WhiteSpace@23..24 " " + FnKw@24..26 "fn" + WhiteSpace@26..27 " " + Ident@27..30 "Foo" + Newline@30..32 "\n\n" + WhiteSpace@32..36 " " + Func@36..48 + ItemModifier@36..39 + PubKw@36..39 "pub" + WhiteSpace@39..40 " " + FnKw@40..42 "fn" + WhiteSpace@42..43 " " + Ident@43..46 "bar" + FuncParamList@46..48 + LParen@46..47 "(" + RParen@47..48 ")" + Newline@48..50 "\n\n" + WhiteSpace@50..54 " " + Item@54..75 + Struct@54..75 + StructKw@54..60 "struct" + WhiteSpace@60..61 " " + Ident@61..64 "Foo" + WhiteSpace@64..65 " " + RecordFieldDefList@65..75 + LBrace@65..66 "{" + Newline@66..68 "\n\n" + WhiteSpace@68..72 " " + RecordFieldDef@72..75 + PubKw@72..75 "pub" + WhiteSpace@75..76 " " + Item@76..93 + Func@76..93 + ItemModifier@76..82 + UnsafeKw@76..82 "unsafe" + WhiteSpace@82..83 " " + FnKw@83..85 "fn" + WhiteSpace@85..86 " " + Ident@86..89 "foo" + FuncParamList@89..91 + LParen@89..90 "(" + RParen@90..91 ")" + Newline@91..92 "\n" + Error@92..93 + RBrace@92..93 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/func.fe b/crates/parser2/test_files/error_recovery/items/func.fe new file mode 100644 index 0000000000..481ba1e975 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/func.fe @@ -0,0 +1,10 @@ +fn foo>(x: i32, _ mut y: u32, z: u32) -> T, u where T: Trait2 +{ + +} + +fn foo<<(x: i32) + where T: Trait2 +{ + +} diff --git a/crates/parser2/test_files/error_recovery/items/func.snap b/crates/parser2/test_files/error_recovery/items/func.snap new file mode 100644 index 0000000000..30d836c5ab --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/func.snap @@ -0,0 +1,156 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/func.fe +--- +Root@0..133 + ItemList@0..132 + Item@0..78 + Func@0..78 + FnKw@0..2 "fn" + WhiteSpace@2..3 " " + Ident@3..6 "foo" + GenericParamList@6..16 + Lt@6..7 "<" + TypeGenericParam@7..15 + Ident@7..8 "T" + TypeBoundList@8..15 + Colon@8..9 ":" + WhiteSpace@9..10 " " + TypeBound@10..15 + TraitRef@10..15 + Path@10..15 + PathSegment@10..15 + Ident@10..15 "Trait" + Gt@15..16 ">" + Error@16..17 + Gt@16..17 ">" + FuncParamList@17..47 + LParen@17..18 "(" + FnParam@18..24 + Ident@18..19 "x" + Colon@19..20 ":" + WhiteSpace@20..21 " " + PathType@21..24 + Path@21..24 + PathSegment@21..24 + Ident@21..24 "i32" + Comma@24..25 "," + WhiteSpace@25..26 " " + FnParam@26..38 + Underscore@26..27 "_" + WhiteSpace@27..28 " " + Error@28..31 + MutKw@28..31 "mut" + WhiteSpace@31..32 " " + Ident@32..33 "y" + Colon@33..34 ":" + WhiteSpace@34..35 " " + PathType@35..38 + Path@35..38 + PathSegment@35..38 + Ident@35..38 "u32" + Comma@38..39 "," + WhiteSpace@39..40 " " + FnParam@40..46 + Ident@40..41 "z" + Colon@41..42 ":" + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "u32" + RParen@46..47 ")" + WhiteSpace@47..48 " " + Arrow@48..50 "->" + WhiteSpace@50..52 " " + PathType@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "T" + Error@53..56 + Comma@53..54 "," + WhiteSpace@54..55 " " + Ident@55..56 "u" + WhiteSpace@56..57 " " + WhereClause@57..72 + WhereKw@57..62 "where" + WhiteSpace@62..63 " " + WherePredicate@63..72 + PathType@63..64 + Path@63..64 + PathSegment@63..64 + Ident@63..64 "T" + TypeBoundList@64..72 + Colon@64..65 ":" + WhiteSpace@65..66 " " + TypeBound@66..72 + TraitRef@66..72 + Path@66..72 + PathSegment@66..72 + Ident@66..72 "Trait2" + WhiteSpace@72..73 " " + Newline@73..74 "\n" + BlockExpr@74..78 + LBrace@74..75 "{" + Newline@75..77 "\n\n" + RBrace@77..78 "}" + Newline@78..80 "\n\n" + Item@80..132 + Func@80..132 + FnKw@80..82 "fn" + WhiteSpace@82..83 " " + Ident@83..86 "foo" + GenericParamList@86..98 + Lt@86..87 "<" + Error@87..89 + Lt@87..88 "<" + Lt@88..89 "<" + TypeGenericParam@89..97 + Ident@89..90 "T" + TypeBoundList@90..97 + Colon@90..91 ":" + WhiteSpace@91..92 " " + TypeBound@92..97 + TraitRef@92..97 + Path@92..97 + PathSegment@92..97 + Ident@92..97 "Trait" + Gt@97..98 ">" + FuncParamList@98..106 + LParen@98..99 "(" + FnParam@99..105 + Ident@99..100 "x" + Colon@100..101 ":" + WhiteSpace@101..102 " " + PathType@102..105 + Path@102..105 + PathSegment@102..105 + Ident@102..105 "i32" + RParen@105..106 ")" + Newline@106..107 "\n" + WhiteSpace@107..111 " " + WhereClause@111..126 + WhereKw@111..116 "where" + WhiteSpace@116..117 " " + WherePredicate@117..126 + PathType@117..118 + Path@117..118 + PathSegment@117..118 + Ident@117..118 "T" + TypeBoundList@118..126 + Colon@118..119 ":" + WhiteSpace@119..120 " " + TypeBound@120..126 + TraitRef@120..126 + Path@120..126 + PathSegment@120..126 + Ident@120..126 "Trait2" + WhiteSpace@126..127 " " + Newline@127..128 "\n" + BlockExpr@128..132 + LBrace@128..129 "{" + Newline@129..131 "\n\n" + RBrace@131..132 "}" + Newline@132..133 "\n" + diff --git a/crates/parser2/test_files/error_recovery/items/impl_.fe b/crates/parser2/test_files/error_recovery/items/impl_.fe new file mode 100644 index 0000000000..f7192a5d44 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/impl_.fe @@ -0,0 +1,6 @@ +impl Foo +{ } \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/impl_.snap b/crates/parser2/test_files/error_recovery/items/impl_.snap new file mode 100644 index 0000000000..8b7f27281c --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/impl_.snap @@ -0,0 +1,76 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/impl_.fe +--- +Root@0..56 + ItemList@0..56 + Item@0..37 + Impl@0..37 + ImplKw@0..4 "impl" + WhiteSpace@4..5 " " + PathType@5..17 + Path@5..8 + PathSegment@5..8 + Ident@5..8 "Foo" + GenericArgList@8..17 + Lt@8..9 "<" + TypeGenericArg@9..10 + PathType@9..10 + Path@9..10 + PathSegment@9..10 + Ident@9..10 "T" + Comma@10..11 "," + WhiteSpace@11..12 " " + Newline@12..13 "\n" + WhiteSpace@13..17 " " + TypeGenericArg@17..17 + PathType@17..17 + Path@17..17 + PathSegment@17..17 + WhereClause@17..33 + WhereKw@17..22 "where" + WhiteSpace@22..23 " " + WherePredicate@23..33 + PathType@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "T" + TypeBoundList@24..33 + Colon@24..25 ":" + WhiteSpace@25..26 " " + TypeBound@26..33 + TraitRef@26..33 + Path@26..33 + PathSegment@26..33 + Ident@26..33 "Integer" + Newline@33..34 "\n" + ImplItemList@34..37 + LBrace@34..35 "{" + WhiteSpace@35..36 " " + RBrace@36..37 "}" + Newline@37..39 "\n\n" + Item@39..56 + Impl@39..56 + ImplKw@39..43 "impl" + WhiteSpace@43..44 " " + PathType@44..52 + Path@44..52 + PathSegment@44..52 + Ident@44..47 "Foo" + GenericArgList@47..52 + Lt@47..48 "<" + TypeGenericArg@48..49 + PathType@48..49 + Path@48..49 + PathSegment@48..49 + Ident@48..49 "T" + Comma@49..50 "," + WhiteSpace@50..51 " " + Gt@51..52 ">" + Newline@52..53 "\n" + ImplItemList@53..56 + LBrace@53..54 "{" + WhiteSpace@54..55 " " + RBrace@55..56 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/impl_trait.fe b/crates/parser2/test_files/error_recovery/items/impl_trait.fe new file mode 100644 index 0000000000..b38ce032ff --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/impl_trait.fe @@ -0,0 +1,5 @@ +impl A for B" + WhiteSpace@14..15 " " + ForKw@15..18 "for" + WhiteSpace@18..19 " " + PathType@19..22 + Path@19..20 + PathSegment@19..20 + Ident@19..20 "B" + GenericArgList@20..22 + Lt@20..21 "<" + TypeGenericArg@21..22 + PathType@21..22 + Path@21..22 + PathSegment@21..22 + Ident@21..22 "T" + WhiteSpace@22..23 " " + WhereClause@23..33 + WhereKw@23..28 "where" + WhiteSpace@28..29 " " + WherePredicate@29..33 + PathType@29..30 + Path@29..30 + PathSegment@29..30 + Ident@29..30 "T" + TypeBoundList@30..33 + Colon@30..31 ":" + WhiteSpace@31..32 " " + TypeBound@32..33 + TraitRef@32..33 + Path@32..33 + PathSegment@32..33 + Ident@32..33 "X" + WhiteSpace@33..34 " " + ImplTraitItemList@34..36 + LBrace@34..35 "{" + RBrace@35..36 "}" + Newline@36..38 "\n\n" + Item@38..71 + ImplTrait@38..71 + ImplKw@38..42 "impl" + WhiteSpace@42..43 " " + TraitRef@43..49 + Path@43..44 + PathSegment@43..44 + Ident@43..44 "C" + GenericArgList@44..49 + Lt@44..45 "<" + TypeGenericArg@45..46 + PathType@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "T" + Comma@46..47 "," + WhiteSpace@47..48 " " + TypeGenericArg@48..49 + PathType@48..49 + Path@48..49 + PathSegment@48..49 + Ident@48..49 "u" + WhiteSpace@49..50 " " + ForKw@50..53 "for" + WhiteSpace@53..54 " " + PathType@54..57 + Path@54..55 + PathSegment@54..55 + Ident@54..55 "D" + GenericArgList@55..57 + Lt@55..56 "<" + TypeGenericArg@56..57 + PathType@56..57 + Path@56..57 + PathSegment@56..57 + Ident@56..57 "T" + WhiteSpace@57..58 " " + WhereClause@58..68 + WhereKw@58..63 "where" + WhiteSpace@63..64 " " + WherePredicate@64..68 + PathType@64..65 + Path@64..65 + PathSegment@64..65 + Ident@64..65 "T" + TypeBoundList@65..68 + Colon@65..66 ":" + WhiteSpace@66..67 " " + TypeBound@67..68 + TraitRef@67..68 + Path@67..68 + PathSegment@67..68 + Ident@67..68 "X" + WhiteSpace@68..69 " " + ImplTraitItemList@69..71 + LBrace@69..70 "{" + RBrace@70..71 "}" + Newline@71..73 "\n\n" + Item@73..90 + ImplTrait@73..90 + ImplKw@73..77 "impl" + WhiteSpace@77..78 " " + TraitRef@78..79 + Path@78..79 + PathSegment@78..79 + Ident@78..79 "E" + WhiteSpace@79..80 " " + Error@80..81 + InvalidToken@80..81 "@" + WhiteSpace@81..82 " " + ForKw@82..85 "for" + WhiteSpace@85..86 " " + PathType@86..87 + Path@86..87 + PathSegment@86..87 + Ident@86..87 "F" + WhiteSpace@87..88 " " + ImplTraitItemList@88..90 + LBrace@88..89 "{" + RBrace@89..90 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/struct_.fe b/crates/parser2/test_files/error_recovery/items/struct_.fe new file mode 100644 index 0000000000..50f0c13817 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/struct_.fe @@ -0,0 +1,16 @@ +pub struct i32 { + return 1 + } + + x: i32 +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/struct_.snap b/crates/parser2/test_files/error_recovery/items/struct_.snap new file mode 100644 index 0000000000..2557680498 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/struct_.snap @@ -0,0 +1,131 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/struct_.fe +--- +Root@0..160 + ItemList@0..160 + Item@0..74 + Struct@0..74 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + GenericParamList@10..15 + Lt@10..11 "<" + TypeGenericParam@11..12 + Ident@11..12 "T" + Comma@12..13 "," + WhiteSpace@13..14 " " + TypeGenericParam@14..15 + Ident@14..15 "U" + Newline@15..16 "\n" + WhereClause@16..39 + WhereKw@16..21 "where" + WhiteSpace@21..22 " " + WherePredicate@22..23 + PathType@22..23 + Path@22..23 + PathSegment@22..23 + Ident@22..23 "T" + WhiteSpace@23..24 " " + Newline@24..25 "\n" + WhiteSpace@25..31 " " + WherePredicate@31..39 + PathType@31..32 + Path@31..32 + PathSegment@31..32 + Ident@31..32 "U" + TypeBoundList@32..39 + Colon@32..33 ":" + WhiteSpace@33..34 " " + TypeBound@34..39 + TraitRef@34..39 + Path@34..39 + PathSegment@34..39 + Ident@34..39 "Trait" + Newline@39..40 "\n" + WhiteSpace@40..44 " " + Newline@44..45 "\n" + RecordFieldDefList@45..74 + LBrace@45..46 "{" + Newline@46..47 "\n" + WhiteSpace@47..51 " " + RecordFieldDef@51..54 + Ident@51..54 "foo" + Newline@54..55 "\n" + WhiteSpace@55..59 " " + RecordFieldDef@59..72 + Ident@59..62 "bar" + Colon@62..63 ":" + WhiteSpace@63..64 " " + PathType@64..72 + Path@64..72 + PathSegment@64..67 + Ident@64..67 "i32" + Colon2@67..69 "::" + PathSegment@69..72 + Ident@69..72 "foo" + Newline@72..73 "\n" + RBrace@73..74 "}" + Newline@74..76 "\n\n" + Item@76..160 + Struct@76..160 + ItemModifier@76..79 + PubKw@76..79 "pub" + WhiteSpace@79..80 " " + StructKw@80..86 "struct" + WhiteSpace@86..87 " " + Ident@87..90 "Foo" + WhiteSpace@90..91 " " + RecordFieldDefList@91..160 + LBrace@91..92 "{" + Newline@92..93 "\n" + WhiteSpace@93..97 " " + RecordFieldDef@97..142 + PubKw@97..100 "pub" + WhiteSpace@100..101 " " + Error@101..142 + Func@101..142 + FnKw@101..103 "fn" + WhiteSpace@103..104 " " + Ident@104..107 "foo" + FuncParamList@107..109 + LParen@107..108 "(" + RParen@108..109 ")" + WhiteSpace@109..111 " " + Arrow@111..113 "->" + WhiteSpace@113..114 " " + PathType@114..117 + Path@114..117 + PathSegment@114..117 + Ident@114..117 "i32" + WhiteSpace@117..118 " " + BlockExpr@118..142 + LBrace@118..119 "{" + Newline@119..120 "\n" + WhiteSpace@120..128 " " + ReturnStmt@128..136 + ReturnKw@128..134 "return" + WhiteSpace@134..135 " " + LitExpr@135..136 + Lit@135..136 + Int@135..136 "1" + Newline@136..137 "\n" + WhiteSpace@137..141 " " + RBrace@141..142 "}" + Newline@142..143 "\n" + WhiteSpace@143..147 " " + Newline@147..148 "\n" + WhiteSpace@148..152 " " + RecordFieldDef@152..158 + Ident@152..153 "x" + Colon@153..154 ":" + WhiteSpace@154..155 " " + PathType@155..158 + Path@155..158 + PathSegment@155..158 + Ident@155..158 "i32" + Newline@158..159 "\n" + RBrace@159..160 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/trait_.fe b/crates/parser2/test_files/error_recovery/items/trait_.fe new file mode 100644 index 0000000000..fbc4691c6a --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/trait_.fe @@ -0,0 +1,13 @@ +trait Foo{} + +trait Bar + +trait Bar where T: Add {} + +trait Bar< + where T: Add +{ + +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/trait_.snap b/crates/parser2/test_files/error_recovery/items/trait_.snap new file mode 100644 index 0000000000..99e95864ef --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/trait_.snap @@ -0,0 +1,141 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/trait_.fe +--- +Root@0..133 + ItemList@0..133 + Item@0..18 + Trait@0..18 + TraitKw@0..5 "trait" + WhiteSpace@5..6 " " + Ident@6..9 "Foo" + GenericParamList@9..16 + Lt@9..10 "<" + TypeGenericParam@10..11 + Ident@10..11 "T" + Comma@11..12 "," + WhiteSpace@12..13 " " + TypeGenericParam@13..14 + Ident@13..14 "Y" + Comma@14..15 "," + Gt@15..16 ">" + TraitItemList@16..18 + LBrace@16..17 "{" + RBrace@17..18 "}" + Newline@18..20 "\n\n" + Item@20..35 + Trait@20..35 + TraitKw@20..25 "trait" + WhiteSpace@25..26 " " + Ident@26..29 "Bar" + GenericParamList@29..32 + Lt@29..30 "<" + TypeGenericParam@30..31 + Ident@30..31 "Y" + Comma@31..32 "," + WhiteSpace@32..33 " " + TraitItemList@33..35 + LBrace@33..34 "{" + RBrace@34..35 "}" + Newline@35..37 "\n\n" + Item@37..51 + Trait@37..51 + TraitKw@37..42 "trait" + WhiteSpace@42..43 " " + Ident@43..46 "Bar" + GenericParamList@46..51 + Lt@46..47 "<" + TypeGenericParam@47..48 + Ident@47..48 "T" + Comma@48..49 "," + WhiteSpace@49..50 " " + Gt@50..51 ">" + Newline@51..53 "\n\n" + Item@53..85 + Trait@53..85 + TraitKw@53..58 "trait" + WhiteSpace@58..59 " " + Ident@59..62 "Bar" + GenericParamList@62..69 + Lt@62..63 "<" + TypeGenericParam@63..64 + Ident@63..64 "Y" + Comma@64..65 "," + WhiteSpace@65..66 " " + TypeGenericParam@66..67 + Ident@66..67 "T" + Comma@67..68 "," + Gt@68..69 ">" + WhiteSpace@69..70 " " + WhereClause@70..82 + WhereKw@70..75 "where" + WhiteSpace@75..76 " " + WherePredicate@76..82 + PathType@76..77 + Path@76..77 + PathSegment@76..77 + Ident@76..77 "T" + TypeBoundList@77..82 + Colon@77..78 ":" + WhiteSpace@78..79 " " + TypeBound@79..82 + TraitRef@79..82 + Path@79..82 + PathSegment@79..82 + Ident@79..82 "Add" + WhiteSpace@82..83 " " + TraitItemList@83..85 + LBrace@83..84 "{" + RBrace@84..85 "}" + Newline@85..87 "\n\n" + Item@87..133 + Trait@87..133 + TraitKw@87..92 "trait" + WhiteSpace@92..93 " " + Ident@93..96 "Bar" + GenericParamList@96..108 + Lt@96..97 "<" + Error@97..98 + Lt@97..98 "<" + TypeGenericParam@98..99 + Ident@98..99 "Y" + Comma@99..100 "," + WhiteSpace@100..101 " " + TypeGenericParam@101..107 + Ident@101..102 "K" + TypeBoundList@102..107 + Colon@102..103 ":" + WhiteSpace@103..104 " " + TypeBound@104..107 + TraitRef@104..107 + Path@104..107 + PathSegment@104..107 + Ident@104..107 "Sub" + Gt@107..108 ">" + WhiteSpace@108..110 " " + Newline@110..111 "\n" + WhiteSpace@111..115 " " + WhereClause@115..127 + WhereKw@115..120 "where" + WhiteSpace@120..121 " " + WherePredicate@121..127 + PathType@121..122 + Path@121..122 + PathSegment@121..122 + Ident@121..122 "T" + TypeBoundList@122..127 + Colon@122..123 ":" + WhiteSpace@123..124 " " + TypeBound@124..127 + TraitRef@124..127 + Path@124..127 + PathSegment@124..127 + Ident@124..127 "Add" + WhiteSpace@127..128 " " + Newline@128..129 "\n" + TraitItemList@129..133 + LBrace@129..130 "{" + Newline@130..132 "\n\n" + RBrace@132..133 "}" + diff --git a/crates/parser2/test_files/error_recovery/items/type_.fe b/crates/parser2/test_files/error_recovery/items/type_.fe new file mode 100644 index 0000000000..e3b54f5ce3 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/type_.fe @@ -0,0 +1,4 @@ +type Result + +type Foo = Result + diff --git a/crates/parser2/test_files/error_recovery/items/type_.snap b/crates/parser2/test_files/error_recovery/items/type_.snap new file mode 100644 index 0000000000..94e1400706 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/type_.snap @@ -0,0 +1,90 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/type_.fe +--- +Root@0..72 + ItemList@0..70 + Item@0..29 + TypeAlias@0..29 + TypeKw@0..4 "type" + WhiteSpace@4..5 " " + Ident@5..11 "Result" + GenericParamList@11..14 + Lt@11..12 "<" + TypeGenericParam@12..13 + Ident@12..13 "T" + Comma@13..14 "," + WhiteSpace@14..15 " " + Eq@15..16 "=" + WhiteSpace@16..17 " " + PathType@17..29 + Path@17..29 + PathSegment@17..29 + Ident@17..23 "Result" + GenericArgList@23..29 + Lt@23..24 "<" + TypeGenericArg@24..25 + PathType@24..25 + Path@24..25 + PathSegment@24..25 + Ident@24..25 "T" + Comma@25..26 "," + WhiteSpace@26..27 " " + TypeGenericArg@27..28 + PathType@27..28 + Path@27..28 + PathSegment@27..28 + Ident@27..28 "E" + Gt@28..29 ">" + Newline@29..31 "\n\n" + Item@31..70 + TypeAlias@31..70 + TypeKw@31..35 "type" + WhiteSpace@35..36 " " + Ident@36..39 "Foo" + GenericParamList@39..55 + Lt@39..40 "<" + TypeGenericParam@40..46 + Ident@40..41 "T" + TypeBoundList@41..46 + Colon@41..42 ":" + WhiteSpace@42..43 " " + TypeBound@43..46 + Error@43..46 + Ident@43..46 "i32" + Comma@46..47 "," + WhiteSpace@47..48 " " + TypeGenericParam@48..54 + Ident@48..49 "U" + TypeBoundList@49..54 + Colon@49..50 ":" + WhiteSpace@50..51 " " + TypeBound@51..54 + Error@51..54 + Ident@51..54 "i32" + Gt@54..55 ">" + WhiteSpace@55..56 " " + Eq@56..57 "=" + WhiteSpace@57..58 " " + PathType@58..70 + Path@58..70 + PathSegment@58..70 + Ident@58..64 "Result" + GenericArgList@64..70 + Lt@64..65 "<" + TypeGenericArg@65..66 + PathType@65..66 + Path@65..66 + PathSegment@65..66 + Ident@65..66 "T" + Comma@66..67 "," + WhiteSpace@67..68 " " + TypeGenericArg@68..69 + PathType@68..69 + Path@68..69 + PathSegment@68..69 + Ident@68..69 "U" + Gt@69..70 ">" + Newline@70..72 "\n\n" + diff --git a/crates/parser2/test_files/error_recovery/items/use_.fe b/crates/parser2/test_files/error_recovery/items/use_.fe new file mode 100644 index 0000000000..eb4678b476 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/use_.fe @@ -0,0 +1,3 @@ +use foo::bar::*::A +use foo::bar::*::{A, B} +use foo::bar::* as B \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/items/use_.snap b/crates/parser2/test_files/error_recovery/items/use_.snap new file mode 100644 index 0000000000..5d052e46a7 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/items/use_.snap @@ -0,0 +1,74 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/items/use_.fe +--- +Root@0..63 + ItemList@0..63 + Item@0..18 + Use@0..18 + UseKw@0..3 "use" + WhiteSpace@3..4 " " + UseTree@4..18 + UsePath@4..18 + UsePathSegment@4..7 + Ident@4..7 "foo" + Colon2@7..9 "::" + UsePathSegment@9..12 + Ident@9..12 "bar" + Colon2@12..14 "::" + UsePathSegment@14..15 + Star@14..15 "*" + Colon2@15..17 "::" + UsePathSegment@17..18 + Ident@17..18 "A" + Newline@18..19 "\n" + Item@19..42 + Use@19..42 + UseKw@19..22 "use" + WhiteSpace@22..23 " " + UseTree@23..42 + UsePath@23..34 + UsePathSegment@23..26 + Ident@23..26 "foo" + Colon2@26..28 "::" + UsePathSegment@28..31 + Ident@28..31 "bar" + Colon2@31..33 "::" + UsePathSegment@33..34 + Star@33..34 "*" + Colon2@34..36 "::" + UseTreeList@36..42 + LBrace@36..37 "{" + UseTree@37..38 + UsePath@37..38 + UsePathSegment@37..38 + Ident@37..38 "A" + Comma@38..39 "," + WhiteSpace@39..40 " " + UseTree@40..41 + UsePath@40..41 + UsePathSegment@40..41 + Ident@40..41 "B" + RBrace@41..42 "}" + Newline@42..43 "\n" + Item@43..63 + Use@43..63 + UseKw@43..46 "use" + WhiteSpace@46..47 " " + UseTree@47..63 + UsePath@47..58 + UsePathSegment@47..50 + Ident@47..50 "foo" + Colon2@50..52 "::" + UsePathSegment@52..55 + Ident@52..55 "bar" + Colon2@55..57 "::" + UsePathSegment@57..58 + Star@57..58 "*" + WhiteSpace@58..59 " " + UseTreeRename@59..63 + AsKw@59..61 "as" + WhiteSpace@61..62 " " + Ident@62..63 "B" + diff --git a/crates/parser2/test_files/error_recovery/stmts/for_.fe b/crates/parser2/test_files/error_recovery/stmts/for_.fe new file mode 100644 index 0000000000..38a3af19f2 --- /dev/null +++ b/crates/parser2/test_files/error_recovery/stmts/for_.fe @@ -0,0 +1,9 @@ +{ +for i arr { } + +for in arr { } + +for @ in arr {} + +for @ in arr x y {} +} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/stmts/for_.snap b/crates/parser2/test_files/error_recovery/stmts/for_.snap new file mode 100644 index 0000000000..d74f0a69bf --- /dev/null +++ b/crates/parser2/test_files/error_recovery/stmts/for_.snap @@ -0,0 +1,93 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/stmts/for_.fe +--- +Root@0..71 + ExprStmt@0..71 + BlockExpr@0..71 + LBrace@0..1 "{" + Newline@1..2 "\n" + ForStmt@2..15 + ForKw@2..5 "for" + WhiteSpace@5..6 " " + PathPat@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "i" + WhiteSpace@7..8 " " + PathExpr@8..11 + Path@8..11 + PathSegment@8..11 + Ident@8..11 "arr" + WhiteSpace@11..12 " " + BlockExpr@12..15 + LBrace@12..13 "{" + WhiteSpace@13..14 " " + RBrace@14..15 "}" + Newline@15..17 "\n\n" + ForStmt@17..31 + ForKw@17..20 "for" + WhiteSpace@20..21 " " + PathPat@21..21 + Path@21..21 + PathSegment@21..21 + InKw@21..23 "in" + WhiteSpace@23..24 " " + PathExpr@24..27 + Path@24..27 + PathSegment@24..27 + Ident@24..27 "arr" + WhiteSpace@27..28 " " + BlockExpr@28..31 + LBrace@28..29 "{" + WhiteSpace@29..30 " " + RBrace@30..31 "}" + Newline@31..33 "\n\n" + ForStmt@33..48 + ForKw@33..36 "for" + WhiteSpace@36..37 " " + PathPat@37..38 + Path@37..37 + PathSegment@37..37 + Error@37..38 + InvalidToken@37..38 "@" + WhiteSpace@38..39 " " + InKw@39..41 "in" + WhiteSpace@41..42 " " + PathExpr@42..45 + Path@42..45 + PathSegment@42..45 + Ident@42..45 "arr" + WhiteSpace@45..46 " " + BlockExpr@46..48 + LBrace@46..47 "{" + RBrace@47..48 "}" + Newline@48..50 "\n\n" + ForStmt@50..69 + ForKw@50..53 "for" + WhiteSpace@53..54 " " + PathPat@54..55 + Path@54..54 + PathSegment@54..54 + Error@54..55 + InvalidToken@54..55 "@" + WhiteSpace@55..56 " " + InKw@56..58 "in" + WhiteSpace@58..59 " " + PathExpr@59..62 + Path@59..62 + PathSegment@59..62 + Ident@59..62 "arr" + WhiteSpace@62..63 " " + Error@63..66 + Ident@63..64 "x" + WhiteSpace@64..65 " " + Ident@65..66 "y" + WhiteSpace@66..67 " " + BlockExpr@67..69 + LBrace@67..68 "{" + RBrace@68..69 "}" + Newline@69..70 "\n" + RBrace@70..71 "}" + diff --git a/crates/parser2/test_files/error_recovery/stmts/while_.fe b/crates/parser2/test_files/error_recovery/stmts/while_.fe new file mode 100644 index 0000000000..ef4ccc095c --- /dev/null +++ b/crates/parser2/test_files/error_recovery/stmts/while_.fe @@ -0,0 +1,7 @@ +while @ {} + +while true { + x + 1 +}} + +while true {} \ No newline at end of file diff --git a/crates/parser2/test_files/error_recovery/stmts/while_.snap b/crates/parser2/test_files/error_recovery/stmts/while_.snap new file mode 100644 index 0000000000..80e8dcfd4a --- /dev/null +++ b/crates/parser2/test_files/error_recovery/stmts/while_.snap @@ -0,0 +1,54 @@ +--- +source: crates/parser2/tests/error_recovery.rs +expression: node +input_file: crates/parser2/test_files/error_recovery/stmts/while_.fe +--- +Root@0..56 + WhileStmt@0..10 + WhileKw@0..5 "while" + WhiteSpace@5..6 " " + Error@6..7 + InvalidToken@6..7 "@" + WhiteSpace@7..8 " " + BlockExpr@8..10 + LBrace@8..9 "{" + RBrace@9..10 "}" + Newline@10..12 "\n\n" + WhileStmt@12..36 + WhileKw@12..17 "while" + WhiteSpace@17..18 " " + LitExpr@18..22 + Lit@18..22 + TrueKw@18..22 "true" + WhiteSpace@22..23 " " + BlockExpr@23..36 + LBrace@23..24 "{" + Newline@24..25 "\n" + WhiteSpace@25..29 " " + ExprStmt@29..34 + BinExpr@29..34 + PathExpr@29..30 + Path@29..30 + PathSegment@29..30 + Ident@29..30 "x" + WhiteSpace@30..31 " " + Plus@31..32 "+" + WhiteSpace@32..33 " " + LitExpr@33..34 + Lit@33..34 + Int@33..34 "1" + Newline@34..35 "\n" + RBrace@35..36 "}" + ExprStmt@36..56 + Error@36..56 + RBrace@36..37 "}" + Newline@37..38 "\n" + WhiteSpace@38..42 " " + Newline@42..43 "\n" + WhileKw@43..48 "while" + WhiteSpace@48..49 " " + TrueKw@49..53 "true" + WhiteSpace@53..54 " " + LBrace@54..55 "{" + RBrace@55..56 "}" + diff --git a/crates/parser2/test_files/syntax_node/exprs/array.fe b/crates/parser2/test_files/syntax_node/exprs/array.fe new file mode 100644 index 0000000000..df749291b9 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/array.fe @@ -0,0 +1,2 @@ +[1, {1 + 2}] +[1; 16] \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/array.snap b/crates/parser2/test_files/syntax_node/exprs/array.snap new file mode 100644 index 0000000000..5514973774 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/array.snap @@ -0,0 +1,41 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/array.fe +--- +Root@0..20 + ArrayExpr@0..12 + LBracket@0..1 "[" + LitExpr@1..2 + Lit@1..2 + Int@1..2 "1" + Comma@2..3 "," + WhiteSpace@3..4 " " + BlockExpr@4..11 + LBrace@4..5 "{" + ExprStmt@5..10 + BinExpr@5..10 + LitExpr@5..6 + Lit@5..6 + Int@5..6 "1" + WhiteSpace@6..7 " " + Plus@7..8 "+" + WhiteSpace@8..9 " " + LitExpr@9..10 + Lit@9..10 + Int@9..10 "2" + RBrace@10..11 "}" + RBracket@11..12 "]" + Newline@12..13 "\n" + ArrayRepExpr@13..20 + LBracket@13..14 "[" + LitExpr@14..15 + Lit@14..15 + Int@14..15 "1" + SemiColon@15..16 ";" + WhiteSpace@16..17 " " + LitExpr@17..19 + Lit@17..19 + Int@17..19 "16" + RBracket@19..20 "]" + diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.fe b/crates/parser2/test_files/syntax_node/exprs/binop.fe new file mode 100644 index 0000000000..b364268cf4 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/binop.fe @@ -0,0 +1,14 @@ +1 + 2 * 3 +1 * 2 + 3 +1 < 2 +1 < (2 + 3) +1 < a(foo) +1 <= 2 +1 >= 2 +true || false && 1 < 2 +true || false && (1 < 2) > 3 ^ 2 +a ** 2 ** 3 +1 - 2 - 3 +1 << 3 >> 2 +a.b.c +a.0.c \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/binop.snap b/crates/parser2/test_files/syntax_node/exprs/binop.snap new file mode 100644 index 0000000000..4bdb51021c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/binop.snap @@ -0,0 +1,269 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/binop.fe +--- +Root@0..164 + BinExpr@0..9 + LitExpr@0..1 + Lit@0..1 + Int@0..1 "1" + WhiteSpace@1..2 " " + Plus@2..3 "+" + WhiteSpace@3..4 " " + BinExpr@4..9 + LitExpr@4..5 + Lit@4..5 + Int@4..5 "2" + WhiteSpace@5..6 " " + Star@6..7 "*" + WhiteSpace@7..8 " " + LitExpr@8..9 + Lit@8..9 + Int@8..9 "3" + Newline@9..10 "\n" + BinExpr@10..19 + BinExpr@10..15 + LitExpr@10..11 + Lit@10..11 + Int@10..11 "1" + WhiteSpace@11..12 " " + Star@12..13 "*" + WhiteSpace@13..14 " " + LitExpr@14..15 + Lit@14..15 + Int@14..15 "2" + WhiteSpace@15..16 " " + Plus@16..17 "+" + WhiteSpace@17..18 " " + LitExpr@18..19 + Lit@18..19 + Int@18..19 "3" + Newline@19..20 "\n" + BinExpr@20..25 + LitExpr@20..21 + Lit@20..21 + Int@20..21 "1" + WhiteSpace@21..22 " " + Lt@22..23 "<" + WhiteSpace@23..24 " " + LitExpr@24..25 + Lit@24..25 + Int@24..25 "2" + Newline@25..26 "\n" + BinExpr@26..37 + LitExpr@26..27 + Lit@26..27 + Int@26..27 "1" + WhiteSpace@27..28 " " + Lt@28..29 "<" + WhiteSpace@29..30 " " + ParenExpr@30..37 + LParen@30..31 "(" + BinExpr@31..36 + LitExpr@31..32 + Lit@31..32 + Int@31..32 "2" + WhiteSpace@32..33 " " + Plus@33..34 "+" + WhiteSpace@34..35 " " + LitExpr@35..36 + Lit@35..36 + Int@35..36 "3" + RParen@36..37 ")" + Newline@37..38 "\n" + BinExpr@38..48 + LitExpr@38..39 + Lit@38..39 + Int@38..39 "1" + WhiteSpace@39..40 " " + Lt@40..41 "<" + WhiteSpace@41..42 " " + CallExpr@42..48 + PathExpr@42..43 + Path@42..43 + PathSegment@42..43 + Ident@42..43 "a" + CallArgList@43..48 + LParen@43..44 "(" + CallArg@44..47 + PathExpr@44..47 + Path@44..47 + PathSegment@44..47 + Ident@44..47 "foo" + RParen@47..48 ")" + Newline@48..49 "\n" + BinExpr@49..55 + LitExpr@49..50 + Lit@49..50 + Int@49..50 "1" + WhiteSpace@50..51 " " + LtEq@51..53 + Lt@51..52 "<" + Eq@52..53 "=" + WhiteSpace@53..54 " " + LitExpr@54..55 + Lit@54..55 + Int@54..55 "2" + Newline@55..56 "\n" + BinExpr@56..62 + LitExpr@56..57 + Lit@56..57 + Int@56..57 "1" + WhiteSpace@57..58 " " + GtEq@58..60 + Gt@58..59 ">" + Eq@59..60 "=" + WhiteSpace@60..61 " " + LitExpr@61..62 + Lit@61..62 + Int@61..62 "2" + Newline@62..63 "\n" + BinExpr@63..85 + LitExpr@63..67 + Lit@63..67 + TrueKw@63..67 "true" + WhiteSpace@67..68 " " + Pipe2@68..70 "||" + WhiteSpace@70..71 " " + BinExpr@71..85 + LitExpr@71..76 + Lit@71..76 + FalseKw@71..76 "false" + WhiteSpace@76..77 " " + Amp2@77..79 "&&" + WhiteSpace@79..80 " " + BinExpr@80..85 + LitExpr@80..81 + Lit@80..81 + Int@80..81 "1" + WhiteSpace@81..82 " " + Lt@82..83 "<" + WhiteSpace@83..84 " " + LitExpr@84..85 + Lit@84..85 + Int@84..85 "2" + Newline@85..86 "\n" + BinExpr@86..118 + LitExpr@86..90 + Lit@86..90 + TrueKw@86..90 "true" + WhiteSpace@90..91 " " + Pipe2@91..93 "||" + WhiteSpace@93..94 " " + BinExpr@94..118 + LitExpr@94..99 + Lit@94..99 + FalseKw@94..99 "false" + WhiteSpace@99..100 " " + Amp2@100..102 "&&" + WhiteSpace@102..103 " " + BinExpr@103..118 + ParenExpr@103..110 + LParen@103..104 "(" + BinExpr@104..109 + LitExpr@104..105 + Lit@104..105 + Int@104..105 "1" + WhiteSpace@105..106 " " + Lt@106..107 "<" + WhiteSpace@107..108 " " + LitExpr@108..109 + Lit@108..109 + Int@108..109 "2" + RParen@109..110 ")" + WhiteSpace@110..111 " " + Gt@111..112 ">" + WhiteSpace@112..113 " " + BinExpr@113..118 + LitExpr@113..114 + Lit@113..114 + Int@113..114 "3" + WhiteSpace@114..115 " " + Hat@115..116 "^" + WhiteSpace@116..117 " " + LitExpr@117..118 + Lit@117..118 + Int@117..118 "2" + Newline@118..119 "\n" + BinExpr@119..130 + PathExpr@119..120 + Path@119..120 + PathSegment@119..120 + Ident@119..120 "a" + WhiteSpace@120..121 " " + Star2@121..123 "**" + WhiteSpace@123..124 " " + BinExpr@124..130 + LitExpr@124..125 + Lit@124..125 + Int@124..125 "2" + WhiteSpace@125..126 " " + Star2@126..128 "**" + WhiteSpace@128..129 " " + LitExpr@129..130 + Lit@129..130 + Int@129..130 "3" + Newline@130..131 "\n" + BinExpr@131..140 + BinExpr@131..136 + LitExpr@131..132 + Lit@131..132 + Int@131..132 "1" + WhiteSpace@132..133 " " + Minus@133..134 "-" + WhiteSpace@134..135 " " + LitExpr@135..136 + Lit@135..136 + Int@135..136 "2" + WhiteSpace@136..137 " " + Minus@137..138 "-" + WhiteSpace@138..139 " " + LitExpr@139..140 + Lit@139..140 + Int@139..140 "3" + Newline@140..141 "\n" + BinExpr@141..152 + BinExpr@141..147 + LitExpr@141..142 + Lit@141..142 + Int@141..142 "1" + WhiteSpace@142..143 " " + LShift@143..145 + Lt@143..144 "<" + Lt@144..145 "<" + WhiteSpace@145..146 " " + LitExpr@146..147 + Lit@146..147 + Int@146..147 "3" + WhiteSpace@147..148 " " + RShift@148..150 + Gt@148..149 ">" + Gt@149..150 ">" + WhiteSpace@150..151 " " + LitExpr@151..152 + Lit@151..152 + Int@151..152 "2" + Newline@152..153 "\n" + FieldExpr@153..158 + FieldExpr@153..156 + PathExpr@153..154 + Path@153..154 + PathSegment@153..154 + Ident@153..154 "a" + Dot@154..155 "." + Ident@155..156 "b" + Dot@156..157 "." + Ident@157..158 "c" + Newline@158..159 "\n" + FieldExpr@159..164 + FieldExpr@159..162 + PathExpr@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "a" + Dot@160..161 "." + Int@161..162 "0" + Dot@162..163 "." + Ident@163..164 "c" + diff --git a/crates/parser2/test_files/syntax_node/exprs/block.fe b/crates/parser2/test_files/syntax_node/exprs/block.fe new file mode 100644 index 0000000000..bd6c0aed13 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/block.fe @@ -0,0 +1,7 @@ +{ + use super::Foo + struct Foo {} + fn foo() {} + + let x = 1 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/block.snap b/crates/parser2/test_files/syntax_node/exprs/block.snap new file mode 100644 index 0000000000..4c6ab0b269 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/block.snap @@ -0,0 +1,66 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/block.fe +--- +Root@0..75 + BlockExpr@0..75 + LBrace@0..1 "{" + Newline@1..2 "\n" + WhiteSpace@2..6 " " + Item@6..20 + Use@6..20 + UseKw@6..9 "use" + WhiteSpace@9..10 " " + UseTree@10..20 + UsePath@10..20 + UsePathSegment@10..15 + SuperKw@10..15 "super" + Colon2@15..17 "::" + UsePathSegment@17..20 + Ident@17..20 "Foo" + Newline@20..21 "\n" + WhiteSpace@21..25 " " + Item@25..38 + Struct@25..38 + StructKw@25..31 "struct" + WhiteSpace@31..32 " " + Ident@32..35 "Foo" + WhiteSpace@35..36 " " + RecordFieldDefList@36..38 + LBrace@36..37 "{" + RBrace@37..38 "}" + Newline@38..39 "\n" + WhiteSpace@39..43 " " + Item@43..54 + Func@43..54 + FnKw@43..45 "fn" + WhiteSpace@45..46 " " + Ident@46..49 "foo" + FuncParamList@49..51 + LParen@49..50 "(" + RParen@50..51 ")" + WhiteSpace@51..52 " " + BlockExpr@52..54 + LBrace@52..53 "{" + RBrace@53..54 "}" + Newline@54..55 "\n" + WhiteSpace@55..59 " " + Newline@59..60 "\n" + WhiteSpace@60..64 " " + LetStmt@64..73 + LetKw@64..67 "let" + WhiteSpace@67..68 " " + PathPat@68..69 + Path@68..69 + PathSegment@68..69 + Ident@68..69 "x" + WhiteSpace@69..70 " " + Eq@70..71 "=" + WhiteSpace@71..72 " " + LitExpr@72..73 + Lit@72..73 + Int@72..73 "1" + Newline@73..74 "\n" + RBrace@74..75 "}" + diff --git a/crates/parser2/test_files/syntax_node/exprs/call.fe b/crates/parser2/test_files/syntax_node/exprs/call.fe new file mode 100644 index 0000000000..499be49d3d --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/call.fe @@ -0,0 +1,14 @@ +foo() +foo::Bar() +foo(x: 1, z: 3) +foo(x: 1, z: 3) +foo(x: 1, 2, z: 3) +foo(1, y: 2, z: 3) + +foo(val1: 2, val2: "String") +foo<[u32; 1], {3 + 4}>(x: 1, y: 2) + +foo::bar(x) + +// Ths should be parsed as `(foo(1))`, not a tuple expression. +(foo < i32, (u32) > (1)) diff --git a/crates/parser2/test_files/syntax_node/exprs/call.snap b/crates/parser2/test_files/syntax_node/exprs/call.snap new file mode 100644 index 0000000000..045db86498 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/call.snap @@ -0,0 +1,313 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/call.fe +--- +Root@0..290 + CallExpr@0..5 + PathExpr@0..3 + Path@0..3 + PathSegment@0..3 + Ident@0..3 "foo" + CallArgList@3..5 + LParen@3..4 "(" + RParen@4..5 ")" + Newline@5..6 "\n" + CallExpr@6..16 + PathExpr@6..14 + Path@6..14 + PathSegment@6..9 + Ident@6..9 "foo" + Colon2@9..11 "::" + PathSegment@11..14 + Ident@11..14 "Bar" + CallArgList@14..16 + LParen@14..15 "(" + RParen@15..16 ")" + Newline@16..17 "\n" + CallExpr@17..32 + PathExpr@17..20 + Path@17..20 + PathSegment@17..20 + Ident@17..20 "foo" + CallArgList@20..32 + LParen@20..21 "(" + CallArg@21..25 + Ident@21..22 "x" + Colon@22..23 ":" + WhiteSpace@23..24 " " + LitExpr@24..25 + Lit@24..25 + Int@24..25 "1" + Comma@25..26 "," + WhiteSpace@26..27 " " + CallArg@27..31 + Ident@27..28 "z" + Colon@28..29 ":" + WhiteSpace@29..30 " " + LitExpr@30..31 + Lit@30..31 + Int@30..31 "3" + RParen@31..32 ")" + Newline@32..33 "\n" + CallExpr@33..48 + PathExpr@33..36 + Path@33..36 + PathSegment@33..36 + Ident@33..36 "foo" + CallArgList@36..48 + LParen@36..37 "(" + CallArg@37..41 + Ident@37..38 "x" + Colon@38..39 ":" + WhiteSpace@39..40 " " + LitExpr@40..41 + Lit@40..41 + Int@40..41 "1" + Comma@41..42 "," + WhiteSpace@42..43 " " + CallArg@43..47 + Ident@43..44 "z" + Colon@44..45 ":" + WhiteSpace@45..46 " " + LitExpr@46..47 + Lit@46..47 + Int@46..47 "3" + RParen@47..48 ")" + Newline@48..49 "\n" + CallExpr@49..67 + PathExpr@49..52 + Path@49..52 + PathSegment@49..52 + Ident@49..52 "foo" + CallArgList@52..67 + LParen@52..53 "(" + CallArg@53..57 + Ident@53..54 "x" + Colon@54..55 ":" + WhiteSpace@55..56 " " + LitExpr@56..57 + Lit@56..57 + Int@56..57 "1" + Comma@57..58 "," + WhiteSpace@58..59 " " + CallArg@59..60 + LitExpr@59..60 + Lit@59..60 + Int@59..60 "2" + Comma@60..61 "," + WhiteSpace@61..62 " " + CallArg@62..66 + Ident@62..63 "z" + Colon@63..64 ":" + WhiteSpace@64..65 " " + LitExpr@65..66 + Lit@65..66 + Int@65..66 "3" + RParen@66..67 ")" + Newline@67..68 "\n" + CallExpr@68..86 + PathExpr@68..71 + Path@68..71 + PathSegment@68..71 + Ident@68..71 "foo" + CallArgList@71..86 + LParen@71..72 "(" + CallArg@72..73 + LitExpr@72..73 + Lit@72..73 + Int@72..73 "1" + Comma@73..74 "," + WhiteSpace@74..75 " " + CallArg@75..79 + Ident@75..76 "y" + Colon@76..77 ":" + WhiteSpace@77..78 " " + LitExpr@78..79 + Lit@78..79 + Int@78..79 "2" + Comma@79..80 "," + WhiteSpace@80..81 " " + CallArg@81..85 + Ident@81..82 "z" + Colon@82..83 ":" + WhiteSpace@83..84 " " + LitExpr@84..85 + Lit@84..85 + Int@84..85 "3" + RParen@85..86 ")" + Newline@86..88 "\n\n" + CallExpr@88..134 + PathExpr@88..109 + Path@88..109 + PathSegment@88..109 + Ident@88..91 "foo" + GenericArgList@91..109 + Lt@91..92 "<" + TypeGenericArg@92..95 + PathType@92..95 + Path@92..95 + PathSegment@92..95 + Ident@92..95 "i32" + Comma@95..96 "," + WhiteSpace@96..97 " " + TypeGenericArg@97..108 + PathType@97..108 + Path@97..108 + PathSegment@97..100 + Ident@97..100 "foo" + Colon2@100..102 "::" + PathSegment@102..108 + Ident@102..108 "MyType" + Gt@108..109 ">" + CallArgList@109..134 + LParen@109..110 "(" + CallArg@110..117 + Ident@110..114 "val1" + Colon@114..115 ":" + WhiteSpace@115..116 " " + LitExpr@116..117 + Lit@116..117 + Int@116..117 "2" + Comma@117..118 "," + WhiteSpace@118..119 " " + CallArg@119..133 + Ident@119..123 "val2" + Colon@123..124 ":" + WhiteSpace@124..125 " " + LitExpr@125..133 + Lit@125..133 + String@125..133 "\"String\"" + RParen@133..134 ")" + Newline@134..135 "\n" + CallExpr@135..169 + PathExpr@135..157 + Path@135..157 + PathSegment@135..157 + Ident@135..138 "foo" + GenericArgList@138..157 + Lt@138..139 "<" + TypeGenericArg@139..147 + ArrayType@139..147 + LBracket@139..140 "[" + PathType@140..143 + Path@140..143 + PathSegment@140..143 + Ident@140..143 "u32" + SemiColon@143..144 ";" + WhiteSpace@144..145 " " + LitExpr@145..146 + Lit@145..146 + Int@145..146 "1" + RBracket@146..147 "]" + Comma@147..148 "," + WhiteSpace@148..149 " " + ConstGenericArg@149..156 + BlockExpr@149..156 + LBrace@149..150 "{" + ExprStmt@150..155 + BinExpr@150..155 + LitExpr@150..151 + Lit@150..151 + Int@150..151 "3" + WhiteSpace@151..152 " " + Plus@152..153 "+" + WhiteSpace@153..154 " " + LitExpr@154..155 + Lit@154..155 + Int@154..155 "4" + RBrace@155..156 "}" + Gt@156..157 ">" + CallArgList@157..169 + LParen@157..158 "(" + CallArg@158..162 + Ident@158..159 "x" + Colon@159..160 ":" + WhiteSpace@160..161 " " + LitExpr@161..162 + Lit@161..162 + Int@161..162 "1" + Comma@162..163 "," + WhiteSpace@163..164 " " + CallArg@164..168 + Ident@164..165 "y" + Colon@165..166 ":" + WhiteSpace@166..167 " " + LitExpr@167..168 + Lit@167..168 + Int@167..168 "2" + RParen@168..169 ")" + Newline@169..171 "\n\n" + CallExpr@171..188 + PathExpr@171..185 + Path@171..185 + PathSegment@171..174 + Ident@171..174 "foo" + Colon2@174..176 "::" + PathSegment@176..185 + Ident@176..179 "bar" + GenericArgList@179..185 + Lt@179..180 "<" + TypeGenericArg@180..181 + PathType@180..181 + Path@180..181 + PathSegment@180..181 + Ident@180..181 "T" + Comma@181..182 "," + WhiteSpace@182..183 " " + TypeGenericArg@183..184 + PathType@183..184 + Path@183..184 + PathSegment@183..184 + Ident@183..184 "U" + Gt@184..185 ">" + CallArgList@185..188 + LParen@185..186 "(" + CallArg@186..187 + PathExpr@186..187 + Path@186..187 + PathSegment@186..187 + Ident@186..187 "x" + RParen@187..188 ")" + Newline@188..190 "\n\n" + Comment@190..264 "// Ths should be pars ..." + Newline@264..265 "\n" + ParenExpr@265..289 + LParen@265..266 "(" + CallExpr@266..288 + PathExpr@266..284 + Path@266..284 + PathSegment@266..284 + Ident@266..269 "foo" + WhiteSpace@269..270 " " + GenericArgList@270..284 + Lt@270..271 "<" + WhiteSpace@271..272 " " + TypeGenericArg@272..275 + PathType@272..275 + Path@272..275 + PathSegment@272..275 + Ident@272..275 "i32" + Comma@275..276 "," + WhiteSpace@276..277 " " + TypeGenericArg@277..282 + TupleType@277..282 + LParen@277..278 "(" + PathType@278..281 + Path@278..281 + PathSegment@278..281 + Ident@278..281 "u32" + RParen@281..282 ")" + WhiteSpace@282..283 " " + Gt@283..284 ">" + WhiteSpace@284..285 " " + CallArgList@285..288 + LParen@285..286 "(" + CallArg@286..287 + LitExpr@286..287 + Lit@286..287 + Int@286..287 "1" + RParen@287..288 ")" + RParen@288..289 ")" + Newline@289..290 "\n" + diff --git a/crates/parser2/test_files/syntax_node/exprs/expr_path.fe b/crates/parser2/test_files/syntax_node/exprs/expr_path.fe new file mode 100644 index 0000000000..acf6c7fa29 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/expr_path.fe @@ -0,0 +1,3 @@ +super::Foo +ingot::Bar +Self::Foo \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/expr_path.snap b/crates/parser2/test_files/syntax_node/exprs/expr_path.snap new file mode 100644 index 0000000000..8b70fe1415 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/expr_path.snap @@ -0,0 +1,30 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/expr_path.fe +--- +Root@0..31 + PathExpr@0..10 + Path@0..10 + PathSegment@0..5 + SuperKw@0..5 "super" + Colon2@5..7 "::" + PathSegment@7..10 + Ident@7..10 "Foo" + Newline@10..11 "\n" + PathExpr@11..21 + Path@11..21 + PathSegment@11..16 + IngotKw@11..16 "ingot" + Colon2@16..18 "::" + PathSegment@18..21 + Ident@18..21 "Bar" + Newline@21..22 "\n" + PathExpr@22..31 + Path@22..31 + PathSegment@22..26 + SelfTypeKw@22..26 "Self" + Colon2@26..28 "::" + PathSegment@28..31 + Ident@28..31 "Foo" + diff --git a/crates/parser2/test_files/syntax_node/exprs/if.fe b/crates/parser2/test_files/syntax_node/exprs/if.fe new file mode 100644 index 0000000000..d823bc0d5c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/if.fe @@ -0,0 +1,33 @@ +if b {} else {} + +if b {} else { + let x = 1 + x +} + +if b { + let x = 1 + x +} else {} + +if b { + let x = 1 + x +} + +if b { + let x = 1 + x +} else { + let y = 1 + y +} + +if match x { + Scope::Parent => true + Scope::Child => false +} { + return +} else { + 1 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/if.snap b/crates/parser2/test_files/syntax_node/exprs/if.snap new file mode 100644 index 0000000000..4fe25e692b --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/if.snap @@ -0,0 +1,276 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/if.fe +--- +Root@0..279 + IfExpr@0..15 + IfKw@0..2 "if" + WhiteSpace@2..3 " " + PathExpr@3..4 + Path@3..4 + PathSegment@3..4 + Ident@3..4 "b" + WhiteSpace@4..5 " " + BlockExpr@5..7 + LBrace@5..6 "{" + RBrace@6..7 "}" + WhiteSpace@7..8 " " + ElseKw@8..12 "else" + WhiteSpace@12..13 " " + BlockExpr@13..15 + LBrace@13..14 "{" + RBrace@14..15 "}" + Newline@15..17 "\n\n" + IfExpr@17..53 + IfKw@17..19 "if" + WhiteSpace@19..20 " " + PathExpr@20..21 + Path@20..21 + PathSegment@20..21 + Ident@20..21 "b" + WhiteSpace@21..22 " " + BlockExpr@22..24 + LBrace@22..23 "{" + RBrace@23..24 "}" + WhiteSpace@24..25 " " + ElseKw@25..29 "else" + WhiteSpace@29..30 " " + BlockExpr@30..53 + LBrace@30..31 "{" + Newline@31..32 "\n" + WhiteSpace@32..36 " " + LetStmt@36..45 + LetKw@36..39 "let" + WhiteSpace@39..40 " " + PathPat@40..41 + Path@40..41 + PathSegment@40..41 + Ident@40..41 "x" + WhiteSpace@41..42 " " + Eq@42..43 "=" + WhiteSpace@43..44 " " + LitExpr@44..45 + Lit@44..45 + Int@44..45 "1" + Newline@45..46 "\n" + WhiteSpace@46..50 " " + ExprStmt@50..51 + PathExpr@50..51 + Path@50..51 + PathSegment@50..51 + Ident@50..51 "x" + Newline@51..52 "\n" + RBrace@52..53 "}" + Newline@53..55 "\n\n" + IfExpr@55..91 + IfKw@55..57 "if" + WhiteSpace@57..58 " " + PathExpr@58..59 + Path@58..59 + PathSegment@58..59 + Ident@58..59 "b" + WhiteSpace@59..60 " " + BlockExpr@60..83 + LBrace@60..61 "{" + Newline@61..62 "\n" + WhiteSpace@62..66 " " + LetStmt@66..75 + LetKw@66..69 "let" + WhiteSpace@69..70 " " + PathPat@70..71 + Path@70..71 + PathSegment@70..71 + Ident@70..71 "x" + WhiteSpace@71..72 " " + Eq@72..73 "=" + WhiteSpace@73..74 " " + LitExpr@74..75 + Lit@74..75 + Int@74..75 "1" + Newline@75..76 "\n" + WhiteSpace@76..80 " " + ExprStmt@80..81 + PathExpr@80..81 + Path@80..81 + PathSegment@80..81 + Ident@80..81 "x" + Newline@81..82 "\n" + RBrace@82..83 "}" + WhiteSpace@83..84 " " + ElseKw@84..88 "else" + WhiteSpace@88..89 " " + BlockExpr@89..91 + LBrace@89..90 "{" + RBrace@90..91 "}" + Newline@91..93 "\n\n" + IfExpr@93..121 + IfKw@93..95 "if" + WhiteSpace@95..96 " " + PathExpr@96..97 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "b" + WhiteSpace@97..98 " " + BlockExpr@98..121 + LBrace@98..99 "{" + Newline@99..100 "\n" + WhiteSpace@100..104 " " + LetStmt@104..113 + LetKw@104..107 "let" + WhiteSpace@107..108 " " + PathPat@108..109 + Path@108..109 + PathSegment@108..109 + Ident@108..109 "x" + WhiteSpace@109..110 " " + Eq@110..111 "=" + WhiteSpace@111..112 " " + LitExpr@112..113 + Lit@112..113 + Int@112..113 "1" + Newline@113..114 "\n" + WhiteSpace@114..118 " " + ExprStmt@118..119 + PathExpr@118..119 + Path@118..119 + PathSegment@118..119 + Ident@118..119 "x" + Newline@119..120 "\n" + RBrace@120..121 "}" + Newline@121..123 "\n\n" + IfExpr@123..180 + IfKw@123..125 "if" + WhiteSpace@125..126 " " + PathExpr@126..127 + Path@126..127 + PathSegment@126..127 + Ident@126..127 "b" + WhiteSpace@127..128 " " + BlockExpr@128..151 + LBrace@128..129 "{" + Newline@129..130 "\n" + WhiteSpace@130..134 " " + LetStmt@134..143 + LetKw@134..137 "let" + WhiteSpace@137..138 " " + PathPat@138..139 + Path@138..139 + PathSegment@138..139 + Ident@138..139 "x" + WhiteSpace@139..140 " " + Eq@140..141 "=" + WhiteSpace@141..142 " " + LitExpr@142..143 + Lit@142..143 + Int@142..143 "1" + Newline@143..144 "\n" + WhiteSpace@144..148 " " + ExprStmt@148..149 + PathExpr@148..149 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "x" + Newline@149..150 "\n" + RBrace@150..151 "}" + WhiteSpace@151..152 " " + ElseKw@152..156 "else" + WhiteSpace@156..157 " " + BlockExpr@157..180 + LBrace@157..158 "{" + Newline@158..159 "\n" + WhiteSpace@159..163 " " + LetStmt@163..172 + LetKw@163..166 "let" + WhiteSpace@166..167 " " + PathPat@167..168 + Path@167..168 + PathSegment@167..168 + Ident@167..168 "y" + WhiteSpace@168..169 " " + Eq@169..170 "=" + WhiteSpace@170..171 " " + LitExpr@171..172 + Lit@171..172 + Int@171..172 "1" + Newline@172..173 "\n" + WhiteSpace@173..177 " " + ExprStmt@177..178 + PathExpr@177..178 + Path@177..178 + PathSegment@177..178 + Ident@177..178 "y" + Newline@178..179 "\n" + RBrace@179..180 "}" + Newline@180..182 "\n\n" + IfExpr@182..279 + IfKw@182..184 "if" + WhiteSpace@184..185 " " + MatchExpr@185..248 + MatchKw@185..190 "match" + WhiteSpace@190..191 " " + PathExpr@191..192 + Path@191..192 + PathSegment@191..192 + Ident@191..192 "x" + WhiteSpace@192..193 " " + MatchArmList@193..248 + LBrace@193..194 "{" + Newline@194..195 "\n" + WhiteSpace@195..199 " " + MatchArm@199..220 + PathPat@199..212 + Path@199..212 + PathSegment@199..204 + Ident@199..204 "Scope" + Colon2@204..206 "::" + PathSegment@206..212 + Ident@206..212 "Parent" + WhiteSpace@212..213 " " + FatArrow@213..215 "=>" + WhiteSpace@215..216 " " + LitExpr@216..220 + Lit@216..220 + TrueKw@216..220 "true" + Newline@220..221 "\n" + WhiteSpace@221..225 " " + MatchArm@225..246 + PathPat@225..237 + Path@225..237 + PathSegment@225..230 + Ident@225..230 "Scope" + Colon2@230..232 "::" + PathSegment@232..237 + Ident@232..237 "Child" + WhiteSpace@237..238 " " + FatArrow@238..240 "=>" + WhiteSpace@240..241 " " + LitExpr@241..246 + Lit@241..246 + FalseKw@241..246 "false" + Newline@246..247 "\n" + RBrace@247..248 "}" + WhiteSpace@248..249 " " + BlockExpr@249..264 + LBrace@249..250 "{" + Newline@250..251 "\n" + WhiteSpace@251..255 " " + ReturnStmt@255..261 + ReturnKw@255..261 "return" + WhiteSpace@261..262 " " + Newline@262..263 "\n" + RBrace@263..264 "}" + WhiteSpace@264..265 " " + ElseKw@265..269 "else" + WhiteSpace@269..270 " " + BlockExpr@270..279 + LBrace@270..271 "{" + Newline@271..272 "\n" + WhiteSpace@272..276 " " + ExprStmt@276..277 + LitExpr@276..277 + Lit@276..277 + Int@276..277 "1" + Newline@277..278 "\n" + RBrace@278..279 "}" + diff --git a/crates/parser2/test_files/syntax_node/exprs/index.fe b/crates/parser2/test_files/syntax_node/exprs/index.fe new file mode 100644 index 0000000000..1545b60d77 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/index.fe @@ -0,0 +1,2 @@ +x[1 + 2] +x[foo.y(1, 2)] \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/index.snap b/crates/parser2/test_files/syntax_node/exprs/index.snap new file mode 100644 index 0000000000..94a7173502 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/index.snap @@ -0,0 +1,52 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/index.fe +--- +Root@0..23 + IndexExpr@0..8 + PathExpr@0..1 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" + LBracket@1..2 "[" + BinExpr@2..7 + LitExpr@2..3 + Lit@2..3 + Int@2..3 "1" + WhiteSpace@3..4 " " + Plus@4..5 "+" + WhiteSpace@5..6 " " + LitExpr@6..7 + Lit@6..7 + Int@6..7 "2" + RBracket@7..8 "]" + Newline@8..9 "\n" + IndexExpr@9..23 + PathExpr@9..10 + Path@9..10 + PathSegment@9..10 + Ident@9..10 "x" + LBracket@10..11 "[" + MethodCallExpr@11..22 + PathExpr@11..14 + Path@11..14 + PathSegment@11..14 + Ident@11..14 "foo" + Dot@14..15 "." + Ident@15..16 "y" + CallArgList@16..22 + LParen@16..17 "(" + CallArg@17..18 + LitExpr@17..18 + Lit@17..18 + Int@17..18 "1" + Comma@18..19 "," + WhiteSpace@19..20 " " + CallArg@20..21 + LitExpr@20..21 + Lit@20..21 + Int@20..21 "2" + RParen@21..22 ")" + RBracket@22..23 "]" + diff --git a/crates/parser2/test_files/syntax_node/exprs/match.fe b/crates/parser2/test_files/syntax_node/exprs/match.fe new file mode 100644 index 0000000000..6ddf0c7210 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/match.fe @@ -0,0 +1,38 @@ +match e {} + +match e { + Enum::Add(x, y) => x + y + Enum::Sub(x, y) => x - y +} + +match (S {x: 1, y: 2}) { + _ => 1 +} + +match e { + Enum::Add(x, y) => x + y + Enum::Sub(x, y) => x - y +} + +match e { + Enum::Add(x, y) => { + x + y + } + Enum::Sub(x, y) => x - y + Enum::Mul(x, y) => { x * y } +} + +match e { Enum::Var(s) => s } + +match { + let x = 1 + Enum::Var(x) + } +{ + Enum::Var(s) => s +} + +match (S {x: Foo::Bar(x), y: 2}) { + S {x: Boo::Bar(x), y} => true + _ => false +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/match.snap b/crates/parser2/test_files/syntax_node/exprs/match.snap new file mode 100644 index 0000000000..1278de74d2 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/match.snap @@ -0,0 +1,599 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/match.fe +--- +Root@0..516 + MatchExpr@0..10 + MatchKw@0..5 "match" + WhiteSpace@5..6 " " + PathExpr@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "e" + WhiteSpace@7..8 " " + MatchArmList@8..10 + LBrace@8..9 "{" + RBrace@9..10 "}" + Newline@10..12 "\n\n" + MatchExpr@12..81 + MatchKw@12..17 "match" + WhiteSpace@17..18 " " + PathExpr@18..19 + Path@18..19 + PathSegment@18..19 + Ident@18..19 "e" + WhiteSpace@19..20 " " + MatchArmList@20..81 + LBrace@20..21 "{" + Newline@21..22 "\n" + WhiteSpace@22..26 " " + MatchArm@26..50 + PathTuplePat@26..41 + Path@26..35 + PathSegment@26..30 + Ident@26..30 "Enum" + Colon2@30..32 "::" + PathSegment@32..35 + Ident@32..35 "Add" + TuplePatElemList@35..41 + LParen@35..36 "(" + PathPat@36..37 + Path@36..37 + PathSegment@36..37 + Ident@36..37 "x" + Comma@37..38 "," + WhiteSpace@38..39 " " + PathPat@39..40 + Path@39..40 + PathSegment@39..40 + Ident@39..40 "y" + RParen@40..41 ")" + WhiteSpace@41..42 " " + FatArrow@42..44 "=>" + WhiteSpace@44..45 " " + BinExpr@45..50 + PathExpr@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "x" + WhiteSpace@46..47 " " + Plus@47..48 "+" + WhiteSpace@48..49 " " + PathExpr@49..50 + Path@49..50 + PathSegment@49..50 + Ident@49..50 "y" + Newline@50..51 "\n" + WhiteSpace@51..55 " " + MatchArm@55..79 + PathTuplePat@55..70 + Path@55..64 + PathSegment@55..59 + Ident@55..59 "Enum" + Colon2@59..61 "::" + PathSegment@61..64 + Ident@61..64 "Sub" + TuplePatElemList@64..70 + LParen@64..65 "(" + PathPat@65..66 + Path@65..66 + PathSegment@65..66 + Ident@65..66 "x" + Comma@66..67 "," + WhiteSpace@67..68 " " + PathPat@68..69 + Path@68..69 + PathSegment@68..69 + Ident@68..69 "y" + RParen@69..70 ")" + WhiteSpace@70..71 " " + FatArrow@71..73 "=>" + WhiteSpace@73..74 " " + BinExpr@74..79 + PathExpr@74..75 + Path@74..75 + PathSegment@74..75 + Ident@74..75 "x" + WhiteSpace@75..76 " " + Minus@76..77 "-" + WhiteSpace@77..78 " " + PathExpr@78..79 + Path@78..79 + PathSegment@78..79 + Ident@78..79 "y" + Newline@79..80 "\n" + RBrace@80..81 "}" + Newline@81..83 "\n\n" + MatchExpr@83..120 + MatchKw@83..88 "match" + WhiteSpace@88..89 " " + ParenExpr@89..105 + LParen@89..90 "(" + RecordInitExpr@90..104 + Path@90..91 + PathSegment@90..91 + Ident@90..91 "S" + WhiteSpace@91..92 " " + RecordFieldList@92..104 + LBrace@92..93 "{" + RecordField@93..97 + Ident@93..94 "x" + Colon@94..95 ":" + WhiteSpace@95..96 " " + LitExpr@96..97 + Lit@96..97 + Int@96..97 "1" + Comma@97..98 "," + WhiteSpace@98..99 " " + RecordField@99..103 + Ident@99..100 "y" + Colon@100..101 ":" + WhiteSpace@101..102 " " + LitExpr@102..103 + Lit@102..103 + Int@102..103 "2" + RBrace@103..104 "}" + RParen@104..105 ")" + WhiteSpace@105..106 " " + MatchArmList@106..120 + LBrace@106..107 "{" + Newline@107..108 "\n" + WhiteSpace@108..112 " " + MatchArm@112..118 + WildCardPat@112..113 + Underscore@112..113 "_" + WhiteSpace@113..114 " " + FatArrow@114..116 "=>" + WhiteSpace@116..117 " " + LitExpr@117..118 + Lit@117..118 + Int@117..118 "1" + Newline@118..119 "\n" + RBrace@119..120 "}" + Newline@120..122 "\n\n" + MatchExpr@122..191 + MatchKw@122..127 "match" + WhiteSpace@127..128 " " + PathExpr@128..129 + Path@128..129 + PathSegment@128..129 + Ident@128..129 "e" + WhiteSpace@129..130 " " + MatchArmList@130..191 + LBrace@130..131 "{" + Newline@131..132 "\n" + WhiteSpace@132..136 " " + MatchArm@136..160 + PathTuplePat@136..151 + Path@136..145 + PathSegment@136..140 + Ident@136..140 "Enum" + Colon2@140..142 "::" + PathSegment@142..145 + Ident@142..145 "Add" + TuplePatElemList@145..151 + LParen@145..146 "(" + PathPat@146..147 + Path@146..147 + PathSegment@146..147 + Ident@146..147 "x" + Comma@147..148 "," + WhiteSpace@148..149 " " + PathPat@149..150 + Path@149..150 + PathSegment@149..150 + Ident@149..150 "y" + RParen@150..151 ")" + WhiteSpace@151..152 " " + FatArrow@152..154 "=>" + WhiteSpace@154..155 " " + BinExpr@155..160 + PathExpr@155..156 + Path@155..156 + PathSegment@155..156 + Ident@155..156 "x" + WhiteSpace@156..157 " " + Plus@157..158 "+" + WhiteSpace@158..159 " " + PathExpr@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "y" + Newline@160..161 "\n" + WhiteSpace@161..165 " " + MatchArm@165..189 + PathTuplePat@165..180 + Path@165..174 + PathSegment@165..169 + Ident@165..169 "Enum" + Colon2@169..171 "::" + PathSegment@171..174 + Ident@171..174 "Sub" + TuplePatElemList@174..180 + LParen@174..175 "(" + PathPat@175..176 + Path@175..176 + PathSegment@175..176 + Ident@175..176 "x" + Comma@176..177 "," + WhiteSpace@177..178 " " + PathPat@178..179 + Path@178..179 + PathSegment@178..179 + Ident@178..179 "y" + RParen@179..180 ")" + WhiteSpace@180..181 " " + FatArrow@181..183 "=>" + WhiteSpace@183..184 " " + BinExpr@184..189 + PathExpr@184..185 + Path@184..185 + PathSegment@184..185 + Ident@184..185 "x" + WhiteSpace@185..186 " " + Minus@186..187 "-" + WhiteSpace@187..188 " " + PathExpr@188..189 + Path@188..189 + PathSegment@188..189 + Ident@188..189 "y" + Newline@189..190 "\n" + RBrace@190..191 "}" + Newline@191..193 "\n\n" + MatchExpr@193..313 + MatchKw@193..198 "match" + WhiteSpace@198..199 " " + PathExpr@199..200 + Path@199..200 + PathSegment@199..200 + Ident@199..200 "e" + WhiteSpace@200..201 " " + MatchArmList@201..313 + LBrace@201..202 "{" + Newline@202..203 "\n" + WhiteSpace@203..207 " " + MatchArm@207..249 + PathTuplePat@207..222 + Path@207..216 + PathSegment@207..211 + Ident@207..211 "Enum" + Colon2@211..213 "::" + PathSegment@213..216 + Ident@213..216 "Add" + TuplePatElemList@216..222 + LParen@216..217 "(" + PathPat@217..218 + Path@217..218 + PathSegment@217..218 + Ident@217..218 "x" + Comma@218..219 "," + WhiteSpace@219..220 " " + PathPat@220..221 + Path@220..221 + PathSegment@220..221 + Ident@220..221 "y" + RParen@221..222 ")" + WhiteSpace@222..223 " " + FatArrow@223..225 "=>" + WhiteSpace@225..226 " " + BlockExpr@226..249 + LBrace@226..227 "{" + WhiteSpace@227..228 " " + Newline@228..229 "\n" + WhiteSpace@229..237 " " + ExprStmt@237..242 + BinExpr@237..242 + PathExpr@237..238 + Path@237..238 + PathSegment@237..238 + Ident@237..238 "x" + WhiteSpace@238..239 " " + Plus@239..240 "+" + WhiteSpace@240..241 " " + PathExpr@241..242 + Path@241..242 + PathSegment@241..242 + Ident@241..242 "y" + WhiteSpace@242..243 " " + Newline@243..244 "\n" + WhiteSpace@244..248 " " + RBrace@248..249 "}" + Newline@249..250 "\n" + WhiteSpace@250..254 " " + MatchArm@254..278 + PathTuplePat@254..269 + Path@254..263 + PathSegment@254..258 + Ident@254..258 "Enum" + Colon2@258..260 "::" + PathSegment@260..263 + Ident@260..263 "Sub" + TuplePatElemList@263..269 + LParen@263..264 "(" + PathPat@264..265 + Path@264..265 + PathSegment@264..265 + Ident@264..265 "x" + Comma@265..266 "," + WhiteSpace@266..267 " " + PathPat@267..268 + Path@267..268 + PathSegment@267..268 + Ident@267..268 "y" + RParen@268..269 ")" + WhiteSpace@269..270 " " + FatArrow@270..272 "=>" + WhiteSpace@272..273 " " + BinExpr@273..278 + PathExpr@273..274 + Path@273..274 + PathSegment@273..274 + Ident@273..274 "x" + WhiteSpace@274..275 " " + Minus@275..276 "-" + WhiteSpace@276..277 " " + PathExpr@277..278 + Path@277..278 + PathSegment@277..278 + Ident@277..278 "y" + Newline@278..279 "\n" + WhiteSpace@279..283 " " + MatchArm@283..311 + PathTuplePat@283..298 + Path@283..292 + PathSegment@283..287 + Ident@283..287 "Enum" + Colon2@287..289 "::" + PathSegment@289..292 + Ident@289..292 "Mul" + TuplePatElemList@292..298 + LParen@292..293 "(" + PathPat@293..294 + Path@293..294 + PathSegment@293..294 + Ident@293..294 "x" + Comma@294..295 "," + WhiteSpace@295..296 " " + PathPat@296..297 + Path@296..297 + PathSegment@296..297 + Ident@296..297 "y" + RParen@297..298 ")" + WhiteSpace@298..299 " " + FatArrow@299..301 "=>" + WhiteSpace@301..302 " " + BlockExpr@302..311 + LBrace@302..303 "{" + WhiteSpace@303..304 " " + ExprStmt@304..309 + BinExpr@304..309 + PathExpr@304..305 + Path@304..305 + PathSegment@304..305 + Ident@304..305 "x" + WhiteSpace@305..306 " " + Star@306..307 "*" + WhiteSpace@307..308 " " + PathExpr@308..309 + Path@308..309 + PathSegment@308..309 + Ident@308..309 "y" + WhiteSpace@309..310 " " + RBrace@310..311 "}" + Newline@311..312 "\n" + RBrace@312..313 "}" + Newline@313..315 "\n\n" + MatchExpr@315..344 + MatchKw@315..320 "match" + WhiteSpace@320..321 " " + PathExpr@321..322 + Path@321..322 + PathSegment@321..322 + Ident@321..322 "e" + WhiteSpace@322..323 " " + MatchArmList@323..344 + LBrace@323..324 "{" + WhiteSpace@324..325 " " + MatchArm@325..342 + PathTuplePat@325..337 + Path@325..334 + PathSegment@325..329 + Ident@325..329 "Enum" + Colon2@329..331 "::" + PathSegment@331..334 + Ident@331..334 "Var" + TuplePatElemList@334..337 + LParen@334..335 "(" + PathPat@335..336 + Path@335..336 + PathSegment@335..336 + Ident@335..336 "s" + RParen@336..337 ")" + WhiteSpace@337..338 " " + FatArrow@338..340 "=>" + WhiteSpace@340..341 " " + PathExpr@341..342 + Path@341..342 + PathSegment@341..342 + Ident@341..342 "s" + WhiteSpace@342..343 " " + RBrace@343..344 "}" + Newline@344..346 "\n\n" + MatchExpr@346..429 + MatchKw@346..351 "match" + WhiteSpace@351..352 " " + BlockExpr@352..400 + LBrace@352..353 "{" + Newline@353..354 "\n" + WhiteSpace@354..362 " " + LetStmt@362..371 + LetKw@362..365 "let" + WhiteSpace@365..366 " " + PathPat@366..367 + Path@366..367 + PathSegment@366..367 + Ident@366..367 "x" + WhiteSpace@367..368 " " + Eq@368..369 "=" + WhiteSpace@369..370 " " + LitExpr@370..371 + Lit@370..371 + Int@370..371 "1" + Newline@371..372 "\n" + WhiteSpace@372..380 " " + ExprStmt@380..392 + CallExpr@380..392 + PathExpr@380..389 + Path@380..389 + PathSegment@380..384 + Ident@380..384 "Enum" + Colon2@384..386 "::" + PathSegment@386..389 + Ident@386..389 "Var" + CallArgList@389..392 + LParen@389..390 "(" + CallArg@390..391 + PathExpr@390..391 + Path@390..391 + PathSegment@390..391 + Ident@390..391 "x" + RParen@391..392 ")" + Newline@392..393 "\n" + WhiteSpace@393..399 " " + RBrace@399..400 "}" + WhiteSpace@400..401 " " + Newline@401..402 "\n" + MatchArmList@402..429 + LBrace@402..403 "{" + WhiteSpace@403..404 " " + Newline@404..405 "\n" + WhiteSpace@405..409 " " + MatchArm@409..426 + PathTuplePat@409..421 + Path@409..418 + PathSegment@409..413 + Ident@409..413 "Enum" + Colon2@413..415 "::" + PathSegment@415..418 + Ident@415..418 "Var" + TuplePatElemList@418..421 + LParen@418..419 "(" + PathPat@419..420 + Path@419..420 + PathSegment@419..420 + Ident@419..420 "s" + RParen@420..421 ")" + WhiteSpace@421..422 " " + FatArrow@422..424 "=>" + WhiteSpace@424..425 " " + PathExpr@425..426 + Path@425..426 + PathSegment@425..426 + Ident@425..426 "s" + WhiteSpace@426..427 " " + Newline@427..428 "\n" + RBrace@428..429 "}" + Newline@429..431 "\n\n" + MatchExpr@431..516 + MatchKw@431..436 "match" + WhiteSpace@436..437 " " + ParenExpr@437..463 + LParen@437..438 "(" + RecordInitExpr@438..462 + Path@438..439 + PathSegment@438..439 + Ident@438..439 "S" + WhiteSpace@439..440 " " + RecordFieldList@440..462 + LBrace@440..441 "{" + RecordField@441..455 + Ident@441..442 "x" + Colon@442..443 ":" + WhiteSpace@443..444 " " + CallExpr@444..455 + PathExpr@444..452 + Path@444..452 + PathSegment@444..447 + Ident@444..447 "Foo" + Colon2@447..449 "::" + PathSegment@449..452 + Ident@449..452 "Bar" + CallArgList@452..455 + LParen@452..453 "(" + CallArg@453..454 + PathExpr@453..454 + Path@453..454 + PathSegment@453..454 + Ident@453..454 "x" + RParen@454..455 ")" + Comma@455..456 "," + WhiteSpace@456..457 " " + RecordField@457..461 + Ident@457..458 "y" + Colon@458..459 ":" + WhiteSpace@459..460 " " + LitExpr@460..461 + Lit@460..461 + Int@460..461 "2" + RBrace@461..462 "}" + RParen@462..463 ")" + WhiteSpace@463..464 " " + MatchArmList@464..516 + LBrace@464..465 "{" + Newline@465..466 "\n" + WhiteSpace@466..470 " " + MatchArm@470..499 + RecordPat@470..491 + Path@470..471 + PathSegment@470..471 + Ident@470..471 "S" + WhiteSpace@471..472 " " + RecordPatFieldList@472..491 + LBrace@472..473 "{" + RecordPatField@473..487 + Ident@473..474 "x" + Colon@474..475 ":" + WhiteSpace@475..476 " " + PathTuplePat@476..487 + Path@476..484 + PathSegment@476..479 + Ident@476..479 "Boo" + Colon2@479..481 "::" + PathSegment@481..484 + Ident@481..484 "Bar" + TuplePatElemList@484..487 + LParen@484..485 "(" + PathPat@485..486 + Path@485..486 + PathSegment@485..486 + Ident@485..486 "x" + RParen@486..487 ")" + Comma@487..488 "," + WhiteSpace@488..489 " " + RecordPatField@489..490 + PathPat@489..490 + Path@489..490 + PathSegment@489..490 + Ident@489..490 "y" + RBrace@490..491 "}" + WhiteSpace@491..492 " " + FatArrow@492..494 "=>" + WhiteSpace@494..495 " " + LitExpr@495..499 + Lit@495..499 + TrueKw@495..499 "true" + Newline@499..500 "\n" + WhiteSpace@500..504 " " + MatchArm@504..514 + WildCardPat@504..505 + Underscore@504..505 "_" + WhiteSpace@505..506 " " + FatArrow@506..508 "=>" + WhiteSpace@508..509 " " + LitExpr@509..514 + Lit@509..514 + FalseKw@509..514 "false" + Newline@514..515 "\n" + RBrace@515..516 "}" + diff --git a/crates/parser2/test_files/syntax_node/exprs/method.fe b/crates/parser2/test_files/syntax_node/exprs/method.fe new file mode 100644 index 0000000000..f93bbcc003 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/method.fe @@ -0,0 +1,11 @@ +x.y() +x.y(1, 2) + +x.y.z(x: 1, y: 2) +x[0].z(x: 1) + +x.y(x: 1, y) + +x +.y() +.z() \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/method.snap b/crates/parser2/test_files/syntax_node/exprs/method.snap new file mode 100644 index 0000000000..cf7e29a4d9 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/method.snap @@ -0,0 +1,163 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/method.fe +--- +Root@0..88 + MethodCallExpr@0..5 + PathExpr@0..1 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" + Dot@1..2 "." + Ident@2..3 "y" + CallArgList@3..5 + LParen@3..4 "(" + RParen@4..5 ")" + Newline@5..6 "\n" + MethodCallExpr@6..15 + PathExpr@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "x" + Dot@7..8 "." + Ident@8..9 "y" + CallArgList@9..15 + LParen@9..10 "(" + CallArg@10..11 + LitExpr@10..11 + Lit@10..11 + Int@10..11 "1" + Comma@11..12 "," + WhiteSpace@12..13 " " + CallArg@13..14 + LitExpr@13..14 + Lit@13..14 + Int@13..14 "2" + RParen@14..15 ")" + Newline@15..17 "\n\n" + MethodCallExpr@17..34 + FieldExpr@17..20 + PathExpr@17..18 + Path@17..18 + PathSegment@17..18 + Ident@17..18 "x" + Dot@18..19 "." + Ident@19..20 "y" + Dot@20..21 "." + Ident@21..22 "z" + CallArgList@22..34 + LParen@22..23 "(" + CallArg@23..27 + Ident@23..24 "x" + Colon@24..25 ":" + WhiteSpace@25..26 " " + LitExpr@26..27 + Lit@26..27 + Int@26..27 "1" + Comma@27..28 "," + WhiteSpace@28..29 " " + CallArg@29..33 + Ident@29..30 "y" + Colon@30..31 ":" + WhiteSpace@31..32 " " + LitExpr@32..33 + Lit@32..33 + Int@32..33 "2" + RParen@33..34 ")" + Newline@34..35 "\n" + MethodCallExpr@35..47 + IndexExpr@35..39 + PathExpr@35..36 + Path@35..36 + PathSegment@35..36 + Ident@35..36 "x" + LBracket@36..37 "[" + LitExpr@37..38 + Lit@37..38 + Int@37..38 "0" + RBracket@38..39 "]" + Dot@39..40 "." + Ident@40..41 "z" + CallArgList@41..47 + LParen@41..42 "(" + CallArg@42..46 + Ident@42..43 "x" + Colon@43..44 ":" + WhiteSpace@44..45 " " + LitExpr@45..46 + Lit@45..46 + Int@45..46 "1" + RParen@46..47 ")" + Newline@47..49 "\n\n" + MethodCallExpr@49..75 + PathExpr@49..50 + Path@49..50 + PathSegment@49..50 + Ident@49..50 "x" + Dot@50..51 "." + Ident@51..52 "y" + GenericArgList@52..66 + Lt@52..53 "<" + TypeGenericArg@53..56 + PathType@53..56 + Path@53..56 + PathSegment@53..56 + Ident@53..56 "i32" + Comma@56..57 "," + WhiteSpace@57..58 " " + ConstGenericArg@58..65 + BlockExpr@58..65 + LBrace@58..59 "{" + ExprStmt@59..64 + BinExpr@59..64 + PathExpr@59..60 + Path@59..60 + PathSegment@59..60 + Ident@59..60 "x" + WhiteSpace@60..61 " " + Plus@61..62 "+" + WhiteSpace@62..63 " " + PathExpr@63..64 + Path@63..64 + PathSegment@63..64 + Ident@63..64 "y" + RBrace@64..65 "}" + Gt@65..66 ">" + CallArgList@66..75 + LParen@66..67 "(" + CallArg@67..71 + Ident@67..68 "x" + Colon@68..69 ":" + WhiteSpace@69..70 " " + LitExpr@70..71 + Lit@70..71 + Int@70..71 "1" + Comma@71..72 "," + WhiteSpace@72..73 " " + CallArg@73..74 + PathExpr@73..74 + Path@73..74 + PathSegment@73..74 + Ident@73..74 "y" + RParen@74..75 ")" + Newline@75..77 "\n\n" + MethodCallExpr@77..88 + MethodCallExpr@77..83 + PathExpr@77..78 + Path@77..78 + PathSegment@77..78 + Ident@77..78 "x" + Newline@78..79 "\n" + Dot@79..80 "." + Ident@80..81 "y" + CallArgList@81..83 + LParen@81..82 "(" + RParen@82..83 ")" + Newline@83..84 "\n" + Dot@84..85 "." + Ident@85..86 "z" + CallArgList@86..88 + LParen@86..87 "(" + RParen@87..88 ")" + diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.fe b/crates/parser2/test_files/syntax_node/exprs/struct_init.fe new file mode 100644 index 0000000000..4578b13dc2 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.fe @@ -0,0 +1,3 @@ +Struct {x, y} +Struct {x: 1 + 2} +Empty {} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/struct_init.snap b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap new file mode 100644 index 0000000000..df44ced52e --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/struct_init.snap @@ -0,0 +1,59 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/struct_init.fe +--- +Root@0..40 + RecordInitExpr@0..13 + Path@0..6 + PathSegment@0..6 + Ident@0..6 "Struct" + WhiteSpace@6..7 " " + RecordFieldList@7..13 + LBrace@7..8 "{" + RecordField@8..9 + PathExpr@8..9 + Path@8..9 + PathSegment@8..9 + Ident@8..9 "x" + Comma@9..10 "," + WhiteSpace@10..11 " " + RecordField@11..12 + PathExpr@11..12 + Path@11..12 + PathSegment@11..12 + Ident@11..12 "y" + RBrace@12..13 "}" + Newline@13..14 "\n" + RecordInitExpr@14..31 + Path@14..20 + PathSegment@14..20 + Ident@14..20 "Struct" + WhiteSpace@20..21 " " + RecordFieldList@21..31 + LBrace@21..22 "{" + RecordField@22..30 + Ident@22..23 "x" + Colon@23..24 ":" + WhiteSpace@24..25 " " + BinExpr@25..30 + LitExpr@25..26 + Lit@25..26 + Int@25..26 "1" + WhiteSpace@26..27 " " + Plus@27..28 "+" + WhiteSpace@28..29 " " + LitExpr@29..30 + Lit@29..30 + Int@29..30 "2" + RBrace@30..31 "}" + Newline@31..32 "\n" + RecordInitExpr@32..40 + Path@32..37 + PathSegment@32..37 + Ident@32..37 "Empty" + WhiteSpace@37..38 " " + RecordFieldList@38..40 + LBrace@38..39 "{" + RBrace@39..40 "}" + diff --git a/crates/parser2/test_files/syntax_node/exprs/tuple.fe b/crates/parser2/test_files/syntax_node/exprs/tuple.fe new file mode 100644 index 0000000000..3d445ca973 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/tuple.fe @@ -0,0 +1,6 @@ +() +( + 1, + 2, + 3 +) \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/exprs/tuple.snap b/crates/parser2/test_files/syntax_node/exprs/tuple.snap new file mode 100644 index 0000000000..84c565f41e --- /dev/null +++ b/crates/parser2/test_files/syntax_node/exprs/tuple.snap @@ -0,0 +1,32 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/exprs/tuple.fe +--- +Root@0..26 + TupleExpr@0..2 + LParen@0..1 "(" + RParen@1..2 ")" + Newline@2..3 "\n" + TupleExpr@3..26 + LParen@3..4 "(" + Newline@4..5 "\n" + WhiteSpace@5..9 " " + LitExpr@9..10 + Lit@9..10 + Int@9..10 "1" + Comma@10..11 "," + Newline@11..12 "\n" + WhiteSpace@12..16 " " + LitExpr@16..17 + Lit@16..17 + Int@16..17 "2" + Comma@17..18 "," + Newline@18..19 "\n" + WhiteSpace@19..23 " " + LitExpr@23..24 + Lit@23..24 + Int@23..24 "3" + Newline@24..25 "\n" + RParen@25..26 ")" + diff --git a/crates/parser2/test_files/syntax_node/items/const.fe b/crates/parser2/test_files/syntax_node/items/const.fe new file mode 100644 index 0000000000..16aaad59fd --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/const.fe @@ -0,0 +1,13 @@ +pub const FOO: i32 = 1 + +const BAR: u256 = { + let b = true + let x = 1 + if b { + 1 + } else if x == 1 { + 2 + } else { + 3 + } +} diff --git a/crates/parser2/test_files/syntax_node/items/const.snap b/crates/parser2/test_files/syntax_node/items/const.snap new file mode 100644 index 0000000000..6b23631b34 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/const.snap @@ -0,0 +1,143 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/const.fe +--- +Root@0..160 + ItemList@0..159 + Item@0..22 + Const@0..22 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + ConstKw@4..9 "const" + WhiteSpace@9..10 " " + Ident@10..13 "FOO" + Colon@13..14 ":" + WhiteSpace@14..15 " " + PathType@15..18 + Path@15..18 + PathSegment@15..18 + Ident@15..18 "i32" + WhiteSpace@18..19 " " + Eq@19..20 "=" + WhiteSpace@20..21 " " + LitExpr@21..22 + Lit@21..22 + Int@21..22 "1" + Newline@22..24 "\n\n" + Item@24..159 + Const@24..159 + ConstKw@24..29 "const" + WhiteSpace@29..30 " " + Ident@30..33 "BAR" + Colon@33..34 ":" + WhiteSpace@34..35 " " + PathType@35..39 + Path@35..39 + PathSegment@35..39 + Ident@35..39 "u256" + WhiteSpace@39..40 " " + Eq@40..41 "=" + WhiteSpace@41..42 " " + BlockExpr@42..159 + LBrace@42..43 "{" + Newline@43..44 "\n" + WhiteSpace@44..48 " " + LetStmt@48..60 + LetKw@48..51 "let" + WhiteSpace@51..52 " " + PathPat@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "b" + WhiteSpace@53..54 " " + Eq@54..55 "=" + WhiteSpace@55..56 " " + LitExpr@56..60 + Lit@56..60 + TrueKw@56..60 "true" + Newline@60..61 "\n" + WhiteSpace@61..65 " " + LetStmt@65..74 + LetKw@65..68 "let" + WhiteSpace@68..69 " " + PathPat@69..70 + Path@69..70 + PathSegment@69..70 + Ident@69..70 "x" + WhiteSpace@70..71 " " + Eq@71..72 "=" + WhiteSpace@72..73 " " + LitExpr@73..74 + Lit@73..74 + Int@73..74 "1" + Newline@74..75 "\n" + WhiteSpace@75..79 " " + ExprStmt@79..157 + IfExpr@79..157 + IfKw@79..81 "if" + WhiteSpace@81..82 " " + PathExpr@82..83 + Path@82..83 + PathSegment@82..83 + Ident@82..83 "b" + WhiteSpace@83..84 " " + BlockExpr@84..101 + LBrace@84..85 "{" + Newline@85..86 "\n" + WhiteSpace@86..94 " " + ExprStmt@94..95 + LitExpr@94..95 + Lit@94..95 + Int@94..95 "1" + Newline@95..96 "\n" + WhiteSpace@96..100 " " + RBrace@100..101 "}" + WhiteSpace@101..102 " " + ElseKw@102..106 "else" + WhiteSpace@106..107 " " + IfExpr@107..157 + IfKw@107..109 "if" + WhiteSpace@109..110 " " + BinExpr@110..116 + PathExpr@110..111 + Path@110..111 + PathSegment@110..111 + Ident@110..111 "x" + WhiteSpace@111..112 " " + Eq2@112..114 "==" + WhiteSpace@114..115 " " + LitExpr@115..116 + Lit@115..116 + Int@115..116 "1" + WhiteSpace@116..117 " " + BlockExpr@117..134 + LBrace@117..118 "{" + Newline@118..119 "\n" + WhiteSpace@119..127 " " + ExprStmt@127..128 + LitExpr@127..128 + Lit@127..128 + Int@127..128 "2" + Newline@128..129 "\n" + WhiteSpace@129..133 " " + RBrace@133..134 "}" + WhiteSpace@134..135 " " + ElseKw@135..139 "else" + WhiteSpace@139..140 " " + BlockExpr@140..157 + LBrace@140..141 "{" + Newline@141..142 "\n" + WhiteSpace@142..150 " " + ExprStmt@150..151 + LitExpr@150..151 + Lit@150..151 + Int@150..151 "3" + Newline@151..152 "\n" + WhiteSpace@152..156 " " + RBrace@156..157 "}" + Newline@157..158 "\n" + RBrace@158..159 "}" + Newline@159..160 "\n" + diff --git a/crates/parser2/test_files/syntax_node/items/contract.fe b/crates/parser2/test_files/syntax_node/items/contract.fe new file mode 100644 index 0000000000..3b65d7f965 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/contract.fe @@ -0,0 +1,7 @@ +contract Empty {} + +pub contract C { + x: i32, + y: u256, + z: MyStruct::Encodable, +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/contract.snap b/crates/parser2/test_files/syntax_node/items/contract.snap new file mode 100644 index 0000000000..b66008047c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/contract.snap @@ -0,0 +1,67 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/contract.fe +--- +Root@0..90 + ItemList@0..90 + Item@0..17 + Contract@0..17 + ContractKw@0..8 "contract" + WhiteSpace@8..9 " " + Ident@9..14 "Empty" + WhiteSpace@14..15 " " + RecordFieldDefList@15..17 + LBrace@15..16 "{" + RBrace@16..17 "}" + Newline@17..19 "\n\n" + Item@19..90 + Contract@19..90 + ItemModifier@19..22 + PubKw@19..22 "pub" + WhiteSpace@22..23 " " + ContractKw@23..31 "contract" + WhiteSpace@31..32 " " + Ident@32..33 "C" + WhiteSpace@33..34 " " + RecordFieldDefList@34..90 + LBrace@34..35 "{" + Newline@35..36 "\n" + WhiteSpace@36..40 " " + RecordFieldDef@40..46 + Ident@40..41 "x" + Colon@41..42 ":" + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "i32" + Comma@46..47 "," + Newline@47..48 "\n" + WhiteSpace@48..52 " " + RecordFieldDef@52..59 + Ident@52..53 "y" + Colon@53..54 ":" + WhiteSpace@54..55 " " + PathType@55..59 + Path@55..59 + PathSegment@55..59 + Ident@55..59 "u256" + Comma@59..60 "," + Newline@60..61 "\n" + WhiteSpace@61..65 " " + RecordFieldDef@65..87 + Ident@65..66 "z" + Colon@66..67 ":" + WhiteSpace@67..68 " " + PathType@68..87 + Path@68..87 + PathSegment@68..76 + Ident@68..76 "MyStruct" + Colon2@76..78 "::" + PathSegment@78..87 + Ident@78..87 "Encodable" + Comma@87..88 "," + Newline@88..89 "\n" + RBrace@89..90 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/enums.fe b/crates/parser2/test_files/syntax_node/items/enums.fe new file mode 100644 index 0000000000..41e73a2ab2 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/enums.fe @@ -0,0 +1,39 @@ +enum Empty {} + +enum Basic { + Unit, + Tup(i32, u32) +} + +enum RecordVariants { + Rectangle { w: u32, h: u32 }, + Circle { r: u32 } +} + +enum Option + where T: Clone +{ + /// Some value of type `T` + Some(T), + + /// No value. + None, +} + +enum BoundEnum +where Foo::Bar: Trait +{ + AddMul(T), + SubDiv(U), +} + +enum HKTEnum *, U, V, W> +where + U: (* -> *) -> *, + V: * -> * -> (* -> *), + W: * -> * -> * -> * +{ + Foo(U) +} + +enum SingleLine { A, B, C { x: i32, y: u8 }, D(i8, i8) } diff --git a/crates/parser2/test_files/syntax_node/items/enums.snap b/crates/parser2/test_files/syntax_node/items/enums.snap new file mode 100644 index 0000000000..cf48357337 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/enums.snap @@ -0,0 +1,494 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/enums.fe +--- +Root@0..547 + ItemList@0..546 + Item@0..13 + Enum@0..13 + EnumKw@0..4 "enum" + WhiteSpace@4..5 " " + Ident@5..10 "Empty" + WhiteSpace@10..11 " " + VariantDefList@11..13 + LBrace@11..12 "{" + RBrace@12..13 "}" + Newline@13..15 "\n\n" + Item@15..57 + Enum@15..57 + EnumKw@15..19 "enum" + WhiteSpace@19..20 " " + Ident@20..25 "Basic" + WhiteSpace@25..26 " " + VariantDefList@26..57 + LBrace@26..27 "{" + Newline@27..28 "\n" + WhiteSpace@28..32 " " + VariantDef@32..36 + Ident@32..36 "Unit" + Comma@36..37 "," + Newline@37..38 "\n" + WhiteSpace@38..42 " " + VariantDef@42..55 + Ident@42..45 "Tup" + TupleType@45..55 + LParen@45..46 "(" + PathType@46..49 + Path@46..49 + PathSegment@46..49 + Ident@46..49 "i32" + Comma@49..50 "," + WhiteSpace@50..51 " " + PathType@51..54 + Path@51..54 + PathSegment@51..54 + Ident@51..54 "u32" + RParen@54..55 ")" + Newline@55..56 "\n" + RBrace@56..57 "}" + Newline@57..59 "\n\n" + Item@59..140 + Enum@59..140 + EnumKw@59..63 "enum" + WhiteSpace@63..64 " " + Ident@64..78 "RecordVariants" + WhiteSpace@78..79 " " + VariantDefList@79..140 + LBrace@79..80 "{" + Newline@80..81 "\n" + WhiteSpace@81..86 " " + VariantDef@86..114 + Ident@86..95 "Rectangle" + WhiteSpace@95..96 " " + RecordFieldDefList@96..114 + LBrace@96..97 "{" + WhiteSpace@97..98 " " + RecordFieldDef@98..104 + Ident@98..99 "w" + Colon@99..100 ":" + WhiteSpace@100..101 " " + PathType@101..104 + Path@101..104 + PathSegment@101..104 + Ident@101..104 "u32" + Comma@104..105 "," + WhiteSpace@105..106 " " + RecordFieldDef@106..112 + Ident@106..107 "h" + Colon@107..108 ":" + WhiteSpace@108..109 " " + PathType@109..112 + Path@109..112 + PathSegment@109..112 + Ident@109..112 "u32" + WhiteSpace@112..113 " " + RBrace@113..114 "}" + Comma@114..115 "," + Newline@115..116 "\n" + WhiteSpace@116..121 " " + VariantDef@121..138 + Ident@121..127 "Circle" + WhiteSpace@127..128 " " + RecordFieldDefList@128..138 + LBrace@128..129 "{" + WhiteSpace@129..130 " " + RecordFieldDef@130..136 + Ident@130..131 "r" + Colon@131..132 ":" + WhiteSpace@132..133 " " + PathType@133..136 + Path@133..136 + PathSegment@133..136 + Ident@133..136 "u32" + WhiteSpace@136..137 " " + RBrace@137..138 "}" + Newline@138..139 "\n" + RBrace@139..140 "}" + Newline@140..142 "\n\n" + Item@142..256 + Enum@142..256 + EnumKw@142..146 "enum" + WhiteSpace@146..147 " " + Ident@147..153 "Option" + GenericParamList@153..156 + Lt@153..154 "<" + TypeGenericParam@154..155 + Ident@154..155 "T" + Gt@155..156 ">" + Newline@156..157 "\n" + WhiteSpace@157..161 " " + WhereClause@161..175 + WhereKw@161..166 "where" + WhiteSpace@166..167 " " + WherePredicate@167..175 + PathType@167..168 + Path@167..168 + PathSegment@167..168 + Ident@167..168 "T" + TypeBoundList@168..175 + Colon@168..169 ":" + WhiteSpace@169..170 " " + TypeBound@170..175 + TraitRef@170..175 + Path@170..175 + PathSegment@170..175 + Ident@170..175 "Clone" + Newline@175..176 "\n" + VariantDefList@176..256 + LBrace@176..177 "{" + Newline@177..178 "\n" + WhiteSpace@178..182 " " + VariantDef@182..220 + AttrList@182..209 + DocCommentAttr@182..208 + DocComment@182..208 "/// Some value of typ ..." + Newline@208..209 "\n" + WhiteSpace@209..213 " " + Ident@213..217 "Some" + TupleType@217..220 + LParen@217..218 "(" + PathType@218..219 + Path@218..219 + PathSegment@218..219 + Ident@218..219 "T" + RParen@219..220 ")" + Comma@220..221 "," + Newline@221..222 "\n" + WhiteSpace@222..226 " " + Newline@226..227 "\n" + WhiteSpace@227..231 " " + VariantDef@231..253 + AttrList@231..245 + DocCommentAttr@231..244 + DocComment@231..244 "/// No value." + Newline@244..245 "\n" + WhiteSpace@245..249 " " + Ident@249..253 "None" + Comma@253..254 "," + Newline@254..255 "\n" + RBrace@255..256 "}" + Newline@256..258 "\n\n" + Item@258..360 + Enum@258..360 + EnumKw@258..262 "enum" + WhiteSpace@262..263 " " + Ident@263..272 "BoundEnum" + GenericParamList@272..301 + Lt@272..273 "<" + TypeGenericParam@273..285 + Ident@273..274 "T" + TypeBoundList@274..285 + Colon@274..275 ":" + WhiteSpace@275..276 " " + TypeBound@276..279 + TraitRef@276..279 + Path@276..279 + PathSegment@276..279 + Ident@276..279 "Add" + WhiteSpace@279..280 " " + Plus@280..281 "+" + WhiteSpace@281..282 " " + TypeBound@282..285 + TraitRef@282..285 + Path@282..285 + PathSegment@282..285 + Ident@282..285 "Mul" + WhiteSpace@285..286 " " + Comma@286..287 "," + WhiteSpace@287..288 " " + TypeGenericParam@288..300 + Ident@288..289 "U" + TypeBoundList@289..300 + Colon@289..290 ":" + WhiteSpace@290..291 " " + TypeBound@291..294 + TraitRef@291..294 + Path@291..294 + PathSegment@291..294 + Ident@291..294 "Sub" + WhiteSpace@294..295 " " + Plus@295..296 "+" + WhiteSpace@296..297 " " + TypeBound@297..300 + TraitRef@297..300 + Path@297..300 + PathSegment@297..300 + Ident@297..300 "Div" + Gt@300..301 ">" + Newline@301..302 "\n" + WhereClause@302..326 + WhereKw@302..307 "where" + WhiteSpace@307..308 " " + WherePredicate@308..326 + PathType@308..319 + Path@308..319 + PathSegment@308..311 + Ident@308..311 "Foo" + Colon2@311..313 "::" + PathSegment@313..319 + Ident@313..316 "Bar" + GenericArgList@316..319 + Lt@316..317 "<" + TypeGenericArg@317..318 + PathType@317..318 + Path@317..318 + PathSegment@317..318 + Ident@317..318 "T" + Gt@318..319 ">" + TypeBoundList@319..326 + Colon@319..320 ":" + WhiteSpace@320..321 " " + TypeBound@321..326 + TraitRef@321..326 + Path@321..326 + PathSegment@321..326 + Ident@321..326 "Trait" + Newline@326..327 "\n" + VariantDefList@327..360 + LBrace@327..328 "{" + Newline@328..329 "\n" + WhiteSpace@329..333 " " + VariantDef@333..342 + Ident@333..339 "AddMul" + TupleType@339..342 + LParen@339..340 "(" + PathType@340..341 + Path@340..341 + PathSegment@340..341 + Ident@340..341 "T" + RParen@341..342 ")" + Comma@342..343 "," + Newline@343..344 "\n" + WhiteSpace@344..348 " " + VariantDef@348..357 + Ident@348..354 "SubDiv" + TupleType@354..357 + LParen@354..355 "(" + PathType@355..356 + Path@355..356 + PathSegment@355..356 + Ident@355..356 "U" + RParen@356..357 ")" + Comma@357..358 "," + Newline@358..359 "\n" + RBrace@359..360 "}" + Newline@360..362 "\n\n" + Item@362..488 + Enum@362..488 + EnumKw@362..366 "enum" + WhiteSpace@366..367 " " + Ident@367..374 "HKTEnum" + GenericParamList@374..394 + Lt@374..375 "<" + TypeGenericParam@375..384 + Ident@375..376 "T" + TypeBoundList@376..384 + Colon@376..377 ":" + WhiteSpace@377..378 " " + TypeBound@378..384 + KindBoundAbs@378..384 + KindBoundMono@378..379 + Star@378..379 "*" + WhiteSpace@379..380 " " + Arrow@380..382 "->" + WhiteSpace@382..383 " " + KindBoundMono@383..384 + Star@383..384 "*" + Comma@384..385 "," + WhiteSpace@385..386 " " + TypeGenericParam@386..387 + Ident@386..387 "U" + Comma@387..388 "," + WhiteSpace@388..389 " " + TypeGenericParam@389..390 + Ident@389..390 "V" + Comma@390..391 "," + WhiteSpace@391..392 " " + TypeGenericParam@392..393 + Ident@392..393 "W" + Gt@393..394 ">" + Newline@394..395 "\n" + WhereClause@395..470 + WhereKw@395..400 "where" + Newline@400..401 "\n" + WhiteSpace@401..404 " " + WherePredicate@404..420 + PathType@404..405 + Path@404..405 + PathSegment@404..405 + Ident@404..405 "U" + TypeBoundList@405..420 + Colon@405..406 ":" + WhiteSpace@406..407 " " + TypeBound@407..420 + KindBoundAbs@407..420 + LParen@407..408 "(" + KindBoundAbs@408..414 + KindBoundMono@408..409 + Star@408..409 "*" + WhiteSpace@409..410 " " + Arrow@410..412 "->" + WhiteSpace@412..413 " " + KindBoundMono@413..414 + Star@413..414 "*" + RParen@414..415 ")" + WhiteSpace@415..416 " " + Arrow@416..418 "->" + WhiteSpace@418..419 " " + KindBoundMono@419..420 + Star@419..420 "*" + Comma@420..421 "," + Newline@421..422 "\n" + WhiteSpace@422..425 " " + WherePredicate@425..446 + PathType@425..426 + Path@425..426 + PathSegment@425..426 + Ident@425..426 "V" + TypeBoundList@426..446 + Colon@426..427 ":" + WhiteSpace@427..428 " " + TypeBound@428..446 + KindBoundAbs@428..446 + KindBoundMono@428..429 + Star@428..429 "*" + WhiteSpace@429..430 " " + Arrow@430..432 "->" + KindBoundAbs@432..446 + WhiteSpace@432..433 " " + KindBoundMono@433..434 + Star@433..434 "*" + WhiteSpace@434..435 " " + Arrow@435..437 "->" + WhiteSpace@437..438 " " + LParen@438..439 "(" + KindBoundAbs@439..445 + KindBoundMono@439..440 + Star@439..440 "*" + WhiteSpace@440..441 " " + Arrow@441..443 "->" + WhiteSpace@443..444 " " + KindBoundMono@444..445 + Star@444..445 "*" + RParen@445..446 ")" + Comma@446..447 "," + Newline@447..448 "\n" + WhiteSpace@448..451 " " + WherePredicate@451..470 + PathType@451..452 + Path@451..452 + PathSegment@451..452 + Ident@451..452 "W" + TypeBoundList@452..470 + Colon@452..453 ":" + WhiteSpace@453..454 " " + TypeBound@454..470 + KindBoundAbs@454..470 + KindBoundMono@454..455 + Star@454..455 "*" + WhiteSpace@455..456 " " + Arrow@456..458 "->" + KindBoundAbs@458..470 + WhiteSpace@458..459 " " + KindBoundMono@459..460 + Star@459..460 "*" + WhiteSpace@460..461 " " + Arrow@461..463 "->" + KindBoundAbs@463..470 + WhiteSpace@463..464 " " + KindBoundMono@464..465 + Star@464..465 "*" + WhiteSpace@465..466 " " + Arrow@466..468 "->" + WhiteSpace@468..469 " " + KindBoundMono@469..470 + Star@469..470 "*" + Newline@470..471 "\n" + VariantDefList@471..488 + LBrace@471..472 "{" + Newline@472..473 "\n" + WhiteSpace@473..477 " " + VariantDef@477..486 + Ident@477..480 "Foo" + TupleType@480..486 + LParen@480..481 "(" + PathType@481..485 + Path@481..485 + PathSegment@481..485 + Ident@481..482 "U" + GenericArgList@482..485 + Lt@482..483 "<" + TypeGenericArg@483..484 + PathType@483..484 + Path@483..484 + PathSegment@483..484 + Ident@483..484 "T" + Gt@484..485 ">" + RParen@485..486 ")" + Newline@486..487 "\n" + RBrace@487..488 "}" + Newline@488..490 "\n\n" + Item@490..546 + Enum@490..546 + EnumKw@490..494 "enum" + WhiteSpace@494..495 " " + Ident@495..505 "SingleLine" + WhiteSpace@505..506 " " + VariantDefList@506..546 + LBrace@506..507 "{" + WhiteSpace@507..508 " " + VariantDef@508..509 + Ident@508..509 "A" + Comma@509..510 "," + WhiteSpace@510..511 " " + VariantDef@511..512 + Ident@511..512 "B" + Comma@512..513 "," + WhiteSpace@513..514 " " + VariantDef@514..533 + Ident@514..515 "C" + WhiteSpace@515..516 " " + RecordFieldDefList@516..533 + LBrace@516..517 "{" + WhiteSpace@517..518 " " + RecordFieldDef@518..524 + Ident@518..519 "x" + Colon@519..520 ":" + WhiteSpace@520..521 " " + PathType@521..524 + Path@521..524 + PathSegment@521..524 + Ident@521..524 "i32" + Comma@524..525 "," + WhiteSpace@525..526 " " + RecordFieldDef@526..531 + Ident@526..527 "y" + Colon@527..528 ":" + WhiteSpace@528..529 " " + PathType@529..531 + Path@529..531 + PathSegment@529..531 + Ident@529..531 "u8" + WhiteSpace@531..532 " " + RBrace@532..533 "}" + Comma@533..534 "," + WhiteSpace@534..535 " " + VariantDef@535..544 + Ident@535..536 "D" + TupleType@536..544 + LParen@536..537 "(" + PathType@537..539 + Path@537..539 + PathSegment@537..539 + Ident@537..539 "i8" + Comma@539..540 "," + WhiteSpace@540..541 " " + PathType@541..543 + Path@541..543 + PathSegment@541..543 + Ident@541..543 "i8" + RParen@543..544 ")" + WhiteSpace@544..545 " " + RBrace@545..546 "}" + Newline@546..547 "\n" diff --git a/crates/parser2/test_files/syntax_node/items/extern.fe b/crates/parser2/test_files/syntax_node/items/extern.fe new file mode 100644 index 0000000000..000de9c818 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/extern.fe @@ -0,0 +1,9 @@ +extern { + +} + +extern { + pub unsafe fn write(loc: *u32, value: u32) -> bool + pub unsafe fn read(loc: *u32, len: usize) -> usize + fn foo() +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/extern.snap b/crates/parser2/test_files/syntax_node/items/extern.snap new file mode 100644 index 0000000000..5a89154c0f --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/extern.snap @@ -0,0 +1,116 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/extern.fe +--- +Root@0..146 + ItemList@0..146 + Item@0..11 + Extern@0..11 + ExternKw@0..6 "extern" + WhiteSpace@6..7 " " + ExternItemList@7..11 + LBrace@7..8 "{" + Newline@8..10 "\n\n" + RBrace@10..11 "}" + Newline@11..13 "\n\n" + Item@13..146 + Extern@13..146 + ExternKw@13..19 "extern" + WhiteSpace@19..20 " " + ExternItemList@20..146 + LBrace@20..21 "{" + Newline@21..22 "\n" + WhiteSpace@22..26 " " + Func@26..76 + ItemModifier@26..36 + PubKw@26..29 "pub" + WhiteSpace@29..30 " " + UnsafeKw@30..36 "unsafe" + WhiteSpace@36..37 " " + FnKw@37..39 "fn" + WhiteSpace@39..40 " " + Ident@40..45 "write" + FuncParamList@45..68 + LParen@45..46 "(" + FnParam@46..55 + Ident@46..49 "loc" + Colon@49..50 ":" + WhiteSpace@50..51 " " + PtrType@51..55 + Star@51..52 "*" + PathType@52..55 + Path@52..55 + PathSegment@52..55 + Ident@52..55 "u32" + Comma@55..56 "," + WhiteSpace@56..57 " " + FnParam@57..67 + Ident@57..62 "value" + Colon@62..63 ":" + WhiteSpace@63..64 " " + PathType@64..67 + Path@64..67 + PathSegment@64..67 + Ident@64..67 "u32" + RParen@67..68 ")" + WhiteSpace@68..69 " " + Arrow@69..71 "->" + WhiteSpace@71..72 " " + PathType@72..76 + Path@72..76 + PathSegment@72..76 + Ident@72..76 "bool" + Newline@76..77 "\n" + WhiteSpace@77..81 " " + Func@81..131 + ItemModifier@81..91 + PubKw@81..84 "pub" + WhiteSpace@84..85 " " + UnsafeKw@85..91 "unsafe" + WhiteSpace@91..92 " " + FnKw@92..94 "fn" + WhiteSpace@94..95 " " + Ident@95..99 "read" + FuncParamList@99..122 + LParen@99..100 "(" + FnParam@100..109 + Ident@100..103 "loc" + Colon@103..104 ":" + WhiteSpace@104..105 " " + PtrType@105..109 + Star@105..106 "*" + PathType@106..109 + Path@106..109 + PathSegment@106..109 + Ident@106..109 "u32" + Comma@109..110 "," + WhiteSpace@110..111 " " + FnParam@111..121 + Ident@111..114 "len" + Colon@114..115 ":" + WhiteSpace@115..116 " " + PathType@116..121 + Path@116..121 + PathSegment@116..121 + Ident@116..121 "usize" + RParen@121..122 ")" + WhiteSpace@122..123 " " + Arrow@123..125 "->" + WhiteSpace@125..126 " " + PathType@126..131 + Path@126..131 + PathSegment@126..131 + Ident@126..131 "usize" + Newline@131..132 "\n" + WhiteSpace@132..136 " " + Func@136..144 + FnKw@136..138 "fn" + WhiteSpace@138..139 " " + Ident@139..142 "foo" + FuncParamList@142..144 + LParen@142..143 "(" + RParen@143..144 ")" + Newline@144..145 "\n" + RBrace@145..146 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/func.fe b/crates/parser2/test_files/syntax_node/items/func.fe new file mode 100644 index 0000000000..c727427859 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/func.fe @@ -0,0 +1,21 @@ +pub fn foo() { + let x = 1 +} + +fn bar(bar: i32, mut baz: u256) -> i32 { + 1 +} + +fn baz(from sender: address, mut to recipient: address, _ val: u256, _ _: u256) -> i32 { + 1 +} + +fn generics1(t: T, u: Option) -> T + where Result: Trait, + Option: Clone + +{ + t +} + +fn decl(t: MyStruct) -> Result {} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/func.snap b/crates/parser2/test_files/syntax_node/items/func.snap new file mode 100644 index 0000000000..63ea1d801d --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/func.snap @@ -0,0 +1,349 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/func.fe +--- +Root@0..351 + ItemList@0..351 + Item@0..30 + Func@0..30 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + FnKw@4..6 "fn" + WhiteSpace@6..7 " " + Ident@7..10 "foo" + FuncParamList@10..12 + LParen@10..11 "(" + RParen@11..12 ")" + WhiteSpace@12..13 " " + BlockExpr@13..30 + LBrace@13..14 "{" + Newline@14..15 "\n" + WhiteSpace@15..19 " " + LetStmt@19..28 + LetKw@19..22 "let" + WhiteSpace@22..23 " " + PathPat@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "x" + WhiteSpace@24..25 " " + Eq@25..26 "=" + WhiteSpace@26..27 " " + LitExpr@27..28 + Lit@27..28 + Int@27..28 "1" + Newline@28..29 "\n" + RBrace@29..30 "}" + Newline@30..32 "\n\n" + Item@32..80 + Func@32..80 + FnKw@32..34 "fn" + WhiteSpace@34..35 " " + Ident@35..38 "bar" + FuncParamList@38..63 + LParen@38..39 "(" + FnParam@39..47 + Ident@39..42 "bar" + Colon@42..43 ":" + WhiteSpace@43..44 " " + PathType@44..47 + Path@44..47 + PathSegment@44..47 + Ident@44..47 "i32" + Comma@47..48 "," + WhiteSpace@48..49 " " + FnParam@49..62 + MutKw@49..52 "mut" + WhiteSpace@52..53 " " + Ident@53..56 "baz" + Colon@56..57 ":" + WhiteSpace@57..58 " " + PathType@58..62 + Path@58..62 + PathSegment@58..62 + Ident@58..62 "u256" + RParen@62..63 ")" + WhiteSpace@63..64 " " + Arrow@64..66 "->" + WhiteSpace@66..67 " " + PathType@67..70 + Path@67..70 + PathSegment@67..70 + Ident@67..70 "i32" + WhiteSpace@70..71 " " + BlockExpr@71..80 + LBrace@71..72 "{" + Newline@72..73 "\n" + WhiteSpace@73..77 " " + ExprStmt@77..78 + LitExpr@77..78 + Lit@77..78 + Int@77..78 "1" + Newline@78..79 "\n" + RBrace@79..80 "}" + Newline@80..82 "\n\n" + Item@82..178 + Func@82..178 + FnKw@82..84 "fn" + WhiteSpace@84..85 " " + Ident@85..88 "baz" + FuncParamList@88..161 + LParen@88..89 "(" + FnParam@89..109 + Ident@89..93 "from" + WhiteSpace@93..94 " " + Ident@94..100 "sender" + Colon@100..101 ":" + WhiteSpace@101..102 " " + PathType@102..109 + Path@102..109 + PathSegment@102..109 + Ident@102..109 "address" + Comma@109..110 "," + WhiteSpace@110..111 " " + FnParam@111..136 + MutKw@111..114 "mut" + WhiteSpace@114..115 " " + Ident@115..117 "to" + WhiteSpace@117..118 " " + Ident@118..127 "recipient" + Colon@127..128 ":" + WhiteSpace@128..129 " " + PathType@129..136 + Path@129..136 + PathSegment@129..136 + Ident@129..136 "address" + Comma@136..137 "," + WhiteSpace@137..138 " " + FnParam@138..149 + Underscore@138..139 "_" + WhiteSpace@139..140 " " + Ident@140..143 "val" + Colon@143..144 ":" + WhiteSpace@144..145 " " + PathType@145..149 + Path@145..149 + PathSegment@145..149 + Ident@145..149 "u256" + Comma@149..150 "," + WhiteSpace@150..151 " " + FnParam@151..160 + Underscore@151..152 "_" + WhiteSpace@152..153 " " + Underscore@153..154 "_" + Colon@154..155 ":" + WhiteSpace@155..156 " " + PathType@156..160 + Path@156..160 + PathSegment@156..160 + Ident@156..160 "u256" + RParen@160..161 ")" + WhiteSpace@161..162 " " + Arrow@162..164 "->" + WhiteSpace@164..165 " " + PathType@165..168 + Path@165..168 + PathSegment@165..168 + Ident@165..168 "i32" + WhiteSpace@168..169 " " + BlockExpr@169..178 + LBrace@169..170 "{" + Newline@170..171 "\n" + WhiteSpace@171..175 " " + ExprStmt@175..176 + LitExpr@175..176 + Lit@175..176 + Int@175..176 "1" + Newline@176..177 "\n" + RBrace@177..178 "}" + Newline@178..180 "\n\n" + Item@180..296 + Func@180..296 + FnKw@180..182 "fn" + WhiteSpace@182..183 " " + Ident@183..192 "generics1" + GenericParamList@192..205 + Lt@192..193 "<" + TypeGenericParam@193..201 + Ident@193..194 "T" + TypeBoundList@194..201 + Colon@194..195 ":" + WhiteSpace@195..196 " " + TypeBound@196..201 + TraitRef@196..201 + Path@196..201 + PathSegment@196..201 + Ident@196..201 "Trait" + Comma@201..202 "," + WhiteSpace@202..203 " " + TypeGenericParam@203..204 + Ident@203..204 "U" + Gt@204..205 ">" + FuncParamList@205..225 + LParen@205..206 "(" + FnParam@206..210 + Ident@206..207 "t" + Colon@207..208 ":" + WhiteSpace@208..209 " " + PathType@209..210 + Path@209..210 + PathSegment@209..210 + Ident@209..210 "T" + Comma@210..211 "," + WhiteSpace@211..212 " " + FnParam@212..224 + Ident@212..213 "u" + Colon@213..214 ":" + WhiteSpace@214..215 " " + PathType@215..224 + Path@215..224 + PathSegment@215..224 + Ident@215..221 "Option" + GenericArgList@221..224 + Lt@221..222 "<" + TypeGenericArg@222..223 + PathType@222..223 + Path@222..223 + PathSegment@222..223 + Ident@222..223 "U" + Gt@223..224 ">" + RParen@224..225 ")" + WhiteSpace@225..226 " " + Arrow@226..228 "->" + WhiteSpace@228..229 " " + PathType@229..230 + Path@229..230 + PathSegment@229..230 + Ident@229..230 "T" + Newline@230..231 "\n" + WhiteSpace@231..235 " " + WhereClause@235..285 + WhereKw@235..240 "where" + WhiteSpace@240..241 " " + WherePredicate@241..257 + PathType@241..250 + Path@241..250 + PathSegment@241..250 + Ident@241..247 "Result" + GenericArgList@247..250 + Lt@247..248 "<" + TypeGenericArg@248..249 + PathType@248..249 + Path@248..249 + PathSegment@248..249 + Ident@248..249 "T" + Gt@249..250 ">" + TypeBoundList@250..257 + Colon@250..251 ":" + WhiteSpace@251..252 " " + TypeBound@252..257 + TraitRef@252..257 + Path@252..257 + PathSegment@252..257 + Ident@252..257 "Trait" + Comma@257..258 "," + Newline@258..259 "\n" + WhiteSpace@259..269 " " + WherePredicate@269..285 + PathType@269..278 + Path@269..278 + PathSegment@269..278 + Ident@269..275 "Option" + GenericArgList@275..278 + Lt@275..276 "<" + TypeGenericArg@276..277 + PathType@276..277 + Path@276..277 + PathSegment@276..277 + Ident@276..277 "U" + Gt@277..278 ">" + TypeBoundList@278..285 + Colon@278..279 ":" + WhiteSpace@279..280 " " + TypeBound@280..285 + TraitRef@280..285 + Path@280..285 + PathSegment@280..285 + Ident@280..285 "Clone" + Newline@285..287 "\n\n" + BlockExpr@287..296 + LBrace@287..288 "{" + Newline@288..289 "\n" + WhiteSpace@289..293 " " + ExprStmt@293..294 + PathExpr@293..294 + Path@293..294 + PathSegment@293..294 + Ident@293..294 "t" + Newline@294..295 "\n" + RBrace@295..296 "}" + Newline@296..298 "\n\n" + Item@298..351 + Func@298..351 + FnKw@298..300 "fn" + WhiteSpace@300..301 " " + Ident@301..305 "decl" + GenericParamList@305..311 + Lt@305..306 "<" + TypeGenericParam@306..307 + Ident@306..307 "T" + Comma@307..308 "," + WhiteSpace@308..309 " " + TypeGenericParam@309..310 + Ident@309..310 "U" + Gt@310..311 ">" + FuncParamList@311..330 + LParen@311..312 "(" + FnParam@312..329 + Ident@312..313 "t" + Colon@313..314 ":" + WhiteSpace@314..315 " " + PathType@315..329 + Path@315..329 + PathSegment@315..329 + Ident@315..323 "MyStruct" + GenericArgList@323..329 + Lt@323..324 "<" + TypeGenericArg@324..325 + PathType@324..325 + Path@324..325 + PathSegment@324..325 + Ident@324..325 "T" + Comma@325..326 "," + WhiteSpace@326..327 " " + TypeGenericArg@327..328 + PathType@327..328 + Path@327..328 + PathSegment@327..328 + Ident@327..328 "U" + Gt@328..329 ">" + RParen@329..330 ")" + WhiteSpace@330..331 " " + Arrow@331..333 "->" + WhiteSpace@333..334 " " + PathType@334..348 + Path@334..348 + PathSegment@334..348 + Ident@334..340 "Result" + GenericArgList@340..348 + Lt@340..341 "<" + TypeGenericArg@341..342 + PathType@341..342 + Path@341..342 + PathSegment@341..342 + Ident@341..342 "T" + Comma@342..343 "," + WhiteSpace@343..344 " " + TypeGenericArg@344..347 + PathType@344..347 + Path@344..347 + PathSegment@344..347 + Ident@344..347 "Err" + Gt@347..348 ">" + WhiteSpace@348..349 " " + BlockExpr@349..351 + LBrace@349..350 "{" + RBrace@350..351 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/impl.fe b/crates/parser2/test_files/syntax_node/items/impl.fe new file mode 100644 index 0000000000..db27e486f0 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/impl.fe @@ -0,0 +1,17 @@ +impl Foo::Bar { + pub fn add(self, rhs: Self) -> Self { + Self { + val: self.val + rhs.val + } + } +} + +impl Foo +where Foo: Clone +{ + fn add>(self, rhs: U) + where T: Copy + { + (rhs - self.t) + } +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/impl.snap b/crates/parser2/test_files/syntax_node/items/impl.snap new file mode 100644 index 0000000000..fdf40976fb --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/impl.snap @@ -0,0 +1,257 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/impl.fe +--- +Root@0..272 + ItemList@0..272 + Item@0..137 + Impl@0..137 + ImplKw@0..4 "impl" + GenericParamList@4..12 + Lt@4..5 "<" + TypeGenericParam@5..11 + Ident@5..6 "T" + TypeBoundList@6..11 + Colon@6..7 ":" + WhiteSpace@7..8 " " + TypeBound@8..11 + TraitRef@8..11 + Path@8..11 + PathSegment@8..11 + Ident@8..11 "Add" + Gt@11..12 ">" + WhiteSpace@12..13 " " + PathType@13..24 + Path@13..24 + PathSegment@13..16 + Ident@13..16 "Foo" + Colon2@16..18 "::" + PathSegment@18..24 + Ident@18..21 "Bar" + GenericArgList@21..24 + Lt@21..22 "<" + TypeGenericArg@22..23 + PathType@22..23 + Path@22..23 + PathSegment@22..23 + Ident@22..23 "T" + Gt@23..24 ">" + WhiteSpace@24..25 " " + ImplItemList@25..137 + LBrace@25..26 "{" + Newline@26..27 "\n" + WhiteSpace@27..31 " " + Func@31..135 + ItemModifier@31..34 + PubKw@31..34 "pub" + WhiteSpace@34..35 " " + FnKw@35..37 "fn" + WhiteSpace@37..38 " " + Ident@38..41 "add" + FuncParamList@41..58 + LParen@41..42 "(" + FnParam@42..46 + SelfKw@42..46 "self" + Comma@46..47 "," + WhiteSpace@47..48 " " + FnParam@48..57 + Ident@48..51 "rhs" + Colon@51..52 ":" + WhiteSpace@52..53 " " + SelfType@53..57 + SelfTypeKw@53..57 "Self" + RParen@57..58 ")" + WhiteSpace@58..59 " " + Arrow@59..61 "->" + WhiteSpace@61..62 " " + SelfType@62..66 + SelfTypeKw@62..66 "Self" + WhiteSpace@66..67 " " + BlockExpr@67..135 + LBrace@67..68 "{" + Newline@68..69 "\n" + WhiteSpace@69..77 " " + ExprStmt@77..129 + RecordInitExpr@77..129 + Path@77..81 + PathSegment@77..81 + SelfTypeKw@77..81 "Self" + WhiteSpace@81..82 " " + RecordFieldList@82..129 + LBrace@82..83 "{" + Newline@83..84 "\n" + WhiteSpace@84..96 " " + RecordField@96..119 + Ident@96..99 "val" + Colon@99..100 ":" + WhiteSpace@100..101 " " + BinExpr@101..119 + FieldExpr@101..109 + PathExpr@101..105 + Path@101..105 + PathSegment@101..105 + SelfKw@101..105 "self" + Dot@105..106 "." + Ident@106..109 "val" + WhiteSpace@109..110 " " + Plus@110..111 "+" + WhiteSpace@111..112 " " + FieldExpr@112..119 + PathExpr@112..115 + Path@112..115 + PathSegment@112..115 + Ident@112..115 "rhs" + Dot@115..116 "." + Ident@116..119 "val" + Newline@119..120 "\n" + WhiteSpace@120..128 " " + RBrace@128..129 "}" + Newline@129..130 "\n" + WhiteSpace@130..134 " " + RBrace@134..135 "}" + Newline@135..136 "\n" + RBrace@136..137 "}" + Newline@137..139 "\n\n" + Item@139..272 + Impl@139..272 + ImplKw@139..143 "impl" + GenericParamList@143..146 + Lt@143..144 "<" + TypeGenericParam@144..145 + Ident@144..145 "T" + Gt@145..146 ">" + WhiteSpace@146..147 " " + PathType@147..153 + Path@147..153 + PathSegment@147..153 + Ident@147..150 "Foo" + GenericArgList@150..153 + Lt@150..151 "<" + TypeGenericArg@151..152 + PathType@151..152 + Path@151..152 + PathSegment@151..152 + Ident@151..152 "T" + Gt@152..153 ">" + WhiteSpace@153..154 " " + Newline@154..155 "\n" + WhereClause@155..174 + WhereKw@155..160 "where" + WhiteSpace@160..161 " " + WherePredicate@161..174 + PathType@161..167 + Path@161..167 + PathSegment@161..167 + Ident@161..164 "Foo" + GenericArgList@164..167 + Lt@164..165 "<" + TypeGenericArg@165..166 + PathType@165..166 + Path@165..166 + PathSegment@165..166 + Ident@165..166 "T" + Gt@166..167 ">" + TypeBoundList@167..174 + Colon@167..168 ":" + WhiteSpace@168..169 " " + TypeBound@169..174 + TraitRef@169..174 + Path@169..174 + PathSegment@169..174 + Ident@169..174 "Clone" + Newline@174..175 "\n" + ImplItemList@175..272 + LBrace@175..176 "{" + Newline@176..177 "\n" + WhiteSpace@177..181 " " + Func@181..270 + FnKw@181..183 "fn" + WhiteSpace@183..184 " " + Ident@184..187 "add" + GenericParamList@187..198 + Lt@187..188 "<" + TypeGenericParam@188..197 + Ident@188..189 "U" + TypeBoundList@189..197 + Colon@189..190 ":" + WhiteSpace@190..191 " " + TypeBound@191..197 + TraitRef@191..197 + Path@191..197 + PathSegment@191..197 + Ident@191..194 "Add" + GenericArgList@194..197 + Lt@194..195 "<" + TypeGenericArg@195..196 + PathType@195..196 + Path@195..196 + PathSegment@195..196 + Ident@195..196 "T" + Gt@196..197 ">" + Gt@197..198 ">" + FuncParamList@198..212 + LParen@198..199 "(" + FnParam@199..203 + SelfKw@199..203 "self" + Comma@203..204 "," + WhiteSpace@204..205 " " + FnParam@205..211 + Ident@205..208 "rhs" + Colon@208..209 ":" + WhiteSpace@209..210 " " + PathType@210..211 + Path@210..211 + PathSegment@210..211 + Ident@210..211 "U" + RParen@211..212 ")" + WhiteSpace@212..213 " " + Newline@213..214 "\n" + WhiteSpace@214..222 " " + WhereClause@222..235 + WhereKw@222..227 "where" + WhiteSpace@227..228 " " + WherePredicate@228..235 + PathType@228..229 + Path@228..229 + PathSegment@228..229 + Ident@228..229 "T" + TypeBoundList@229..235 + Colon@229..230 ":" + WhiteSpace@230..231 " " + TypeBound@231..235 + TraitRef@231..235 + Path@231..235 + PathSegment@231..235 + Ident@231..235 "Copy" + Newline@235..236 "\n" + WhiteSpace@236..240 " " + BlockExpr@240..270 + LBrace@240..241 "{" + Newline@241..242 "\n" + WhiteSpace@242..250 " " + ExprStmt@250..264 + ParenExpr@250..264 + LParen@250..251 "(" + BinExpr@251..263 + PathExpr@251..254 + Path@251..254 + PathSegment@251..254 + Ident@251..254 "rhs" + WhiteSpace@254..255 " " + Minus@255..256 "-" + WhiteSpace@256..257 " " + FieldExpr@257..263 + PathExpr@257..261 + Path@257..261 + PathSegment@257..261 + SelfKw@257..261 "self" + Dot@261..262 "." + Ident@262..263 "t" + RParen@263..264 ")" + Newline@264..265 "\n" + WhiteSpace@265..269 " " + RBrace@269..270 "}" + Newline@270..271 "\n" + RBrace@271..272 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.fe b/crates/parser2/test_files/syntax_node/items/impl_trait.fe new file mode 100644 index 0000000000..dfd87c9005 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.fe @@ -0,0 +1,22 @@ +impl Trait for F { + fn foo() { + return 1 + } +} + +impl Trait for F +where T: Clone, + U: Bar +{ + fn foo>(t: T) { + do_something(t) + } +} + +impl Trait for F +where U: Bar +{ + fn foo>(t: T) { + do_something(t) + } +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/impl_trait.snap b/crates/parser2/test_files/syntax_node/items/impl_trait.snap new file mode 100644 index 0000000000..f1d68e1719 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/impl_trait.snap @@ -0,0 +1,372 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/impl_trait.fe +--- +Root@0..334 + ItemList@0..334 + Item@0..67 + ImplTrait@0..67 + ImplKw@0..4 "impl" + GenericParamList@4..7 + Lt@4..5 "<" + TypeGenericParam@5..6 + Ident@5..6 "T" + Gt@6..7 ">" + WhiteSpace@7..8 " " + TraitRef@8..16 + Path@8..16 + PathSegment@8..16 + Ident@8..13 "Trait" + GenericArgList@13..16 + Lt@13..14 "<" + TypeGenericArg@14..15 + PathType@14..15 + Path@14..15 + PathSegment@14..15 + Ident@14..15 "T" + Gt@15..16 ">" + WhiteSpace@16..17 " " + ForKw@17..20 "for" + WhiteSpace@20..21 " " + PathType@21..25 + Path@21..25 + PathSegment@21..25 + Ident@21..22 "F" + GenericArgList@22..25 + Lt@22..23 "<" + TypeGenericArg@23..24 + PathType@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "T" + Gt@24..25 ">" + WhiteSpace@25..26 " " + ImplTraitItemList@26..67 + LBrace@26..27 "{" + Newline@27..28 "\n" + WhiteSpace@28..32 " " + Func@32..65 + FnKw@32..34 "fn" + WhiteSpace@34..35 " " + Ident@35..38 "foo" + FuncParamList@38..40 + LParen@38..39 "(" + RParen@39..40 ")" + WhiteSpace@40..41 " " + BlockExpr@41..65 + LBrace@41..42 "{" + Newline@42..43 "\n" + WhiteSpace@43..51 " " + ReturnStmt@51..59 + ReturnKw@51..57 "return" + WhiteSpace@57..58 " " + LitExpr@58..59 + Lit@58..59 + Int@58..59 "1" + Newline@59..60 "\n" + WhiteSpace@60..64 " " + RBrace@64..65 "}" + Newline@65..66 "\n" + RBrace@66..67 "}" + Newline@67..69 "\n\n" + Item@69..205 + ImplTrait@69..205 + ImplKw@69..73 "impl" + GenericParamList@73..79 + Lt@73..74 "<" + TypeGenericParam@74..75 + Ident@74..75 "T" + Comma@75..76 "," + WhiteSpace@76..77 " " + TypeGenericParam@77..78 + Ident@77..78 "U" + Gt@78..79 ">" + WhiteSpace@79..80 " " + TraitRef@80..91 + Path@80..91 + PathSegment@80..91 + Ident@80..85 "Trait" + GenericArgList@85..91 + Lt@85..86 "<" + TypeGenericArg@86..87 + PathType@86..87 + Path@86..87 + PathSegment@86..87 + Ident@86..87 "T" + Comma@87..88 "," + WhiteSpace@88..89 " " + TypeGenericArg@89..90 + PathType@89..90 + Path@89..90 + PathSegment@89..90 + Ident@89..90 "U" + Gt@90..91 ">" + WhiteSpace@91..92 " " + ForKw@92..95 "for" + WhiteSpace@95..96 " " + PathType@96..100 + Path@96..100 + PathSegment@96..100 + Ident@96..97 "F" + GenericArgList@97..100 + Lt@97..98 "<" + TypeGenericArg@98..99 + PathType@98..99 + Path@98..99 + PathSegment@98..99 + Ident@98..99 "T" + Gt@99..100 ">" + Newline@100..101 "\n" + WhereClause@101..129 + WhereKw@101..106 "where" + WhiteSpace@106..107 " " + WherePredicate@107..115 + PathType@107..108 + Path@107..108 + PathSegment@107..108 + Ident@107..108 "T" + TypeBoundList@108..115 + Colon@108..109 ":" + WhiteSpace@109..110 " " + TypeBound@110..115 + TraitRef@110..115 + Path@110..115 + PathSegment@110..115 + Ident@110..115 "Clone" + Comma@115..116 "," + Newline@116..117 "\n" + WhiteSpace@117..123 " " + WherePredicate@123..129 + PathType@123..124 + Path@123..124 + PathSegment@123..124 + Ident@123..124 "U" + TypeBoundList@124..129 + Colon@124..125 ":" + WhiteSpace@125..126 " " + TypeBound@126..129 + TraitRef@126..129 + Path@126..129 + PathSegment@126..129 + Ident@126..129 "Bar" + Newline@129..130 "\n" + ImplTraitItemList@130..205 + LBrace@130..131 "{" + Newline@131..132 "\n" + WhiteSpace@132..136 " " + Func@136..203 + FnKw@136..138 "fn" + WhiteSpace@138..139 " " + Ident@139..142 "foo" + GenericParamList@142..160 + Lt@142..143 "<" + TypeGenericParam@143..159 + Ident@143..144 "T" + TypeBoundList@144..159 + Colon@144..145 ":" + WhiteSpace@145..146 " " + TypeBound@146..159 + TraitRef@146..159 + Path@146..159 + PathSegment@146..159 + Ident@146..156 "OtherTrait" + GenericArgList@156..159 + Lt@156..157 "<" + TypeGenericArg@157..158 + PathType@157..158 + Path@157..158 + PathSegment@157..158 + Ident@157..158 "U" + Gt@158..159 ">" + Gt@159..160 ">" + FuncParamList@160..166 + LParen@160..161 "(" + FnParam@161..165 + Ident@161..162 "t" + Colon@162..163 ":" + WhiteSpace@163..164 " " + PathType@164..165 + Path@164..165 + PathSegment@164..165 + Ident@164..165 "T" + RParen@165..166 ")" + WhiteSpace@166..167 " " + BlockExpr@167..203 + LBrace@167..168 "{" + Newline@168..169 "\n" + WhiteSpace@169..177 " " + ExprStmt@177..197 + CallExpr@177..197 + PathExpr@177..194 + Path@177..194 + PathSegment@177..194 + Ident@177..189 "do_something" + GenericArgList@189..194 + Lt@189..190 "<" + TypeGenericArg@190..193 + PathType@190..193 + Path@190..193 + PathSegment@190..193 + Ident@190..193 "i32" + Gt@193..194 ">" + CallArgList@194..197 + LParen@194..195 "(" + CallArg@195..196 + PathExpr@195..196 + Path@195..196 + PathSegment@195..196 + Ident@195..196 "t" + RParen@196..197 ")" + Newline@197..198 "\n" + WhiteSpace@198..202 " " + RBrace@202..203 "}" + Newline@203..204 "\n" + RBrace@204..205 "}" + Newline@205..207 "\n\n" + Item@207..334 + ImplTrait@207..334 + ImplKw@207..211 "impl" + GenericParamList@211..224 + Lt@211..212 "<" + TypeGenericParam@212..220 + Ident@212..213 "T" + TypeBoundList@213..220 + Colon@213..214 ":" + WhiteSpace@214..215 " " + TypeBound@215..220 + TraitRef@215..220 + Path@215..220 + PathSegment@215..220 + Ident@215..220 "Clone" + Comma@220..221 "," + WhiteSpace@221..222 " " + TypeGenericParam@222..223 + Ident@222..223 "U" + Gt@223..224 ">" + WhiteSpace@224..225 " " + TraitRef@225..236 + Path@225..236 + PathSegment@225..236 + Ident@225..230 "Trait" + GenericArgList@230..236 + Lt@230..231 "<" + TypeGenericArg@231..232 + PathType@231..232 + Path@231..232 + PathSegment@231..232 + Ident@231..232 "T" + Comma@232..233 "," + WhiteSpace@233..234 " " + TypeGenericArg@234..235 + PathType@234..235 + Path@234..235 + PathSegment@234..235 + Ident@234..235 "U" + Gt@235..236 ">" + WhiteSpace@236..237 " " + ForKw@237..240 "for" + WhiteSpace@240..241 " " + PathType@241..245 + Path@241..245 + PathSegment@241..245 + Ident@241..242 "F" + GenericArgList@242..245 + Lt@242..243 "<" + TypeGenericArg@243..244 + PathType@243..244 + Path@243..244 + PathSegment@243..244 + Ident@243..244 "U" + Gt@244..245 ">" + Newline@245..246 "\n" + WhereClause@246..258 + WhereKw@246..251 "where" + WhiteSpace@251..252 " " + WherePredicate@252..258 + PathType@252..253 + Path@252..253 + PathSegment@252..253 + Ident@252..253 "U" + TypeBoundList@253..258 + Colon@253..254 ":" + WhiteSpace@254..255 " " + TypeBound@255..258 + TraitRef@255..258 + Path@255..258 + PathSegment@255..258 + Ident@255..258 "Bar" + Newline@258..259 "\n" + ImplTraitItemList@259..334 + LBrace@259..260 "{" + Newline@260..261 "\n" + WhiteSpace@261..265 " " + Func@265..332 + FnKw@265..267 "fn" + WhiteSpace@267..268 " " + Ident@268..271 "foo" + GenericParamList@271..289 + Lt@271..272 "<" + TypeGenericParam@272..288 + Ident@272..273 "T" + TypeBoundList@273..288 + Colon@273..274 ":" + WhiteSpace@274..275 " " + TypeBound@275..288 + TraitRef@275..288 + Path@275..288 + PathSegment@275..288 + Ident@275..285 "OtherTrait" + GenericArgList@285..288 + Lt@285..286 "<" + TypeGenericArg@286..287 + PathType@286..287 + Path@286..287 + PathSegment@286..287 + Ident@286..287 "U" + Gt@287..288 ">" + Gt@288..289 ">" + FuncParamList@289..295 + LParen@289..290 "(" + FnParam@290..294 + Ident@290..291 "t" + Colon@291..292 ":" + WhiteSpace@292..293 " " + PathType@293..294 + Path@293..294 + PathSegment@293..294 + Ident@293..294 "T" + RParen@294..295 ")" + WhiteSpace@295..296 " " + BlockExpr@296..332 + LBrace@296..297 "{" + Newline@297..298 "\n" + WhiteSpace@298..306 " " + ExprStmt@306..326 + CallExpr@306..326 + PathExpr@306..323 + Path@306..323 + PathSegment@306..323 + Ident@306..318 "do_something" + GenericArgList@318..323 + Lt@318..319 "<" + TypeGenericArg@319..322 + PathType@319..322 + Path@319..322 + PathSegment@319..322 + Ident@319..322 "i32" + Gt@322..323 ">" + CallArgList@323..326 + LParen@323..324 "(" + CallArg@324..325 + PathExpr@324..325 + Path@324..325 + PathSegment@324..325 + Ident@324..325 "t" + RParen@325..326 ")" + Newline@326..327 "\n" + WhiteSpace@327..331 " " + RBrace@331..332 "}" + Newline@332..333 "\n" + RBrace@333..334 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/mod.fe b/crates/parser2/test_files/syntax_node/items/mod.fe new file mode 100644 index 0000000000..5b810b9b8b --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/mod.fe @@ -0,0 +1,11 @@ +pub mod foo { + fn foo_foo(bar: i32, mut baz: u256) -> i32 { + 1 + } + + pub struct Foo {} +} + +pub mod bar { + pub struct Bar {} +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/mod.snap b/crates/parser2/test_files/syntax_node/items/mod.snap new file mode 100644 index 0000000000..60b6e63e9e --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/mod.snap @@ -0,0 +1,114 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/mod.fe +--- +Root@0..146 + ItemList@0..146 + Item@0..107 + Mod@0..107 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + ModKw@4..7 "mod" + WhiteSpace@7..8 " " + Ident@8..11 "foo" + WhiteSpace@11..12 " " + ItemList@12..107 + LBrace@12..13 "{" + Newline@13..14 "\n" + WhiteSpace@14..18 " " + Item@18..78 + Func@18..78 + FnKw@18..20 "fn" + WhiteSpace@20..21 " " + Ident@21..28 "foo_foo" + FuncParamList@28..53 + LParen@28..29 "(" + FnParam@29..37 + Ident@29..32 "bar" + Colon@32..33 ":" + WhiteSpace@33..34 " " + PathType@34..37 + Path@34..37 + PathSegment@34..37 + Ident@34..37 "i32" + Comma@37..38 "," + WhiteSpace@38..39 " " + FnParam@39..52 + MutKw@39..42 "mut" + WhiteSpace@42..43 " " + Ident@43..46 "baz" + Colon@46..47 ":" + WhiteSpace@47..48 " " + PathType@48..52 + Path@48..52 + PathSegment@48..52 + Ident@48..52 "u256" + RParen@52..53 ")" + WhiteSpace@53..54 " " + Arrow@54..56 "->" + WhiteSpace@56..57 " " + PathType@57..60 + Path@57..60 + PathSegment@57..60 + Ident@57..60 "i32" + WhiteSpace@60..61 " " + BlockExpr@61..78 + LBrace@61..62 "{" + Newline@62..63 "\n" + WhiteSpace@63..71 " " + ExprStmt@71..72 + LitExpr@71..72 + Lit@71..72 + Int@71..72 "1" + Newline@72..73 "\n" + WhiteSpace@73..77 " " + RBrace@77..78 "}" + Newline@78..79 "\n" + WhiteSpace@79..83 " " + Newline@83..84 "\n" + WhiteSpace@84..88 " " + Item@88..105 + Struct@88..105 + ItemModifier@88..91 + PubKw@88..91 "pub" + WhiteSpace@91..92 " " + StructKw@92..98 "struct" + WhiteSpace@98..99 " " + Ident@99..102 "Foo" + WhiteSpace@102..103 " " + RecordFieldDefList@103..105 + LBrace@103..104 "{" + RBrace@104..105 "}" + Newline@105..106 "\n" + RBrace@106..107 "}" + Newline@107..109 "\n\n" + Item@109..146 + Mod@109..146 + ItemModifier@109..112 + PubKw@109..112 "pub" + WhiteSpace@112..113 " " + ModKw@113..116 "mod" + WhiteSpace@116..117 " " + Ident@117..120 "bar" + WhiteSpace@120..121 " " + ItemList@121..146 + LBrace@121..122 "{" + Newline@122..123 "\n" + WhiteSpace@123..127 " " + Item@127..144 + Struct@127..144 + ItemModifier@127..130 + PubKw@127..130 "pub" + WhiteSpace@130..131 " " + StructKw@131..137 "struct" + WhiteSpace@137..138 " " + Ident@138..141 "Bar" + WhiteSpace@141..142 " " + RecordFieldDefList@142..144 + LBrace@142..143 "{" + RBrace@143..144 "}" + Newline@144..145 "\n" + RBrace@145..146 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/path_generic.fe b/crates/parser2/test_files/syntax_node/items/path_generic.fe new file mode 100644 index 0000000000..faf6de6c1e --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/path_generic.fe @@ -0,0 +1,17 @@ +struct Foo { + t: T +} + +impl Foo { + fn method(self) -> T { + self.t + } +} + +fn foo() { + // Deciding the `Foo` type is not possible without a type argument for `Foo`. + let x = Foo::method() + + // We need this! + let x = Foo::method() +} diff --git a/crates/parser2/test_files/syntax_node/items/path_generic.snap b/crates/parser2/test_files/syntax_node/items/path_generic.snap new file mode 100644 index 0000000000..16997e91ec --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/path_generic.snap @@ -0,0 +1,169 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/path_generic.fe +--- +Root@0..270 + ItemList@0..269 + Item@0..26 + Struct@0..26 + StructKw@0..6 "struct" + WhiteSpace@6..7 " " + Ident@7..10 "Foo" + GenericParamList@10..13 + Lt@10..11 "<" + TypeGenericParam@11..12 + Ident@11..12 "T" + Gt@12..13 ">" + WhiteSpace@13..14 " " + RecordFieldDefList@14..26 + LBrace@14..15 "{" + Newline@15..16 "\n" + WhiteSpace@16..20 " " + RecordFieldDef@20..24 + Ident@20..21 "t" + Colon@21..22 ":" + WhiteSpace@22..23 " " + PathType@23..24 + Path@23..24 + PathSegment@23..24 + Ident@23..24 "T" + Newline@24..25 "\n" + RBrace@25..26 "}" + Newline@26..28 "\n\n" + Item@28..94 + Impl@28..94 + ImplKw@28..32 "impl" + GenericParamList@32..35 + Lt@32..33 "<" + TypeGenericParam@33..34 + Ident@33..34 "T" + Gt@34..35 ">" + WhiteSpace@35..36 " " + PathType@36..42 + Path@36..42 + PathSegment@36..42 + Ident@36..39 "Foo" + GenericArgList@39..42 + Lt@39..40 "<" + TypeGenericArg@40..41 + PathType@40..41 + Path@40..41 + PathSegment@40..41 + Ident@40..41 "T" + Gt@41..42 ">" + WhiteSpace@42..43 " " + ImplItemList@43..94 + LBrace@43..44 "{" + Newline@44..45 "\n" + WhiteSpace@45..49 " " + Func@49..92 + FnKw@49..51 "fn" + WhiteSpace@51..52 " " + Ident@52..58 "method" + FuncParamList@58..64 + LParen@58..59 "(" + FnParam@59..63 + SelfKw@59..63 "self" + RParen@63..64 ")" + WhiteSpace@64..65 " " + Arrow@65..67 "->" + WhiteSpace@67..68 " " + PathType@68..69 + Path@68..69 + PathSegment@68..69 + Ident@68..69 "T" + WhiteSpace@69..70 " " + BlockExpr@70..92 + LBrace@70..71 "{" + Newline@71..72 "\n" + WhiteSpace@72..80 " " + ExprStmt@80..86 + FieldExpr@80..86 + PathExpr@80..84 + Path@80..84 + PathSegment@80..84 + SelfKw@80..84 "self" + Dot@84..85 "." + Ident@85..86 "t" + Newline@86..87 "\n" + WhiteSpace@87..91 " " + RBrace@91..92 "}" + Newline@92..93 "\n" + RBrace@93..94 "}" + Newline@94..96 "\n\n" + Item@96..269 + Func@96..269 + FnKw@96..98 "fn" + WhiteSpace@98..99 " " + Ident@99..102 "foo" + FuncParamList@102..104 + LParen@102..103 "(" + RParen@103..104 ")" + WhiteSpace@104..105 " " + BlockExpr@105..269 + LBrace@105..106 "{" + Newline@106..107 "\n" + WhiteSpace@107..111 " " + Comment@111..188 "// Deciding the `Foo` ..." + Newline@188..189 "\n" + WhiteSpace@189..193 " " + LetStmt@193..214 + LetKw@193..196 "let" + WhiteSpace@196..197 " " + PathPat@197..198 + Path@197..198 + PathSegment@197..198 + Ident@197..198 "x" + WhiteSpace@198..199 " " + Eq@199..200 "=" + WhiteSpace@200..201 " " + CallExpr@201..214 + PathExpr@201..212 + Path@201..212 + PathSegment@201..204 + Ident@201..204 "Foo" + Colon2@204..206 "::" + PathSegment@206..212 + Ident@206..212 "method" + CallArgList@212..214 + LParen@212..213 "(" + RParen@213..214 ")" + Newline@214..216 "\n\n" + WhiteSpace@216..220 " " + Comment@220..236 "// We need this!" + Newline@236..237 "\n" + WhiteSpace@237..241 " " + LetStmt@241..267 + LetKw@241..244 "let" + WhiteSpace@244..245 " " + PathPat@245..246 + Path@245..246 + PathSegment@245..246 + Ident@245..246 "x" + WhiteSpace@246..247 " " + Eq@247..248 "=" + WhiteSpace@248..249 " " + CallExpr@249..267 + PathExpr@249..265 + Path@249..265 + PathSegment@249..257 + Ident@249..252 "Foo" + GenericArgList@252..257 + Lt@252..253 "<" + TypeGenericArg@253..256 + PathType@253..256 + Path@253..256 + PathSegment@253..256 + Ident@253..256 "i32" + Gt@256..257 ">" + Colon2@257..259 "::" + PathSegment@259..265 + Ident@259..265 "method" + CallArgList@265..267 + LParen@265..266 "(" + RParen@266..267 ")" + Newline@267..268 "\n" + RBrace@268..269 "}" + Newline@269..270 "\n" + diff --git a/crates/parser2/test_files/syntax_node/items/trait.fe b/crates/parser2/test_files/syntax_node/items/trait.fe new file mode 100644 index 0000000000..938bb0940f --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/trait.fe @@ -0,0 +1,33 @@ +trait Marker {} + +pub trait Foo { + fn foo(t: T, u: U) + + fn default_method(lhs: T, rhs: T) -> i32 { + lhs + lhs - (rhs + rhs) + } +} + +pub trait Add +{ + fn add(self, rhs: Rhs) -> Self + where RHS: Sub +} + + +pub trait Parse { + fn parse(mut self, mut parser: Parser) +} + +impl Parser + where S: TokenStream + Clone +{ + pub fn parse(mut self, mut scope: T, checkpoint: Option) -> (bool, Checkpoint) { + (SyntaxNode::new_root(self.builder.finish()), self.errors) + } +} + + +pub trait SubTrait: Parse + Add +where T: Add +{} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/trait.snap b/crates/parser2/test_files/syntax_node/items/trait.snap new file mode 100644 index 0000000000..ea89210e32 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/trait.snap @@ -0,0 +1,552 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/trait.fe +--- +Root@0..652 + ItemList@0..652 + Item@0..15 + Trait@0..15 + TraitKw@0..5 "trait" + WhiteSpace@5..6 " " + Ident@6..12 "Marker" + WhiteSpace@12..13 " " + TraitItemList@13..15 + LBrace@13..14 "{" + RBrace@14..15 "}" + Newline@15..17 "\n\n" + Item@17..182 + Trait@17..182 + ItemModifier@17..20 + PubKw@17..20 "pub" + WhiteSpace@20..21 " " + TraitKw@21..26 "trait" + WhiteSpace@26..27 " " + Ident@27..30 "Foo" + WhiteSpace@30..31 " " + TraitItemList@31..182 + LBrace@31..32 "{" + Newline@32..33 "\n" + WhiteSpace@33..37 " " + Func@37..79 + FnKw@37..39 "fn" + WhiteSpace@39..40 " " + Ident@40..43 "foo" + GenericParamList@43..67 + Lt@43..44 "<" + TypeGenericParam@44..52 + Ident@44..45 "T" + TypeBoundList@45..52 + Colon@45..46 ":" + WhiteSpace@46..47 " " + TypeBound@47..52 + TraitRef@47..52 + Path@47..52 + PathSegment@47..52 + Ident@47..52 "Trait" + Comma@52..53 "," + WhiteSpace@53..54 " " + ConstGenericParam@54..66 + ConstKw@54..59 "const" + WhiteSpace@59..60 " " + Ident@60..61 "U" + Colon@61..62 ":" + WhiteSpace@62..63 " " + PathType@63..66 + Path@63..66 + PathSegment@63..66 + Ident@63..66 "i32" + Gt@66..67 ">" + FuncParamList@67..79 + LParen@67..68 "(" + FnParam@68..72 + Ident@68..69 "t" + Colon@69..70 ":" + WhiteSpace@70..71 " " + PathType@71..72 + Path@71..72 + PathSegment@71..72 + Ident@71..72 "T" + Comma@72..73 "," + WhiteSpace@73..74 " " + FnParam@74..78 + Ident@74..75 "u" + Colon@75..76 ":" + WhiteSpace@76..77 " " + PathType@77..78 + Path@77..78 + PathSegment@77..78 + Ident@77..78 "U" + RParen@78..79 ")" + Newline@79..81 "\n\n" + WhiteSpace@81..85 " " + Func@85..180 + FnKw@85..87 "fn" + WhiteSpace@87..88 " " + Ident@88..102 "default_method" + GenericParamList@102..116 + Lt@102..103 "<" + TypeGenericParam@103..115 + Ident@103..104 "T" + TypeBoundList@104..115 + Colon@104..105 ":" + WhiteSpace@105..106 " " + TypeBound@106..109 + TraitRef@106..109 + Path@106..109 + PathSegment@106..109 + Ident@106..109 "Add" + WhiteSpace@109..110 " " + Plus@110..111 "+" + WhiteSpace@111..112 " " + TypeBound@112..115 + TraitRef@112..115 + Path@112..115 + PathSegment@112..115 + Ident@112..115 "Sub" + Gt@115..116 ">" + FuncParamList@116..132 + LParen@116..117 "(" + FnParam@117..123 + Ident@117..120 "lhs" + Colon@120..121 ":" + WhiteSpace@121..122 " " + PathType@122..123 + Path@122..123 + PathSegment@122..123 + Ident@122..123 "T" + Comma@123..124 "," + WhiteSpace@124..125 " " + FnParam@125..131 + Ident@125..128 "rhs" + Colon@128..129 ":" + WhiteSpace@129..130 " " + PathType@130..131 + Path@130..131 + PathSegment@130..131 + Ident@130..131 "T" + RParen@131..132 ")" + WhiteSpace@132..134 " " + Arrow@134..136 "->" + WhiteSpace@136..137 " " + PathType@137..140 + Path@137..140 + PathSegment@137..140 + Ident@137..140 "i32" + WhiteSpace@140..141 " " + BlockExpr@141..180 + LBrace@141..142 "{" + Newline@142..143 "\n" + WhiteSpace@143..151 " " + ExprStmt@151..174 + BinExpr@151..174 + BinExpr@151..160 + PathExpr@151..154 + Path@151..154 + PathSegment@151..154 + Ident@151..154 "lhs" + WhiteSpace@154..155 " " + Plus@155..156 "+" + WhiteSpace@156..157 " " + PathExpr@157..160 + Path@157..160 + PathSegment@157..160 + Ident@157..160 "lhs" + WhiteSpace@160..161 " " + Minus@161..162 "-" + WhiteSpace@162..163 " " + ParenExpr@163..174 + LParen@163..164 "(" + BinExpr@164..173 + PathExpr@164..167 + Path@164..167 + PathSegment@164..167 + Ident@164..167 "rhs" + WhiteSpace@167..168 " " + Plus@168..169 "+" + WhiteSpace@169..170 " " + PathExpr@170..173 + Path@170..173 + PathSegment@170..173 + Ident@170..173 "rhs" + RParen@173..174 ")" + Newline@174..175 "\n" + WhiteSpace@175..179 " " + RBrace@179..180 "}" + Newline@180..181 "\n" + RBrace@181..182 "}" + Newline@182..184 "\n\n" + Item@184..271 + Trait@184..271 + ItemModifier@184..187 + PubKw@184..187 "pub" + WhiteSpace@187..188 " " + TraitKw@188..193 "trait" + WhiteSpace@193..194 " " + Ident@194..197 "Add" + GenericParamList@197..207 + Lt@197..198 "<" + TypeGenericParam@198..206 + Ident@198..201 "RHS" + TypeBoundList@201..206 + Colon@201..202 ":" + WhiteSpace@202..203 " " + TypeBound@203..206 + TraitRef@203..206 + Path@203..206 + PathSegment@203..206 + Ident@203..206 "Add" + Gt@206..207 ">" + WhiteSpace@207..208 " " + Newline@208..209 "\n" + TraitItemList@209..271 + LBrace@209..210 "{" + Newline@210..211 "\n" + WhiteSpace@211..215 " " + Func@215..269 + FnKw@215..217 "fn" + WhiteSpace@217..218 " " + Ident@218..221 "add" + FuncParamList@221..237 + LParen@221..222 "(" + FnParam@222..226 + SelfKw@222..226 "self" + Comma@226..227 "," + WhiteSpace@227..228 " " + FnParam@228..236 + Ident@228..231 "rhs" + Colon@231..232 ":" + WhiteSpace@232..233 " " + PathType@233..236 + Path@233..236 + PathSegment@233..236 + Ident@233..236 "Rhs" + RParen@236..237 ")" + WhiteSpace@237..238 " " + Arrow@238..240 "->" + WhiteSpace@240..241 " " + SelfType@241..245 + SelfTypeKw@241..245 "Self" + WhiteSpace@245..246 " " + Newline@246..247 "\n" + WhiteSpace@247..255 " " + WhereClause@255..269 + WhereKw@255..260 "where" + WhiteSpace@260..261 " " + WherePredicate@261..269 + PathType@261..264 + Path@261..264 + PathSegment@261..264 + Ident@261..264 "RHS" + TypeBoundList@264..269 + Colon@264..265 ":" + WhiteSpace@265..266 " " + TypeBound@266..269 + TraitRef@266..269 + Path@266..269 + PathSegment@266..269 + Ident@266..269 "Sub" + Newline@269..270 "\n" + RBrace@270..271 "}" + Newline@271..274 "\n\n\n" + Item@274..355 + Trait@274..355 + ItemModifier@274..277 + PubKw@274..277 "pub" + WhiteSpace@277..278 " " + TraitKw@278..283 "trait" + WhiteSpace@283..284 " " + Ident@284..289 "Parse" + WhiteSpace@289..290 " " + TraitItemList@290..355 + LBrace@290..291 "{" + Newline@291..292 "\n" + WhiteSpace@292..296 " " + Func@296..353 + FnKw@296..298 "fn" + WhiteSpace@298..299 " " + Ident@299..304 "parse" + GenericParamList@304..320 + Lt@304..305 "<" + TypeGenericParam@305..319 + Ident@305..306 "S" + TypeBoundList@306..319 + Colon@306..307 ":" + WhiteSpace@307..308 " " + TypeBound@308..319 + TraitRef@308..319 + Path@308..319 + PathSegment@308..319 + Ident@308..319 "TokenStream" + Gt@319..320 ">" + FuncParamList@320..353 + LParen@320..321 "(" + FnParam@321..329 + MutKw@321..324 "mut" + WhiteSpace@324..325 " " + SelfKw@325..329 "self" + Comma@329..330 "," + WhiteSpace@330..331 " " + FnParam@331..352 + MutKw@331..334 "mut" + WhiteSpace@334..335 " " + Ident@335..341 "parser" + Colon@341..342 ":" + WhiteSpace@342..343 " " + PathType@343..352 + Path@343..352 + PathSegment@343..352 + Ident@343..349 "Parser" + GenericArgList@349..352 + Lt@349..350 "<" + TypeGenericArg@350..351 + PathType@350..351 + Path@350..351 + PathSegment@350..351 + Ident@350..351 "S" + Gt@351..352 ">" + RParen@352..353 ")" + Newline@353..354 "\n" + RBrace@354..355 "}" + Newline@355..357 "\n\n" + Item@357..592 + Impl@357..592 + ImplKw@357..361 "impl" + GenericParamList@361..364 + Lt@361..362 "<" + TypeGenericParam@362..363 + Ident@362..363 "S" + Gt@363..364 ">" + WhiteSpace@364..365 " " + PathType@365..374 + Path@365..374 + PathSegment@365..374 + Ident@365..371 "Parser" + GenericArgList@371..374 + Lt@371..372 "<" + TypeGenericArg@372..373 + PathType@372..373 + Path@372..373 + PathSegment@372..373 + Ident@372..373 "S" + Gt@373..374 ">" + WhiteSpace@374..375 " " + Newline@375..376 "\n" + WhiteSpace@376..380 " " + WhereClause@380..408 + WhereKw@380..385 "where" + WhiteSpace@385..386 " " + WherePredicate@386..408 + PathType@386..387 + Path@386..387 + PathSegment@386..387 + Ident@386..387 "S" + TypeBoundList@387..408 + Colon@387..388 ":" + WhiteSpace@388..389 " " + TypeBound@389..400 + TraitRef@389..400 + Path@389..400 + PathSegment@389..400 + Ident@389..400 "TokenStream" + WhiteSpace@400..401 " " + Plus@401..402 "+" + WhiteSpace@402..403 " " + TypeBound@403..408 + TraitRef@403..408 + Path@403..408 + PathSegment@403..408 + Ident@403..408 "Clone" + Newline@408..409 "\n" + ImplItemList@409..592 + LBrace@409..410 "{" + Newline@410..411 "\n" + WhiteSpace@411..415 " " + Func@415..590 + ItemModifier@415..418 + PubKw@415..418 "pub" + WhiteSpace@418..419 " " + FnKw@419..421 "fn" + WhiteSpace@421..422 " " + Ident@422..427 "parse" + GenericParamList@427..437 + Lt@427..428 "<" + TypeGenericParam@428..436 + Ident@428..429 "T" + TypeBoundList@429..436 + Colon@429..430 ":" + WhiteSpace@430..431 " " + TypeBound@431..436 + TraitRef@431..436 + Path@431..436 + PathSegment@431..436 + Ident@431..436 "Parse" + Gt@436..437 ">" + FuncParamList@437..493 + LParen@437..438 "(" + FnParam@438..446 + MutKw@438..441 "mut" + WhiteSpace@441..442 " " + SelfKw@442..446 "self" + Comma@446..447 "," + WhiteSpace@447..448 " " + FnParam@448..460 + MutKw@448..451 "mut" + WhiteSpace@451..452 " " + Ident@452..457 "scope" + Colon@457..458 ":" + WhiteSpace@458..459 " " + PathType@459..460 + Path@459..460 + PathSegment@459..460 + Ident@459..460 "T" + Comma@460..461 "," + WhiteSpace@461..462 " " + FnParam@462..492 + Ident@462..472 "checkpoint" + Colon@472..473 ":" + WhiteSpace@473..474 " " + PathType@474..492 + Path@474..492 + PathSegment@474..492 + Ident@474..480 "Option" + GenericArgList@480..492 + Lt@480..481 "<" + TypeGenericArg@481..491 + PathType@481..491 + Path@481..491 + PathSegment@481..491 + Ident@481..491 "Checkpoint" + Gt@491..492 ">" + RParen@492..493 ")" + WhiteSpace@493..494 " " + Arrow@494..496 "->" + WhiteSpace@496..497 " " + TupleType@497..515 + LParen@497..498 "(" + PathType@498..502 + Path@498..502 + PathSegment@498..502 + Ident@498..502 "bool" + Comma@502..503 "," + WhiteSpace@503..504 " " + PathType@504..514 + Path@504..514 + PathSegment@504..514 + Ident@504..514 "Checkpoint" + RParen@514..515 ")" + WhiteSpace@515..516 " " + BlockExpr@516..590 + LBrace@516..517 "{" + Newline@517..518 "\n" + WhiteSpace@518..526 " " + ExprStmt@526..584 + TupleExpr@526..584 + LParen@526..527 "(" + CallExpr@527..570 + PathExpr@527..547 + Path@527..547 + PathSegment@527..537 + Ident@527..537 "SyntaxNode" + Colon2@537..539 "::" + PathSegment@539..547 + Ident@539..547 "new_root" + CallArgList@547..570 + LParen@547..548 "(" + CallArg@548..569 + MethodCallExpr@548..569 + FieldExpr@548..560 + PathExpr@548..552 + Path@548..552 + PathSegment@548..552 + SelfKw@548..552 "self" + Dot@552..553 "." + Ident@553..560 "builder" + Dot@560..561 "." + Ident@561..567 "finish" + CallArgList@567..569 + LParen@567..568 "(" + RParen@568..569 ")" + RParen@569..570 ")" + Comma@570..571 "," + WhiteSpace@571..572 " " + FieldExpr@572..583 + PathExpr@572..576 + Path@572..576 + PathSegment@572..576 + SelfKw@572..576 "self" + Dot@576..577 "." + Ident@577..583 "errors" + RParen@583..584 ")" + Newline@584..585 "\n" + WhiteSpace@585..589 " " + RBrace@589..590 "}" + Newline@590..591 "\n" + RBrace@591..592 "}" + Newline@592..595 "\n\n\n" + Item@595..652 + Trait@595..652 + ItemModifier@595..598 + PubKw@595..598 "pub" + WhiteSpace@598..599 " " + TraitKw@599..604 "trait" + WhiteSpace@604..605 " " + Ident@605..613 "SubTrait" + GenericParamList@613..616 + Lt@613..614 "<" + TypeGenericParam@614..615 + Ident@614..615 "T" + Gt@615..616 ">" + SuperTraitList@616..632 + Colon@616..617 ":" + WhiteSpace@617..618 " " + TraitRef@618..623 + Path@618..623 + PathSegment@618..623 + Ident@618..623 "Parse" + WhiteSpace@623..624 " " + Plus@624..625 "+" + WhiteSpace@625..626 " " + TraitRef@626..632 + Path@626..632 + PathSegment@626..632 + Ident@626..629 "Add" + GenericArgList@629..632 + Lt@629..630 "<" + TypeGenericArg@630..631 + PathType@630..631 + Path@630..631 + PathSegment@630..631 + Ident@630..631 "T" + Gt@631..632 ">" + WhiteSpace@632..633 " " + Newline@633..634 "\n" + WhereClause@634..649 + WhereKw@634..639 "where" + WhiteSpace@639..640 " " + WherePredicate@640..649 + PathType@640..641 + Path@640..641 + PathSegment@640..641 + Ident@640..641 "T" + TypeBoundList@641..649 + Colon@641..642 ":" + WhiteSpace@642..643 " " + TypeBound@643..649 + TraitRef@643..649 + Path@643..649 + PathSegment@643..649 + Ident@643..646 "Add" + GenericArgList@646..649 + Lt@646..647 "<" + TypeGenericArg@647..648 + PathType@647..648 + Path@647..648 + PathSegment@647..648 + Ident@647..648 "T" + Gt@648..649 ">" + Newline@649..650 "\n" + TraitItemList@650..652 + LBrace@650..651 "{" + RBrace@651..652 "}" + diff --git a/crates/parser2/test_files/syntax_node/items/type.fe b/crates/parser2/test_files/syntax_node/items/type.fe new file mode 100644 index 0000000000..a0e1b67b43 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/type.fe @@ -0,0 +1,3 @@ +pub type Int = i32 + +type Result = Result \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/items/type.snap b/crates/parser2/test_files/syntax_node/items/type.snap new file mode 100644 index 0000000000..b93344028e --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/type.snap @@ -0,0 +1,57 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/type.fe +--- +Root@0..54 + ItemList@0..54 + Item@0..18 + TypeAlias@0..18 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + TypeKw@4..8 "type" + WhiteSpace@8..9 " " + Ident@9..12 "Int" + WhiteSpace@12..13 " " + Eq@13..14 "=" + WhiteSpace@14..15 " " + PathType@15..18 + Path@15..18 + PathSegment@15..18 + Ident@15..18 "i32" + WhiteSpace@18..19 " " + Newline@19..21 "\n\n" + Item@21..54 + TypeAlias@21..54 + TypeKw@21..25 "type" + WhiteSpace@25..26 " " + Ident@26..32 "Result" + GenericParamList@32..35 + Lt@32..33 "<" + TypeGenericParam@33..34 + Ident@33..34 "T" + Gt@34..35 ">" + WhiteSpace@35..36 " " + Eq@36..37 "=" + WhiteSpace@37..38 " " + PathType@38..54 + Path@38..54 + PathSegment@38..54 + Ident@38..44 "Result" + GenericArgList@44..54 + Lt@44..45 "<" + TypeGenericArg@45..46 + PathType@45..46 + Path@45..46 + PathSegment@45..46 + Ident@45..46 "T" + Comma@46..47 "," + WhiteSpace@47..48 " " + TypeGenericArg@48..53 + PathType@48..53 + Path@48..53 + PathSegment@48..53 + Ident@48..53 "Error" + Gt@53..54 ">" + diff --git a/crates/parser2/test_files/syntax_node/items/use.fe b/crates/parser2/test_files/syntax_node/items/use.fe new file mode 100644 index 0000000000..bff0d3b26d --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/use.fe @@ -0,0 +1,16 @@ +use Foo::Bar +pub use Foo::Bar +use Foo::* +use Foo::Bar as Bar1 +use Foo::Trait as _ + +use Foo::{Foo, Bar} +use Foo::{self, Bar} +use Foo::{self, Bar as Bar1} +use Foo::{self as self_, Bar::{Bar as _, Baz}, *} + +use {Foo::Bar as Bar1, Bar::Bar as Bar2, Baz::Bar as Bar3, Trait::T} +use * + +use super::* +use ingot::Foo diff --git a/crates/parser2/test_files/syntax_node/items/use.snap b/crates/parser2/test_files/syntax_node/items/use.snap new file mode 100644 index 0000000000..d425d218c7 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/items/use.snap @@ -0,0 +1,299 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/items/use.fe +--- +Root@0..308 + ItemList@0..307 + Item@0..12 + Use@0..12 + UseKw@0..3 "use" + WhiteSpace@3..4 " " + UseTree@4..12 + UsePath@4..12 + UsePathSegment@4..7 + Ident@4..7 "Foo" + Colon2@7..9 "::" + UsePathSegment@9..12 + Ident@9..12 "Bar" + Newline@12..13 "\n" + Item@13..29 + Use@13..29 + ItemModifier@13..16 + PubKw@13..16 "pub" + WhiteSpace@16..17 " " + UseKw@17..20 "use" + WhiteSpace@20..21 " " + UseTree@21..29 + UsePath@21..29 + UsePathSegment@21..24 + Ident@21..24 "Foo" + Colon2@24..26 "::" + UsePathSegment@26..29 + Ident@26..29 "Bar" + Newline@29..30 "\n" + Item@30..40 + Use@30..40 + UseKw@30..33 "use" + WhiteSpace@33..34 " " + UseTree@34..40 + UsePath@34..40 + UsePathSegment@34..37 + Ident@34..37 "Foo" + Colon2@37..39 "::" + UsePathSegment@39..40 + Star@39..40 "*" + Newline@40..41 "\n" + Item@41..61 + Use@41..61 + UseKw@41..44 "use" + WhiteSpace@44..45 " " + UseTree@45..61 + UsePath@45..53 + UsePathSegment@45..48 + Ident@45..48 "Foo" + Colon2@48..50 "::" + UsePathSegment@50..53 + Ident@50..53 "Bar" + WhiteSpace@53..54 " " + UseTreeRename@54..61 + AsKw@54..56 "as" + WhiteSpace@56..57 " " + Ident@57..61 "Bar1" + Newline@61..62 "\n" + Item@62..81 + Use@62..81 + UseKw@62..65 "use" + WhiteSpace@65..66 " " + UseTree@66..81 + UsePath@66..76 + UsePathSegment@66..69 + Ident@66..69 "Foo" + Colon2@69..71 "::" + UsePathSegment@71..76 + Ident@71..76 "Trait" + WhiteSpace@76..77 " " + UseTreeRename@77..81 + AsKw@77..79 "as" + WhiteSpace@79..80 " " + Underscore@80..81 "_" + Newline@81..83 "\n\n" + Item@83..102 + Use@83..102 + UseKw@83..86 "use" + WhiteSpace@86..87 " " + UseTree@87..102 + UsePath@87..90 + UsePathSegment@87..90 + Ident@87..90 "Foo" + Colon2@90..92 "::" + UseTreeList@92..102 + LBrace@92..93 "{" + UseTree@93..96 + UsePath@93..96 + UsePathSegment@93..96 + Ident@93..96 "Foo" + Comma@96..97 "," + WhiteSpace@97..98 " " + UseTree@98..101 + UsePath@98..101 + UsePathSegment@98..101 + Ident@98..101 "Bar" + RBrace@101..102 "}" + Newline@102..103 "\n" + Item@103..123 + Use@103..123 + UseKw@103..106 "use" + WhiteSpace@106..107 " " + UseTree@107..123 + UsePath@107..110 + UsePathSegment@107..110 + Ident@107..110 "Foo" + Colon2@110..112 "::" + UseTreeList@112..123 + LBrace@112..113 "{" + UseTree@113..117 + UsePath@113..117 + UsePathSegment@113..117 + SelfKw@113..117 "self" + Comma@117..118 "," + WhiteSpace@118..119 " " + UseTree@119..122 + UsePath@119..122 + UsePathSegment@119..122 + Ident@119..122 "Bar" + RBrace@122..123 "}" + Newline@123..124 "\n" + Item@124..152 + Use@124..152 + UseKw@124..127 "use" + WhiteSpace@127..128 " " + UseTree@128..152 + UsePath@128..131 + UsePathSegment@128..131 + Ident@128..131 "Foo" + Colon2@131..133 "::" + UseTreeList@133..152 + LBrace@133..134 "{" + UseTree@134..138 + UsePath@134..138 + UsePathSegment@134..138 + SelfKw@134..138 "self" + Comma@138..139 "," + WhiteSpace@139..140 " " + UseTree@140..151 + UsePath@140..143 + UsePathSegment@140..143 + Ident@140..143 "Bar" + WhiteSpace@143..144 " " + UseTreeRename@144..151 + AsKw@144..146 "as" + WhiteSpace@146..147 " " + Ident@147..151 "Bar1" + RBrace@151..152 "}" + Newline@152..153 "\n" + Item@153..202 + Use@153..202 + UseKw@153..156 "use" + WhiteSpace@156..157 " " + UseTree@157..202 + UsePath@157..160 + UsePathSegment@157..160 + Ident@157..160 "Foo" + Colon2@160..162 "::" + UseTreeList@162..202 + LBrace@162..163 "{" + UseTree@163..176 + UsePath@163..167 + UsePathSegment@163..167 + SelfKw@163..167 "self" + WhiteSpace@167..168 " " + UseTreeRename@168..176 + AsKw@168..170 "as" + WhiteSpace@170..171 " " + Ident@171..176 "self_" + Comma@176..177 "," + WhiteSpace@177..178 " " + UseTree@178..198 + UsePath@178..181 + UsePathSegment@178..181 + Ident@178..181 "Bar" + Colon2@181..183 "::" + UseTreeList@183..198 + LBrace@183..184 "{" + UseTree@184..192 + UsePath@184..187 + UsePathSegment@184..187 + Ident@184..187 "Bar" + WhiteSpace@187..188 " " + UseTreeRename@188..192 + AsKw@188..190 "as" + WhiteSpace@190..191 " " + Underscore@191..192 "_" + Comma@192..193 "," + WhiteSpace@193..194 " " + UseTree@194..197 + UsePath@194..197 + UsePathSegment@194..197 + Ident@194..197 "Baz" + RBrace@197..198 "}" + Comma@198..199 "," + WhiteSpace@199..200 " " + UseTree@200..201 + UsePath@200..201 + UsePathSegment@200..201 + Star@200..201 "*" + RBrace@201..202 "}" + Newline@202..204 "\n\n" + Item@204..272 + Use@204..272 + UseKw@204..207 "use" + WhiteSpace@207..208 " " + UseTree@208..272 + UseTreeList@208..272 + LBrace@208..209 "{" + UseTree@209..225 + UsePath@209..217 + UsePathSegment@209..212 + Ident@209..212 "Foo" + Colon2@212..214 "::" + UsePathSegment@214..217 + Ident@214..217 "Bar" + WhiteSpace@217..218 " " + UseTreeRename@218..225 + AsKw@218..220 "as" + WhiteSpace@220..221 " " + Ident@221..225 "Bar1" + Comma@225..226 "," + WhiteSpace@226..227 " " + UseTree@227..243 + UsePath@227..235 + UsePathSegment@227..230 + Ident@227..230 "Bar" + Colon2@230..232 "::" + UsePathSegment@232..235 + Ident@232..235 "Bar" + WhiteSpace@235..236 " " + UseTreeRename@236..243 + AsKw@236..238 "as" + WhiteSpace@238..239 " " + Ident@239..243 "Bar2" + Comma@243..244 "," + WhiteSpace@244..245 " " + UseTree@245..261 + UsePath@245..253 + UsePathSegment@245..248 + Ident@245..248 "Baz" + Colon2@248..250 "::" + UsePathSegment@250..253 + Ident@250..253 "Bar" + WhiteSpace@253..254 " " + UseTreeRename@254..261 + AsKw@254..256 "as" + WhiteSpace@256..257 " " + Ident@257..261 "Bar3" + Comma@261..262 "," + WhiteSpace@262..263 " " + UseTree@263..271 + UsePath@263..271 + UsePathSegment@263..268 + Ident@263..268 "Trait" + Colon2@268..270 "::" + UsePathSegment@270..271 + Ident@270..271 "T" + RBrace@271..272 "}" + Newline@272..273 "\n" + Item@273..278 + Use@273..278 + UseKw@273..276 "use" + WhiteSpace@276..277 " " + UseTree@277..278 + UsePath@277..278 + UsePathSegment@277..278 + Star@277..278 "*" + Newline@278..280 "\n\n" + Item@280..292 + Use@280..292 + UseKw@280..283 "use" + WhiteSpace@283..284 " " + UseTree@284..292 + UsePath@284..292 + UsePathSegment@284..289 + SuperKw@284..289 "super" + Colon2@289..291 "::" + UsePathSegment@291..292 + Star@291..292 "*" + Newline@292..293 "\n" + Item@293..307 + Use@293..307 + UseKw@293..296 "use" + WhiteSpace@296..297 " " + UseTree@297..307 + UsePath@297..307 + UsePathSegment@297..302 + IngotKw@297..302 "ingot" + Colon2@302..304 "::" + UsePathSegment@304..307 + Ident@304..307 "Foo" + Newline@307..308 "\n" + diff --git a/crates/parser2/test_files/syntax_node/pats/lit.fe b/crates/parser2/test_files/syntax_node/pats/lit.fe new file mode 100644 index 0000000000..73671527c0 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/lit.fe @@ -0,0 +1,2 @@ +0x1 +"String" \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/lit.snap b/crates/parser2/test_files/syntax_node/pats/lit.snap new file mode 100644 index 0000000000..6f652353b6 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/lit.snap @@ -0,0 +1,14 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/pats/lit.fe +--- +Root@0..12 + LitPat@0..3 + Lit@0..3 + Int@0..3 "0x1" + Newline@3..4 "\n" + LitPat@4..12 + Lit@4..12 + String@4..12 "\"String\"" + diff --git a/crates/parser2/test_files/syntax_node/pats/or.fe b/crates/parser2/test_files/syntax_node/pats/or.fe new file mode 100644 index 0000000000..8cd692f4c1 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/or.fe @@ -0,0 +1,5 @@ +Foo::Bar | FOO::Baz + +Foo::Bar(1 | 2) | Foo::Baz(..) + +Foo::Bar(1 | 2) | Foo::Baz(Foo::Bar(1 | 2) | Bar::Baz("STRING")) \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/or.snap b/crates/parser2/test_files/syntax_node/pats/or.snap new file mode 100644 index 0000000000..2a8c75fd6c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/or.snap @@ -0,0 +1,134 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/pats/or.fe +--- +Root@0..117 + OrPat@0..19 + PathPat@0..8 + Path@0..8 + PathSegment@0..3 + Ident@0..3 "Foo" + Colon2@3..5 "::" + PathSegment@5..8 + Ident@5..8 "Bar" + WhiteSpace@8..9 " " + Pipe@9..10 "|" + WhiteSpace@10..11 " " + PathPat@11..19 + Path@11..19 + PathSegment@11..14 + Ident@11..14 "FOO" + Colon2@14..16 "::" + PathSegment@16..19 + Ident@16..19 "Baz" + Newline@19..21 "\n\n" + OrPat@21..51 + PathTuplePat@21..36 + Path@21..29 + PathSegment@21..24 + Ident@21..24 "Foo" + Colon2@24..26 "::" + PathSegment@26..29 + Ident@26..29 "Bar" + TuplePatElemList@29..36 + LParen@29..30 "(" + OrPat@30..35 + LitPat@30..31 + Lit@30..31 + Int@30..31 "1" + WhiteSpace@31..32 " " + Pipe@32..33 "|" + WhiteSpace@33..34 " " + LitPat@34..35 + Lit@34..35 + Int@34..35 "2" + RParen@35..36 ")" + WhiteSpace@36..37 " " + Pipe@37..38 "|" + WhiteSpace@38..39 " " + PathTuplePat@39..51 + Path@39..47 + PathSegment@39..42 + Ident@39..42 "Foo" + Colon2@42..44 "::" + PathSegment@44..47 + Ident@44..47 "Baz" + TuplePatElemList@47..51 + LParen@47..48 "(" + RestPat@48..50 + Dot2@48..50 ".." + RParen@50..51 ")" + Newline@51..53 "\n\n" + OrPat@53..117 + PathTuplePat@53..68 + Path@53..61 + PathSegment@53..56 + Ident@53..56 "Foo" + Colon2@56..58 "::" + PathSegment@58..61 + Ident@58..61 "Bar" + TuplePatElemList@61..68 + LParen@61..62 "(" + OrPat@62..67 + LitPat@62..63 + Lit@62..63 + Int@62..63 "1" + WhiteSpace@63..64 " " + Pipe@64..65 "|" + WhiteSpace@65..66 " " + LitPat@66..67 + Lit@66..67 + Int@66..67 "2" + RParen@67..68 ")" + WhiteSpace@68..69 " " + Pipe@69..70 "|" + WhiteSpace@70..71 " " + PathTuplePat@71..117 + Path@71..79 + PathSegment@71..74 + Ident@71..74 "Foo" + Colon2@74..76 "::" + PathSegment@76..79 + Ident@76..79 "Baz" + TuplePatElemList@79..117 + LParen@79..80 "(" + OrPat@80..116 + PathTuplePat@80..95 + Path@80..88 + PathSegment@80..83 + Ident@80..83 "Foo" + Colon2@83..85 "::" + PathSegment@85..88 + Ident@85..88 "Bar" + TuplePatElemList@88..95 + LParen@88..89 "(" + OrPat@89..94 + LitPat@89..90 + Lit@89..90 + Int@89..90 "1" + WhiteSpace@90..91 " " + Pipe@91..92 "|" + WhiteSpace@92..93 " " + LitPat@93..94 + Lit@93..94 + Int@93..94 "2" + RParen@94..95 ")" + WhiteSpace@95..96 " " + Pipe@96..97 "|" + WhiteSpace@97..98 " " + PathTuplePat@98..116 + Path@98..106 + PathSegment@98..101 + Ident@98..101 "Bar" + Colon2@101..103 "::" + PathSegment@103..106 + Ident@103..106 "Baz" + TuplePatElemList@106..116 + LParen@106..107 "(" + LitPat@107..115 + Lit@107..115 + String@107..115 "\"STRING\"" + RParen@115..116 ")" + RParen@116..117 ")" + diff --git a/crates/parser2/test_files/syntax_node/pats/path.fe b/crates/parser2/test_files/syntax_node/pats/path.fe new file mode 100644 index 0000000000..7e28afe8ee --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/path.fe @@ -0,0 +1 @@ +MyEnum::Foo \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/path.snap b/crates/parser2/test_files/syntax_node/pats/path.snap new file mode 100644 index 0000000000..1e7f37ef48 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/path.snap @@ -0,0 +1,13 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..11 + PathPat@0..11 + Path@0..11 + PathSegment@0..6 + Ident@0..6 "MyEnum" + Colon2@6..8 "::" + PathSegment@8..11 + Ident@8..11 "Foo" + diff --git a/crates/parser2/test_files/syntax_node/pats/path_tuple.fe b/crates/parser2/test_files/syntax_node/pats/path_tuple.fe new file mode 100644 index 0000000000..8a87ada08d --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/path_tuple.fe @@ -0,0 +1,16 @@ +Empty() + +MyEnum::Empty() + +MyEnum::Foo(X::Foo, Z::Bar(1, 2), _, ..) + +MyEnum::Foo2( + X::Foo, + Z::Bar(1, 2), + _, + .. +) + +MyEnum::Bind(x) + +MyEnum::OrTuple(Int::I32 | Int::I64 | Int::Any(10)) \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/path_tuple.snap b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap new file mode 100644 index 0000000000..01dd16e43a --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/path_tuple.snap @@ -0,0 +1,187 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/pats/path_tuple.fe +--- +Root@0..203 + PathTuplePat@0..7 + Path@0..5 + PathSegment@0..5 + Ident@0..5 "Empty" + TuplePatElemList@5..7 + LParen@5..6 "(" + RParen@6..7 ")" + Newline@7..9 "\n\n" + PathTuplePat@9..24 + Path@9..22 + PathSegment@9..15 + Ident@9..15 "MyEnum" + Colon2@15..17 "::" + PathSegment@17..22 + Ident@17..22 "Empty" + TuplePatElemList@22..24 + LParen@22..23 "(" + RParen@23..24 ")" + Newline@24..26 "\n\n" + PathTuplePat@26..67 + Path@26..37 + PathSegment@26..32 + Ident@26..32 "MyEnum" + Colon2@32..34 "::" + PathSegment@34..37 + Ident@34..37 "Foo" + TuplePatElemList@37..67 + LParen@37..38 "(" + PathPat@38..44 + Path@38..44 + PathSegment@38..39 + Ident@38..39 "X" + Colon2@39..41 "::" + PathSegment@41..44 + Ident@41..44 "Foo" + Comma@44..45 "," + WhiteSpace@45..46 " " + PathTuplePat@46..58 + Path@46..52 + PathSegment@46..47 + Ident@46..47 "Z" + Colon2@47..49 "::" + PathSegment@49..52 + Ident@49..52 "Bar" + TuplePatElemList@52..58 + LParen@52..53 "(" + LitPat@53..54 + Lit@53..54 + Int@53..54 "1" + Comma@54..55 "," + WhiteSpace@55..56 " " + LitPat@56..57 + Lit@56..57 + Int@56..57 "2" + RParen@57..58 ")" + Comma@58..59 "," + WhiteSpace@59..61 " " + WildCardPat@61..62 + Underscore@61..62 "_" + Comma@62..63 "," + WhiteSpace@63..64 " " + RestPat@64..66 + Dot2@64..66 ".." + RParen@66..67 ")" + Newline@67..69 "\n\n" + PathTuplePat@69..133 + Path@69..81 + PathSegment@69..75 + Ident@69..75 "MyEnum" + Colon2@75..77 "::" + PathSegment@77..81 + Ident@77..81 "Foo2" + TuplePatElemList@81..133 + LParen@81..82 "(" + Newline@82..83 "\n" + WhiteSpace@83..87 " " + PathPat@87..93 + Path@87..93 + PathSegment@87..88 + Ident@87..88 "X" + Colon2@88..90 "::" + PathSegment@90..93 + Ident@90..93 "Foo" + Comma@93..94 "," + WhiteSpace@94..95 " " + Newline@95..96 "\n" + WhiteSpace@96..100 " " + PathTuplePat@100..112 + Path@100..106 + PathSegment@100..101 + Ident@100..101 "Z" + Colon2@101..103 "::" + PathSegment@103..106 + Ident@103..106 "Bar" + TuplePatElemList@106..112 + LParen@106..107 "(" + LitPat@107..108 + Lit@107..108 + Int@107..108 "1" + Comma@108..109 "," + WhiteSpace@109..110 " " + LitPat@110..111 + Lit@110..111 + Int@110..111 "2" + RParen@111..112 ")" + Comma@112..113 "," + WhiteSpace@113..114 " " + Newline@114..115 "\n" + WhiteSpace@115..120 " " + WildCardPat@120..121 + Underscore@120..121 "_" + Comma@121..122 "," + WhiteSpace@122..123 " " + Newline@123..124 "\n" + WhiteSpace@124..129 " " + RestPat@129..131 + Dot2@129..131 ".." + Newline@131..132 "\n" + RParen@132..133 ")" + Newline@133..135 "\n\n" + PathTuplePat@135..150 + Path@135..147 + PathSegment@135..141 + Ident@135..141 "MyEnum" + Colon2@141..143 "::" + PathSegment@143..147 + Ident@143..147 "Bind" + TuplePatElemList@147..150 + LParen@147..148 "(" + PathPat@148..149 + Path@148..149 + PathSegment@148..149 + Ident@148..149 "x" + RParen@149..150 ")" + Newline@150..152 "\n\n" + PathTuplePat@152..203 + Path@152..167 + PathSegment@152..158 + Ident@152..158 "MyEnum" + Colon2@158..160 "::" + PathSegment@160..167 + Ident@160..167 "OrTuple" + TuplePatElemList@167..203 + LParen@167..168 "(" + OrPat@168..202 + PathPat@168..176 + Path@168..176 + PathSegment@168..171 + Ident@168..171 "Int" + Colon2@171..173 "::" + PathSegment@173..176 + Ident@173..176 "I32" + WhiteSpace@176..177 " " + Pipe@177..178 "|" + WhiteSpace@178..179 " " + OrPat@179..202 + PathPat@179..187 + Path@179..187 + PathSegment@179..182 + Ident@179..182 "Int" + Colon2@182..184 "::" + PathSegment@184..187 + Ident@184..187 "I64" + WhiteSpace@187..188 " " + Pipe@188..189 "|" + WhiteSpace@189..190 " " + PathTuplePat@190..202 + Path@190..198 + PathSegment@190..193 + Ident@190..193 "Int" + Colon2@193..195 "::" + PathSegment@195..198 + Ident@195..198 "Any" + TuplePatElemList@198..202 + LParen@198..199 "(" + LitPat@199..201 + Lit@199..201 + Int@199..201 "10" + RParen@201..202 ")" + RParen@202..203 ")" + diff --git a/crates/parser2/test_files/syntax_node/pats/record.fe b/crates/parser2/test_files/syntax_node/pats/record.fe new file mode 100644 index 0000000000..e207b4de03 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/record.fe @@ -0,0 +1,7 @@ +Record {} + +foo::Empty { } + +Record { a, b } +Record { a: x, b: y } +Record {x: (1, a), Foo {x, y} } \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/record.snap b/crates/parser2/test_files/syntax_node/pats/record.snap new file mode 100644 index 0000000000..cc833d76ce --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/record.snap @@ -0,0 +1,130 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/pats/record.fe +--- +Root@0..96 + RecordPat@0..9 + Path@0..6 + PathSegment@0..6 + Ident@0..6 "Record" + WhiteSpace@6..7 " " + RecordPatFieldList@7..9 + LBrace@7..8 "{" + RBrace@8..9 "}" + Newline@9..11 "\n\n" + RecordPat@11..25 + Path@11..21 + PathSegment@11..14 + Ident@11..14 "foo" + Colon2@14..16 "::" + PathSegment@16..21 + Ident@16..21 "Empty" + WhiteSpace@21..22 " " + RecordPatFieldList@22..25 + LBrace@22..23 "{" + WhiteSpace@23..24 " " + RBrace@24..25 "}" + Newline@25..27 "\n\n" + RecordPat@27..42 + Path@27..33 + PathSegment@27..33 + Ident@27..33 "Record" + WhiteSpace@33..34 " " + RecordPatFieldList@34..42 + LBrace@34..35 "{" + WhiteSpace@35..36 " " + RecordPatField@36..37 + PathPat@36..37 + Path@36..37 + PathSegment@36..37 + Ident@36..37 "a" + Comma@37..38 "," + WhiteSpace@38..39 " " + RecordPatField@39..40 + PathPat@39..40 + Path@39..40 + PathSegment@39..40 + Ident@39..40 "b" + WhiteSpace@40..41 " " + RBrace@41..42 "}" + Newline@42..43 "\n" + RecordPat@43..64 + Path@43..49 + PathSegment@43..49 + Ident@43..49 "Record" + WhiteSpace@49..50 " " + RecordPatFieldList@50..64 + LBrace@50..51 "{" + WhiteSpace@51..52 " " + RecordPatField@52..56 + Ident@52..53 "a" + Colon@53..54 ":" + WhiteSpace@54..55 " " + PathPat@55..56 + Path@55..56 + PathSegment@55..56 + Ident@55..56 "x" + Comma@56..57 "," + WhiteSpace@57..58 " " + RecordPatField@58..62 + Ident@58..59 "b" + Colon@59..60 ":" + WhiteSpace@60..61 " " + PathPat@61..62 + Path@61..62 + PathSegment@61..62 + Ident@61..62 "y" + WhiteSpace@62..63 " " + RBrace@63..64 "}" + Newline@64..65 "\n" + RecordPat@65..96 + Path@65..71 + PathSegment@65..71 + Ident@65..71 "Record" + WhiteSpace@71..72 " " + RecordPatFieldList@72..96 + LBrace@72..73 "{" + RecordPatField@73..82 + Ident@73..74 "x" + Colon@74..75 ":" + WhiteSpace@75..76 " " + TuplePat@76..82 + TuplePatElemList@76..82 + LParen@76..77 "(" + LitPat@77..78 + Lit@77..78 + Int@77..78 "1" + Comma@78..79 "," + WhiteSpace@79..80 " " + PathPat@80..81 + Path@80..81 + PathSegment@80..81 + Ident@80..81 "a" + RParen@81..82 ")" + Comma@82..83 "," + WhiteSpace@83..84 " " + RecordPatField@84..94 + RecordPat@84..94 + Path@84..87 + PathSegment@84..87 + Ident@84..87 "Foo" + WhiteSpace@87..88 " " + RecordPatFieldList@88..94 + LBrace@88..89 "{" + RecordPatField@89..90 + PathPat@89..90 + Path@89..90 + PathSegment@89..90 + Ident@89..90 "x" + Comma@90..91 "," + WhiteSpace@91..92 " " + RecordPatField@92..93 + PathPat@92..93 + Path@92..93 + PathSegment@92..93 + Ident@92..93 "y" + RBrace@93..94 "}" + WhiteSpace@94..95 " " + RBrace@95..96 "}" + diff --git a/crates/parser2/test_files/syntax_node/pats/rest_pattern.fe b/crates/parser2/test_files/syntax_node/pats/rest_pattern.fe new file mode 100644 index 0000000000..a96aa0ea9d --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/rest_pattern.fe @@ -0,0 +1 @@ +.. \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/rest_pattern.snap b/crates/parser2/test_files/syntax_node/pats/rest_pattern.snap new file mode 100644 index 0000000000..caf7a706ba --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/rest_pattern.snap @@ -0,0 +1,8 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..2 + RestPat@0..2 + Dot2@0..2 ".." + diff --git a/crates/parser2/test_files/syntax_node/pats/wilecard.fe b/crates/parser2/test_files/syntax_node/pats/wilecard.fe new file mode 100644 index 0000000000..c9cdc63b07 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/wilecard.fe @@ -0,0 +1 @@ +_ \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/pats/wilecard.snap b/crates/parser2/test_files/syntax_node/pats/wilecard.snap new file mode 100644 index 0000000000..e1751174b5 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/pats/wilecard.snap @@ -0,0 +1,8 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: snapshot +--- +Root@0..1 + WildCardPat@0..1 + Underscore@0..1 "_" + diff --git a/crates/parser2/test_files/syntax_node/stmts/assign.fe b/crates/parser2/test_files/syntax_node/stmts/assign.fe new file mode 100644 index 0000000000..42a9d86103 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/assign.fe @@ -0,0 +1,2 @@ +x = 1 +Foo{x, y} = foo \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/assign.snap b/crates/parser2/test_files/syntax_node/stmts/assign.snap new file mode 100644 index 0000000000..a2786ec300 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/assign.snap @@ -0,0 +1,48 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/assign.fe +--- +Root@0..21 + ExprStmt@0..5 + AssignExpr@0..5 + PathExpr@0..1 + Path@0..1 + PathSegment@0..1 + Ident@0..1 "x" + WhiteSpace@1..2 " " + Eq@2..3 "=" + WhiteSpace@3..4 " " + LitExpr@4..5 + Lit@4..5 + Int@4..5 "1" + Newline@5..6 "\n" + ExprStmt@6..21 + AssignExpr@6..21 + RecordInitExpr@6..15 + Path@6..9 + PathSegment@6..9 + Ident@6..9 "Foo" + RecordFieldList@9..15 + LBrace@9..10 "{" + RecordField@10..11 + PathExpr@10..11 + Path@10..11 + PathSegment@10..11 + Ident@10..11 "x" + Comma@11..12 "," + WhiteSpace@12..13 " " + RecordField@13..14 + PathExpr@13..14 + Path@13..14 + PathSegment@13..14 + Ident@13..14 "y" + RBrace@14..15 "}" + WhiteSpace@15..16 " " + Eq@16..17 "=" + WhiteSpace@17..18 " " + PathExpr@18..21 + Path@18..21 + PathSegment@18..21 + Ident@18..21 "foo" + diff --git a/crates/parser2/test_files/syntax_node/stmts/for.fe b/crates/parser2/test_files/syntax_node/stmts/for.fe new file mode 100644 index 0000000000..3804fce951 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/for.fe @@ -0,0 +1,7 @@ +for i in arr { + sum = sum + i +} + +for Struct {x, y} in s_list.iter() { + sum = sum + x + y +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/for.snap b/crates/parser2/test_files/syntax_node/stmts/for.snap new file mode 100644 index 0000000000..8f67d4e018 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/for.snap @@ -0,0 +1,122 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/for.fe +--- +Root@0..96 + ForStmt@0..34 + ForKw@0..3 "for" + WhiteSpace@3..4 " " + PathPat@4..5 + Path@4..5 + PathSegment@4..5 + Ident@4..5 "i" + WhiteSpace@5..6 " " + InKw@6..8 "in" + WhiteSpace@8..9 " " + PathExpr@9..12 + Path@9..12 + PathSegment@9..12 + Ident@9..12 "arr" + WhiteSpace@12..13 " " + BlockExpr@13..34 + LBrace@13..14 "{" + Newline@14..15 "\n" + WhiteSpace@15..19 " " + ExprStmt@19..32 + AssignExpr@19..32 + PathExpr@19..22 + Path@19..22 + PathSegment@19..22 + Ident@19..22 "sum" + WhiteSpace@22..23 " " + Eq@23..24 "=" + WhiteSpace@24..25 " " + BinExpr@25..32 + PathExpr@25..28 + Path@25..28 + PathSegment@25..28 + Ident@25..28 "sum" + WhiteSpace@28..29 " " + Plus@29..30 "+" + WhiteSpace@30..31 " " + PathExpr@31..32 + Path@31..32 + PathSegment@31..32 + Ident@31..32 "i" + Newline@32..33 "\n" + RBrace@33..34 "}" + Newline@34..36 "\n\n" + ForStmt@36..96 + ForKw@36..39 "for" + WhiteSpace@39..40 " " + RecordPat@40..53 + Path@40..46 + PathSegment@40..46 + Ident@40..46 "Struct" + WhiteSpace@46..47 " " + RecordPatFieldList@47..53 + LBrace@47..48 "{" + RecordPatField@48..49 + PathPat@48..49 + Path@48..49 + PathSegment@48..49 + Ident@48..49 "x" + Comma@49..50 "," + WhiteSpace@50..51 " " + RecordPatField@51..52 + PathPat@51..52 + Path@51..52 + PathSegment@51..52 + Ident@51..52 "y" + RBrace@52..53 "}" + WhiteSpace@53..54 " " + InKw@54..56 "in" + WhiteSpace@56..57 " " + MethodCallExpr@57..70 + PathExpr@57..63 + Path@57..63 + PathSegment@57..63 + Ident@57..63 "s_list" + Dot@63..64 "." + Ident@64..68 "iter" + CallArgList@68..70 + LParen@68..69 "(" + RParen@69..70 ")" + WhiteSpace@70..71 " " + BlockExpr@71..96 + LBrace@71..72 "{" + Newline@72..73 "\n" + WhiteSpace@73..77 " " + ExprStmt@77..94 + AssignExpr@77..94 + PathExpr@77..80 + Path@77..80 + PathSegment@77..80 + Ident@77..80 "sum" + WhiteSpace@80..81 " " + Eq@81..82 "=" + WhiteSpace@82..83 " " + BinExpr@83..94 + BinExpr@83..90 + PathExpr@83..86 + Path@83..86 + PathSegment@83..86 + Ident@83..86 "sum" + WhiteSpace@86..87 " " + Plus@87..88 "+" + WhiteSpace@88..89 " " + PathExpr@89..90 + Path@89..90 + PathSegment@89..90 + Ident@89..90 "x" + WhiteSpace@90..91 " " + Plus@91..92 "+" + WhiteSpace@92..93 " " + PathExpr@93..94 + Path@93..94 + PathSegment@93..94 + Ident@93..94 "y" + Newline@94..95 "\n" + RBrace@95..96 "}" + diff --git a/crates/parser2/test_files/syntax_node/stmts/let.fe b/crates/parser2/test_files/syntax_node/stmts/let.fe new file mode 100644 index 0000000000..4582f9b69c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/let.fe @@ -0,0 +1,23 @@ +let x + +let x = 1 +let x: i32 = 1 +let mut x: i32 = 1 + +x += 1 + 1 +y <<= 1 >> 2 + +let MyEnum::Foo(x, y) = e + +let S {x, y: z} = s + +let x = if b { + y +} else { + z +} + +let x = match b { + MyEnum::A(x) | MyEnum::B(x) => x + _ => 0 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/let.snap b/crates/parser2/test_files/syntax_node/stmts/let.snap new file mode 100644 index 0000000000..ffd3cf7c12 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/let.snap @@ -0,0 +1,301 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/let.fe +--- +Root@0..231 + LetStmt@0..5 + LetKw@0..3 "let" + WhiteSpace@3..4 " " + PathPat@4..5 + Path@4..5 + PathSegment@4..5 + Ident@4..5 "x" + Newline@5..7 "\n\n" + LetStmt@7..16 + LetKw@7..10 "let" + WhiteSpace@10..11 " " + PathPat@11..12 + Path@11..12 + PathSegment@11..12 + Ident@11..12 "x" + WhiteSpace@12..13 " " + Eq@13..14 "=" + WhiteSpace@14..15 " " + LitExpr@15..16 + Lit@15..16 + Int@15..16 "1" + Newline@16..17 "\n" + LetStmt@17..31 + LetKw@17..20 "let" + WhiteSpace@20..21 " " + PathPat@21..22 + Path@21..22 + PathSegment@21..22 + Ident@21..22 "x" + Colon@22..23 ":" + WhiteSpace@23..24 " " + PathType@24..27 + Path@24..27 + PathSegment@24..27 + Ident@24..27 "i32" + WhiteSpace@27..28 " " + Eq@28..29 "=" + WhiteSpace@29..30 " " + LitExpr@30..31 + Lit@30..31 + Int@30..31 "1" + Newline@31..32 "\n" + LetStmt@32..50 + LetKw@32..35 "let" + WhiteSpace@35..36 " " + PathPat@36..41 + MutKw@36..39 "mut" + WhiteSpace@39..40 " " + Path@40..41 + PathSegment@40..41 + Ident@40..41 "x" + Colon@41..42 ":" + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "i32" + WhiteSpace@46..47 " " + Eq@47..48 "=" + WhiteSpace@48..49 " " + LitExpr@49..50 + Lit@49..50 + Int@49..50 "1" + Newline@50..52 "\n\n" + ExprStmt@52..62 + AugAssignExpr@52..62 + PathExpr@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "x" + WhiteSpace@53..54 " " + Plus@54..55 "+" + Eq@55..56 "=" + WhiteSpace@56..57 " " + BinExpr@57..62 + LitExpr@57..58 + Lit@57..58 + Int@57..58 "1" + WhiteSpace@58..59 " " + Plus@59..60 "+" + WhiteSpace@60..61 " " + LitExpr@61..62 + Lit@61..62 + Int@61..62 "1" + Newline@62..63 "\n" + ExprStmt@63..75 + AugAssignExpr@63..75 + PathExpr@63..64 + Path@63..64 + PathSegment@63..64 + Ident@63..64 "y" + WhiteSpace@64..65 " " + LShift@65..67 + Lt@65..66 "<" + Lt@66..67 "<" + Eq@67..68 "=" + WhiteSpace@68..69 " " + BinExpr@69..75 + LitExpr@69..70 + Lit@69..70 + Int@69..70 "1" + WhiteSpace@70..71 " " + RShift@71..73 + Gt@71..72 ">" + Gt@72..73 ">" + WhiteSpace@73..74 " " + LitExpr@74..75 + Lit@74..75 + Int@74..75 "2" + Newline@75..77 "\n\n" + LetStmt@77..102 + LetKw@77..80 "let" + WhiteSpace@80..81 " " + PathTuplePat@81..98 + Path@81..92 + PathSegment@81..87 + Ident@81..87 "MyEnum" + Colon2@87..89 "::" + PathSegment@89..92 + Ident@89..92 "Foo" + TuplePatElemList@92..98 + LParen@92..93 "(" + PathPat@93..94 + Path@93..94 + PathSegment@93..94 + Ident@93..94 "x" + Comma@94..95 "," + WhiteSpace@95..96 " " + PathPat@96..97 + Path@96..97 + PathSegment@96..97 + Ident@96..97 "y" + RParen@97..98 ")" + WhiteSpace@98..99 " " + Eq@99..100 "=" + WhiteSpace@100..101 " " + PathExpr@101..102 + Path@101..102 + PathSegment@101..102 + Ident@101..102 "e" + Newline@102..104 "\n\n" + LetStmt@104..123 + LetKw@104..107 "let" + WhiteSpace@107..108 " " + RecordPat@108..119 + Path@108..109 + PathSegment@108..109 + Ident@108..109 "S" + WhiteSpace@109..110 " " + RecordPatFieldList@110..119 + LBrace@110..111 "{" + RecordPatField@111..112 + PathPat@111..112 + Path@111..112 + PathSegment@111..112 + Ident@111..112 "x" + Comma@112..113 "," + WhiteSpace@113..114 " " + RecordPatField@114..118 + Ident@114..115 "y" + Colon@115..116 ":" + WhiteSpace@116..117 " " + PathPat@117..118 + Path@117..118 + PathSegment@117..118 + Ident@117..118 "z" + RBrace@118..119 "}" + WhiteSpace@119..120 " " + Eq@120..121 "=" + WhiteSpace@121..122 " " + PathExpr@122..123 + Path@122..123 + PathSegment@122..123 + Ident@122..123 "s" + Newline@123..125 "\n\n" + LetStmt@125..162 + LetKw@125..128 "let" + WhiteSpace@128..129 " " + PathPat@129..130 + Path@129..130 + PathSegment@129..130 + Ident@129..130 "x" + WhiteSpace@130..131 " " + Eq@131..132 "=" + WhiteSpace@132..133 " " + IfExpr@133..162 + IfKw@133..135 "if" + WhiteSpace@135..136 " " + PathExpr@136..137 + Path@136..137 + PathSegment@136..137 + Ident@136..137 "b" + WhiteSpace@137..138 " " + BlockExpr@138..147 + LBrace@138..139 "{" + Newline@139..140 "\n" + WhiteSpace@140..144 " " + ExprStmt@144..145 + PathExpr@144..145 + Path@144..145 + PathSegment@144..145 + Ident@144..145 "y" + Newline@145..146 "\n" + RBrace@146..147 "}" + WhiteSpace@147..148 " " + ElseKw@148..152 "else" + WhiteSpace@152..153 " " + BlockExpr@153..162 + LBrace@153..154 "{" + Newline@154..155 "\n" + WhiteSpace@155..159 " " + ExprStmt@159..160 + PathExpr@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "z" + Newline@160..161 "\n" + RBrace@161..162 "}" + Newline@162..164 "\n\n" + LetStmt@164..231 + LetKw@164..167 "let" + WhiteSpace@167..168 " " + PathPat@168..169 + Path@168..169 + PathSegment@168..169 + Ident@168..169 "x" + WhiteSpace@169..170 " " + Eq@170..171 "=" + WhiteSpace@171..172 " " + MatchExpr@172..231 + MatchKw@172..177 "match" + WhiteSpace@177..178 " " + PathExpr@178..179 + Path@178..179 + PathSegment@178..179 + Ident@178..179 "b" + WhiteSpace@179..180 " " + MatchArmList@180..231 + LBrace@180..181 "{" + Newline@181..182 "\n" + WhiteSpace@182..186 " " + MatchArm@186..218 + OrPat@186..213 + PathTuplePat@186..198 + Path@186..195 + PathSegment@186..192 + Ident@186..192 "MyEnum" + Colon2@192..194 "::" + PathSegment@194..195 + Ident@194..195 "A" + TuplePatElemList@195..198 + LParen@195..196 "(" + PathPat@196..197 + Path@196..197 + PathSegment@196..197 + Ident@196..197 "x" + RParen@197..198 ")" + WhiteSpace@198..199 " " + Pipe@199..200 "|" + WhiteSpace@200..201 " " + PathTuplePat@201..213 + Path@201..210 + PathSegment@201..207 + Ident@201..207 "MyEnum" + Colon2@207..209 "::" + PathSegment@209..210 + Ident@209..210 "B" + TuplePatElemList@210..213 + LParen@210..211 "(" + PathPat@211..212 + Path@211..212 + PathSegment@211..212 + Ident@211..212 "x" + RParen@212..213 ")" + WhiteSpace@213..214 " " + FatArrow@214..216 "=>" + WhiteSpace@216..217 " " + PathExpr@217..218 + Path@217..218 + PathSegment@217..218 + Ident@217..218 "x" + Newline@218..219 "\n" + WhiteSpace@219..223 " " + MatchArm@223..229 + WildCardPat@223..224 + Underscore@223..224 "_" + WhiteSpace@224..225 " " + FatArrow@225..227 "=>" + WhiteSpace@227..228 " " + LitExpr@228..229 + Lit@228..229 + Int@228..229 "0" + Newline@229..230 "\n" + RBrace@230..231 "}" + diff --git a/crates/parser2/test_files/syntax_node/stmts/while.fe b/crates/parser2/test_files/syntax_node/stmts/while.fe new file mode 100644 index 0000000000..6af89ff648 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/while.fe @@ -0,0 +1,4 @@ +while i < 10 { + sum = 1 + 2 + i = i + 1 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/stmts/while.snap b/crates/parser2/test_files/syntax_node/stmts/while.snap new file mode 100644 index 0000000000..514bb9b281 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/stmts/while.snap @@ -0,0 +1,69 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/stmts/while.fe +--- +Root@0..46 + WhileStmt@0..46 + WhileKw@0..5 "while" + WhiteSpace@5..6 " " + BinExpr@6..12 + PathExpr@6..7 + Path@6..7 + PathSegment@6..7 + Ident@6..7 "i" + WhiteSpace@7..8 " " + Lt@8..9 "<" + WhiteSpace@9..10 " " + LitExpr@10..12 + Lit@10..12 + Int@10..12 "10" + WhiteSpace@12..13 " " + BlockExpr@13..46 + LBrace@13..14 "{" + Newline@14..15 "\n" + WhiteSpace@15..19 " " + ExprStmt@19..30 + AssignExpr@19..30 + PathExpr@19..22 + Path@19..22 + PathSegment@19..22 + Ident@19..22 "sum" + WhiteSpace@22..23 " " + Eq@23..24 "=" + WhiteSpace@24..25 " " + BinExpr@25..30 + LitExpr@25..26 + Lit@25..26 + Int@25..26 "1" + WhiteSpace@26..27 " " + Plus@27..28 "+" + WhiteSpace@28..29 " " + LitExpr@29..30 + Lit@29..30 + Int@29..30 "2" + Newline@30..31 "\n" + WhiteSpace@31..35 " " + ExprStmt@35..44 + AssignExpr@35..44 + PathExpr@35..36 + Path@35..36 + PathSegment@35..36 + Ident@35..36 "i" + WhiteSpace@36..37 " " + Eq@37..38 "=" + WhiteSpace@38..39 " " + BinExpr@39..44 + PathExpr@39..40 + Path@39..40 + PathSegment@39..40 + Ident@39..40 "i" + WhiteSpace@40..41 " " + Plus@41..42 "+" + WhiteSpace@42..43 " " + LitExpr@43..44 + Lit@43..44 + Int@43..44 "1" + Newline@44..45 "\n" + RBrace@45..46 "}" + diff --git a/crates/parser2/test_files/syntax_node/structs/attr.fe b/crates/parser2/test_files/syntax_node/structs/attr.fe new file mode 100644 index 0000000000..e1f1dc750c --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/attr.fe @@ -0,0 +1,11 @@ +/// DocComment1 +#attr +// normal comment +/// DocComment2 +pub struct StructAttr { + /// This is `x` + x: foo::Bar, + /// This is `y` + #cfg(target: evm) + y: i32 +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/attr.snap b/crates/parser2/test_files/syntax_node/structs/attr.snap new file mode 100644 index 0000000000..f65a60c566 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/attr.snap @@ -0,0 +1,81 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/structs/attr.fe +--- +Root@0..171 + ItemList@0..171 + Item@0..171 + Struct@0..171 + AttrList@0..56 + DocCommentAttr@0..15 + DocComment@0..15 "/// DocComment1" + Newline@15..16 "\n" + Attr@16..21 + Pound@16..17 "#" + Ident@17..21 "attr" + Newline@21..22 "\n" + Comment@22..39 "// normal comment" + Newline@39..40 "\n" + DocCommentAttr@40..55 + DocComment@40..55 "/// DocComment2" + Newline@55..56 "\n" + ItemModifier@56..59 + PubKw@56..59 "pub" + WhiteSpace@59..60 " " + StructKw@60..66 "struct" + WhiteSpace@66..67 " " + Ident@67..77 "StructAttr" + WhiteSpace@77..78 " " + RecordFieldDefList@78..171 + LBrace@78..79 "{" + Newline@79..80 "\n" + WhiteSpace@80..84 " " + RecordFieldDef@84..115 + AttrList@84..100 + DocCommentAttr@84..99 + DocComment@84..99 "/// This is `x`" + Newline@99..100 "\n" + WhiteSpace@100..104 " " + Ident@104..105 "x" + Colon@105..106 ":" + WhiteSpace@106..107 " " + PathType@107..115 + Path@107..115 + PathSegment@107..110 + Ident@107..110 "foo" + Colon2@110..112 "::" + PathSegment@112..115 + Ident@112..115 "Bar" + Comma@115..116 "," + Newline@116..117 "\n" + WhiteSpace@117..121 " " + RecordFieldDef@121..169 + AttrList@121..159 + DocCommentAttr@121..136 + DocComment@121..136 "/// This is `y`" + Newline@136..137 "\n" + WhiteSpace@137..141 " " + Attr@141..158 + Pound@141..142 "#" + Ident@142..145 "cfg" + AttrArgList@145..158 + LParen@145..146 "(" + AttrArg@146..157 + Ident@146..152 "target" + Colon@152..153 ":" + WhiteSpace@153..154 " " + Ident@154..157 "evm" + RParen@157..158 ")" + Newline@158..159 "\n" + WhiteSpace@159..163 " " + Ident@163..164 "y" + Colon@164..165 ":" + WhiteSpace@165..166 " " + PathType@166..169 + Path@166..169 + PathSegment@166..169 + Ident@166..169 "i32" + Newline@169..170 "\n" + RBrace@170..171 "}" + diff --git a/crates/parser2/test_files/syntax_node/structs/empty.fe b/crates/parser2/test_files/syntax_node/structs/empty.fe new file mode 100644 index 0000000000..901512de18 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/empty.fe @@ -0,0 +1,2 @@ +pub struct EmptyStruct { +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/empty.snap b/crates/parser2/test_files/syntax_node/structs/empty.snap new file mode 100644 index 0000000000..a633350661 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/empty.snap @@ -0,0 +1,21 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/structs/empty.fe +--- +Root@0..26 + ItemList@0..26 + Item@0..26 + Struct@0..26 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + WhiteSpace@10..11 " " + Ident@11..22 "EmptyStruct" + WhiteSpace@22..23 " " + RecordFieldDefList@23..26 + LBrace@23..24 "{" + Newline@24..25 "\n" + RBrace@25..26 "}" + diff --git a/crates/parser2/test_files/syntax_node/structs/generics.fe b/crates/parser2/test_files/syntax_node/structs/generics.fe new file mode 100644 index 0000000000..2b1483bcfb --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/generics.fe @@ -0,0 +1,38 @@ +pub struct StructWithGenericParam +{ + x: S, + y: T, + z: U, +} + +pub struct StructWithGenericParam2< + S, + T: foo::Trait, + U +> { + x: *(S, *i32), + y: T, + z: U, +} + +pub struct StructWithGenericParam3< + S: foo::Trait + bar::Trait, + T, + U: bar::Trait +> where + T: Trait1 + Trait2, + Option: Trait1 + Trait2, + Result: Trait2 + Trait3, +{ + x: S, + y: T, + z: U, +} + +pub struct MyArr + where + (T, U): Trait + Trait +{ + __inner: [T; N], + __inner2: (T, U) +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/generics.snap b/crates/parser2/test_files/syntax_node/structs/generics.snap new file mode 100644 index 0000000000..ae26fa6500 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/generics.snap @@ -0,0 +1,484 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/structs/generics.fe +--- +Root@0..563 + ItemList@0..563 + Item@0..76 + Struct@0..76 + ItemModifier@0..3 + PubKw@0..3 "pub" + WhiteSpace@3..4 " " + StructKw@4..10 "struct" + WhiteSpace@10..11 " " + Ident@11..33 "StructWithGenericParam" + GenericParamList@33..42 + Lt@33..34 "<" + TypeGenericParam@34..35 + Ident@34..35 "S" + Comma@35..36 "," + WhiteSpace@36..37 " " + TypeGenericParam@37..38 + Ident@37..38 "T" + Comma@38..39 "," + WhiteSpace@39..40 " " + TypeGenericParam@40..41 + Ident@40..41 "U" + Gt@41..42 ">" + Newline@42..43 "\n" + RecordFieldDefList@43..76 + LBrace@43..44 "{" + Newline@44..45 "\n" + WhiteSpace@45..49 " " + RecordFieldDef@49..53 + Ident@49..50 "x" + Colon@50..51 ":" + WhiteSpace@51..52 " " + PathType@52..53 + Path@52..53 + PathSegment@52..53 + Ident@52..53 "S" + Comma@53..54 "," + Newline@54..55 "\n" + WhiteSpace@55..59 " " + RecordFieldDef@59..63 + Ident@59..60 "y" + Colon@60..61 ":" + WhiteSpace@61..62 " " + PathType@62..63 + Path@62..63 + PathSegment@62..63 + Ident@62..63 "T" + Comma@63..64 "," + Newline@64..65 "\n" + WhiteSpace@65..69 " " + RecordFieldDef@69..73 + Ident@69..70 "z" + Colon@70..71 ":" + WhiteSpace@71..72 " " + PathType@72..73 + Path@72..73 + PathSegment@72..73 + Ident@72..73 "U" + Comma@73..74 "," + Newline@74..75 "\n" + RBrace@75..76 "}" + Newline@76..78 "\n\n" + Item@78..190 + Struct@78..190 + ItemModifier@78..81 + PubKw@78..81 "pub" + WhiteSpace@81..82 " " + StructKw@82..88 "struct" + WhiteSpace@88..89 " " + Ident@89..112 "StructWithGenericParam2" + GenericParamList@112..147 + Lt@112..113 "<" + Newline@113..114 "\n" + WhiteSpace@114..118 " " + TypeGenericParam@118..119 + Ident@118..119 "S" + Comma@119..120 "," + Newline@120..121 "\n" + WhiteSpace@121..125 " " + TypeGenericParam@125..138 + Ident@125..126 "T" + TypeBoundList@126..138 + Colon@126..127 ":" + WhiteSpace@127..128 " " + TypeBound@128..138 + TraitRef@128..138 + Path@128..138 + PathSegment@128..131 + Ident@128..131 "foo" + Colon2@131..133 "::" + PathSegment@133..138 + Ident@133..138 "Trait" + Comma@138..139 "," + Newline@139..140 "\n" + WhiteSpace@140..144 " " + TypeGenericParam@144..145 + Ident@144..145 "U" + Newline@145..146 "\n" + Gt@146..147 ">" + WhiteSpace@147..148 " " + RecordFieldDefList@148..190 + LBrace@148..149 "{" + Newline@149..150 "\n" + WhiteSpace@150..154 " " + RecordFieldDef@154..167 + Ident@154..155 "x" + Colon@155..156 ":" + WhiteSpace@156..157 " " + PtrType@157..167 + Star@157..158 "*" + TupleType@158..167 + LParen@158..159 "(" + PathType@159..160 + Path@159..160 + PathSegment@159..160 + Ident@159..160 "S" + Comma@160..161 "," + WhiteSpace@161..162 " " + PtrType@162..166 + Star@162..163 "*" + PathType@163..166 + Path@163..166 + PathSegment@163..166 + Ident@163..166 "i32" + RParen@166..167 ")" + Comma@167..168 "," + Newline@168..169 "\n" + WhiteSpace@169..173 " " + RecordFieldDef@173..177 + Ident@173..174 "y" + Colon@174..175 ":" + WhiteSpace@175..176 " " + PathType@176..177 + Path@176..177 + PathSegment@176..177 + Ident@176..177 "T" + Comma@177..178 "," + Newline@178..179 "\n" + WhiteSpace@179..183 " " + RecordFieldDef@183..187 + Ident@183..184 "z" + Colon@184..185 ":" + WhiteSpace@185..186 " " + PathType@186..187 + Path@186..187 + PathSegment@186..187 + Ident@186..187 "U" + Comma@187..188 "," + Newline@188..189 "\n" + RBrace@189..190 "}" + Newline@190..192 "\n\n" + Item@192..414 + Struct@192..414 + ItemModifier@192..195 + PubKw@192..195 "pub" + WhiteSpace@195..196 " " + StructKw@196..202 "struct" + WhiteSpace@202..203 " " + Ident@203..226 "StructWithGenericParam3" + GenericParamList@226..286 + Lt@226..227 "<" + Newline@227..228 "\n" + WhiteSpace@228..232 " " + TypeGenericParam@232..258 + Ident@232..233 "S" + TypeBoundList@233..258 + Colon@233..234 ":" + WhiteSpace@234..235 " " + TypeBound@235..245 + TraitRef@235..245 + Path@235..245 + PathSegment@235..238 + Ident@235..238 "foo" + Colon2@238..240 "::" + PathSegment@240..245 + Ident@240..245 "Trait" + WhiteSpace@245..246 " " + Plus@246..247 "+" + WhiteSpace@247..248 " " + TypeBound@248..258 + TraitRef@248..258 + Path@248..258 + PathSegment@248..251 + Ident@248..251 "bar" + Colon2@251..253 "::" + PathSegment@253..258 + Ident@253..258 "Trait" + Comma@258..259 "," + Newline@259..260 "\n" + WhiteSpace@260..264 " " + TypeGenericParam@264..265 + Ident@264..265 "T" + Comma@265..266 "," + Newline@266..267 "\n" + WhiteSpace@267..271 " " + TypeGenericParam@271..284 + Ident@271..272 "U" + TypeBoundList@272..284 + Colon@272..273 ":" + WhiteSpace@273..274 " " + TypeBound@274..284 + TraitRef@274..284 + Path@274..284 + PathSegment@274..277 + Ident@274..277 "bar" + Colon2@277..279 "::" + PathSegment@279..284 + Ident@279..284 "Trait" + Newline@284..285 "\n" + Gt@285..286 ">" + WhiteSpace@286..287 " " + WhereClause@287..380 + WhereKw@287..292 "where" + Newline@292..293 "\n" + WhiteSpace@293..297 " " + WherePredicate@297..315 + PathType@297..298 + Path@297..298 + PathSegment@297..298 + Ident@297..298 "T" + TypeBoundList@298..315 + Colon@298..299 ":" + WhiteSpace@299..300 " " + TypeBound@300..306 + TraitRef@300..306 + Path@300..306 + PathSegment@300..306 + Ident@300..306 "Trait1" + WhiteSpace@306..307 " " + Plus@307..308 "+" + WhiteSpace@308..309 " " + TypeBound@309..315 + TraitRef@309..315 + Path@309..315 + PathSegment@309..315 + Ident@309..315 "Trait2" + Comma@315..316 "," + Newline@316..317 "\n" + WhiteSpace@317..321 " " + WherePredicate@321..347 + PathType@321..330 + Path@321..330 + PathSegment@321..330 + Ident@321..327 "Option" + GenericArgList@327..330 + Lt@327..328 "<" + TypeGenericArg@328..329 + PathType@328..329 + Path@328..329 + PathSegment@328..329 + Ident@328..329 "T" + Gt@329..330 ">" + TypeBoundList@330..347 + Colon@330..331 ":" + WhiteSpace@331..332 " " + TypeBound@332..338 + TraitRef@332..338 + Path@332..338 + PathSegment@332..338 + Ident@332..338 "Trait1" + WhiteSpace@338..339 " " + Plus@339..340 "+" + WhiteSpace@340..341 " " + TypeBound@341..347 + TraitRef@341..347 + Path@341..347 + PathSegment@341..347 + Ident@341..347 "Trait2" + Comma@347..348 "," + Newline@348..349 "\n" + WhiteSpace@349..353 " " + WherePredicate@353..379 + PathType@353..362 + Path@353..362 + PathSegment@353..362 + Ident@353..359 "Result" + GenericArgList@359..362 + Lt@359..360 "<" + TypeGenericArg@360..361 + PathType@360..361 + Path@360..361 + PathSegment@360..361 + Ident@360..361 "U" + Gt@361..362 ">" + TypeBoundList@362..379 + Colon@362..363 ":" + WhiteSpace@363..364 " " + TypeBound@364..370 + TraitRef@364..370 + Path@364..370 + PathSegment@364..370 + Ident@364..370 "Trait2" + WhiteSpace@370..371 " " + Plus@371..372 "+" + WhiteSpace@372..373 " " + TypeBound@373..379 + TraitRef@373..379 + Path@373..379 + PathSegment@373..379 + Ident@373..379 "Trait3" + Comma@379..380 "," + Newline@380..381 "\n" + RecordFieldDefList@381..414 + LBrace@381..382 "{" + Newline@382..383 "\n" + WhiteSpace@383..387 " " + RecordFieldDef@387..391 + Ident@387..388 "x" + Colon@388..389 ":" + WhiteSpace@389..390 " " + PathType@390..391 + Path@390..391 + PathSegment@390..391 + Ident@390..391 "S" + Comma@391..392 "," + Newline@392..393 "\n" + WhiteSpace@393..397 " " + RecordFieldDef@397..401 + Ident@397..398 "y" + Colon@398..399 ":" + WhiteSpace@399..400 " " + PathType@400..401 + Path@400..401 + PathSegment@400..401 + Ident@400..401 "T" + Comma@401..402 "," + Newline@402..403 "\n" + WhiteSpace@403..407 " " + RecordFieldDef@407..411 + Ident@407..408 "z" + Colon@408..409 ":" + WhiteSpace@409..410 " " + PathType@410..411 + Path@410..411 + PathSegment@410..411 + Ident@410..411 "U" + Comma@411..412 "," + Newline@412..413 "\n" + RBrace@413..414 "}" + Newline@414..416 "\n\n" + Item@416..563 + Struct@416..563 + ItemModifier@416..419 + PubKw@416..419 "pub" + WhiteSpace@419..420 " " + StructKw@420..426 "struct" + WhiteSpace@426..427 " " + Ident@427..432 "MyArr" + GenericParamList@432..469 + Lt@432..433 "<" + TypeGenericParam@433..449 + Ident@433..434 "T" + TypeBoundList@434..449 + Colon@434..435 ":" + WhiteSpace@435..436 " " + TypeBound@436..449 + TraitRef@436..449 + Path@436..449 + PathSegment@436..439 + Ident@436..439 "std" + Colon2@439..441 "::" + PathSegment@441..444 + Ident@441..444 "ops" + Colon2@444..446 "::" + PathSegment@446..449 + Ident@446..449 "Add" + Comma@449..450 "," + WhiteSpace@450..451 " " + TypeGenericParam@451..452 + Ident@451..452 "U" + Comma@452..453 "," + WhiteSpace@453..454 " " + ConstGenericParam@454..468 + ConstKw@454..459 "const" + WhiteSpace@459..460 " " + Ident@460..461 "N" + Colon@461..462 ":" + WhiteSpace@462..463 " " + PathType@463..468 + Path@463..468 + PathSegment@463..468 + Ident@463..468 "usize" + Gt@468..469 ">" + Newline@469..470 "\n" + WhiteSpace@470..474 " " + WhereClause@474..517 + WhereKw@474..479 "where" + Newline@479..480 "\n" + WhiteSpace@480..488 " " + WherePredicate@488..517 + TupleType@488..494 + LParen@488..489 "(" + PathType@489..490 + Path@489..490 + PathSegment@489..490 + Ident@489..490 "T" + Comma@490..491 "," + WhiteSpace@491..492 " " + PathType@492..493 + Path@492..493 + PathSegment@492..493 + Ident@492..493 "U" + RParen@493..494 ")" + TypeBoundList@494..517 + Colon@494..495 ":" + WhiteSpace@495..496 " " + TypeBound@496..501 + TraitRef@496..501 + Path@496..501 + PathSegment@496..501 + Ident@496..501 "Trait" + WhiteSpace@501..502 " " + Plus@502..503 "+" + WhiteSpace@503..504 " " + TypeBound@504..517 + TraitRef@504..517 + Path@504..517 + PathSegment@504..517 + Ident@504..509 "Trait" + GenericArgList@509..517 + Lt@509..510 "<" + TypeGenericArg@510..513 + PathType@510..513 + Path@510..513 + PathSegment@510..513 + Ident@510..513 "i32" + Comma@513..514 "," + WhiteSpace@514..515 " " + TypeGenericArg@515..516 + PathType@515..516 + Path@515..516 + PathSegment@515..516 + Ident@515..516 "Y" + Gt@516..517 ">" + Newline@517..518 "\n" + RecordFieldDefList@518..563 + LBrace@518..519 "{" + Newline@519..520 "\n" + WhiteSpace@520..524 " " + RecordFieldDef@524..539 + Ident@524..531 "__inner" + Colon@531..532 ":" + WhiteSpace@532..533 " " + ArrayType@533..539 + LBracket@533..534 "[" + PathType@534..535 + Path@534..535 + PathSegment@534..535 + Ident@534..535 "T" + SemiColon@535..536 ";" + WhiteSpace@536..537 " " + PathExpr@537..538 + Path@537..538 + PathSegment@537..538 + Ident@537..538 "N" + RBracket@538..539 "]" + Comma@539..540 "," + Newline@540..541 "\n" + WhiteSpace@541..545 " " + RecordFieldDef@545..561 + Ident@545..553 "__inner2" + Colon@553..554 ":" + WhiteSpace@554..555 " " + TupleType@555..561 + LParen@555..556 "(" + PathType@556..557 + Path@556..557 + PathSegment@556..557 + Ident@556..557 "T" + Comma@557..558 "," + WhiteSpace@558..559 " " + PathType@559..560 + Path@559..560 + PathSegment@559..560 + Ident@559..560 "U" + RParen@560..561 ")" + Newline@561..562 "\n" + RBrace@562..563 "}" + diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.fe b/crates/parser2/test_files/syntax_node/structs/tupel_field.fe new file mode 100644 index 0000000000..fbfd4abe89 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.fe @@ -0,0 +1,9 @@ +struct StructWithTupleField { + x: (i32, u32), + y: ( + i32, + foo::Bar, + u32 + ), + z: () +} \ No newline at end of file diff --git a/crates/parser2/test_files/syntax_node/structs/tupel_field.snap b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap new file mode 100644 index 0000000000..5f2eb756a1 --- /dev/null +++ b/crates/parser2/test_files/syntax_node/structs/tupel_field.snap @@ -0,0 +1,82 @@ +--- +source: crates/parser2/tests/syntax_node.rs +expression: node +input_file: crates/parser2/test_files/syntax_node/structs/tupel_field.fe +--- +Root@0..119 + ItemList@0..119 + Item@0..119 + Struct@0..119 + StructKw@0..6 "struct" + WhiteSpace@6..7 " " + Ident@7..27 "StructWithTupleField" + WhiteSpace@27..28 " " + RecordFieldDefList@28..119 + LBrace@28..29 "{" + Newline@29..30 "\n" + WhiteSpace@30..34 " " + RecordFieldDef@34..47 + Ident@34..35 "x" + Colon@35..36 ":" + WhiteSpace@36..37 " " + TupleType@37..47 + LParen@37..38 "(" + PathType@38..41 + Path@38..41 + PathSegment@38..41 + Ident@38..41 "i32" + Comma@41..42 "," + WhiteSpace@42..43 " " + PathType@43..46 + Path@43..46 + PathSegment@43..46 + Ident@43..46 "u32" + RParen@46..47 ")" + Comma@47..48 "," + Newline@48..49 "\n" + WhiteSpace@49..53 " " + RecordFieldDef@53..106 + Ident@53..54 "y" + Colon@54..55 ":" + WhiteSpace@55..56 " " + TupleType@56..106 + LParen@56..57 "(" + Newline@57..58 "\n" + WhiteSpace@58..66 " " + PathType@66..69 + Path@66..69 + PathSegment@66..69 + Ident@66..69 "i32" + Comma@69..70 "," + Newline@70..71 "\n" + WhiteSpace@71..79 " " + PathType@79..87 + Path@79..87 + PathSegment@79..82 + Ident@79..82 "foo" + Colon2@82..84 "::" + PathSegment@84..87 + Ident@84..87 "Bar" + Comma@87..88 "," + Newline@88..89 "\n" + WhiteSpace@89..97 " " + PathType@97..100 + Path@97..100 + PathSegment@97..100 + Ident@97..100 "u32" + Newline@100..101 "\n" + WhiteSpace@101..105 " " + RParen@105..106 ")" + Comma@106..107 "," + Newline@107..108 "\n" + WhiteSpace@108..112 " " + RecordFieldDef@112..117 + Ident@112..113 "z" + Colon@113..114 ":" + WhiteSpace@114..115 " " + TupleType@115..117 + LParen@115..116 "(" + RParen@116..117 ")" + Newline@117..118 "\n" + RBrace@118..119 "}" + diff --git a/crates/parser2/tests/error_recovery.rs b/crates/parser2/tests/error_recovery.rs new file mode 100644 index 0000000000..18635f1e91 --- /dev/null +++ b/crates/parser2/tests/error_recovery.rs @@ -0,0 +1,78 @@ +use dir_test::{dir_test, Fixture}; + +use fe_compiler_test_utils::snap_test; + +mod test_runner; +use test_runner::*; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/items", + glob: "*.fe" +)] +fn test_item_list(fixture: Fixture<&str>) { + let runner = TestRunner::item_list(false); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); +} + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/stmts", + glob: "*.fe" +)] +fn test_stmt(fixture: Fixture<&str>) { + let runner = TestRunner::stmt_list(false); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); +} + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/exprs", + glob: "*.fe" +)] +fn test_expr(fixture: Fixture<&str>) { + let runner = TestRunner::expr_list(false); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); +} + +#[cfg(target_family = "wasm")] +mod wasm { + use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/items", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_item_list(fixture: Fixture<&str>) { + TestRunner::item_list(false).run(fixture.content()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/stmts", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_stmt(fixture: Fixture<&str>) { + TestRunner::stmt_list(false).run(fixture.content()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/error_recovery/exprs", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_expr(fixture: Fixture<&str>) { + TestRunner::expr_list(false).run(fixture.content()); + } +} diff --git a/crates/parser2/tests/syntax_node.rs b/crates/parser2/tests/syntax_node.rs new file mode 100644 index 0000000000..9d6a6ddbc3 --- /dev/null +++ b/crates/parser2/tests/syntax_node.rs @@ -0,0 +1,123 @@ +use dir_test::{dir_test, Fixture}; + +use fe_compiler_test_utils::snap_test; + +mod test_runner; +use test_runner::*; + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/items", + glob: "*.fe" +)] +fn test_item_list(fixture: Fixture<&str>) { + let runner = TestRunner::item_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); +} + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/structs", + glob: "*.fe" +)] +fn test_struct(fixture: Fixture<&str>) { + let runner = TestRunner::item_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); +} + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/stmts", + glob: "*.fe" +)] +fn test_stmt(fixture: Fixture<&str>) { + let runner = TestRunner::stmt_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); +} + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/exprs", + glob: "*.fe" + postfix: "expr" +)] +fn test_expr(fixture: Fixture<&str>) { + let runner = TestRunner::expr_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); +} + +#[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/pats", + glob: "*.fe" +)] +fn test_pat(fixture: Fixture<&str>) { + let runner = TestRunner::pat_list(true); + let node = format! {"{:#?}", runner.run(fixture.content())}; + snap_test!(node, fixture.path()); +} + +#[cfg(target_family = "wasm")] +mod wasm { + use super::*; + use wasm_bindgen_test::wasm_bindgen_test; + + #[dir_test::dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/items", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_item_list(fixture: dir_test::Fixture<&str>) { + TestRunner::item_list(true).run(fixture.content()); + } + + #[dir_test::dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/structs", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_struct(fixture: dir_test::Fixture<&str>) { + TestRunner::item_list(true).run(fixture.content()); + } + + #[dir_test::dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/stmts", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_stmt(fixture: dir_test::Fixture<&str>) { + TestRunner::stmt_list(true).run(fixture.content()); + } + + #[dir_test::dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/exprs", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_expr(fixture: dir_test::Fixture<&str>) { + TestRunner::expr_list(true).run(fixture.content()); + } + + #[dir_test( + dir: "$CARGO_MANIFEST_DIR/test_files/syntax_node/pats", + glob: "*.fe" + postfix: "wasm" + )] + #[dir_test_attr( + #[wasm_bindgen_test] + )] + fn test_pat(fixture: Fixture<&str>) { + TestRunner::pat_list(true).run(fixture.content()); + } +} diff --git a/crates/parser2/tests/test_runner.rs b/crates/parser2/tests/test_runner.rs new file mode 100644 index 0000000000..ac763c48d6 --- /dev/null +++ b/crates/parser2/tests/test_runner.rs @@ -0,0 +1,110 @@ +#![allow(unused)] + +use fe_parser2::{ + lexer, + parser::{ + expr::parse_expr, item::ItemListScope, parse_pat, stmt::parse_stmt, Parser, RootScope, + }, + syntax_node::SyntaxNode, + SyntaxKind, +}; + +type BoxedParseFn = Box)>; +pub struct TestRunner { + f: BoxedParseFn, + should_success: bool, +} + +impl TestRunner { + /// Constructs a new test runner. + pub fn new(f: F, should_success: bool) -> Self + where + F: Fn(&mut Parser) + 'static, + { + Self { + f: Box::new(f), + should_success, + } + } + + /// Constructs a test runner for parsing a list of expressions. + pub fn item_list(should_success: bool) -> Self { + fn parse(parser: &mut Parser) { + parser.parse(ItemListScope::default()); + } + + Self::new(parse, should_success) + } + + /// Constructs a test runner for parsing a list of statements. + pub fn stmt_list(should_success: bool) -> Self { + fn parse(parser: &mut Parser) { + parser.set_newline_as_trivia(false); + + bump_newlines(parser); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_stmt(parser); + bump_newlines(parser); + } + } + + Self::new(parse, should_success) + } + + /// Constructs a test runner for parsing a list of expressions. + pub fn expr_list(should_success: bool) -> Self { + fn parse(parser: &mut Parser) { + parser.set_newline_as_trivia(false); + + bump_newlines(parser); + while parser.current_kind().is_some() { + bump_newlines(parser); + parse_expr(parser); + bump_newlines(parser); + } + } + + Self::new(parse, should_success) + } + + /// Constructs a test runner for parsing a list of patterns. + pub fn pat_list(should_success: bool) -> Self { + fn parse(parser: &mut Parser) { + while parser.current_kind().is_some() { + parse_pat(parser); + } + } + + Self::new(parse, should_success) + } + + pub fn run(&self, input: &str) -> SyntaxNode { + let lexer = lexer::Lexer::new(input); + let mut parser = Parser::new(lexer); + + let checkpoint = parser.enter(RootScope::default(), None); + (self.f)(&mut parser); + parser.leave(checkpoint); + + let (cst, errors) = parser.finish_to_node(); + + for error in &errors { + println!("{}@{:?}", error.msg(), error.range()); + } + if self.should_success { + assert! {errors.is_empty()} + } else { + assert! {!errors.is_empty()} + } + assert_eq!(input, cst.to_string()); + + cst + } +} + +pub fn bump_newlines(parser: &mut Parser) { + while parser.current_kind() == Some(SyntaxKind::Newline) { + parser.bump(); + } +} diff --git a/crates/test-utils/src/_macro_support.rs b/crates/test-utils/src/_macro_support.rs new file mode 100644 index 0000000000..13b387142a --- /dev/null +++ b/crates/test-utils/src/_macro_support.rs @@ -0,0 +1,32 @@ +#[doc(hidden)] +pub use insta as _insta; + +/// A macro to assert that a value matches a snapshot. +/// If the snapshot does not exist, it will be created in the same directory as +/// the test file. +#[macro_export] +macro_rules! snap_test { + ($value:expr, $fixture_path: expr) => { + let mut settings = $crate::_macro_support::_insta::Settings::new(); + let fixture_path = ::std::path::Path::new($fixture_path); + let fixture_dir = fixture_path.parent().unwrap(); + let fixture_name = fixture_path.file_stem().unwrap().to_str().unwrap(); + + settings.set_snapshot_path(fixture_dir); + settings.set_input_file($fixture_path); + settings.set_prepend_module_to_snapshot(false); + settings.bind(|| { + $crate::_macro_support::_insta::_macro_support::assert_snapshot( + $crate::_macro_support::_insta::_macro_support::AutoName.into(), + &$value, + env!("CARGO_MANIFEST_DIR"), + fixture_name, + module_path!(), + file!(), + line!(), + stringify!($value), + ) + .unwrap() + }) + }; +} diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index bef59af0fe..d7d7b5d02c 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -1,12 +1,17 @@ +#[doc(hidden)] +pub mod _macro_support; + use evm_runtime::{ExitReason, Handler}; -use fe_common::diagnostics::print_diagnostics; -use fe_common::utils::keccak; +use fe_common::{diagnostics::print_diagnostics, utils::keccak}; use fe_driver as driver; + use primitive_types::{H160, U256}; -use std::cell::RefCell; -use std::collections::BTreeMap; -use std::fmt::{Display, Formatter}; -use std::str::FromStr; +use std::{ + cell::RefCell, + collections::BTreeMap, + fmt::{Display, Formatter}, + str::FromStr, +}; use yultsur::*; #[macro_export] @@ -476,7 +481,7 @@ fn _deploy_contract( bytecode = constructor.encode_input(bytecode, init_params).unwrap() } - if let evm::Capture::Exit(exit) = executor.create( + let exit = executor.create( address(DEFAULT_CALLER), evm_runtime::CreateScheme::Legacy { caller: address(DEFAULT_CALLER), @@ -484,15 +489,16 @@ fn _deploy_contract( U256::zero(), bytecode, None, - ) { - return ContractHarness::new( + ); + + match exit { + evm::Capture::Exit(exit) => ContractHarness::new( exit.1 .unwrap_or_else(|| panic!("Unable to retrieve contract address: {:?}", exit.0)), abi, - ); + ), + _ => panic!("Failed to create contract"), } - - panic!("Failed to create contract") } #[derive(Debug)] @@ -723,7 +729,7 @@ fn execute_runtime_functions(executor: &mut Executor, runtime: &Runtime) -> (Exi .expect("failed to compile Yul"); let bytecode = hex::decode(contract_bytecode.bytecode).expect("failed to decode bytecode"); - if let evm::Capture::Exit((reason, _, output)) = executor.create( + let evm::Capture::Exit((reason, _, output)) = executor.create( address(DEFAULT_CALLER), evm_runtime::CreateScheme::Legacy { caller: address(DEFAULT_CALLER), @@ -731,11 +737,9 @@ fn execute_runtime_functions(executor: &mut Executor, runtime: &Runtime) -> (Exi U256::zero(), bytecode, None, - ) { - (reason, output) - } else { - panic!("EVM trap during test") - } + ); + + (reason, output) } #[allow(dead_code)] diff --git a/crates/tests-legacy/Cargo.toml b/crates/tests-legacy/Cargo.toml index d7367f0c5d..8dbeb759b7 100644 --- a/crates/tests-legacy/Cargo.toml +++ b/crates/tests-legacy/Cargo.toml @@ -28,7 +28,7 @@ yultsur = {git = "https://github.com/fe-lang/yultsur", rev = "ae85470"} insta = { default-features = false, version = "1.26" } pretty_assertions = "1.0.0" wasm-bindgen-test = "0.3.24" -dir-test="0.1" +dir-test="0.3" [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] proptest = {version = "1.1.0", default-features = false, features = ["std"]} diff --git a/crates/tests-legacy/src/differential.rs b/crates/tests-legacy/src/differential.rs index 6648ad375e..da911ce830 100644 --- a/crates/tests-legacy/src/differential.rs +++ b/crates/tests-legacy/src/differential.rs @@ -1,9 +1,11 @@ -//! Tests that check for differences between Solidity and Fe implementations of similar contracts +//! Tests that check for differences between Solidity and Fe implementations of +//! similar contracts #![cfg(all(feature = "solc-backend", not(target_arch = "wasm32")))] use proptest::prelude::*; -use fe_compiler_test_utils::*; -use fe_compiler_test_utils::{self as test_utils}; +use fe_compiler_test_utils::{ + *, {self as test_utils}, +}; struct DualHarness { fe_harness: ContractHarness, @@ -19,7 +21,7 @@ struct CaptureResult<'a> { input: &'a [ethabi::Token], } -impl<'a> CaptureResult<'a> { +impl CaptureResult<'_> { pub fn assert_fe_max_percentage_more_gas(&self, max_percentage: i64) -> &Self { let fe_percentage: i64 = (self.fe_used_gas as i64 - self.solidity_used_gas as i64) * 100 / self.solidity_used_gas as i64; diff --git a/crates/tests-legacy/src/features.rs b/crates/tests-legacy/src/features.rs index 6f9ff8dbf9..e4890f0df5 100644 --- a/crates/tests-legacy/src/features.rs +++ b/crates/tests-legacy/src/features.rs @@ -9,8 +9,9 @@ use rstest::rstest; use std::collections::BTreeMap; use fe_common::utils::keccak; -use fe_compiler_test_utils::*; -use fe_compiler_test_utils::{self as test_utils}; +use fe_compiler_test_utils::{ + *, {self as test_utils}, +}; const SOME_ADDRESS: &str = "2012301230123012301230123012301230123002"; diff --git a/crates/tests-legacy/src/ingots.rs b/crates/tests-legacy/src/ingots.rs new file mode 100644 index 0000000000..f5475a73de --- /dev/null +++ b/crates/tests-legacy/src/ingots.rs @@ -0,0 +1,79 @@ +#![cfg(feature = "solc-backend")] +use fe_compiler_test_utils::{ + *, {self as test_utils}, +}; + +pub fn deploy_ingot( + executor: &mut Executor, + fixture: &str, + contract_name: &str, + init_params: &[ethabi::Token], +) -> ContractHarness { + test_utils::deploy_contract_from_ingot( + executor, + &format!("ingots/{fixture}/src"), + contract_name, + init_params, + ) +} + +#[test] +fn test_ingot_with_visibility() { + with_executor(&|mut executor| { + let _harness = deploy_ingot(&mut executor, "pub_contract_ingot", "FooBarBing", &[]); + }) +} + +#[test] +fn test_trait_no_ambiguity() { + with_executor(&|mut executor| { + let _harness = deploy_ingot(&mut executor, "trait_no_ambiguity", "Foo", &[]); + }) +} + +#[test] +fn test_trait_ingot_check() { + with_executor(&|mut executor| { + let _harness = deploy_ingot(&mut executor, "trait_ingot_check", "Foo", &[]); + }) +} + +#[test] +fn test_ingot_pub_contract() { + with_executor(&|mut executor| { + let _harness = deploy_ingot(&mut executor, "visibility_ingot", "Foo", &[]); + }) +} +#[test] +fn test_basic_ingot() { + with_executor(&|mut executor| { + let harness = deploy_ingot(&mut executor, "basic_ingot", "Foo", &[]); + + harness.test_function( + &mut executor, + "get_my_baz", + &[], + Some(&tuple_token(&[bool_token(true), uint_token(26)])), + ); + + harness.test_function(&mut executor, "get_42", &[], Some(&uint_token(42))); + harness.test_function(&mut executor, "get_26", &[], Some(&uint_token(26))); + harness.test_function( + &mut executor, + "get_my_dyng", + &[], + Some(&tuple_token(&[ + address_token("8"), + uint_token(42), + int_token(-1), + ])), + ); + harness.test_function(&mut executor, "call_on_path", &[], None); + harness.test_function( + &mut executor, + "create_bing_contract", + &[], + Some(&uint_token(90)), + ); + }) +} diff --git a/crates/tests/Cargo.toml b/crates/tests/Cargo.toml index 145339b1cf..e3011e21b3 100644 --- a/crates/tests/Cargo.toml +++ b/crates/tests/Cargo.toml @@ -7,11 +7,11 @@ license = "GPL-3.0-or-later" repository = "https://github.com/ethereum/fe" [dependencies] -fe-test-runner = {path = "../test-runner", version = "^0.26.0"} -fe-driver = {path = "../driver", version = "^0.26.0"} -fe-common = {path = "../common", version = "^0.26.0"} +fe-test-runner = { path = "../test-runner", version = "^0.26.0" } +fe-driver = { path = "../driver", version = "^0.26.0" } +fe-common = { path = "../common", version = "^0.26.0" } -dir-test = "^0.1" +dir-test = "0.3" [features] solc-backend = ["fe-driver/solc-backend"] diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__const_local.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__const_local.snap new file mode 100644 index 0000000000..266f1a5ebf --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__const_local.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([]) used 2323 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_false.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_false.snap new file mode 100644 index 0000000000..258d191e3d --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_false.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([]) used 102 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_true.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_true.snap new file mode 100644 index 0000000000..258d191e3d --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_bool_true.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([]) used 102 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_msg_sig.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_msg_sig.snap new file mode 100644 index 0000000000..258d191e3d --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_msg_sig.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([]) used 102 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u128_cast.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u128_cast.snap new file mode 100644 index 0000000000..258d191e3d --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u128_cast.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([]) used 102 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256.snap new file mode 100644 index 0000000000..258d191e3d --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__return_u256.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([]) used 102 gas + diff --git a/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break.snap b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break.snap new file mode 100644 index 0000000000..258d191e3d --- /dev/null +++ b/crates/tests/src/snapshots/fe_compiler_tests__features__while_loop_with_break.snap @@ -0,0 +1,6 @@ +--- +source: crates/tests/src/features.rs +expression: "format!(\"{}\", harness.gas_reporter)" +--- +bar([]) used 102 gas + diff --git a/crates/uitest/Cargo.toml b/crates/uitest/Cargo.toml new file mode 100644 index 0000000000..b589347462 --- /dev/null +++ b/crates/uitest/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "driver2" +version = "0.26.0" +authors = ["The Fe Developers "] +edition = "2021" +license = "Apache-2.0" +repository = "https://github.com/ethereum/fe" +description = "Provides Fe driver" +publish = false + +[dependencies] +driver = { path = "../driver2", package = "fe-driver2" } +hir = { path = "../hir", package = "fe-hir" } +fe-compiler-test-utils = { path = "../test-utils" } +dir-test = "0.3" +wasm-bindgen-test = "0.3" diff --git a/crates/uitest/build.rs b/crates/uitest/build.rs new file mode 100644 index 0000000000..854eb71fab --- /dev/null +++ b/crates/uitest/build.rs @@ -0,0 +1,4 @@ +fn main() { + #[cfg(test)] + println!("cargo:rerun-if-changed=./fixtures"); +} diff --git a/crates/uitest/fixtures/name_resolution/conflict.fe b/crates/uitest/fixtures/name_resolution/conflict.fe new file mode 100644 index 0000000000..aacdd64a21 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict.fe @@ -0,0 +1,10 @@ +// Value domain. +pub fn Foo() {} +pub const Foo: i32 = 1 + +// Type domain. +pub enum Foo {} +pub struct Foo {} +mod Foo {} +enum Foo {} +type Foo = i32 \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/conflict.snap b/crates/uitest/fixtures/name_resolution/conflict.snap new file mode 100644 index 0000000000..cc692ac138 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict.snap @@ -0,0 +1,28 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/conflict.fe +--- +error[2-0001]: `Foo` conflicts with other definitions + ┌─ conflict.fe:2:8 + │ +2 │ pub fn Foo() {} + │ ^^^ `Foo` is defined here +3 │ pub const Foo: i32 = 1 + │ --- `Foo` is redefined here + +error[2-0001]: `Foo` conflicts with other definitions + ┌─ conflict.fe:6:10 + │ + 6 │ pub enum Foo {} + │ ^^^ `Foo` is defined here + 7 │ pub struct Foo {} + │ --- `Foo` is redefined here + 8 │ mod Foo {} + │ --- `Foo` is redefined here + 9 │ enum Foo {} + │ --- `Foo` is redefined here +10 │ type Foo = i32 + │ --- `Foo` is redefined here + + diff --git a/crates/uitest/fixtures/name_resolution/conflict_field.fe b/crates/uitest/fixtures/name_resolution/conflict_field.fe new file mode 100644 index 0000000000..6fcc98c509 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict_field.fe @@ -0,0 +1,4 @@ +pub struct MyS { + x: i32, + x: u32, +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/conflict_field.snap b/crates/uitest/fixtures/name_resolution/conflict_field.snap new file mode 100644 index 0000000000..9b73251e60 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict_field.snap @@ -0,0 +1,14 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/conflict_field.fe +--- +error[2-0001]: `x` conflicts with other definitions + ┌─ conflict_field.fe:2:5 + │ +2 │ x: i32, + │ ^ `x` is defined here +3 │ x: u32, + │ - `x` is redefined here + + diff --git a/crates/uitest/fixtures/name_resolution/conflict_generics.fe b/crates/uitest/fixtures/name_resolution/conflict_generics.fe new file mode 100644 index 0000000000..2422e2b0a7 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict_generics.fe @@ -0,0 +1,4 @@ +pub struct MyS { + x: T, + y: U +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/conflict_generics.snap b/crates/uitest/fixtures/name_resolution/conflict_generics.snap new file mode 100644 index 0000000000..e989d213b5 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict_generics.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/conflict_generics.fe +--- +error[2-0001]: `T` conflicts with other definitions + ┌─ conflict_generics.fe:1:16 + │ +1 │ pub struct MyS { + │ ^ - `T` is redefined here + │ │ + │ `T` is defined here + +error[2-0004]: `T` is ambiguous + ┌─ conflict_generics.fe:2:8 + │ +1 │ pub struct MyS { + │ - - candidate `#1` + │ │ + │ candidate `#0` +2 │ x: T, + │ ^ `T` is ambiguous + + diff --git a/crates/uitest/fixtures/name_resolution/conflict_variant.fe b/crates/uitest/fixtures/name_resolution/conflict_variant.fe new file mode 100644 index 0000000000..48b0545914 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict_variant.fe @@ -0,0 +1,5 @@ +pub enum MyE { + Var1, + Var2, + Var1, +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/conflict_variant.snap b/crates/uitest/fixtures/name_resolution/conflict_variant.snap new file mode 100644 index 0000000000..a95c1c71d0 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/conflict_variant.snap @@ -0,0 +1,15 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/conflict_variant.fe +--- +error[2-0001]: `Var1` conflicts with other definitions + ┌─ conflict_variant.fe:2:5 + │ +2 │ Var1, + │ ^^^^ `Var1` is defined here +3 │ Var2, +4 │ Var1, + │ ---- `Var1` is redefined here + + diff --git a/crates/uitest/fixtures/name_resolution/import_alias_cycle.fe b/crates/uitest/fixtures/name_resolution/import_alias_cycle.fe new file mode 100644 index 0000000000..c46e8d2049 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_alias_cycle.fe @@ -0,0 +1,7 @@ +pub mod mod1 { + pub use super::mod2::Foo as Bar // Error +} + +pub mod mod2 { + pub use super::mod1::Bar as Foo // Error +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_alias_cycle.snap b/crates/uitest/fixtures/name_resolution/import_alias_cycle.snap new file mode 100644 index 0000000000..6efe635fd3 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_alias_cycle.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_alias_cycle.fe +--- +error[2-0002]: `Foo` is not found + ┌─ import_alias_cycle.fe:2:26 + │ +2 │ pub use super::mod2::Foo as Bar // Error + │ ^^^ `Foo` is not found + +error[2-0002]: `Bar` is not found + ┌─ import_alias_cycle.fe:6:26 + │ +6 │ pub use super::mod1::Bar as Foo // Error + │ ^^^ `Bar` is not found + + diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous.fe b/crates/uitest/fixtures/name_resolution/import_ambiguous.fe new file mode 100644 index 0000000000..8ef6d67152 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous.fe @@ -0,0 +1,16 @@ +use foo::* +pub use S + +mod foo { + pub use inner1::* + pub use inner2::* + pub use S + + + pub mod inner1 { + pub struct S {} + } + mod inner2 { + pub struct S {} + } +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous.snap b/crates/uitest/fixtures/name_resolution/import_ambiguous.snap new file mode 100644 index 0000000000..7fdd4c1e4b --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_ambiguous.fe +--- +error[2-0004]: `S` is ambiguous + ┌─ import_ambiguous.fe:2:9 + │ + 2 │ pub use S + │ ^ `S` is ambiguous + · +11 │ pub struct S {} + │ - candidate `#0` + · +14 │ pub struct S {} + │ - candidate `#1` + +error[2-0004]: `S` is ambiguous + ┌─ import_ambiguous.fe:7:13 + │ + 7 │ pub use S + │ ^ `S` is ambiguous + · +11 │ pub struct S {} + │ - candidate `#0` + · +14 │ pub struct S {} + │ - candidate `#1` + + diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe b/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe new file mode 100644 index 0000000000..a027553e39 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe @@ -0,0 +1,7 @@ +use foo::* + +use i32::* + +mod foo { + pub mod i32 {} +} diff --git a/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.snap b/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.snap new file mode 100644 index 0000000000..d53f66d5b8 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_ambiguous_builtin.fe +--- +error[2-0004]: `i32` is ambiguous + ┌─ import_ambiguous_builtin.fe:3:5 + │ +3 │ use i32::* + │ ^^^ `i32` is ambiguous + + diff --git a/crates/uitest/fixtures/name_resolution/import_conflict.fe b/crates/uitest/fixtures/name_resolution/import_conflict.fe new file mode 100644 index 0000000000..d98c1fb101 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_conflict.fe @@ -0,0 +1,10 @@ +use foo1::S +use foo2::S + +pub mod foo1 { + pub struct S {} +} + +pub mod foo2 { + pub struct S {} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_conflict.snap b/crates/uitest/fixtures/name_resolution/import_conflict.snap new file mode 100644 index 0000000000..58d1d0f5d2 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_conflict.snap @@ -0,0 +1,14 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_conflict.fe +--- +error[2-0001]: `S` conflicts with other definitions + ┌─ import_conflict.fe:1:11 + │ +1 │ use foo1::S + │ ^ `S` is defined here +2 │ use foo2::S + │ - `S` is redefined here + + diff --git a/crates/uitest/fixtures/name_resolution/import_cycle.fe b/crates/uitest/fixtures/name_resolution/import_cycle.fe new file mode 100644 index 0000000000..ab4a9ef004 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_cycle.fe @@ -0,0 +1,12 @@ +use Foo as Bar +use Bar as Baz +use Baz as Foo + +pub mod mod1 { + pub use super::mod2::Foo + +} + +pub mod mod2 { + pub use super::mod1::Foo +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_cycle.snap b/crates/uitest/fixtures/name_resolution/import_cycle.snap new file mode 100644 index 0000000000..4a9390c2c5 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_cycle.snap @@ -0,0 +1,36 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_cycle.fe +--- +error[2-0002]: `Foo` is not found + ┌─ import_cycle.fe:1:5 + │ +1 │ use Foo as Bar + │ ^^^ `Foo` is not found + +error[2-0002]: `Bar` is not found + ┌─ import_cycle.fe:2:5 + │ +2 │ use Bar as Baz + │ ^^^ `Bar` is not found + +error[2-0002]: `Baz` is not found + ┌─ import_cycle.fe:3:5 + │ +3 │ use Baz as Foo + │ ^^^ `Baz` is not found + +error[2-0002]: `Foo` is not found + ┌─ import_cycle.fe:6:26 + │ +6 │ pub use super::mod2::Foo + │ ^^^ `Foo` is not found + +error[2-0002]: `Foo` is not found + ┌─ import_cycle.fe:11:26 + │ +11 │ pub use super::mod1::Foo + │ ^^^ `Foo` is not found + + diff --git a/crates/uitest/fixtures/name_resolution/import_invisible.fe b/crates/uitest/fixtures/name_resolution/import_invisible.fe new file mode 100644 index 0000000000..1b4b9a0cd5 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_invisible.fe @@ -0,0 +1,21 @@ +use foo::Bar +mod foo { + struct Bar {} +} + +use foo2::Bar +mod foo2 { + use foo3::Bar + + mod foo3 { + pub struct Bar {} + } +} + +use foo3::foo4::Bar +mod foo3 { + mod foo4 { + pub struct Bar {} + } +} + diff --git a/crates/uitest/fixtures/name_resolution/import_invisible.snap b/crates/uitest/fixtures/name_resolution/import_invisible.snap new file mode 100644 index 0000000000..f9dd22bcce --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_invisible.snap @@ -0,0 +1,31 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_invisible.fe +--- +error[2-0003]: `Bar` is not visible + ┌─ import_invisible.fe:1:10 + │ +1 │ use foo::Bar + │ ^^^ `Bar` is not visible +2 │ mod foo { +3 │ struct Bar {} + │ --- `Bar` is defined here + +error[2-0003]: `Bar` is not visible + ┌─ import_invisible.fe:6:11 + │ +6 │ use foo2::Bar + │ ^^^ `Bar` is not visible +7 │ mod foo2 { +8 │ use foo3::Bar + │ --- `Bar` is defined here + +error[2-0003]: `foo4` is not visible + ┌─ import_invisible.fe:15:11 + │ +15 │ use foo3::foo4::Bar + │ ^^^^ `foo4` is not visible +16 │ mod foo3 { +17 │ mod foo4 { + │ ---- `foo4` is defined here diff --git a/crates/uitest/fixtures/name_resolution/import_missing.fe b/crates/uitest/fixtures/name_resolution/import_missing.fe new file mode 100644 index 0000000000..7ba3ff09c5 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_missing.fe @@ -0,0 +1,14 @@ +use foo::Bar + +use foo::{Foo, Bar} + +use foo::bar::Foo + + +mod foo { + pub struct Foo {} + + mod baz { + pub struct Baz {} + } +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_missing.snap b/crates/uitest/fixtures/name_resolution/import_missing.snap new file mode 100644 index 0000000000..03941e60b1 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_missing.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/src/lib.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_missing.fe +--- +error[2-0002]: `Bar` is not found + ┌─ import_missing.fe:1:10 + │ +1 │ use foo::Bar + │ ^^^ `Bar` is not found + +error[2-0002]: `Bar` is not found + ┌─ import_missing.fe:3:16 + │ +3 │ use foo::{Foo, Bar} + │ ^^^ `Bar` is not found + +error[2-0002]: `bar` is not found + ┌─ import_missing.fe:5:10 + │ +5 │ use foo::bar::Foo + │ ^^^ `bar` is not found + + diff --git a/crates/uitest/fixtures/name_resolution/import_unimpotable.fe b/crates/uitest/fixtures/name_resolution/import_unimpotable.fe new file mode 100644 index 0000000000..b842a96238 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_unimpotable.fe @@ -0,0 +1,11 @@ +use S::{t, T} +use E::T + + +struct S { + t: T +} + +enum E { + Bar(T) +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/import_unimpotable.snap b/crates/uitest/fixtures/name_resolution/import_unimpotable.snap new file mode 100644 index 0000000000..6b788b9908 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/import_unimpotable.snap @@ -0,0 +1,31 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/import_unimpotable.fe +--- +error[2-0003]: `T` is not visible + ┌─ import_unimpotable.fe:2:8 + │ +2 │ use E::T + │ ^ `T` is not visible + · +9 │ enum E { + │ - `T` is defined here + +error[2-0005]: `S` can't be used as a middle segment of a path + ┌─ import_unimpotable.fe:1:5 + │ +1 │ use S::{t, T} + │ ^ `S` can't be used as a middle segment of a path + · +5 │ struct S { + │ - `S` is defined here + +error[2-0005]: `S` can't be used as a middle segment of a path + ┌─ import_unimpotable.fe:1:5 + │ +1 │ use S::{t, T} + │ ^ `S` can't be used as a middle segment of a path + · +5 │ struct S { + │ - `S` is defined here diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe b/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe new file mode 100644 index 0000000000..4aaaa62a87 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.fe @@ -0,0 +1,24 @@ +pub const MyC: i32 = 1 + +pub enum MyE { + Var +} + +pub trait MyT {} +pub trait MyTWithGenerics {} + +use MyE::Var + +pub enum MyE2 +where T: MyE, + U: MyTWithGenerics +{ + Variant(MyC), + Variant2(Var) +} + +fn foo(t: T) {} + +pub struct S { + s: foo::T +} diff --git a/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap new file mode 100644 index 0000000000..e7194ca3d4 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_invalid_domain.snap @@ -0,0 +1,37 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/path_invalid_domain.fe +--- +error[2-0005]: `foo` can't be used as a middle segment of a path + ┌─ path_invalid_domain.fe:23:8 + │ +20 │ fn foo(t: T) {} + │ --- `foo` is defined here + · +23 │ s: foo::T + │ ^^^ `foo` can't be used as a middle segment of a path + +error[2-0006]: expected type item here + ┌─ path_invalid_domain.fe:14:26 + │ +14 │ U: MyTWithGenerics + │ ^^^ expected type here, but found trait `MyT` + +error[2-0006]: expected type item here + ┌─ path_invalid_domain.fe:16:13 + │ +16 │ Variant(MyC), + │ ^^^ expected type here, but found const `MyC` + +error[2-0006]: expected type item here + ┌─ path_invalid_domain.fe:17:14 + │ +17 │ Variant2(Var) + │ ^^^ expected type here, but found value `Var` + +error[2-0007]: expected trait item here + ┌─ path_invalid_domain.fe:13:10 + │ +13 │ where T: MyE, + │ ^^^ expected trait here, but found type `MyE` diff --git a/crates/uitest/fixtures/name_resolution/path_missing_generics.fe b/crates/uitest/fixtures/name_resolution/path_missing_generics.fe new file mode 100644 index 0000000000..97da56729d --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_missing_generics.fe @@ -0,0 +1,13 @@ +pub trait Trait {} + +pub struct MyS + where T: Trait, + U: Trait, + Z: Trait, +{ + t: T, + u: U, + z: Z +} + +impl MyS {} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/path_missing_generics.snap b/crates/uitest/fixtures/name_resolution/path_missing_generics.snap new file mode 100644 index 0000000000..5e677041ad --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_missing_generics.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/path_missing_generics.fe +--- +error[2-0002]: `Z` is not found + ┌─ path_missing_generics.fe:6:11 + │ +6 │ Z: Trait, + │ ^ `Z` is not found + +error[2-0002]: `Z` is not found + ┌─ path_missing_generics.fe:10:8 + │ +10 │ z: Z + │ ^ `Z` is not found + +error[2-0002]: `V` is not found + ┌─ path_missing_generics.fe:13:33 + │ +13 │ impl MyS {} + │ ^ `V` is not found + + diff --git a/crates/uitest/fixtures/name_resolution/path_shadow.fe b/crates/uitest/fixtures/name_resolution/path_shadow.fe new file mode 100644 index 0000000000..d7c47dd7d0 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_shadow.fe @@ -0,0 +1,7 @@ +pub trait T {} +pub struct MyS + where U: T +{ + t: T, + u: U +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/path_shadow.snap b/crates/uitest/fixtures/name_resolution/path_shadow.snap new file mode 100644 index 0000000000..035bba2db1 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/path_shadow.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/path_shadow.fe +--- +error[2-0007]: expected trait item here + ┌─ path_shadow.fe:3:14 + │ +3 │ where U: T + │ ^ expected trait here, but found type `T` + + diff --git a/crates/uitest/fixtures/name_resolution/record_field_visibility.fe b/crates/uitest/fixtures/name_resolution/record_field_visibility.fe new file mode 100644 index 0000000000..8cf9ea4cc6 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/record_field_visibility.fe @@ -0,0 +1,18 @@ +pub mod my_mod { + pub struct Foo { + pub x: i32, + y: u32 + } + + pub enum Bar { + Variant {x: i32, pub y: u32} + } +} + +fn foo() { + use my_mod::{Foo, Bar} + + let f = Foo {x: 1, y: 2} + + let bar = Bar::Variant {x: 1, y: 2} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/name_resolution/record_field_visibility.snap b/crates/uitest/fixtures/name_resolution/record_field_visibility.snap new file mode 100644 index 0000000000..a0d3143408 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/record_field_visibility.snap @@ -0,0 +1,22 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/record_field_visibility.fe +--- +error[2-0003]: `y` is not visible + ┌─ record_field_visibility.fe:15:24 + │ + 4 │ y: u32 + │ - `y` is defined here + · +15 │ let f = Foo {x: 1, y: 2} + │ ^^^^ `y` is not visible + +error[2-0003]: `x` is not visible + ┌─ record_field_visibility.fe:17:29 + │ + 8 │ Variant {x: i32, pub y: u32} + │ - `x` is defined here + · +17 │ let bar = Bar::Variant {x: 1, y: 2} + │ ^^^^ `x` is not visible diff --git a/crates/uitest/fixtures/name_resolution/trait_visibility.fe b/crates/uitest/fixtures/name_resolution/trait_visibility.fe new file mode 100644 index 0000000000..0012e0e966 --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/trait_visibility.fe @@ -0,0 +1,16 @@ +fn foo(x: i32) -> i32 { + x.foo() + inner::Foo::foo(x) +} + +mod inner { + trait Foo { + fn foo(self) -> Self + } + + impl Foo for i32 { + fn foo(self) -> i32 { + self + } + } +} diff --git a/crates/uitest/fixtures/name_resolution/trait_visibility.snap b/crates/uitest/fixtures/name_resolution/trait_visibility.snap new file mode 100644 index 0000000000..120ffc755b --- /dev/null +++ b/crates/uitest/fixtures/name_resolution/trait_visibility.snap @@ -0,0 +1,13 @@ +--- +source: crates/uitest/tests/name_resolution.rs +expression: diags +input_file: crates/uitest/fixtures/name_resolution/trait_visibility.fe +--- +error[2-0003]: `Foo` is not visible + ┌─ trait_visibility.fe:3:12 + │ +3 │ inner::Foo::foo(x) + │ ^^^ `Foo` is not visible + · +7 │ trait Foo { + │ --- `Foo` is defined here diff --git a/crates/uitest/fixtures/parser/array.fe b/crates/uitest/fixtures/parser/array.fe new file mode 100644 index 0000000000..05e71bdd22 --- /dev/null +++ b/crates/uitest/fixtures/parser/array.fe @@ -0,0 +1,4 @@ +fn f() { +[1, 2 a, 3] +[1, 2,] +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/array.snap b/crates/uitest/fixtures/parser/array.snap new file mode 100644 index 0000000000..c0635ba26d --- /dev/null +++ b/crates/uitest/fixtures/parser/array.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/array.fe +--- +error[1-0001]: unexpected syntax while parsing array definition + ┌─ array.fe:2:7 + │ +2 │ [1, 2 a, 3] + │ ^ unexpected + + diff --git a/crates/uitest/fixtures/parser/block.fe b/crates/uitest/fixtures/parser/block.fe new file mode 100644 index 0000000000..ffdee30cae --- /dev/null +++ b/crates/uitest/fixtures/parser/block.fe @@ -0,0 +1,7 @@ +fn f() { +{ + let x: i32 u32 = 10 + let y = 10 + +} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/block.snap b/crates/uitest/fixtures/parser/block.snap new file mode 100644 index 0000000000..e18fd86102 --- /dev/null +++ b/crates/uitest/fixtures/parser/block.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/block.fe +--- +error[1-0001]: unexpected syntax while parsing block + ┌─ block.fe:3:16 + │ +3 │ let x: i32 u32 = 10 + │ ^^^^^^^^ unexpected + + diff --git a/crates/uitest/fixtures/parser/call.fe b/crates/uitest/fixtures/parser/call.fe new file mode 100644 index 0000000000..8db93b0f78 --- /dev/null +++ b/crates/uitest/fixtures/parser/call.fe @@ -0,0 +1,5 @@ +fn f() { +foo(x, y a, z ;) + +foo(x, y) +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/call.snap b/crates/uitest/fixtures/parser/call.snap new file mode 100644 index 0000000000..5cfb86d24a --- /dev/null +++ b/crates/uitest/fixtures/parser/call.snap @@ -0,0 +1,22 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/call.fe +--- +error[1-0001]: unexpected syntax while parsing function call arguments + ┌─ call.fe:2:10 + │ +2 │ foo(x, y a, z ;) + │ ^ unexpected + +error[1-0001]: unexpected syntax while parsing function call arguments + ┌─ call.fe:2:15 + │ +2 │ foo(x, y a, z ;) + │ ^ unexpected + +error[1-0001]: unexpected syntax while parsing generic type argument list + ┌─ call.fe:4:12 + │ +4 │ foo(x, y) + │ ^ unexpected diff --git a/crates/uitest/fixtures/parser/const_.fe b/crates/uitest/fixtures/parser/const_.fe new file mode 100644 index 0000000000..a9ab53676f --- /dev/null +++ b/crates/uitest/fixtures/parser/const_.fe @@ -0,0 +1,5 @@ +const X = 10 + +const X: i32 + +const X: ]@ = 1 \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/const_.snap b/crates/uitest/fixtures/parser/const_.snap new file mode 100644 index 0000000000..a8a17904e1 --- /dev/null +++ b/crates/uitest/fixtures/parser/const_.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/const_.fe +--- +error[1-0001]: missing type bound for const definition + ┌─ const_.fe:1:8 + │ +1 │ const X = 10 + │ ^ expected `:` + +error[1-0001]: expected `=` + ┌─ const_.fe:3:13 + │ +3 │ const X: i32 + │ ^ expected `=` + +error[1-0001]: expected type + ┌─ const_.fe:5:9 + │ +5 │ const X: ]@ = 1 + │ ^ expected type + +error[1-0001]: unexpected syntax while parsing type + ┌─ const_.fe:5:10 + │ +5 │ const X: ]@ = 1 + │ ^^ unexpected + + diff --git a/crates/uitest/fixtures/parser/enum_.fe b/crates/uitest/fixtures/parser/enum_.fe new file mode 100644 index 0000000000..753c314d28 --- /dev/null +++ b/crates/uitest/fixtures/parser/enum_.fe @@ -0,0 +1,6 @@ +pub enum MyEnum { + X(u32, T + A + Y(T, u32) A + Z +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/enum_.snap b/crates/uitest/fixtures/parser/enum_.snap new file mode 100644 index 0000000000..a17f1482cf --- /dev/null +++ b/crates/uitest/fixtures/parser/enum_.snap @@ -0,0 +1,36 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/enum_.fe +--- +error[1-0001]: missing closing `)` for tuple type definition + ┌─ enum_.fe:2:13 + │ +2 │ X(u32, T + │ ^ expected `)` or `,` + +error[1-0001]: missing closing `}` for `enum` variant list + ┌─ enum_.fe:2:13 + │ +2 │ X(u32, T + │ ^ expected `}` or `,` + +error[1-0001]: missing closing `}` for `enum` variant list + ┌─ enum_.fe:3:6 + │ +3 │ A + │ ^ expected `}` or `,` + +error[1-0001]: unexpected syntax while parsing `enum` variant list + ┌─ enum_.fe:4:15 + │ +4 │ Y(T, u32) A + │ ^ unexpected + +error[1-0001]: missing closing `}` for `enum` variant list + ┌─ enum_.fe:4:16 + │ +4 │ Y(T, u32) A + │ ^ expected `}` or `,` + + diff --git a/crates/uitest/fixtures/parser/extern_.fe b/crates/uitest/fixtures/parser/extern_.fe new file mode 100644 index 0000000000..12b50f7186 --- /dev/null +++ b/crates/uitest/fixtures/parser/extern_.fe @@ -0,0 +1,7 @@ +extern { + pub unsafe fn Foo(x: *usize) + + struct Foo { + + pub unsafe fn foo() +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/extern_.snap b/crates/uitest/fixtures/parser/extern_.snap new file mode 100644 index 0000000000..bdd33cecdb --- /dev/null +++ b/crates/uitest/fixtures/parser/extern_.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/extern_.fe +--- +error[1-0001]: only `fn` is allowed in this block + ┌─ extern_.fe:4:5 + │ +4 │ struct Foo { + │ ^^^^^^ only `fn` is allowed in this block + +error[1-0001]: expected name for field + ┌─ extern_.fe:6:8 + │ +6 │ pub unsafe fn foo() + │ ^ expected identifier + +error[1-0001]: unexpected syntax while parsing function definition + ┌─ extern_.fe:7:1 + │ +7 │ } + │ ^ unexpected + + diff --git a/crates/uitest/fixtures/parser/fn_missing_body.fe b/crates/uitest/fixtures/parser/fn_missing_body.fe new file mode 100644 index 0000000000..147db222b3 --- /dev/null +++ b/crates/uitest/fixtures/parser/fn_missing_body.fe @@ -0,0 +1,7 @@ +fn foo(x: u8, y: u64) -> u8 + +fn bar() asdf + +fn baz(x: u8) -> u8 { + return 10 +} diff --git a/crates/uitest/fixtures/parser/fn_missing_body.snap b/crates/uitest/fixtures/parser/fn_missing_body.snap new file mode 100644 index 0000000000..40eea7d4fb --- /dev/null +++ b/crates/uitest/fixtures/parser/fn_missing_body.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/fn_missing_body.fe +--- +error[1-0001]: expected `{` or `where` + ┌─ fn_missing_body.fe:1:28 + │ +1 │ fn foo(x: u8, y: u64) -> u8 + │ ^ expected `{` or `where` + +error[1-0001]: unexpected syntax while parsing function definition + ┌─ fn_missing_body.fe:3:13 + │ +3 │ fn bar() asdf + │ ^^^^ unexpected + + diff --git a/crates/uitest/fixtures/parser/fn_missing_parameters.fe b/crates/uitest/fixtures/parser/fn_missing_parameters.fe new file mode 100644 index 0000000000..bf8c0e8ede --- /dev/null +++ b/crates/uitest/fixtures/parser/fn_missing_parameters.fe @@ -0,0 +1,8 @@ +fn foo -> u8 {} + +fn bar { +} + +fn baz -> u8 {} + +fn f where T: U {} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/fn_missing_parameters.snap b/crates/uitest/fixtures/parser/fn_missing_parameters.snap new file mode 100644 index 0000000000..5240af8580 --- /dev/null +++ b/crates/uitest/fixtures/parser/fn_missing_parameters.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/fn_missing_parameters.fe +--- +error[1-0001]: expected function parameter list + ┌─ fn_missing_parameters.fe:1:7 + │ +1 │ fn foo -> u8 {} + │ ^ expected `(` + +error[1-0001]: expected function parameter list + ┌─ fn_missing_parameters.fe:3:7 + │ +3 │ fn bar { + │ ^ expected `(` + +error[1-0001]: expected function parameter list + ┌─ fn_missing_parameters.fe:6:7 + │ +6 │ fn baz -> u8 {} + │ ^ expected `(` + +error[1-0001]: expected function parameter list + ┌─ fn_missing_parameters.fe:8:8 + │ +8 │ fn f where T: U {} + │ ^ expected `(` + + diff --git a/crates/uitest/fixtures/parser/fn_modifiers.fe b/crates/uitest/fixtures/parser/fn_modifiers.fe new file mode 100644 index 0000000000..5c427b184c --- /dev/null +++ b/crates/uitest/fixtures/parser/fn_modifiers.fe @@ -0,0 +1,14 @@ +pub pub struct Foo {} +impl Foo { + pub pub unsafe unsafe fn f() {} + unsafe pub fn g() {} + unsafe unsafe pub unsafe pub pub unsafe fn h() {} +} + +trait T { + fn f(self) +} + +impl T for Foo { + pub unsafe fn f(self) {} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/fn_modifiers.snap b/crates/uitest/fixtures/parser/fn_modifiers.snap new file mode 100644 index 0000000000..8e560a1bfb --- /dev/null +++ b/crates/uitest/fixtures/parser/fn_modifiers.snap @@ -0,0 +1,78 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/fn_modifiers.fe +--- +error[1-0001]: duplicate `pub` modifier + ┌─ fn_modifiers.fe:1:5 + │ +1 │ pub pub struct Foo {} + │ ^^^ unexpected + +error[1-0001]: duplicate `pub` modifier + ┌─ fn_modifiers.fe:3:9 + │ +3 │ pub pub unsafe unsafe fn f() {} + │ ^^^ unexpected + +error[1-0001]: duplicate `unsafe` modifier + ┌─ fn_modifiers.fe:3:20 + │ +3 │ pub pub unsafe unsafe fn f() {} + │ ^^^^^^ unexpected + +error[1-0001]: `pub` modifier must come before `unsafe` + ┌─ fn_modifiers.fe:4:12 + │ +4 │ unsafe pub fn g() {} + │ ^^^ unexpected + +error[1-0001]: duplicate `unsafe` modifier + ┌─ fn_modifiers.fe:5:12 + │ +5 │ unsafe unsafe pub unsafe pub pub unsafe fn h() {} + │ ^^^^^^ unexpected + +error[1-0001]: `pub` modifier must come before `unsafe` + ┌─ fn_modifiers.fe:5:19 + │ +5 │ unsafe unsafe pub unsafe pub pub unsafe fn h() {} + │ ^^^ unexpected + +error[1-0001]: duplicate `unsafe` modifier + ┌─ fn_modifiers.fe:5:23 + │ +5 │ unsafe unsafe pub unsafe pub pub unsafe fn h() {} + │ ^^^^^^ unexpected + +error[1-0001]: duplicate `pub` modifier + ┌─ fn_modifiers.fe:5:30 + │ +5 │ unsafe unsafe pub unsafe pub pub unsafe fn h() {} + │ ^^^ unexpected + +error[1-0001]: duplicate `pub` modifier + ┌─ fn_modifiers.fe:5:34 + │ +5 │ unsafe unsafe pub unsafe pub pub unsafe fn h() {} + │ ^^^ unexpected + +error[1-0001]: duplicate `unsafe` modifier + ┌─ fn_modifiers.fe:5:38 + │ +5 │ unsafe unsafe pub unsafe pub pub unsafe fn h() {} + │ ^^^^^^ unexpected + +error[1-0001]: `pub` modifier is not allowed in this block + ┌─ fn_modifiers.fe:13:5 + │ +13 │ pub unsafe fn f(self) {} + │ ^^^ unexpected + +error[1-0001]: `unsafe` modifier is not allowed in this block + ┌─ fn_modifiers.fe:13:9 + │ +13 │ pub unsafe fn f(self) {} + │ ^^^^^^ unexpected + + diff --git a/crates/uitest/fixtures/parser/for_.fe b/crates/uitest/fixtures/parser/for_.fe new file mode 100644 index 0000000000..5c6229c883 --- /dev/null +++ b/crates/uitest/fixtures/parser/for_.fe @@ -0,0 +1,9 @@ +fn f() { +for i arr { } + +for in arr { } + +for @ in arr {} + +for @ in arr x y {} +} diff --git a/crates/uitest/fixtures/parser/for_.snap b/crates/uitest/fixtures/parser/for_.snap new file mode 100644 index 0000000000..38a718c03d --- /dev/null +++ b/crates/uitest/fixtures/parser/for_.snap @@ -0,0 +1,48 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/for_.fe +--- +error[1-0001]: expected `in` + ┌─ for_.fe:2:6 + │ +2 │ for i arr { } + │ ^ expected `in` + +error[1-0001]: expected pattern + ┌─ for_.fe:4:4 + │ +4 │ for in arr { } + │ ^ expected pattern + +error[1-0001]: expected pattern + ┌─ for_.fe:6:4 + │ +6 │ for @ in arr {} + │ ^ expected pattern + +error[1-0001]: unexpected syntax while parsing pattern + ┌─ for_.fe:6:5 + │ +6 │ for @ in arr {} + │ ^ unexpected + +error[1-0001]: expected pattern + ┌─ for_.fe:8:4 + │ +8 │ for @ in arr x y {} + │ ^ expected pattern + +error[1-0001]: unexpected syntax while parsing pattern + ┌─ for_.fe:8:5 + │ +8 │ for @ in arr x y {} + │ ^ unexpected + +error[1-0001]: unexpected syntax while parsing `for` statement + ┌─ for_.fe:8:14 + │ +8 │ for @ in arr x y {} + │ ^^^ unexpected + + diff --git a/crates/uitest/fixtures/parser/func.fe b/crates/uitest/fixtures/parser/func.fe new file mode 100644 index 0000000000..b2848720db --- /dev/null +++ b/crates/uitest/fixtures/parser/func.fe @@ -0,0 +1,13 @@ +fn foo>(x: i32, _ mut y: u32, z: u32) -> T, u where T: Trait2 +{ + +} + +fn foo<<(x: i32) + where T: Trait2 +{ + +} + +fn bar() + where T: 75 {} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/func.snap b/crates/uitest/fixtures/parser/func.snap new file mode 100644 index 0000000000..0d7cdc30b7 --- /dev/null +++ b/crates/uitest/fixtures/parser/func.snap @@ -0,0 +1,54 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/func.fe +--- +error[1-0001]: unexpected syntax while parsing function definition + ┌─ func.fe:1:17 + │ +1 │ fn foo>(x: i32, _ mut y: u32, z: u32) -> T, u where T: Trait2 + │ ^ unexpected + +error[1-0001]: unexpected syntax while parsing function parameter + ┌─ func.fe:1:29 + │ +1 │ fn foo>(x: i32, _ mut y: u32, z: u32) -> T, u where T: Trait2 + │ ^^^ unexpected + +error[1-0001]: unexpected syntax while parsing function definition + ┌─ func.fe:1:54 + │ +1 │ fn foo>(x: i32, _ mut y: u32, z: u32) -> T, u where T: Trait2 + │ ^^^ unexpected + +error[1-0001]: unexpected syntax while parsing generic parameter list + ┌─ func.fe:6:8 + │ +6 │ fn foo<<(x: i32) + │ ^^ unexpected + +error[1-0001]: expected trait name + ┌─ func.fe:12:10 + │ +12 │ fn bar() + │ ^ expected trait name + +error[1-0001]: unexpected syntax while parsing trait name + ┌─ func.fe:12:11 + │ +12 │ fn bar() + │ ^ unexpected + +error[1-0001]: expected trait name + ┌─ func.fe:13:13 + │ +13 │ where T: 75 {} + │ ^ expected trait name + +error[1-0001]: unexpected syntax while parsing trait name + ┌─ func.fe:13:14 + │ +13 │ where T: 75 {} + │ ^^ unexpected + + diff --git a/crates/uitest/fixtures/parser/if_.fe b/crates/uitest/fixtures/parser/if_.fe new file mode 100644 index 0000000000..948c00bcdb --- /dev/null +++ b/crates/uitest/fixtures/parser/if_.fe @@ -0,0 +1,18 @@ +fn f() { + +if x y { +} + +if x { + +} else x {} + +if x { } else x if x { } else { } + +if x { + 10 +else { + 1 +} + +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/if_.snap b/crates/uitest/fixtures/parser/if_.snap new file mode 100644 index 0000000000..0faed6fa90 --- /dev/null +++ b/crates/uitest/fixtures/parser/if_.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/if_.fe +--- +error[1-0001]: unexpected syntax while parsing `if` expression + ┌─ if_.fe:3:6 + │ +3 │ if x y { + │ ^ unexpected + +error[1-0001]: unexpected syntax while parsing `if` expression + ┌─ if_.fe:8:8 + │ +8 │ } else x {} + │ ^ unexpected + +error[1-0001]: unexpected syntax while parsing `if` expression + ┌─ if_.fe:10:15 + │ +10 │ if x { } else x if x { } else { } + │ ^ unexpected + +error[1-0001]: expected expression + ┌─ if_.fe:14:1 + │ +14 │ else { + │ ^ expected expression + + diff --git a/crates/uitest/fixtures/parser/impl_.fe b/crates/uitest/fixtures/parser/impl_.fe new file mode 100644 index 0000000000..f7192a5d44 --- /dev/null +++ b/crates/uitest/fixtures/parser/impl_.fe @@ -0,0 +1,6 @@ +impl Foo +{ } \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/impl_.snap b/crates/uitest/fixtures/parser/impl_.snap new file mode 100644 index 0000000000..47c112976d --- /dev/null +++ b/crates/uitest/fixtures/parser/impl_.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/impl_.fe +--- +error[1-0001]: expected type + ┌─ impl_.fe:1:12 + │ +1 │ impl Foo for Y for Y` for generic type argument list + ┌─ impl_trait.fe:1:23 + │ +1 │ impl X for Y` or `,` + +error[1-0001]: missing closing `>` for generic type argument list + ┌─ impl_trait.fe:3:12 + │ +3 │ impl X` or `,` + +error[1-0001]: missing closing `>` for generic type argument list + ┌─ impl_trait.fe:3:20 + │ +3 │ impl X` or `,` + +error[1-0001]: unexpected syntax while parsing `impl` trait block + ┌─ impl_trait.fe:5:8 + │ +5 │ impl X @ for Y {} + │ ^ unexpected + + diff --git a/crates/uitest/fixtures/parser/index.fe b/crates/uitest/fixtures/parser/index.fe new file mode 100644 index 0000000000..5dc81403c0 --- /dev/null +++ b/crates/uitest/fixtures/parser/index.fe @@ -0,0 +1,5 @@ +fn f() { +x[1 a] +x[2 + 3 +x[41] +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/index.snap b/crates/uitest/fixtures/parser/index.snap new file mode 100644 index 0000000000..edbface333 --- /dev/null +++ b/crates/uitest/fixtures/parser/index.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/index.fe +--- +error[1-0001]: unexpected syntax while parsing index expression + ┌─ index.fe:2:5 + │ +2 │ x[1 a] + │ ^ unexpected + +error[1-0001]: missing closing `]` for index expression + ┌─ index.fe:3:8 + │ +3 │ x[2 + 3 + │ ^ expected `]` + + diff --git a/crates/uitest/fixtures/parser/match_.fe b/crates/uitest/fixtures/parser/match_.fe new file mode 100644 index 0000000000..dd8d33250d --- /dev/null +++ b/crates/uitest/fixtures/parser/match_.fe @@ -0,0 +1,11 @@ +fn f() { +match X => { + Foo() => true + Bar +} + +match X { + Foo(i, j, => true x + Bar => x +} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/match_.snap b/crates/uitest/fixtures/parser/match_.snap new file mode 100644 index 0000000000..85a9fbc3e4 --- /dev/null +++ b/crates/uitest/fixtures/parser/match_.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/match_.fe +--- +error[1-0001]: unexpected syntax while parsing `match` expression + ┌─ match_.fe:2:10 + │ +2 │ match X => { + │ ^^ unexpected + +error[1-0001]: expected `=>` + ┌─ match_.fe:4:7 + │ +4 │ Bar + │ ^ expected `=>` + +error[1-0001]: expected pattern + ┌─ match_.fe:8:13 + │ +8 │ Foo(i, j, => true x + │ ^ expected pattern + +error[1-0001]: unexpected syntax while parsing `match` arm list + ┌─ match_.fe:8:24 + │ +8 │ Foo(i, j, => true x + │ ^ unexpected + + diff --git a/crates/uitest/fixtures/parser/method.fe b/crates/uitest/fixtures/parser/method.fe new file mode 100644 index 0000000000..60ec01baf8 --- /dev/null +++ b/crates/uitest/fixtures/parser/method.fe @@ -0,0 +1,7 @@ +fn f() { +foo::bar.baz(1, 2) + +foo::bar.x(1, 2 E,) + +foo::bar.baz() +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/method.snap b/crates/uitest/fixtures/parser/method.snap new file mode 100644 index 0000000000..dea18a27a7 --- /dev/null +++ b/crates/uitest/fixtures/parser/method.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/method.fe +--- +error[1-0001]: unexpected syntax while parsing generic type argument list + ┌─ method.fe:2:23 + │ +2 │ foo::bar.baz(1, 2) + │ ^ unexpected + +error[1-0001]: unexpected syntax while parsing function call arguments + ┌─ method.fe:4:17 + │ +4 │ foo::bar.x(1, 2 E,) + │ ^ unexpected + + diff --git a/crates/uitest/fixtures/parser/operators.fe b/crates/uitest/fixtures/parser/operators.fe new file mode 100644 index 0000000000..d38754bd34 --- /dev/null +++ b/crates/uitest/fixtures/parser/operators.fe @@ -0,0 +1,11 @@ +fn f() { + x += 1 + x + = 1 + x -= 1 + x - = 1 + x * = 1 + x << 1 + x < < 1 + x <= 1 + x < = 1 +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/operators.snap b/crates/uitest/fixtures/parser/operators.snap new file mode 100644 index 0000000000..d15e9a7f48 --- /dev/null +++ b/crates/uitest/fixtures/parser/operators.snap @@ -0,0 +1,66 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/operators.fe +--- +error[1-0001]: expected expression + ┌─ operators.fe:3:8 + │ +3 │ x + = 1 + │ ^ expected expression + +error[1-0001]: unexpected syntax while parsing binary expression + ┌─ operators.fe:3:9 + │ +3 │ x + = 1 + │ ^^^ unexpected + +error[1-0001]: expected expression + ┌─ operators.fe:5:8 + │ +5 │ x - = 1 + │ ^ expected expression + +error[1-0001]: unexpected syntax while parsing binary expression + ┌─ operators.fe:5:12 + │ +5 │ x - = 1 + │ ^^^ unexpected + +error[1-0001]: expected expression + ┌─ operators.fe:6:8 + │ +6 │ x * = 1 + │ ^ expected expression + +error[1-0001]: unexpected syntax while parsing binary expression + ┌─ operators.fe:6:9 + │ +6 │ x * = 1 + │ ^^^ unexpected + +error[1-0001]: expected expression + ┌─ operators.fe:8:8 + │ +8 │ x < < 1 + │ ^ expected expression + +error[1-0001]: unexpected syntax while parsing binary expression + ┌─ operators.fe:8:9 + │ +8 │ x < < 1 + │ ^^^ unexpected + +error[1-0001]: expected expression + ┌─ operators.fe:10:8 + │ +10 │ x < = 1 + │ ^ expected expression + +error[1-0001]: unexpected syntax while parsing binary expression + ┌─ operators.fe:10:9 + │ +10 │ x < = 1 + │ ^^^ unexpected + + diff --git a/crates/uitest/fixtures/parser/struct_.fe b/crates/uitest/fixtures/parser/struct_.fe new file mode 100644 index 0000000000..50f0c13817 --- /dev/null +++ b/crates/uitest/fixtures/parser/struct_.fe @@ -0,0 +1,16 @@ +pub struct i32 { + return 1 + } + + x: i32 +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/struct_.snap b/crates/uitest/fixtures/parser/struct_.snap new file mode 100644 index 0000000000..ac499b6e46 --- /dev/null +++ b/crates/uitest/fixtures/parser/struct_.snap @@ -0,0 +1,54 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/struct_.fe +--- +error[1-0001]: expected name for struct definition + ┌─ struct_.fe:1:11 + │ +1 │ pub struct` for generic parameter list + ┌─ struct_.fe:1:16 + │ +1 │ pub struct` or `,` + +error[1-0001]: missing type bound for `where` predicate + ┌─ struct_.fe:2:8 + │ +2 │ where T + │ ^ expected `:` + +error[1-0001]: expected `,` + ┌─ struct_.fe:2:8 + │ +2 │ where T + │ ^ expected `,` + +error[1-0001]: missing type bound for field + ┌─ struct_.fe:6:8 + │ +6 │ foo + │ ^ expected `:` + +error[1-0001]: missing closing `}` for record field list + ┌─ struct_.fe:6:8 + │ +6 │ foo + │ ^ expected `}` or `,` + +error[1-0001]: function definition in struct is not allowed + ┌─ struct_.fe:11:9 + │ +11 │ pub fn foo() -> i32 { + │ ^^ function definition in struct is not allowed + +error[1-0001]: missing closing `}` for record field list + ┌─ struct_.fe:13:6 + │ +13 │ } + │ ^ expected `}` or `,` + + diff --git a/crates/uitest/fixtures/parser/struct_field_missing_comma.fe b/crates/uitest/fixtures/parser/struct_field_missing_comma.fe new file mode 100644 index 0000000000..2a644665c7 --- /dev/null +++ b/crates/uitest/fixtures/parser/struct_field_missing_comma.fe @@ -0,0 +1,5 @@ +struct S { + x: u8 + y: i8 + ,z: i8, +} diff --git a/crates/uitest/fixtures/parser/struct_field_missing_comma.snap b/crates/uitest/fixtures/parser/struct_field_missing_comma.snap new file mode 100644 index 0000000000..904c376215 --- /dev/null +++ b/crates/uitest/fixtures/parser/struct_field_missing_comma.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/struct_field_missing_comma.fe +--- +error[1-0001]: missing closing `}` for record field list + ┌─ struct_field_missing_comma.fe:2:10 + │ +2 │ x: u8 + │ ^ expected `}` or `,` + + diff --git a/crates/uitest/fixtures/parser/struct_missing_body.fe b/crates/uitest/fixtures/parser/struct_missing_body.fe new file mode 100644 index 0000000000..03c461c7d1 --- /dev/null +++ b/crates/uitest/fixtures/parser/struct_missing_body.fe @@ -0,0 +1,6 @@ + +struct S + +struct T + +struct Foo {} diff --git a/crates/uitest/fixtures/parser/struct_missing_body.snap b/crates/uitest/fixtures/parser/struct_missing_body.snap new file mode 100644 index 0000000000..02a04799f0 --- /dev/null +++ b/crates/uitest/fixtures/parser/struct_missing_body.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/struct_missing_body.fe +--- +error[1-0001]: expected `{`, `where` or `<` + ┌─ struct_missing_body.fe:2:9 + │ +2 │ struct S + │ ^ expected `{`, `where` or `<` + +error[1-0001]: expected `{` or `where` + ┌─ struct_missing_body.fe:4:12 + │ +4 │ struct T + │ ^ expected `{` or `where` + + diff --git a/crates/uitest/fixtures/parser/trait_.fe b/crates/uitest/fixtures/parser/trait_.fe new file mode 100644 index 0000000000..fbc4691c6a --- /dev/null +++ b/crates/uitest/fixtures/parser/trait_.fe @@ -0,0 +1,13 @@ +trait Foo{} + +trait Bar + +trait Bar where T: Add {} + +trait Bar< + where T: Add +{ + +} \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/trait_.snap b/crates/uitest/fixtures/parser/trait_.snap new file mode 100644 index 0000000000..305c3fdb5a --- /dev/null +++ b/crates/uitest/fixtures/parser/trait_.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/trait_.fe +--- +error[1-0001]: expected identifier, `const` or `>` + ┌─ trait_.fe:3:13 + │ +3 │ trait Bar` + +error[1-0001]: expected `{`, `where` or `:` + ┌─ trait_.fe:5:15 + │ +5 │ trait Bar + │ ^ expected `{`, `where` or `:` + +error[1-0001]: unexpected syntax while parsing generic parameter list + ┌─ trait_.fe:9:11 + │ +9 │ trait Bar< + │ ^ unexpected + + diff --git a/crates/uitest/fixtures/parser/trait_pub_fn.fe b/crates/uitest/fixtures/parser/trait_pub_fn.fe new file mode 100644 index 0000000000..e3391630a1 --- /dev/null +++ b/crates/uitest/fixtures/parser/trait_pub_fn.fe @@ -0,0 +1,5 @@ +trait Fooable { + pub fn foo(mut self) + pub unsafe fn bar(self) + fn x(self) +} diff --git a/crates/uitest/fixtures/parser/trait_pub_fn.snap b/crates/uitest/fixtures/parser/trait_pub_fn.snap new file mode 100644 index 0000000000..7cf14e7b3b --- /dev/null +++ b/crates/uitest/fixtures/parser/trait_pub_fn.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/trait_pub_fn.fe +--- +error[1-0001]: `pub` modifier is not allowed in this block + ┌─ trait_pub_fn.fe:2:5 + │ +2 │ pub fn foo(mut self) + │ ^^^ unexpected + +error[1-0001]: `pub` modifier is not allowed in this block + ┌─ trait_pub_fn.fe:3:5 + │ +3 │ pub unsafe fn bar(self) + │ ^^^ unexpected + +error[1-0001]: `unsafe` modifier is not allowed in this block + ┌─ trait_pub_fn.fe:3:9 + │ +3 │ pub unsafe fn bar(self) + │ ^^^^^^ unexpected + + diff --git a/crates/uitest/fixtures/parser/type_.fe b/crates/uitest/fixtures/parser/type_.fe new file mode 100644 index 0000000000..1de5391495 --- /dev/null +++ b/crates/uitest/fixtures/parser/type_.fe @@ -0,0 +1 @@ +type Result \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/type_.snap b/crates/uitest/fixtures/parser/type_.snap new file mode 100644 index 0000000000..07d2723c3e --- /dev/null +++ b/crates/uitest/fixtures/parser/type_.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/type_.fe +--- +error[1-0001]: expected identifier, `const` or `>` + ┌─ type_.fe:1:15 + │ +1 │ type Result + │ ^ expected identifier, `const` or `>` + + diff --git a/crates/uitest/fixtures/parser/use_.fe b/crates/uitest/fixtures/parser/use_.fe new file mode 100644 index 0000000000..eb4678b476 --- /dev/null +++ b/crates/uitest/fixtures/parser/use_.fe @@ -0,0 +1,3 @@ +use foo::bar::*::A +use foo::bar::*::{A, B} +use foo::bar::* as B \ No newline at end of file diff --git a/crates/uitest/fixtures/parser/use_.snap b/crates/uitest/fixtures/parser/use_.snap new file mode 100644 index 0000000000..1441a59d99 --- /dev/null +++ b/crates/uitest/fixtures/parser/use_.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/use_.fe +--- +error[1-0001]: can't specify path after `*` + ┌─ use_.fe:1:16 + │ +1 │ use foo::bar::*::A + │ ^^ can't specify path after `*` + +error[1-0001]: can't use `*` with `{}` + ┌─ use_.fe:2:18 + │ +2 │ use foo::bar::*::{A, B} + │ ^ can't use `*` with `{}` + +error[1-0001]: can't use `as` with `*` + ┌─ use_.fe:3:17 + │ +3 │ use foo::bar::* as B + │ ^^ can't use `as` with `*` + + diff --git a/crates/uitest/fixtures/parser/while_.fe b/crates/uitest/fixtures/parser/while_.fe new file mode 100644 index 0000000000..b247fcac3a --- /dev/null +++ b/crates/uitest/fixtures/parser/while_.fe @@ -0,0 +1,9 @@ +fn f() { +while @ {} + +while true { + x + 1 +}} + +while true {} +} diff --git a/crates/uitest/fixtures/parser/while_.snap b/crates/uitest/fixtures/parser/while_.snap new file mode 100644 index 0000000000..a8a480c2ec --- /dev/null +++ b/crates/uitest/fixtures/parser/while_.snap @@ -0,0 +1,25 @@ +--- +source: crates/uitest/tests/parser.rs +expression: diags +input_file: crates/uitest/fixtures/parser/while_.fe +--- +error[1-0001]: expected expression + ┌─ while_.fe:2:6 + │ +2 │ while @ {} + │ ^ expected expression + +error[1-0001]: unexpected syntax while parsing `while` statement + ┌─ while_.fe:2:7 + │ +2 │ while @ {} + │ ^ unexpected + +error[1-0001]: unexpected syntax while parsing item + ┌─ while_.fe:8:1 + │ +8 │ ╭ while true {} +9 │ │ } + │ ╰─^ unexpected + + diff --git a/crates/uitest/fixtures/ty/const_ty/const_ty_expected.fe b/crates/uitest/fixtures/ty/const_ty/const_ty_expected.fe new file mode 100644 index 0000000000..c75cfcb6e2 --- /dev/null +++ b/crates/uitest/fixtures/ty/const_ty/const_ty_expected.fe @@ -0,0 +1,4 @@ +pub struct Foo { + N: u256 +} +fn foo(x: Foo) {} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/const_ty/const_ty_expected.snap b/crates/uitest/fixtures/ty/const_ty/const_ty_expected.snap new file mode 100644 index 0000000000..0d9dfc57da --- /dev/null +++ b/crates/uitest/fixtures/ty/const_ty/const_ty_expected.snap @@ -0,0 +1,12 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/const_ty/const_ty_expected.fe +--- +error[3-0012]: expected const type + ┌─ const_ty_expected.fe:4:11 + │ +4 │ fn foo(x: Foo) {} + │ ^^^^^^^^ expected const type of `u256` here + + diff --git a/crates/uitest/fixtures/ty/const_ty/const_ty_mismatch.fe b/crates/uitest/fixtures/ty/const_ty/const_ty_mismatch.fe new file mode 100644 index 0000000000..5995f4e0d3 --- /dev/null +++ b/crates/uitest/fixtures/ty/const_ty/const_ty_mismatch.fe @@ -0,0 +1,26 @@ +pub struct Foo { + N: u256 +} + +pub struct Foo2 { + t: T, + N: u256 +} + + +pub struct Bar { + N: bool +} + +pub fn foo(x: Foo) {} +pub fn foo2(x: Foo2) {} + +pub fn bar(x: Bar<3>) {} + +pub struct Bar2 { + N: u64 +} + +pub enum Baz { + MyField{N: u64, x: i32} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/const_ty/const_ty_mismatch.snap b/crates/uitest/fixtures/ty/const_ty/const_ty_mismatch.snap new file mode 100644 index 0000000000..d38228121a --- /dev/null +++ b/crates/uitest/fixtures/ty/const_ty/const_ty_mismatch.snap @@ -0,0 +1,48 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/const_ty/const_ty_mismatch.fe +--- +error[3-0011]: given type doesn't match the expected const type + ┌─ const_ty_mismatch.fe:2:8 + │ +2 │ N: u256 + │ ^^^^ expected `u32` type here, but `u256` is given + +error[3-0011]: given type doesn't match the expected const type + ┌─ const_ty_mismatch.fe:7:8 + │ +7 │ N: u256 + │ ^^^^ expected `u32` type here, but `u256` is given + +error[3-0011]: given type doesn't match the expected const type + ┌─ const_ty_mismatch.fe:15:15 + │ +15 │ pub fn foo(x: Foo) {} + │ ^^^^^^^^^ expected `u32` type here, but `bool` is given + +error[3-0011]: given type doesn't match the expected const type + ┌─ const_ty_mismatch.fe:16:16 + │ +16 │ pub fn foo2(x: Foo2) {} + │ ^^^^^^^^^^^^^^^^ expected `u32` type here, but `bool` is given + +error[3-0011]: given type doesn't match the expected const type + ┌─ const_ty_mismatch.fe:18:15 + │ +18 │ pub fn bar(x: Bar<3>) {} + │ ^^^^^^ expected `bool` type here, but `{integer}` is given + +error[3-0011]: given type doesn't match the expected const type + ┌─ const_ty_mismatch.fe:21:8 + │ +21 │ N: u64 + │ ^^^ expected `u32` type here, but `u64` is given + +error[3-0011]: given type doesn't match the expected const type + ┌─ const_ty_mismatch.fe:25:16 + │ +25 │ MyField{N: u64, x: i32} + │ ^^^ expected `u32` type here, but `u64` is given + + diff --git a/crates/uitest/fixtures/ty/const_ty/normal_type_expected.fe b/crates/uitest/fixtures/ty/const_ty/normal_type_expected.fe new file mode 100644 index 0000000000..0268988a90 --- /dev/null +++ b/crates/uitest/fixtures/ty/const_ty/normal_type_expected.fe @@ -0,0 +1,9 @@ +pub struct Foo { + t: T +} +pub fn foo(x: Foo<1>) {} + +pub struct Bar { + u: T, + n: N, +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/const_ty/normal_type_expected.snap b/crates/uitest/fixtures/ty/const_ty/normal_type_expected.snap new file mode 100644 index 0000000000..c2f0b3d0f7 --- /dev/null +++ b/crates/uitest/fixtures/ty/const_ty/normal_type_expected.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/const_ty/normal_type_expected.fe +--- +error[3-0013]: expected a normal type + ┌─ normal_type_expected.fe:4:15 + │ +4 │ pub fn foo(x: Foo<1>) {} + │ ^^^^^^ expected a normal type here, but `1` is given + +error[3-0013]: expected a normal type + ┌─ normal_type_expected.fe:8:8 + │ +8 │ n: N, + │ ^ expected a normal type here, but `const N: u32` is given + + diff --git a/crates/uitest/fixtures/ty/const_ty/trait_const_ty.fe b/crates/uitest/fixtures/ty/const_ty/trait_const_ty.fe new file mode 100644 index 0000000000..468ddff41e --- /dev/null +++ b/crates/uitest/fixtures/ty/const_ty/trait_const_ty.fe @@ -0,0 +1,9 @@ +pub trait Trait {} +impl Trait for i32 {} +impl Trait for i64 {} +pub struct Foo + where T: Trait +{} + +pub trait Trait2 {} +impl Trait2<1> for i32 {} diff --git a/crates/uitest/fixtures/ty/const_ty/trait_const_ty.snap b/crates/uitest/fixtures/ty/const_ty/trait_const_ty.snap new file mode 100644 index 0000000000..b0b96d67bf --- /dev/null +++ b/crates/uitest/fixtures/ty/const_ty/trait_const_ty.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/const_ty/trait_const_ty.fe +--- +error[3-0011]: given type doesn't match the expected const type + ┌─ trait_const_ty.fe:3:20 + │ +3 │ impl Trait for i64 {} + │ ^^^^^^^^ expected `u32` type here, but `u64` is given + +error[3-0011]: given type doesn't match the expected const type + ┌─ trait_const_ty.fe:5:14 + │ +5 │ where T: Trait + │ ^^^^^^^^ expected `u32` type here, but `u64` is given + +error[3-0012]: expected const type + ┌─ trait_const_ty.fe:2:6 + │ +2 │ impl Trait for i32 {} + │ ^^^^^^^^^^ expected const type of `u32` here + +error[3-0013]: expected a normal type + ┌─ trait_const_ty.fe:9:6 + │ +9 │ impl Trait2<1> for i32 {} + │ ^^^^^^^^^ expected a normal type here, but `1` is given + + diff --git a/crates/uitest/fixtures/ty/def/alias_arg_mismatch.fe b/crates/uitest/fixtures/ty/def/alias_arg_mismatch.fe new file mode 100644 index 0000000000..14d8d8c5ac --- /dev/null +++ b/crates/uitest/fixtures/ty/def/alias_arg_mismatch.fe @@ -0,0 +1,14 @@ +struct S2 { + t: T1, + t2: T1, + u: T2, + u2: T2, +} + +pub struct S { + t: T, + u: U, +} +type T1 = S +type T2 = T1 + diff --git a/crates/uitest/fixtures/ty/def/alias_arg_mismatch.snap b/crates/uitest/fixtures/ty/def/alias_arg_mismatch.snap new file mode 100644 index 0000000000..9e5ccc4bdd --- /dev/null +++ b/crates/uitest/fixtures/ty/def/alias_arg_mismatch.snap @@ -0,0 +1,23 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/alias_arg_mismatch.fe +--- +error[3-0003]: all type parameters of type alias must be given + ┌─ alias_arg_mismatch.fe:2:8 + │ + 2 │ t: T1, + │ ^^^^^^^ expected at least 2 arguments here + · +12 │ type T1 = S + │ ----------------------- type alias defined here + +error[3-0003]: all type parameters of type alias must be given + ┌─ alias_arg_mismatch.fe:13:14 + │ +12 │ type T1 = S + │ ----------------------- type alias defined here +13 │ type T2 = T1 + │ ^^^^^ expected at least 2 arguments here + + diff --git a/crates/uitest/fixtures/ty/def/alias_cycle.fe b/crates/uitest/fixtures/ty/def/alias_cycle.fe new file mode 100644 index 0000000000..1249e93804 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/alias_cycle.fe @@ -0,0 +1,5 @@ +type T1 = T1 + +type T2 = T3 +type T3 = T4 +type T4 = T2 diff --git a/crates/uitest/fixtures/ty/def/alias_cycle.snap b/crates/uitest/fixtures/ty/def/alias_cycle.snap new file mode 100644 index 0000000000..6c7e39beec --- /dev/null +++ b/crates/uitest/fixtures/ty/def/alias_cycle.snap @@ -0,0 +1,20 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/alias_cycle.fe +--- +error[3-0004]: recursive type alias cycle is detected + ┌─ alias_cycle.fe:1:11 + │ +1 │ type T1 = T1 + │ ^^ cycle happens here + +error[3-0004]: recursive type alias cycle is detected + ┌─ alias_cycle.fe:5:11 + │ +3 │ type T2 = T3 + │ -- type alias defined here +4 │ type T3 = T4 + │ -- type alias defined here +5 │ type T4 = T2 + │ ^^ cycle happens here diff --git a/crates/uitest/fixtures/ty/def/alias_kind_mismatch.fe b/crates/uitest/fixtures/ty/def/alias_kind_mismatch.fe new file mode 100644 index 0000000000..178d05c312 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/alias_kind_mismatch.fe @@ -0,0 +1,13 @@ +pub struct S0 { + t: T, + u: U, +} + +type T1 = S0 +type T2 = S0 +type T3 = S0 + + +pub struct S1 { + t: T3, +} diff --git a/crates/uitest/fixtures/ty/def/alias_kind_mismatch.snap b/crates/uitest/fixtures/ty/def/alias_kind_mismatch.snap new file mode 100644 index 0000000000..8bc2b933ca --- /dev/null +++ b/crates/uitest/fixtures/ty/def/alias_kind_mismatch.snap @@ -0,0 +1,22 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/alias_kind_mismatch.fe +--- +error[3-0001]: invalid type argument kind + ┌─ alias_kind_mismatch.fe:7:11 + │ +7 │ type T2 = S0 + │ ^^^^^^^^^^^ expected `*` kind, but `S0` has `(* -> (* -> *))` kind + +error[3-0001]: invalid type argument kind + ┌─ alias_kind_mismatch.fe:12:8 + │ +12 │ t: T3, + │ ^^^^^^^^^^^ expected `*` kind, but `S0` has `(* -> (* -> *))` kind + +error[3-0016]: too many generic args; expected 2, given 3 + ┌─ alias_kind_mismatch.fe:6:11 + │ +6 │ type T1 = S0 + │ ^^^^^^^^^^^^^^^^^ too many generic args; expected 2, given 3 diff --git a/crates/uitest/fixtures/ty/def/alias_non_mono.fe b/crates/uitest/fixtures/ty/def/alias_non_mono.fe new file mode 100644 index 0000000000..33f46a3f07 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/alias_non_mono.fe @@ -0,0 +1,7 @@ +pub struct Foo { + t: T, + u: U, +} + +pub type T = Foo +pub type T2 = Foo \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/alias_non_mono.snap b/crates/uitest/fixtures/ty/def/alias_non_mono.snap new file mode 100644 index 0000000000..29c768f2f1 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/alias_non_mono.snap @@ -0,0 +1,6 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/alias_non_mono.fe +--- + diff --git a/crates/uitest/fixtures/ty/def/const_generics_cycle.fe b/crates/uitest/fixtures/ty/def/const_generics_cycle.fe new file mode 100644 index 0000000000..ea05b06bdb --- /dev/null +++ b/crates/uitest/fixtures/ty/def/const_generics_cycle.fe @@ -0,0 +1,2 @@ +pub struct Foo {} +pub struct Bar {} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/const_generics_cycle.snap b/crates/uitest/fixtures/ty/def/const_generics_cycle.snap new file mode 100644 index 0000000000..6532429053 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/const_generics_cycle.snap @@ -0,0 +1,16 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/const_generics_cycle.fe +--- +error[3-0009]: invalid const parameter type + ┌─ const_generics_cycle.fe:1:28 + │ +1 │ pub struct Foo {} + │ ^ only integer or bool types are allowed as a const parameter type + +error[3-0009]: invalid const parameter type + ┌─ const_generics_cycle.fe:2:28 + │ +2 │ pub struct Bar {} + │ ^^^ only integer or bool types are allowed as a const parameter type diff --git a/crates/uitest/fixtures/ty/def/const_generics_invalid_ty.fe b/crates/uitest/fixtures/ty/def/const_generics_invalid_ty.fe new file mode 100644 index 0000000000..4cd439542c --- /dev/null +++ b/crates/uitest/fixtures/ty/def/const_generics_invalid_ty.fe @@ -0,0 +1,4 @@ +pub struct Foo {} +pub struct Bar {} + +pub fn foo() {} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/const_generics_invalid_ty.snap b/crates/uitest/fixtures/ty/def/const_generics_invalid_ty.snap new file mode 100644 index 0000000000..34a8ffe5b4 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/const_generics_invalid_ty.snap @@ -0,0 +1,16 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/const_generics_invalid_ty.fe +--- +error[3-0009]: invalid const parameter type + ┌─ const_generics_invalid_ty.fe:2:25 + │ +2 │ pub struct Bar {} + │ ^^^ only integer or bool types are allowed as a const parameter type + +error[3-0009]: invalid const parameter type + ┌─ const_generics_invalid_ty.fe:4:21 + │ +4 │ pub fn foo() {} + │ ^^^ only integer or bool types are allowed as a const parameter type diff --git a/crates/uitest/fixtures/ty/def/const_generics_trait_bound.fe b/crates/uitest/fixtures/ty/def/const_generics_trait_bound.fe new file mode 100644 index 0000000000..16ad0947f9 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/const_generics_trait_bound.fe @@ -0,0 +1,9 @@ +pub trait Trait {} + +pub struct Foo +where U: Trait +{} + +pub fn foo() +where U: Trait +{} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/const_generics_trait_bound.snap b/crates/uitest/fixtures/ty/def/const_generics_trait_bound.snap new file mode 100644 index 0000000000..dfc8ee558a --- /dev/null +++ b/crates/uitest/fixtures/ty/def/const_generics_trait_bound.snap @@ -0,0 +1,18 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/const_generics_trait_bound.fe +--- +error[6-0006]: trait bound for const type is not allowed + ┌─ const_generics_trait_bound.fe:4:7 + │ +4 │ where U: Trait + │ ^ `const U: u32` is a const type + +error[6-0006]: trait bound for const type is not allowed + ┌─ const_generics_trait_bound.fe:8:7 + │ +8 │ where U: Trait + │ ^ `const U: bool` is a const type + + diff --git a/crates/uitest/fixtures/ty/def/duplicated_arg_name.fe b/crates/uitest/fixtures/ty/def/duplicated_arg_name.fe new file mode 100644 index 0000000000..b582c10570 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/duplicated_arg_name.fe @@ -0,0 +1,9 @@ +pub fn foo(x: i32, y x: u64) {} + +trait Foo { + fn foo(x y: i32, z y: i32) {} +} + +impl Foo for i32 { + fn foo(x y: i32, z y: i32) {} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/duplicated_arg_name.snap b/crates/uitest/fixtures/ty/def/duplicated_arg_name.snap new file mode 100644 index 0000000000..c16c057172 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/duplicated_arg_name.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/duplicated_arg_name.fe +--- +error[3-0008]: duplicated argument name in function definition is not allowed + ┌─ duplicated_arg_name.fe:1:22 + │ +1 │ pub fn foo(x: i32, y x: u64) {} + │ - ^ duplicated argument name `x` + │ │ + │ conflict with this argument name + +error[3-0008]: duplicated argument name in function definition is not allowed + ┌─ duplicated_arg_name.fe:4:24 + │ +4 │ fn foo(x y: i32, z y: i32) {} + │ - ^ duplicated argument name `y` + │ │ + │ conflict with this argument name + +error[3-0008]: duplicated argument name in function definition is not allowed + ┌─ duplicated_arg_name.fe:8:24 + │ +8 │ fn foo(x y: i32, z y: i32) {} + │ - ^ duplicated argument name `y` + │ │ + │ conflict with this argument name + + diff --git a/crates/uitest/fixtures/ty/def/generic_param_conflict.fe b/crates/uitest/fixtures/ty/def/generic_param_conflict.fe new file mode 100644 index 0000000000..8b02d3b335 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/generic_param_conflict.fe @@ -0,0 +1,13 @@ +pub enum Result { + Ok(T), + Err(E), +} + +impl Result +{ + fn foo(self: Self) {} + + fn bar(self: Self) { + fn baz(t: T) {} + } +} diff --git a/crates/uitest/fixtures/ty/def/generic_param_conflict.snap b/crates/uitest/fixtures/ty/def/generic_param_conflict.snap new file mode 100644 index 0000000000..9eedb24f4e --- /dev/null +++ b/crates/uitest/fixtures/ty/def/generic_param_conflict.snap @@ -0,0 +1,21 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/generic_param_conflict.fe +--- +error[3-0007]: generic parameter is already defined in the parent item + ┌─ generic_param_conflict.fe:8:12 + │ +6 │ impl Result + │ - conflict with this generic parameter +7 │ { +8 │ fn foo(self: Self) {} + │ ^ `E` is already defined + +error[3-0007]: generic parameter is already defined in the parent item + ┌─ generic_param_conflict.fe:11:16 + │ +10 │ fn bar(self: Self) { + │ - conflict with this generic parameter +11 │ fn baz(t: T) {} + │ ^ `T` is already defined diff --git a/crates/uitest/fixtures/ty/def/impl_conflict.fe b/crates/uitest/fixtures/ty/def/impl_conflict.fe new file mode 100644 index 0000000000..16b705e86e --- /dev/null +++ b/crates/uitest/fixtures/ty/def/impl_conflict.fe @@ -0,0 +1,16 @@ +pub enum Result { + Ok(T), + Err(E), +} + +impl Result { + fn foo(self) {} +} + +impl Result { + fn foo(self) {} +} + +impl Result { + fn foo(self: Self) {} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/impl_conflict.snap b/crates/uitest/fixtures/ty/def/impl_conflict.snap new file mode 100644 index 0000000000..f8599e95d9 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/impl_conflict.snap @@ -0,0 +1,22 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/impl_conflict.fe +--- +error[6-0000]: conflict method implementation + ┌─ impl_conflict.fe:11:8 + │ + 7 │ fn foo(self) {} + │ --- conflict with this method implementation + · +11 │ fn foo(self) {} + │ ^^^ conflict method implementation + +error[6-0000]: conflict method implementation + ┌─ impl_conflict.fe:15:8 + │ + 7 │ fn foo(self) {} + │ --- conflict with this method implementation + · +15 │ fn foo(self: Self) {} + │ ^^^ conflict method implementation diff --git a/crates/uitest/fixtures/ty/def/impl_foreign.fe b/crates/uitest/fixtures/ty/def/impl_foreign.fe new file mode 100644 index 0000000000..0e9bf4239c --- /dev/null +++ b/crates/uitest/fixtures/ty/def/impl_foreign.fe @@ -0,0 +1,6 @@ +impl i32 {} + +impl T {} + +impl *> T { +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/impl_foreign.snap b/crates/uitest/fixtures/ty/def/impl_foreign.snap new file mode 100644 index 0000000000..338471c22a --- /dev/null +++ b/crates/uitest/fixtures/ty/def/impl_foreign.snap @@ -0,0 +1,24 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/impl_foreign.fe +--- +error[6-0011]: inherent impl is not allowed + ┌─ impl_foreign.fe:1:6 + │ +1 │ impl i32 {} + │ ^^^ inherent impl is not allowed for foreign type `i32` + +error[6-0011]: inherent impl is not allowed + ┌─ impl_foreign.fe:3:9 + │ +3 │ impl T {} + │ ^ inherent impl is not allowed for non nominal type + +error[6-0011]: inherent impl is not allowed + ┌─ impl_foreign.fe:5:17 + │ +5 │ impl *> T { + │ ^^^^^^ inherent impl is not allowed for non nominal type + + diff --git a/crates/uitest/fixtures/ty/def/invalid_self_ty.fe b/crates/uitest/fixtures/ty/def/invalid_self_ty.fe new file mode 100644 index 0000000000..a18e81d815 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/invalid_self_ty.fe @@ -0,0 +1,21 @@ +pub trait Foo { + fn foo(self: i32) +} + +impl Foo for Option { + fn foo(self: i32) {} +} + +pub enum Option { + Some(T), + None, + +} + +impl Option { + fn method1(self: i32) {} +} + +impl Option { + fn method2(self: i32) {} +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/invalid_self_ty.snap b/crates/uitest/fixtures/ty/def/invalid_self_ty.snap new file mode 100644 index 0000000000..14bc3ac9f2 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/invalid_self_ty.snap @@ -0,0 +1,30 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/invalid_self_ty.fe +--- +error[6-0010]: invalid type for `self` argument + ┌─ invalid_self_ty.fe:2:18 + │ +2 │ fn foo(self: i32) + │ ^^^ type of `self` must starts with `Self`, but the given type is `i32` + +error[6-0010]: invalid type for `self` argument + ┌─ invalid_self_ty.fe:6:18 + │ +6 │ fn foo(self: i32) {} + │ ^^^ type of `self` must starts with `Self` or `Option`, but the given type is `i32` + +error[6-0010]: invalid type for `self` argument + ┌─ invalid_self_ty.fe:16:22 + │ +16 │ fn method1(self: i32) {} + │ ^^^ type of `self` must starts with `Self` or `Option`, but the given type is `i32` + +error[6-0010]: invalid type for `self` argument + ┌─ invalid_self_ty.fe:20:22 + │ +20 │ fn method2(self: i32) {} + │ ^^^ type of `self` must starts with `Self` or `Option`, but the given type is `i32` + + diff --git a/crates/uitest/fixtures/ty/def/kind_bound.fe b/crates/uitest/fixtures/ty/def/kind_bound.fe new file mode 100644 index 0000000000..fef8948dc7 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/kind_bound.fe @@ -0,0 +1,53 @@ +// * -> * +pub struct Wrapper1 +{ + value: T +} + +// (* -> *) -> * -> * +pub struct Wrapper2 +where T: * -> * +{ + val: T +} + + +// ((* -> *) -> *) -> (* -> *) -> * +pub struct Wrapper3 +where T: (* -> *) -> * -> *, + U: * -> * +{ + value: T +} + +pub struct Foo { + foo_x: Wrapper2, + foo_err: Wrapper2, +} + + +pub struct Bar { + bar_x: Wrapper3, + bar_y: Wrapper1>, + bar_err1: Wrapper3, + bar_err2: Wrapper3, + bar_err3: wrapper3, +} + +pub struct InvalidBound *> +where T: (* -> *) -> * +{ + val: T +} + +pub struct InvalidBound2 +where Self: * +{ + val: i32 +} + +pub struct InvalidBound3 +where Wrapper1: * +{ + val: Wrapper1 +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/kind_bound.snap b/crates/uitest/fixtures/ty/def/kind_bound.snap new file mode 100644 index 0000000000..fb46598dd1 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/kind_bound.snap @@ -0,0 +1,48 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/kind_bound.fe +--- +error[2-0002]: `wrapper3` is not found + ┌─ kind_bound.fe:34:15 + │ +34 │ bar_err3: wrapper3, + │ ^^^^^^^^ `wrapper3` is not found + +error[3-0001]: invalid type argument kind + ┌─ kind_bound.fe:25:14 + │ +25 │ foo_err: Wrapper2, + │ ^^^^^^^^^^^^^^^^^^ expected `(* -> *)` kind, but `i32` has `*` kind + +error[3-0001]: invalid type argument kind + ┌─ kind_bound.fe:32:15 + │ +32 │ bar_err1: Wrapper3, + │ ^^^^^^^^^^^^^^^^^^ expected `((* -> *) -> (* -> *))` kind, but `i32` has `*` kind + +error[3-0001]: invalid type argument kind + ┌─ kind_bound.fe:33:15 + │ +33 │ bar_err2: Wrapper3, + │ ^^^^^^^^^^^^^^^^^^^^^^^ expected `(* -> *)` kind, but `u32` has `*` kind + +error[3-0005]: duplicate type bound is not allowed. + ┌─ kind_bound.fe:38:10 + │ +38 │ where T: (* -> *) -> * + │ ^^^^^^^^^^^^^ `T` is already declared with `(* -> *)` kind, but found `((* -> *) -> *)` kind here + +error[6-0005]: trait bound for concrete type is not allowed + ┌─ kind_bound.fe:44:7 + │ +44 │ where Self: * + │ ^^^^ `InvalidBound2` is a concrete type + +error[6-0005]: trait bound for concrete type is not allowed + ┌─ kind_bound.fe:50:7 + │ +50 │ where Wrapper1: * + │ ^^^^^^^^^^^^^ `Wrapper1` is a concrete type + + diff --git a/crates/uitest/fixtures/ty/def/kind_mismatch.fe b/crates/uitest/fixtures/ty/def/kind_mismatch.fe new file mode 100644 index 0000000000..76b15389aa --- /dev/null +++ b/crates/uitest/fixtures/ty/def/kind_mismatch.fe @@ -0,0 +1,11 @@ +pub struct Foo { + t: T, + u: U, +} + +pub struct Bar { + foo: Foo, + bar: Foo, + baz: Foo, + baz: Foo>, +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/kind_mismatch.snap b/crates/uitest/fixtures/ty/def/kind_mismatch.snap new file mode 100644 index 0000000000..c2603e1776 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/kind_mismatch.snap @@ -0,0 +1,36 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/kind_mismatch.fe +--- +error[2-0001]: `baz` conflicts with other definitions + ┌─ kind_mismatch.fe:9:5 + │ + 9 │ baz: Foo, + │ ^^^ `baz` is defined here +10 │ baz: Foo>, + │ --- `baz` is redefined here + +error[3-0001]: invalid type argument kind + ┌─ kind_mismatch.fe:7:10 + │ +7 │ foo: Foo, + │ ^^^^^^^^^^^^^ expected `*` kind, but `Foo` has `(* -> (* -> *))` kind + +error[3-0001]: invalid type argument kind + ┌─ kind_mismatch.fe:8:10 + │ +8 │ bar: Foo, + │ ^^^^^^^^^^^^^ expected `*` kind, but `Foo` has `(* -> (* -> *))` kind + +error[3-0016]: too many generic args; expected 2, given 3 + ┌─ kind_mismatch.fe:9:10 + │ +9 │ baz: Foo, + │ ^^^^^^^^^^^^^^^^^^^ too many generic args; expected 2, given 3 + +error[3-0016]: too many generic args; expected 2, given 3 + ┌─ kind_mismatch.fe:10:10 + │ +10 │ baz: Foo>, + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ too many generic args; expected 2, given 3 diff --git a/crates/uitest/fixtures/ty/def/not_fully_applied.fe b/crates/uitest/fixtures/ty/def/not_fully_applied.fe new file mode 100644 index 0000000000..49b97847ca --- /dev/null +++ b/crates/uitest/fixtures/ty/def/not_fully_applied.fe @@ -0,0 +1,28 @@ +pub struct Gen { + t: T, + u: U, +} + +pub struct MyS { + f: Gen, + u: (i32, i32), +} + +pub enum MyE { + Variant(Gen, Gen), + Variant2{ x: Gen, y: Gen } +} + +pub contract MyC { + f: Gen, +} + + +impl Gen { + fn foo(self) {} + + fn bar(self: Self) {} +} + + +fn foo(gen: Gen) {} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/not_fully_applied.snap b/crates/uitest/fixtures/ty/def/not_fully_applied.snap new file mode 100644 index 0000000000..5d16496d44 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/not_fully_applied.snap @@ -0,0 +1,54 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/not_fully_applied.fe +--- +error[3-0000]: expected `*` kind in this context + ┌─ not_fully_applied.fe:7:8 + │ +7 │ f: Gen, + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_fully_applied.fe:12:13 + │ +12 │ Variant(Gen, Gen), + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_fully_applied.fe:12:23 + │ +12 │ Variant(Gen, Gen), + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_fully_applied.fe:13:36 + │ +13 │ Variant2{ x: Gen, y: Gen } + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_fully_applied.fe:17:8 + │ +17 │ f: Gen, + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_fully_applied.fe:22:12 + │ +22 │ fn foo(self) {} + │ ^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_fully_applied.fe:24:18 + │ +24 │ fn bar(self: Self) {} + │ ^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_fully_applied.fe:28:13 + │ +28 │ fn foo(gen: Gen) {} + │ ^^^ expected `*` kind here + + diff --git a/crates/uitest/fixtures/ty/def/not_star_kind.fe b/crates/uitest/fixtures/ty/def/not_star_kind.fe new file mode 100644 index 0000000000..95b0f6ca4c --- /dev/null +++ b/crates/uitest/fixtures/ty/def/not_star_kind.fe @@ -0,0 +1,30 @@ +pub struct Gen { + t: T, + u: U, +} + +pub struct MyS { + f: Gen, + u: (i32, i32), +} + +pub enum MyE { + Variant(Gen, Gen), + Variant2{ x: Gen, y: Gen }, +} + +pub contract MyC { + f: Gen, +} + + +impl Gen { + fn foo(self) {} + + fn bar(self: Self) {} +} + + +fn foo(gen: Gen) {} + +fn foo(gen: Gen) -> Gen {} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/not_star_kind.snap b/crates/uitest/fixtures/ty/def/not_star_kind.snap new file mode 100644 index 0000000000..e11059b5d0 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/not_star_kind.snap @@ -0,0 +1,81 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/def/not_star_kind.fe +--- +error[2-0001]: `foo` conflicts with other definitions + ┌─ not_star_kind.fe:28:4 + │ +28 │ fn foo(gen: Gen) {} + │ ^^^ `foo` is defined here +29 │ +30 │ fn foo(gen: Gen) -> Gen {} + │ --- `foo` is redefined here + +error[2-0002]: `T` is not found + ┌─ not_star_kind.fe:21:10 + │ +21 │ impl Gen { + │ ^ `T` is not found + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:7:8 + │ +7 │ f: Gen, + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:12:13 + │ +12 │ Variant(Gen, Gen), + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:12:23 + │ +12 │ Variant(Gen, Gen), + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:13:36 + │ +13 │ Variant2{ x: Gen, y: Gen }, + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:17:8 + │ +17 │ f: Gen, + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:22:12 + │ +22 │ fn foo(self) {} + │ ^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:24:18 + │ +24 │ fn bar(self: Self) {} + │ ^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:28:13 + │ +28 │ fn foo(gen: Gen) {} + │ ^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:30:13 + │ +30 │ fn foo(gen: Gen) -> Gen {} + │ ^^^^^^^^ expected `*` kind here + +error[3-0000]: expected `*` kind in this context + ┌─ not_star_kind.fe:30:26 + │ +30 │ fn foo(gen: Gen) -> Gen {} + │ ^^^ expected `*` kind here + + diff --git a/crates/uitest/fixtures/ty/def/recursive_type.fe b/crates/uitest/fixtures/ty/def/recursive_type.fe new file mode 100644 index 0000000000..0a750cca77 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/recursive_type.fe @@ -0,0 +1,24 @@ +pub struct S1 { + s: S1 +} + +pub struct S2 { + s: S3 +} + +pub struct S3 { + s: S4 +} + +pub struct S4 { + s: S2 +} + +pub struct S5 { + s: S6, + t: T, +} + +pub struct S6 { + s: S5 +} \ No newline at end of file diff --git a/crates/uitest/fixtures/ty/def/recursive_type.snap b/crates/uitest/fixtures/ty/def/recursive_type.snap new file mode 100644 index 0000000000..daf6f6bf12 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/recursive_type.snap @@ -0,0 +1,46 @@ +--- +source: crates/uitest/tests/ty.rs +expression: diags +input_file: crates/uitest/fixtures/ty/recursive_type.fe +--- +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:1:12 + │ +1 │ pub struct S1 { + │ ^^ recursive type definition +2 │ s: S1 + │ -- recursion occurs here + +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:5:12 + │ +5 │ pub struct S2 { + │ ^^ recursive type definition +6 │ s: S3 + │ -- recursion occurs here + +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:9:12 + │ + 9 │ pub struct S3 { + │ ^^ recursive type definition +10 │ s: S4 + │ -- recursion occurs here + +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:13:12 + │ +13 │ pub struct S4 { + │ ^^ recursive type definition +14 │ s: S2 + │ -- recursion occurs here + +error[3-0002]: recursive type is not allowed + ┌─ recursive_type.fe:22:12 + │ +22 │ pub struct S6 { + │ ^^ recursive type definition +23 │ s: S5 + │ ------ recursion occurs here + + diff --git a/crates/uitest/fixtures/ty/def/trait_arg_mismatch.fe b/crates/uitest/fixtures/ty/def/trait_arg_mismatch.fe new file mode 100644 index 0000000000..aab206b456 --- /dev/null +++ b/crates/uitest/fixtures/ty/def/trait_arg_mismatch.fe @@ -0,0 +1,11 @@ +pub trait Foo *> {} + +enum Option { + Some(T), + None, +} + +impl Foo for i32 {} +impl Foo> for i32 {} +impl Foo