From e5c5d094ebc74fd4288315370bc796e66468668e Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 10 Jan 2024 21:03:45 -0500 Subject: [PATCH 01/66] Start moving LSP server to tower --- Cargo.lock | 479 +++++++++++++++++- crates/language-server/Cargo.toml | 6 + crates/language-server/src/backend.rs | 200 ++++++++ crates/language-server/src/diagnostics.rs | 2 +- .../src/handlers/notifications.rs | 158 +++--- .../language-server/src/handlers/request.rs | 56 +- crates/language-server/src/main.rs | 20 +- crates/language-server/src/server.rs | 94 ++-- 8 files changed, 843 insertions(+), 172 deletions(-) create mode 100644 crates/language-server/src/backend.rs diff --git a/Cargo.lock b/Cargo.lock index 4c1f38447..36963c444 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,21 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + [[package]] name = "ahash" version = "0.7.6" @@ -100,6 +115,17 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +[[package]] +name = "async-trait" +version = "0.1.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "atty" version = "0.2.14" @@ -129,6 +155,21 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + [[package]] name = "base16ct" version = "0.2.0" @@ -304,7 +345,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -724,7 +765,7 @@ checksum = "48016319042fb7c87b78d2993084a831793a897a5cd1a2a67cab9d1eeb4b7d76" dependencies = [ "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -1121,6 +1162,7 @@ dependencies = [ "fe-hir", "fe-hir-analysis", "fe-macros", + "futures", "fxhash", "glob", "indexmap", @@ -1132,6 +1174,11 @@ dependencies = [ "salsa-2022", "serde", "serde_json", + "tokio", + "tokio-macros", + "tower-lsp", + "tracing", + "tracing-subscriber", "url", ] @@ -1149,7 +1196,7 @@ dependencies = [ "glob", "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -1291,6 +1338,95 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + [[package]] name = "fxhash" version = "0.2.1" @@ -1324,6 +1460,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + [[package]] name = "glob" version = "0.3.1" @@ -1449,6 +1591,12 @@ dependencies = [ "digest", ] +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + [[package]] name = "id-arena" version = "2.2.1" @@ -1644,9 +1792,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.142" +version = "0.2.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a987beff54b60ffa6d51982e1aa1146bc42f19bd26be28b0586f252fccf5317" +checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" [[package]] name = "libm" @@ -1722,9 +1870,9 @@ dependencies = [ [[package]] name = "lsp-types" -version = "0.94.0" +version = "0.94.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237" +checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1" dependencies = [ "bitflags", "serde", @@ -1748,6 +1896,36 @@ dependencies = [ "autocfg", ] +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num" version = "0.4.0" @@ -1835,6 +2013,15 @@ dependencies = [ "libc", ] +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" version = "1.18.0" @@ -1862,6 +2049,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "parity-scale-codec" version = "3.4.0" @@ -1968,6 +2161,38 @@ dependencies = [ "indexmap", ] +[[package]] +name = "pin-project" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0302c4a0442c456bd56f841aee5c3bfd17967563f6fadc9ceb9f9c23cf3807e0" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + [[package]] name = "plotters" version = "0.3.4" @@ -2064,9 +2289,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] @@ -2097,9 +2322,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quote" -version = "1.0.32" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -2380,6 +2605,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62cc5760263ea229d367e7dff3c0cbf09e4797a125bd87059a6c095804f3b2d1" +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + [[package]] name = "rustc-hash" version = "1.1.0" @@ -2619,7 +2850,7 @@ checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -2641,7 +2872,7 @@ checksum = "e168eaaf71e8f9bd6037feb05190485708e019f4fd87d161b3c0a0d37daf85e5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -2674,6 +2905,24 @@ dependencies = [ "keccak", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + [[package]] name = "signature" version = "2.1.0" @@ -2690,6 +2939,15 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf" +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + [[package]] name = "smallvec" version = "1.10.0" @@ -2705,6 +2963,16 @@ dependencies = [ "serde", ] +[[package]] +name = "socket2" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "solc" version = "0.1.0" @@ -2773,7 +3041,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.27", + "syn 2.0.48", ] [[package]] @@ -2808,9 +3076,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.27" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", @@ -2870,7 +3138,17 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.27", + "syn 2.0.48", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", ] [[package]] @@ -2907,6 +3185,50 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" +[[package]] +name = "tokio" +version = "1.35.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot 0.12.1", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + [[package]] name = "toml" version = "0.5.11" @@ -2933,6 +3255,123 @@ dependencies = [ "winnow", ] +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-lsp" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4ba052b54a6627628d9b3c34c176e7eda8359b7da9acd497b9f20998d118508" +dependencies = [ + "async-trait", + "auto_impl", + "bytes", + "dashmap", + "futures", + "httparse", + "lsp-types", + "memchr", + "serde", + "serde_json", + "tokio", + "tokio-util", + "tower", + "tower-lsp-macros", + "tracing", +] + +[[package]] +name = "tower-lsp-macros" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "nu-ansi-term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + [[package]] name = "triehash" version = "0.8.4" @@ -3024,6 +3463,12 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + [[package]] name = "vec1" version = "1.10.1" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 1d53425b3..94170cc2b 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -35,3 +35,9 @@ log = "0.4" patricia_tree = "0.6.2" glob = "0.3.1" url = "2.4.1" +tower-lsp = "0.20.0" +tokio = { version = "1.35.1", features = ["full", "io-std"] } +tokio-macros = "2.2.0" +futures = "0.3.28" +tracing = { version = "0.1.40", features = ["async-await"] } +tracing-subscriber = "0.3.18" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs new file mode 100644 index 000000000..794d91c52 --- /dev/null +++ b/crates/language-server/src/backend.rs @@ -0,0 +1,200 @@ +use std::sync::{Arc, Mutex, MutexGuard}; + +use crate::db::LanguageServerDatabase; +use crate::server::server_capabilities; +use crate::workspace::Workspace; +use anyhow::Result; +use crossbeam_channel::{Receiver, Sender}; +use log::{info, Level, Metadata, Record}; +use log::{LevelFilter, SetLoggerError}; +use lsp_server::Message; +use lsp_types::{InitializeParams, InitializeResult}; +use lsp_types::notification::Notification; +use lsp_types::request::Request; +use tower_lsp::{Client, LanguageServer, LspService, Server}; + +use crate::handlers::notifications::{handle_document_did_change, handle_watched_file_changes, handle_document_did_close}; +use crate::handlers::request::handle_goto_definition; +use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; + +pub struct Backend { + // pub(crate) sender: Arc>>, + client: Client, + pub(crate) db: Arc>, + pub(crate) workspace: Workspace, + +} + +#[tower_lsp::async_trait] +impl LanguageServer for Backend { + fn initialize(&self, _: InitializeParams) -> Result { + // let initialize_params: InitializeParams = serde_json::from_value(_initialize_params)?; + + let capabilities = server_capabilities(); + + let initialize_result = lsp_types::InitializeResult { + capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + Ok(initialize_result) + } + fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { + Ok(()) + } +} + +impl Backend { + pub fn new(client: Client) -> Self { + let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); + Self { + client, + db, + workspace: Workspace::default(), + } + } + + fn db(&self) -> MutexGuard { + self.db.lock().unwrap() + } + + pub fn run(&mut self, receiver: Receiver) -> Result<()> { + info!("Fe Language Server listening..."); + + // watch the workspace root for changes + self.send(lsp_server::Message::Request(lsp_server::Request::new( + 28_716_283.into(), + String::from("client/registerCapability"), + lsp_types::RegistrationParams { + registrations: vec![lsp_types::Registration { + id: String::from("watch-fe-files"), + method: String::from("workspace/didChangeWatchedFiles"), + register_options: Some( + serde_json::to_value(lsp_types::DidChangeWatchedFilesRegistrationOptions { + watchers: vec![lsp_types::FileSystemWatcher { + glob_pattern: lsp_types::GlobPattern::String("**/*.fe".to_string()), + kind: None, // kind: Some(WatchKind::Create | WatchKind::Change | WatchKind::Delete), + }], + }) + .unwrap(), + ), + }], + }, + )))?; + + while let Some(msg) = self.next_message(&receiver) { + if let lsp_server::Message::Notification(notification) = &msg { + if notification.method == lsp_types::notification::Exit::METHOD { + return Ok(()); + } + } + + let _ = self.handle_message(msg); + } + Ok(()) + } + + + // fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { + // if let lsp_server::Message::Request(req) = msg { + // info!("REQUEST: {:?}", req); + + // match req.method.as_str() { + // // TODO: implement actually useful hover handler + // lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, + // // goto definition + // lsp_types::request::GotoDefinition::METHOD => handle_goto_definition(self, req)?, + // lsp_types::request::GotoTypeDefinition::METHOD => { + // handle_goto_definition(self, req)?; + // } + // lsp_types::request::GotoImplementation::METHOD => { + // handle_goto_definition(self, req)?; + // } + // lsp_types::request::GotoDeclaration::METHOD => handle_goto_definition(self, req)?, + // _ => {} + // } + // } else if let lsp_server::Message::Notification(note) = msg { + // // log the notification to the console + // info!("NOTIFICATION: {:?}", note); + + // match note.method.as_str() { + // lsp_types::notification::DidOpenTextDocument::METHOD => { + // handle_document_did_open(self, note)?; + // } + // // TODO: this is currently something of a hack to deal with + // // file renames. We should be using the workspace + // // "will change" requests instead. + // lsp_types::notification::DidCloseTextDocument::METHOD => { + // handle_document_did_close(self, note)?; + // } + // lsp_types::notification::DidChangeTextDocument::METHOD => { + // handle_document_did_change(self, note)?; + // } + // lsp_types::notification::DidChangeWatchedFiles::METHOD => { + // handle_watched_file_changes(self, note)?; + // } + // _ => {} + // } + // } else if let lsp_server::Message::Response(resp) = msg { + // info!("RESPONSE: {:?}", resp); + // } + + // Ok(()) + // } + + + pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { + let logger = LspLogger { + level, + client: Arc::new(Mutex::new(self.client.clone())), + // sender: self.sender.clone(), + }; + let static_logger = Box::leak(Box::new(logger)); + log::set_logger(static_logger)?; + log::set_max_level(LevelFilter::Debug); + Ok(()) + } +} + +pub struct LspLogger { + level: Level, + client: Arc>, + // sender: Arc>>, +} + +impl LspLogger { + // fn send(&self, msg: Message) -> Result<()> { + // let sender = self.sender.lock().unwrap(); + // sender.send(msg)?; + // Ok(()) + // } +} + +impl log::Log for LspLogger { + fn enabled(&self, metadata: &Metadata) -> bool { + let logger = self; + metadata.level() <= logger.level + } + + fn log(&self, record: &Record) { + if self.enabled(record.metadata()) { + let message = format!("{} - {}", record.level(), record.args()); + let client = self.client.lock().unwrap(); + let _ = client.log_message( + + match record.level() { + Level::Error => lsp_types::MessageType::ERROR, + Level::Warn => lsp_types::MessageType::WARNING, + Level::Info => lsp_types::MessageType::INFO, + Level::Debug => lsp_types::MessageType::LOG, + Level::Trace => lsp_types::MessageType::LOG, + }, + message + ); + } + } + + fn flush(&self) {} +} diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 4d1d57721..58ac430e8 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -18,7 +18,7 @@ pub trait ToCsDiag { impl ToCsDiag for T where - T: DiagnosticVoucher, + T: DiagnosticVoucher + Sync, { fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic { let complete = self.to_complete(db); diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 93ebb4f19..fbc328e5d 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -4,7 +4,8 @@ use fxhash::FxHashMap; use serde::Deserialize; use crate::{ - state::ServerState, + backend::Backend, + // state::ServerState, util::diag_to_lsp, workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile}, }; @@ -13,11 +14,11 @@ use crate::{ use crate::util::DummyFilePathConversion; fn run_diagnostics( - state: &mut ServerState, + state: &Backend, path: &str, ) -> Vec { - let db = &mut state.db; - let workspace = &mut state.workspace; + let db = &mut *state.db.lock().unwrap(); + let workspace = &mut *state.workspace.lock().unwrap(); let file_path = path; let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); db.analyze_top_mod(top_mod); @@ -25,14 +26,15 @@ fn run_diagnostics( } pub fn get_diagnostics( - state: &mut ServerState, + state: &Backend, uri: lsp_types::Url, ) -> Result>, Error> { let diags = run_diagnostics(state, uri.to_file_path().unwrap().to_str().unwrap()); + let db = &mut *state.db.lock().unwrap(); let diagnostics = diags .into_iter() - .flat_map(|diag| diag_to_lsp(diag, &state.db).clone()); + .flat_map(|diag| diag_to_lsp(diag, db).clone()); // we need to reduce the diagnostics to a map from URL to Vec let mut result = FxHashMap::>::default(); @@ -49,24 +51,27 @@ pub fn get_diagnostics( } pub fn handle_document_did_open( - state: &mut ServerState, + state: &mut Backend, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; - let input = state - .workspace - .input_from_file_path( - &mut state.db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(&mut state.db, None); + { + let db = &mut *state.db.lock().unwrap(); + let workspace = &mut *state.workspace.lock().unwrap(); + let input = workspace + .input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, None); + } let diagnostics = get_diagnostics(state, params.text_document.uri.clone())?; send_diagnostics(state, diagnostics) } @@ -77,14 +82,15 @@ pub fn handle_document_did_open( // The fix: handle document renaming more explicitly in the "will rename" flow, along with the document // rename refactor. pub fn handle_document_did_close( - state: &mut ServerState, + state: &mut Backend, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidCloseTextDocumentParams::deserialize(note.params)?; - let input = state - .workspace + let db = &mut *state.db.lock().unwrap(); + let workspace = &mut *state.workspace.lock().unwrap(); + let input = workspace .input_from_file_path( - &mut state.db, + db, params .text_document .uri @@ -94,38 +100,41 @@ pub fn handle_document_did_close( .unwrap(), ) .unwrap(); - input.sync(&mut state.db, None) + input.sync(db, None) } pub fn handle_document_did_change( - state: &mut ServerState, + state: &mut Backend, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidChangeTextDocumentParams::deserialize(note.params)?; - let input = state - .workspace - .input_from_file_path( - &mut state.db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(&mut state.db, Some(params.content_changes[0].text.clone())); + { + let db = &mut *state.db.lock().unwrap(); + let workspace = &mut *state.workspace.lock().unwrap(); + let input = workspace + .input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, Some(params.content_changes[0].text.clone())); + } let diagnostics = get_diagnostics(state, params.text_document.uri.clone())?; // info!("sending diagnostics... {:?}", diagnostics); send_diagnostics(state, diagnostics) } -fn send_diagnostics( - state: &mut ServerState, +pub fn send_diagnostics( + _state: &mut Backend, diagnostics: FxHashMap>, ) -> Result<(), Error> { - let results = diagnostics.into_iter().map(|(uri, diags)| { + let _results = diagnostics.into_iter().map(|(uri, diags)| { let result = lsp_types::PublishDiagnosticsParams { uri, diagnostics: diags, @@ -137,16 +146,16 @@ fn send_diagnostics( }) }); - results.for_each(|result| { - let sender = state.sender.lock().unwrap(); - let _ = sender.send(result); - }); + // results.for_each(|result| { + // let sender = state.client; + // let _ = sender.send(result); + // }); Ok(()) } pub fn handle_watched_file_changes( - state: &mut ServerState, + state: &mut Backend, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidChangeWatchedFilesParams::deserialize(note.params)?; @@ -156,30 +165,35 @@ pub fn handle_watched_file_changes( let uri = change.uri; let path = uri.to_file_path().unwrap(); - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = state.workspace.sync(&mut state.db); - let input = state - .workspace - .input_from_file_path(&mut state.db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(&mut state.db, None); - } - lsp_types::FileChangeType::CHANGED => { - let input = state - .workspace - .input_from_file_path(&mut state.db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(&mut state.db, None); - } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = state.workspace.sync(&mut state.db); + // TODO: sort out the mutable/immutable borrow issues here + { + let db = &mut state.db.lock().unwrap(); + let workspace = &mut state.workspace.lock().unwrap(); + match change.typ { + lsp_types::FileChangeType::CREATED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + // let db = state.db(); + // let db = &mut state.db.lock().unwrap(); + let _ = workspace.sync(db); + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::CHANGED => { + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::DELETED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + } + _ => {} } - _ => {} } // collect diagnostics for the file if change.typ != lsp_types::FileChangeType::DELETED { diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index f30f08fa8..455234dc5 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -7,17 +7,16 @@ use lsp_server::{Response, ResponseError}; use serde::Deserialize; use crate::{ + backend::Backend, goto::{goto_enclosing_path, Cursor}, - state::ServerState, util::{to_lsp_location_from_scope, to_offset_from_position}, workspace::IngotFileContext, }; -pub fn handle_hover( - state: &mut ServerState, - req: lsp_server::Request, -) -> Result<(), anyhow::Error> { +pub fn handle_hover(state: &mut Backend, req: lsp_server::Request) -> Result<(), anyhow::Error> { // TODO: get more relevant information for the hover + let db = &mut *state.db.lock().unwrap(); + let workspace = &mut state.workspace; let params = lsp_types::HoverParams::deserialize(req.params)?; let file_path = ¶ms .text_document_position_params @@ -41,16 +40,15 @@ pub fn handle_hover( ); // let file_path = std::path::Path::new(file_path); info!("getting hover info for file_path: {:?}", file_path); - let ingot = state - .workspace - .input_from_file_path(&mut state.db, file_path) - .map(|input| input.ingot(&state.db)); + let ingot = workspace + .input_from_file_path(db, file_path) + .map(|input| input.ingot(db)); // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); let ingot_info: Option = { let ingot_type = match ingot { - Some(ingot) => match ingot.kind(&state.db) { + Some(ingot) => match ingot.kind(db) { IngotKind::StandAlone => None, IngotKind::Local => Some("Local ingot"), IngotKind::External => Some("External ingot"), @@ -58,11 +56,11 @@ pub fn handle_hover( }, None => Some("No ingot information available"), }; - let ingot_file_count = ingot.unwrap().files(&state.db).len(); + let ingot_file_count = ingot.unwrap().files(db).len(); let ingot_path = ingot .unwrap() - .path(&state.db) - .strip_prefix(&state.workspace.root_path.clone().unwrap_or("".into())) + .path(db) + .strip_prefix(workspace.root_path.clone().unwrap_or("".into())) .ok(); ingot_type.map(|ingot_type| { @@ -70,22 +68,21 @@ pub fn handle_hover( }) }; - let top_mod = state - .workspace - .top_mod_from_file_path(&mut state.db, file_path) + let top_mod = workspace + .top_mod_from_file_path(db, file_path) .unwrap(); - let early_resolution = goto_enclosing_path(&mut state.db, top_mod, cursor); + let early_resolution = goto_enclosing_path(db, top_mod, cursor); let goto_info = match early_resolution { Some(EarlyResolvedPath::Full(bucket)) => bucket .iter() - .map(|x| x.pretty_path(&state.db).unwrap()) + .map(|x| x.pretty_path(db).unwrap()) .collect::>() .join("\n"), Some(EarlyResolvedPath::Partial { res, unresolved_from: _, - }) => res.pretty_path(&state.db).unwrap(), + }) => res.pretty_path(db).unwrap(), None => String::from("No goto info available"), }; @@ -102,24 +99,26 @@ pub fn handle_hover( }), range: None, }; - let response_message = Response { + let _response_message = Response { id: req.id, result: Some(serde_json::to_value(result)?), error: None, }; - state.send_response(response_message)?; + // state.send_response(response_message)?; Ok(()) } use lsp_types::TextDocumentPositionParams; pub fn handle_goto_definition( - state: &mut ServerState, + state: &mut Backend, req: lsp_server::Request, ) -> Result<(), anyhow::Error> { info!("handling goto definition request: {:?}", req); let params = TextDocumentPositionParams::deserialize(req.params)?; + let db = &mut *state.db.lock().unwrap(); + let workspace = &mut state.workspace; // Convert the position to an offset in the file let file_text = std::fs::read_to_string(params.text_document.uri.path())?; @@ -127,11 +126,10 @@ pub fn handle_goto_definition( // Get the module and the goto info let file_path = params.text_document.uri.path(); - let top_mod = state - .workspace - .top_mod_from_file_path(&mut state.db, file_path) + let top_mod = workspace + .top_mod_from_file_path(db, file_path) .unwrap(); - let goto_info = goto_enclosing_path(&mut state.db, top_mod, cursor); + let goto_info = goto_enclosing_path(db, top_mod, cursor); // Convert the goto info to a Location let scopes = match goto_info { @@ -152,13 +150,13 @@ pub fn handle_goto_definition( let locations = scopes .iter() .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, &state.db)) + .map(|scope| to_lsp_location_from_scope(scope, db)) .collect::>(); let errors = scopes .iter() .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, &state.db)) + .map(|scope| to_lsp_location_from_scope(scope, db)) .filter_map(std::result::Result::err) .map(|err| err.to_string()) .collect::>() @@ -186,6 +184,6 @@ pub fn handle_goto_definition( info!("goto definition response: {:?}", response_message); - state.send_response(response_message)?; + // state.send_response(response_message)?; Ok(()) } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index e5f3e7dca..f15cc8c08 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -2,18 +2,30 @@ mod db; mod diagnostics; mod goto; mod server; -mod state; +// mod state; +mod backend; mod util; mod workspace; +use backend::Backend; use db::Jar; mod handlers { pub mod notifications; pub mod request; } -use server::run_server; +// use server::run_server; -fn main() { - let _ = run_server(); +// fn main() { +// // let _ = run_server(); +// } +#[tokio_macros::main] +async fn main() { + let stdin = tokio::io::stdin(); + let stdout = tokio::io::stdout(); + + let (service, socket) = tower_lsp::LspService::build(Backend::new).finish(); + tower_lsp::Server::new(stdin, stdout, socket) + .serve(service) + .await; } diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/server.rs index c3ef4ffcf..f2bcc718c 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/server.rs @@ -1,12 +1,9 @@ -use super::state::ServerState; -use anyhow::Result; -use lsp_server::{Connection, Notification}; -use lsp_types::{HoverProviderCapability, InitializeParams, ServerCapabilities}; +use lsp_types::{HoverProviderCapability, ServerCapabilities}; #[cfg(target_arch = "wasm32")] use crate::util::DummyFilePathConversion; -fn server_capabilities() -> ServerCapabilities { +pub(crate) fn server_capabilities() -> ServerCapabilities { ServerCapabilities { hover_provider: Some(HoverProviderCapability::Simple(true)), // full sync mode for now @@ -102,56 +99,55 @@ fn server_capabilities() -> ServerCapabilities { } } -pub fn run_server() -> Result<()> { - let (connection, io_threads) = Connection::stdio(); +// pub fn run_server() -> Result<()> { +// let (connection, io_threads) = Connection::stdio(); - let (request_id, _initialize_params) = connection.initialize_start()?; - let initialize_params: InitializeParams = serde_json::from_value(_initialize_params)?; - // let debug_params = initialize_params.clone(); - // todo: actually use initialization params +// let (request_id, _initialize_params) = connection.initialize_start()?; +// let initialize_params: InitializeParams = serde_json::from_value(_initialize_params)?; - let capabilities = server_capabilities(); +// let capabilities = server_capabilities(); - let initialize_result = lsp_types::InitializeResult { - capabilities, - server_info: Some(lsp_types::ServerInfo { - name: String::from("fe-language-server"), - version: Some(String::from(env!("CARGO_PKG_VERSION"))), - }), - }; +// let initialize_result = lsp_types::InitializeResult { +// capabilities, +// server_info: Some(lsp_types::ServerInfo { +// name: String::from("fe-language-server"), +// version: Some(String::from(env!("CARGO_PKG_VERSION"))), +// }), +// }; - let initialize_result = serde_json::to_value(initialize_result).unwrap(); +// let initialize_result = serde_json::to_value(initialize_result).unwrap(); - connection.initialize_finish(request_id, initialize_result)?; - // send a "hello" message to the client - connection - .sender - .send(lsp_server::Message::Notification(Notification { - method: String::from("window/showMessage"), - params: serde_json::to_value(lsp_types::ShowMessageParams { - typ: lsp_types::MessageType::INFO, - message: String::from("hello from the Fe language server"), - }) - .unwrap(), - }))?; +// connection.initialize_finish(request_id, initialize_result)?; +// // send a "hello" message to the client +// connection +// .sender +// .send(lsp_server::Message::Notification(Notification { +// method: String::from("window/showMessage"), +// params: serde_json::to_value(lsp_types::ShowMessageParams { +// typ: lsp_types::MessageType::INFO, +// message: String::from("hello from the Fe language server"), +// }) +// .unwrap(), +// }))?; - let mut state = ServerState::new(connection.sender); - let _ = state.init_logger(log::Level::Info); - state.workspace.set_workspace_root( - &mut state.db, - initialize_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - )?; - // info!("TESTING"); - // info!("initialized with params: {:?}", debug_params); +// // let client = Backend::new(connection.sender.clone()); +// let mut state = ServerState::new(connection.sender); +// let _ = state.init_logger(log::Level::Info); +// state.workspace.set_workspace_root( +// &mut state.db, +// initialize_params +// .root_uri +// .unwrap() +// .to_file_path() +// .ok() +// .unwrap(), +// )?; +// // info!("TESTING"); +// // info!("initialized with params: {:?}", debug_params); - let result = state.run(connection.receiver); +// let result = state.run(connection.receiver); - io_threads.join().unwrap(); +// io_threads.join().unwrap(); - result -} +// result +// } From 1e30c00bfd3abb0dc26138482431ca54307e0b57 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 12 Jan 2024 14:29:29 -0500 Subject: [PATCH 02/66] tower-lsp logging --- crates/language-server/src/backend.rs | 200 +++++++++++++++++--------- 1 file changed, 134 insertions(+), 66 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 794d91c52..b26ecbea2 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,33 +1,34 @@ -use std::sync::{Arc, Mutex, MutexGuard}; +use std::fmt::Write; + +use std::sync::{Arc, Mutex}; use crate::db::LanguageServerDatabase; +use crate::handlers::notifications::get_diagnostics; use crate::server::server_capabilities; -use crate::workspace::Workspace; +use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use anyhow::Result; -use crossbeam_channel::{Receiver, Sender}; + use log::{info, Level, Metadata, Record}; use log::{LevelFilter, SetLoggerError}; use lsp_server::Message; -use lsp_types::{InitializeParams, InitializeResult}; -use lsp_types::notification::Notification; -use lsp_types::request::Request; -use tower_lsp::{Client, LanguageServer, LspService, Server}; +use lsp_types::{InitializeParams, InitializeResult, TextDocumentItem}; -use crate::handlers::notifications::{handle_document_did_change, handle_watched_file_changes, handle_document_did_close}; -use crate::handlers::request::handle_goto_definition; -use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; +use tokio::task; +use tower_lsp::{Client, LanguageServer}; pub struct Backend { // pub(crate) sender: Arc>>, - client: Client, + pub(crate) client: Client, pub(crate) db: Arc>, - pub(crate) workspace: Workspace, - + pub(crate) workspace: Arc>, } #[tower_lsp::async_trait] impl LanguageServer for Backend { - fn initialize(&self, _: InitializeParams) -> Result { + async fn initialize( + &self, + initialize_params: InitializeParams, + ) -> tower_lsp::jsonrpc::Result { // let initialize_params: InitializeParams = serde_json::from_value(_initialize_params)?; let capabilities = server_capabilities(); @@ -39,63 +40,122 @@ impl LanguageServer for Backend { version: Some(String::from(env!("CARGO_PKG_VERSION"))), }), }; + let _ = self.init_logger(log::Level::Info); + let workspace = &mut *self.workspace.lock().unwrap(); + let db = &mut *self.db.lock().unwrap(); + let _ = workspace.set_workspace_root( + db, + initialize_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), + ); + info!("TESTING"); + // info!("initialized with params: {:?}", debug_params); Ok(initialize_result) } - fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { + async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { Ok(()) } + + async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { + // let _ = + info!("did open: {:?}", params); + { + let db = &mut *self.db.lock().unwrap(); + let workspace = &mut *self.workspace.lock().unwrap(); + let input = workspace + .input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, None); + } + self.on_change(TextDocumentItem { + uri: params.text_document.uri, + language_id: params.text_document.language_id, + version: params.text_document.version, + text: params.text_document.text, + }) + .await; + } } +use tracing::Subscriber; +use tracing_subscriber::prelude::*; +use tracing_subscriber::FmtSubscriber; +use tracing_subscriber::Layer; + impl Backend { pub fn new(client: Client) -> Self { let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); + let workspace = Arc::new(Mutex::new(Workspace::default())); Self { client, db, - workspace: Workspace::default(), + workspace, } } + async fn on_change(&self, params: TextDocumentItem) { + let diagnostics = get_diagnostics(self, params.uri.clone()) + .unwrap() + .into_iter() + .map(|(uri, diags)| self.client.publish_diagnostics(uri, diags, None)) + .collect::>(); - fn db(&self) -> MutexGuard { - self.db.lock().unwrap() + futures::future::join_all(diagnostics).await; } - pub fn run(&mut self, receiver: Receiver) -> Result<()> { - info!("Fe Language Server listening..."); - - // watch the workspace root for changes - self.send(lsp_server::Message::Request(lsp_server::Request::new( - 28_716_283.into(), - String::from("client/registerCapability"), - lsp_types::RegistrationParams { - registrations: vec![lsp_types::Registration { - id: String::from("watch-fe-files"), - method: String::from("workspace/didChangeWatchedFiles"), - register_options: Some( - serde_json::to_value(lsp_types::DidChangeWatchedFilesRegistrationOptions { - watchers: vec![lsp_types::FileSystemWatcher { - glob_pattern: lsp_types::GlobPattern::String("**/*.fe".to_string()), - kind: None, // kind: Some(WatchKind::Create | WatchKind::Change | WatchKind::Delete), - }], - }) - .unwrap(), - ), - }], - }, - )))?; - - while let Some(msg) = self.next_message(&receiver) { - if let lsp_server::Message::Notification(notification) = &msg { - if notification.method == lsp_types::notification::Exit::METHOD { - return Ok(()); - } - } - - let _ = self.handle_message(msg); - } - Ok(()) - } + // pub(crate) fn db(&self) -> &mut LanguageServerDatabase { + // let mut db = self.db.lock().unwrap(); + // &mut *db + // } + + // pub fn run(&mut self, receiver: Receiver) -> Result<()> { + // info!("Fe Language Server listening..."); + + // // watch the workspace root for changes + // self.send(lsp_server::Message::Request(lsp_server::Request::new( + // 28_716_283.into(), + // String::from("client/registerCapability"), + // lsp_types::RegistrationParams { + // registrations: vec![lsp_types::Registration { + // id: String::from("watch-fe-files"), + // method: String::from("workspace/didChangeWatchedFiles"), + // register_options: Some( + // serde_json::to_value(lsp_types::DidChangeWatchedFilesRegistrationOptions { + // watchers: vec![lsp_types::FileSystemWatcher { + // glob_pattern: lsp_types::GlobPattern::String("**/*.fe".to_string()), + // kind: None, // kind: Some(WatchKind::Create | WatchKind::Change | WatchKind::Delete), + // }], + // }) + // .unwrap(), + // ), + // }], + // }, + // )))?; + + // while let Some(msg) = self.next_message(&receiver) { + // if let lsp_server::Message::Notification(notification) = &msg { + // if notification.method == lsp_types::notification::Exit::METHOD { + // return Ok(()); + // } + // } + + // let _ = self.handle_message(msg); + // } + // Ok(()) + // } // fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { // if let lsp_server::Message::Request(req) = msg { @@ -148,7 +208,7 @@ impl Backend { pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { let logger = LspLogger { level, - client: Arc::new(Mutex::new(self.client.clone())), + client: Arc::new(tokio::sync::Mutex::new(self.client.clone())), // sender: self.sender.clone(), }; let static_logger = Box::leak(Box::new(logger)); @@ -160,7 +220,7 @@ impl Backend { pub struct LspLogger { level: Level, - client: Arc>, + client: Arc>, // sender: Arc>>, } @@ -178,21 +238,29 @@ impl log::Log for LspLogger { metadata.level() <= logger.level } + // TODO: investigate performance implications of this fn log(&self, record: &Record) { if self.enabled(record.metadata()) { let message = format!("{} - {}", record.level(), record.args()); - let client = self.client.lock().unwrap(); - let _ = client.log_message( - - match record.level() { - Level::Error => lsp_types::MessageType::ERROR, - Level::Warn => lsp_types::MessageType::WARNING, - Level::Info => lsp_types::MessageType::INFO, - Level::Debug => lsp_types::MessageType::LOG, - Level::Trace => lsp_types::MessageType::LOG, + let level = record.level(); + let client = self.client.clone(); + tokio::task::spawn(async move { + // let client = client.lock().unwrap(); + let client = client.lock().await; + // let client = client.lock().unwrap(); + client + .log_message( + match level { + log::Level::Error => lsp_types::MessageType::ERROR, + log::Level::Warn => lsp_types::MessageType::WARNING, + log::Level::Info => lsp_types::MessageType::INFO, + log::Level::Debug => lsp_types::MessageType::LOG, + log::Level::Trace => lsp_types::MessageType::LOG, }, - message - ); + message, + ) + .await + }); } } From 9589ef1cba60641f62443c035f6da690da001e30 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 12 Jan 2024 14:36:04 -0500 Subject: [PATCH 03/66] language server clippy/fmt --- crates/language-server/src/backend.rs | 12 +++++------- crates/language-server/src/handlers/notifications.rs | 5 +---- crates/language-server/src/handlers/request.rs | 8 ++------ 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index b26ecbea2..f16efcc0b 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,4 +1,4 @@ -use std::fmt::Write; + use std::sync::{Arc, Mutex}; @@ -10,10 +10,10 @@ use anyhow::Result; use log::{info, Level, Metadata, Record}; use log::{LevelFilter, SetLoggerError}; -use lsp_server::Message; + use lsp_types::{InitializeParams, InitializeResult, TextDocumentItem}; -use tokio::task; + use tower_lsp::{Client, LanguageServer}; pub struct Backend { @@ -92,7 +92,7 @@ impl LanguageServer for Backend { use tracing::Subscriber; use tracing_subscriber::prelude::*; -use tracing_subscriber::FmtSubscriber; + use tracing_subscriber::Layer; impl Backend { @@ -115,7 +115,6 @@ impl Backend { futures::future::join_all(diagnostics).await; } - // pub(crate) fn db(&self) -> &mut LanguageServerDatabase { // let mut db = self.db.lock().unwrap(); // &mut *db @@ -204,7 +203,6 @@ impl Backend { // Ok(()) // } - pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { let logger = LspLogger { level, @@ -259,7 +257,7 @@ impl log::Log for LspLogger { }, message, ) - .await + .await; }); } } diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index fbc328e5d..c491a3959 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -13,10 +13,7 @@ use crate::{ #[cfg(target_arch = "wasm32")] use crate::util::DummyFilePathConversion; -fn run_diagnostics( - state: &Backend, - path: &str, -) -> Vec { +fn run_diagnostics(state: &Backend, path: &str) -> Vec { let db = &mut *state.db.lock().unwrap(); let workspace = &mut *state.workspace.lock().unwrap(); let file_path = path; diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 455234dc5..bee756165 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -68,9 +68,7 @@ pub fn handle_hover(state: &mut Backend, req: lsp_server::Request) -> Result<(), }) }; - let top_mod = workspace - .top_mod_from_file_path(db, file_path) - .unwrap(); + let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); let early_resolution = goto_enclosing_path(db, top_mod, cursor); let goto_info = match early_resolution { @@ -126,9 +124,7 @@ pub fn handle_goto_definition( // Get the module and the goto info let file_path = params.text_document.uri.path(); - let top_mod = workspace - .top_mod_from_file_path(db, file_path) - .unwrap(); + let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); let goto_info = goto_enclosing_path(db, top_mod, cursor); // Convert the goto info to a Location From 936986f6754d19d2368a976ab37c686f68b87822 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 15 Jan 2024 12:40:52 -0500 Subject: [PATCH 04/66] language server tower refactor minor cleanup --- crates/language-server/src/backend.rs | 162 ++++-------------- .../src/handlers/notifications.rs | 62 ++++--- .../language-server/src/handlers/request.rs | 17 +- 3 files changed, 81 insertions(+), 160 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index f16efcc0b..186c5e3ff 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,5 +1,3 @@ - - use std::sync::{Arc, Mutex}; use crate::db::LanguageServerDatabase; @@ -8,19 +6,35 @@ use crate::server::server_capabilities; use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use anyhow::Result; -use log::{info, Level, Metadata, Record}; -use log::{LevelFilter, SetLoggerError}; +use log::{info, Level, LevelFilter, Metadata, Record, SetLoggerError}; -use lsp_types::{InitializeParams, InitializeResult, TextDocumentItem}; +use lsp_types::{DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration, TextDocumentItem}; use tower_lsp::{Client, LanguageServer}; +async fn register_capabilities(client: Arc>) -> Result<()> { + let client = client.lock().await; + let registration = Registration { + id: String::from("watch-fe-files"), + method: String::from("workspace/didChangeWatchedFiles"), + register_options: Some(serde_json::to_value(DidChangeWatchedFilesRegistrationOptions { + watchers: vec![FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.fe".to_string()), + kind: None, + }], + }).unwrap()), + }; + client.register_capability(vec![registration]).await; + + Ok(()) +} + pub struct Backend { // pub(crate) sender: Arc>>, - pub(crate) client: Client, - pub(crate) db: Arc>, - pub(crate) workspace: Arc>, + pub(crate) client: Arc>, + pub(crate) db: Arc>, + pub(crate) workspace: Arc>, } #[tower_lsp::async_trait] @@ -41,8 +55,8 @@ impl LanguageServer for Backend { }), }; let _ = self.init_logger(log::Level::Info); - let workspace = &mut *self.workspace.lock().unwrap(); - let db = &mut *self.db.lock().unwrap(); + let workspace = &mut *self.workspace.lock().await; + let db = &mut *self.db.lock().await; let _ = workspace.set_workspace_root( db, initialize_params @@ -52,8 +66,8 @@ impl LanguageServer for Backend { .ok() .unwrap(), ); - info!("TESTING"); - // info!("initialized with params: {:?}", debug_params); + let client = self.client.clone(); + let _ = register_capabilities(client).await; Ok(initialize_result) } async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { @@ -64,8 +78,8 @@ impl LanguageServer for Backend { // let _ = info!("did open: {:?}", params); { - let db = &mut *self.db.lock().unwrap(); - let workspace = &mut *self.workspace.lock().unwrap(); + let db = &mut *self.db.lock().await; + let workspace = &mut *self.workspace.lock().await; let input = workspace .input_from_file_path( db, @@ -90,124 +104,35 @@ impl LanguageServer for Backend { } } -use tracing::Subscriber; -use tracing_subscriber::prelude::*; - -use tracing_subscriber::Layer; - impl Backend { pub fn new(client: Client) -> Self { - let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); - let workspace = Arc::new(Mutex::new(Workspace::default())); + let db = Arc::new(tokio::sync::Mutex::new(LanguageServerDatabase::default())); + let workspace = Arc::new(tokio::sync::Mutex::new(Workspace::default())); + let client = Arc::new(tokio::sync::Mutex::new(client)); Self { client, db, workspace, } } + async fn on_change(&self, params: TextDocumentItem) { - let diagnostics = get_diagnostics(self, params.uri.clone()) + let client = self.client.lock().await; + let db = &mut *self.db.lock().await; + let workspace = &mut *self.workspace.lock().await; + let diagnostics = get_diagnostics(db, workspace, params.uri.clone()) .unwrap() .into_iter() - .map(|(uri, diags)| self.client.publish_diagnostics(uri, diags, None)) + .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) .collect::>(); futures::future::join_all(diagnostics).await; } - // pub(crate) fn db(&self) -> &mut LanguageServerDatabase { - // let mut db = self.db.lock().unwrap(); - // &mut *db - // } - - // pub fn run(&mut self, receiver: Receiver) -> Result<()> { - // info!("Fe Language Server listening..."); - - // // watch the workspace root for changes - // self.send(lsp_server::Message::Request(lsp_server::Request::new( - // 28_716_283.into(), - // String::from("client/registerCapability"), - // lsp_types::RegistrationParams { - // registrations: vec![lsp_types::Registration { - // id: String::from("watch-fe-files"), - // method: String::from("workspace/didChangeWatchedFiles"), - // register_options: Some( - // serde_json::to_value(lsp_types::DidChangeWatchedFilesRegistrationOptions { - // watchers: vec![lsp_types::FileSystemWatcher { - // glob_pattern: lsp_types::GlobPattern::String("**/*.fe".to_string()), - // kind: None, // kind: Some(WatchKind::Create | WatchKind::Change | WatchKind::Delete), - // }], - // }) - // .unwrap(), - // ), - // }], - // }, - // )))?; - - // while let Some(msg) = self.next_message(&receiver) { - // if let lsp_server::Message::Notification(notification) = &msg { - // if notification.method == lsp_types::notification::Exit::METHOD { - // return Ok(()); - // } - // } - - // let _ = self.handle_message(msg); - // } - // Ok(()) - // } - - // fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { - // if let lsp_server::Message::Request(req) = msg { - // info!("REQUEST: {:?}", req); - - // match req.method.as_str() { - // // TODO: implement actually useful hover handler - // lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, - // // goto definition - // lsp_types::request::GotoDefinition::METHOD => handle_goto_definition(self, req)?, - // lsp_types::request::GotoTypeDefinition::METHOD => { - // handle_goto_definition(self, req)?; - // } - // lsp_types::request::GotoImplementation::METHOD => { - // handle_goto_definition(self, req)?; - // } - // lsp_types::request::GotoDeclaration::METHOD => handle_goto_definition(self, req)?, - // _ => {} - // } - // } else if let lsp_server::Message::Notification(note) = msg { - // // log the notification to the console - // info!("NOTIFICATION: {:?}", note); - - // match note.method.as_str() { - // lsp_types::notification::DidOpenTextDocument::METHOD => { - // handle_document_did_open(self, note)?; - // } - // // TODO: this is currently something of a hack to deal with - // // file renames. We should be using the workspace - // // "will change" requests instead. - // lsp_types::notification::DidCloseTextDocument::METHOD => { - // handle_document_did_close(self, note)?; - // } - // lsp_types::notification::DidChangeTextDocument::METHOD => { - // handle_document_did_change(self, note)?; - // } - // lsp_types::notification::DidChangeWatchedFiles::METHOD => { - // handle_watched_file_changes(self, note)?; - // } - // _ => {} - // } - // } else if let lsp_server::Message::Response(resp) = msg { - // info!("RESPONSE: {:?}", resp); - // } - - // Ok(()) - // } - pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { let logger = LspLogger { level, - client: Arc::new(tokio::sync::Mutex::new(self.client.clone())), - // sender: self.sender.clone(), + client: self.client.clone(), }; let static_logger = Box::leak(Box::new(logger)); log::set_logger(static_logger)?; @@ -219,15 +144,6 @@ impl Backend { pub struct LspLogger { level: Level, client: Arc>, - // sender: Arc>>, -} - -impl LspLogger { - // fn send(&self, msg: Message) -> Result<()> { - // let sender = self.sender.lock().unwrap(); - // sender.send(msg)?; - // Ok(()) - // } } impl log::Log for LspLogger { @@ -243,9 +159,7 @@ impl log::Log for LspLogger { let level = record.level(); let client = self.client.clone(); tokio::task::spawn(async move { - // let client = client.lock().unwrap(); let client = client.lock().await; - // let client = client.lock().unwrap(); client .log_message( match level { diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index c491a3959..731d6f0d9 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -5,17 +5,21 @@ use serde::Deserialize; use crate::{ backend::Backend, - // state::ServerState, + db::LanguageServerDatabase, util::diag_to_lsp, - workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile}, + workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace}, }; #[cfg(target_arch = "wasm32")] use crate::util::DummyFilePathConversion; -fn run_diagnostics(state: &Backend, path: &str) -> Vec { - let db = &mut *state.db.lock().unwrap(); - let workspace = &mut *state.workspace.lock().unwrap(); +fn run_diagnostics( + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, + path: &str, +) -> Vec { + // let db = &mut *state.db.lock().unwrap(); + // let workspace = &mut *state.workspace.lock().unwrap(); let file_path = path; let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); db.analyze_top_mod(top_mod); @@ -23,11 +27,12 @@ fn run_diagnostics(state: &Backend, path: &str) -> Vec Result>, Error> { - let diags = run_diagnostics(state, uri.to_file_path().unwrap().to_str().unwrap()); - let db = &mut *state.db.lock().unwrap(); + let diags = run_diagnostics(db, workspace, uri.to_file_path().unwrap().to_str().unwrap()); + // let db = &mut *state.db.lock().unwrap(); let diagnostics = diags .into_iter() @@ -48,13 +53,14 @@ pub fn get_diagnostics( } pub fn handle_document_did_open( - state: &mut Backend, + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; { - let db = &mut *state.db.lock().unwrap(); - let workspace = &mut *state.workspace.lock().unwrap(); + // let db = &mut *state.db.lock().unwrap(); + // let workspace = &mut *state.workspace.lock().unwrap(); let input = workspace .input_from_file_path( db, @@ -69,8 +75,8 @@ pub fn handle_document_did_open( .unwrap(); let _ = input.sync(db, None); } - let diagnostics = get_diagnostics(state, params.text_document.uri.clone())?; - send_diagnostics(state, diagnostics) + let diagnostics = get_diagnostics(db, workspace, params.text_document.uri.clone())?; + send_diagnostics(diagnostics) } // Currently this is used to handle document renaming since the "document open" handler is called @@ -79,12 +85,14 @@ pub fn handle_document_did_open( // The fix: handle document renaming more explicitly in the "will rename" flow, along with the document // rename refactor. pub fn handle_document_did_close( + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, state: &mut Backend, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidCloseTextDocumentParams::deserialize(note.params)?; - let db = &mut *state.db.lock().unwrap(); - let workspace = &mut *state.workspace.lock().unwrap(); + // let db = &mut *state.db.lock().unwrap(); + // let workspace = &mut *state.workspace.lock().unwrap(); let input = workspace .input_from_file_path( db, @@ -101,13 +109,14 @@ pub fn handle_document_did_close( } pub fn handle_document_did_change( - state: &mut Backend, + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidChangeTextDocumentParams::deserialize(note.params)?; { - let db = &mut *state.db.lock().unwrap(); - let workspace = &mut *state.workspace.lock().unwrap(); + // let db = &mut *state.db.lock().unwrap(); + // let workspace = &mut *state.workspace.lock().unwrap(); let input = workspace .input_from_file_path( db, @@ -122,13 +131,13 @@ pub fn handle_document_did_change( .unwrap(); let _ = input.sync(db, Some(params.content_changes[0].text.clone())); } - let diagnostics = get_diagnostics(state, params.text_document.uri.clone())?; + let diagnostics = get_diagnostics(db, workspace, params.text_document.uri.clone())?; // info!("sending diagnostics... {:?}", diagnostics); - send_diagnostics(state, diagnostics) + send_diagnostics(diagnostics) } pub fn send_diagnostics( - _state: &mut Backend, + // _state: &mut Backend, diagnostics: FxHashMap>, ) -> Result<(), Error> { let _results = diagnostics.into_iter().map(|(uri, diags)| { @@ -152,7 +161,8 @@ pub fn send_diagnostics( } pub fn handle_watched_file_changes( - state: &mut Backend, + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidChangeWatchedFilesParams::deserialize(note.params)?; @@ -164,8 +174,8 @@ pub fn handle_watched_file_changes( // TODO: sort out the mutable/immutable borrow issues here { - let db = &mut state.db.lock().unwrap(); - let workspace = &mut state.workspace.lock().unwrap(); + // let db = &mut state.db.lock().unwrap(); + // let workspace = &mut state.workspace.lock().unwrap(); match change.typ { lsp_types::FileChangeType::CREATED => { // TODO: handle this more carefully! @@ -194,7 +204,7 @@ pub fn handle_watched_file_changes( } // collect diagnostics for the file if change.typ != lsp_types::FileChangeType::DELETED { - let diags = get_diagnostics(state, uri.clone())?; + let diags = get_diagnostics(db, workspace, uri.clone())?; for (uri, more_diags) in diags { let diags = diagnostics.entry(uri).or_insert_with(Vec::new); diags.extend(more_diags); @@ -202,6 +212,6 @@ pub fn handle_watched_file_changes( } } // info!("sending diagnostics... {:?}", diagnostics); - send_diagnostics(state, diagnostics) + send_diagnostics(diagnostics) // Ok(()) } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index bee756165..3ed45c175 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -7,16 +7,14 @@ use lsp_server::{Response, ResponseError}; use serde::Deserialize; use crate::{ - backend::Backend, - goto::{goto_enclosing_path, Cursor}, - util::{to_lsp_location_from_scope, to_offset_from_position}, - workspace::IngotFileContext, + backend::Backend, db::LanguageServerDatabase, goto::{goto_enclosing_path, Cursor}, util::{to_lsp_location_from_scope, to_offset_from_position}, workspace::{IngotFileContext, Workspace} }; -pub fn handle_hover(state: &mut Backend, req: lsp_server::Request) -> Result<(), anyhow::Error> { +pub fn handle_hover( + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, + req: lsp_server::Request) -> Result<(), anyhow::Error> { // TODO: get more relevant information for the hover - let db = &mut *state.db.lock().unwrap(); - let workspace = &mut state.workspace; let params = lsp_types::HoverParams::deserialize(req.params)?; let file_path = ¶ms .text_document_position_params @@ -110,13 +108,12 @@ pub fn handle_hover(state: &mut Backend, req: lsp_server::Request) -> Result<(), use lsp_types::TextDocumentPositionParams; pub fn handle_goto_definition( - state: &mut Backend, + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, req: lsp_server::Request, ) -> Result<(), anyhow::Error> { info!("handling goto definition request: {:?}", req); let params = TextDocumentPositionParams::deserialize(req.params)?; - let db = &mut *state.db.lock().unwrap(); - let workspace = &mut state.workspace; // Convert the position to an offset in the file let file_text = std::fs::read_to_string(params.text_document.uri.path())?; From dbda90482cd8f89482ed1f3e856f518b0d17ab24 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 18 Jan 2024 18:29:20 -0500 Subject: [PATCH 05/66] language server tower-lsp refactor cleanup; WIP --- Cargo.lock | 69 ------- crates/language-server/Cargo.toml | 2 - crates/language-server/src/backend.rs | 180 +++-------------- .../src/{server.rs => capabilities.rs} | 56 +---- crates/language-server/src/cursor.rs | 0 crates/language-server/src/diagnostics.rs | 40 +++- crates/language-server/src/globals.rs | 1 + .../src/handlers/notifications.rs | 9 +- .../language-server/src/handlers/request.rs | 8 +- crates/language-server/src/language_server.rs | 128 ++++++++++++ crates/language-server/src/logger.rs | 57 ++++++ crates/language-server/src/main.rs | 11 +- crates/language-server/src/state.rs | 191 ------------------ 13 files changed, 266 insertions(+), 486 deletions(-) rename crates/language-server/src/{server.rs => capabilities.rs} (71%) delete mode 100644 crates/language-server/src/cursor.rs create mode 100644 crates/language-server/src/globals.rs create mode 100644 crates/language-server/src/language_server.rs create mode 100644 crates/language-server/src/logger.rs delete mode 100644 crates/language-server/src/state.rs diff --git a/Cargo.lock b/Cargo.lock index 36963c444..a7d671385 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1177,8 +1177,6 @@ dependencies = [ "tokio", "tokio-macros", "tower-lsp", - "tracing", - "tracing-subscriber", "url", ] @@ -1916,16 +1914,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - [[package]] name = "num" version = "0.4.0" @@ -2049,12 +2037,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - [[package]] name = "parity-scale-codec" version = "3.4.0" @@ -2905,15 +2887,6 @@ dependencies = [ "keccak", ] -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - [[package]] name = "signal-hook-registry" version = "1.4.1" @@ -3141,16 +3114,6 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "thread_local" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" -dependencies = [ - "cfg-if 1.0.0", - "once_cell", -] - [[package]] name = "tiny-keccak" version = "2.0.2" @@ -3344,32 +3307,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" -dependencies = [ - "nu-ansi-term", - "sharded-slab", - "smallvec", - "thread_local", - "tracing-core", - "tracing-log", ] [[package]] @@ -3463,12 +3400,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - [[package]] name = "vec1" version = "1.10.1" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 94170cc2b..eb79c02cf 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -39,5 +39,3 @@ tower-lsp = "0.20.0" tokio = { version = "1.35.1", features = ["full", "io-std"] } tokio-macros = "2.2.0" futures = "0.3.28" -tracing = { version = "0.1.40", features = ["async-await"] } -tracing-subscriber = "0.3.18" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 186c5e3ff..9274225a6 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,180 +1,56 @@ -use std::sync::{Arc, Mutex}; +use std::sync::Arc; +use tokio::sync::Mutex; use crate::db::LanguageServerDatabase; -use crate::handlers::notifications::get_diagnostics; -use crate::server::server_capabilities; -use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; -use anyhow::Result; -use log::{info, Level, LevelFilter, Metadata, Record, SetLoggerError}; -use lsp_types::{DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration, TextDocumentItem}; + +use crate::workspace::{Workspace}; +use anyhow::Result; -use tower_lsp::{Client, LanguageServer}; -async fn register_capabilities(client: Arc>) -> Result<()> { - let client = client.lock().await; - let registration = Registration { - id: String::from("watch-fe-files"), - method: String::from("workspace/didChangeWatchedFiles"), - register_options: Some(serde_json::to_value(DidChangeWatchedFilesRegistrationOptions { - watchers: vec![FileSystemWatcher { - glob_pattern: GlobPattern::String("**/*.fe".to_string()), - kind: None, - }], - }).unwrap()), - }; - client.register_capability(vec![registration]).await; - Ok(()) -} +use lsp_types::{ + DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern, Registration, +}; +use tower_lsp::{Client}; pub struct Backend { // pub(crate) sender: Arc>>, - pub(crate) client: Arc>, + pub(crate) client: Arc>, pub(crate) db: Arc>, pub(crate) workspace: Arc>, } -#[tower_lsp::async_trait] -impl LanguageServer for Backend { - async fn initialize( - &self, - initialize_params: InitializeParams, - ) -> tower_lsp::jsonrpc::Result { - // let initialize_params: InitializeParams = serde_json::from_value(_initialize_params)?; - - let capabilities = server_capabilities(); - - let initialize_result = lsp_types::InitializeResult { - capabilities, - server_info: Some(lsp_types::ServerInfo { - name: String::from("fe-language-server"), - version: Some(String::from(env!("CARGO_PKG_VERSION"))), - }), - }; - let _ = self.init_logger(log::Level::Info); - let workspace = &mut *self.workspace.lock().await; - let db = &mut *self.db.lock().await; - let _ = workspace.set_workspace_root( - db, - initialize_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - ); - let client = self.client.clone(); - let _ = register_capabilities(client).await; - Ok(initialize_result) - } - async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { - Ok(()) - } - - async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - // let _ = - info!("did open: {:?}", params); - { - let db = &mut *self.db.lock().await; - let workspace = &mut *self.workspace.lock().await; - let input = workspace - .input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, None); - } - self.on_change(TextDocumentItem { - uri: params.text_document.uri, - language_id: params.text_document.language_id, - version: params.text_document.version, - text: params.text_document.text, - }) - .await; - } -} impl Backend { pub fn new(client: Client) -> Self { - let db = Arc::new(tokio::sync::Mutex::new(LanguageServerDatabase::default())); - let workspace = Arc::new(tokio::sync::Mutex::new(Workspace::default())); - let client = Arc::new(tokio::sync::Mutex::new(client)); + let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); + let workspace = Arc::new(Mutex::new(Workspace::default())); + let client = Arc::new(Mutex::new(client)); Self { client, db, workspace, } } - - async fn on_change(&self, params: TextDocumentItem) { + pub(crate) async fn register_watchers(&self) -> Result<()> { let client = self.client.lock().await; - let db = &mut *self.db.lock().await; - let workspace = &mut *self.workspace.lock().await; - let diagnostics = get_diagnostics(db, workspace, params.uri.clone()) - .unwrap() - .into_iter() - .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) - .collect::>(); - - futures::future::join_all(diagnostics).await; - } - - pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { - let logger = LspLogger { - level, - client: self.client.clone(), + let registration = Registration { + id: String::from("watch-fe-files"), + method: String::from("workspace/didChangeWatchedFiles"), + register_options: Some( + serde_json::to_value(DidChangeWatchedFilesRegistrationOptions { + watchers: vec![FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.fe".to_string()), + kind: None, + }], + }) + .unwrap(), + ), }; - let static_logger = Box::leak(Box::new(logger)); - log::set_logger(static_logger)?; - log::set_max_level(LevelFilter::Debug); - Ok(()) - } -} - -pub struct LspLogger { - level: Level, - client: Arc>, -} - -impl log::Log for LspLogger { - fn enabled(&self, metadata: &Metadata) -> bool { - let logger = self; - metadata.level() <= logger.level + Ok(client.register_capability(vec![registration]).await?) } - // TODO: investigate performance implications of this - fn log(&self, record: &Record) { - if self.enabled(record.metadata()) { - let message = format!("{} - {}", record.level(), record.args()); - let level = record.level(); - let client = self.client.clone(); - tokio::task::spawn(async move { - let client = client.lock().await; - client - .log_message( - match level { - log::Level::Error => lsp_types::MessageType::ERROR, - log::Level::Warn => lsp_types::MessageType::WARNING, - log::Level::Info => lsp_types::MessageType::INFO, - log::Level::Debug => lsp_types::MessageType::LOG, - log::Level::Trace => lsp_types::MessageType::LOG, - }, - message, - ) - .await; - }); - } - } - - fn flush(&self) {} -} +} \ No newline at end of file diff --git a/crates/language-server/src/server.rs b/crates/language-server/src/capabilities.rs similarity index 71% rename from crates/language-server/src/server.rs rename to crates/language-server/src/capabilities.rs index f2bcc718c..43f821099 100644 --- a/crates/language-server/src/server.rs +++ b/crates/language-server/src/capabilities.rs @@ -94,60 +94,6 @@ pub(crate) fn server_capabilities() -> ServerCapabilities { // }), }), }), - // ..Default::default() ..Default::default() } -} - -// pub fn run_server() -> Result<()> { -// let (connection, io_threads) = Connection::stdio(); - -// let (request_id, _initialize_params) = connection.initialize_start()?; -// let initialize_params: InitializeParams = serde_json::from_value(_initialize_params)?; - -// let capabilities = server_capabilities(); - -// let initialize_result = lsp_types::InitializeResult { -// capabilities, -// server_info: Some(lsp_types::ServerInfo { -// name: String::from("fe-language-server"), -// version: Some(String::from(env!("CARGO_PKG_VERSION"))), -// }), -// }; - -// let initialize_result = serde_json::to_value(initialize_result).unwrap(); - -// connection.initialize_finish(request_id, initialize_result)?; -// // send a "hello" message to the client -// connection -// .sender -// .send(lsp_server::Message::Notification(Notification { -// method: String::from("window/showMessage"), -// params: serde_json::to_value(lsp_types::ShowMessageParams { -// typ: lsp_types::MessageType::INFO, -// message: String::from("hello from the Fe language server"), -// }) -// .unwrap(), -// }))?; - -// // let client = Backend::new(connection.sender.clone()); -// let mut state = ServerState::new(connection.sender); -// let _ = state.init_logger(log::Level::Info); -// state.workspace.set_workspace_root( -// &mut state.db, -// initialize_params -// .root_uri -// .unwrap() -// .to_file_path() -// .ok() -// .unwrap(), -// )?; -// // info!("TESTING"); -// // info!("initialized with params: {:?}", debug_params); - -// let result = state.run(connection.receiver); - -// io_threads.join().unwrap(); - -// result -// } +} \ No newline at end of file diff --git a/crates/language-server/src/cursor.rs b/crates/language-server/src/cursor.rs deleted file mode 100644 index e69de29bb..000000000 diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 58ac430e8..67bebe0ec 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -1,6 +1,7 @@ use std::ops::Range; use camino::Utf8Path; +use clap::Error; use codespan_reporting as cs; use cs::{diagnostic as cs_diag, files as cs_files}; @@ -8,9 +9,10 @@ use common::{ diagnostics::{LabelStyle, Severity}, InputFile, }; +use fxhash::FxHashMap; use hir::diagnostics::DiagnosticVoucher; -use crate::db::{LanguageServerDatabase, LanguageServerDb}; +use crate::{db::{LanguageServerDatabase, LanguageServerDb}, util::diag_to_lsp, workspace::{IngotFileContext, Workspace}}; pub trait ToCsDiag { fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic; @@ -120,3 +122,39 @@ impl<'a> cs_files::Files<'a> for LanguageServerDatabase { Ok(Range { start, end }) } } + +fn run_diagnostics( + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, + path: &str, +) -> Vec { + let file_path = path; + let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); + db.analyze_top_mod(top_mod); + db.finalize_diags() +} + +pub fn get_diagnostics( + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, + uri: lsp_types::Url, +) -> Result>, Error> { + let diags = run_diagnostics(db, workspace, uri.to_file_path().unwrap().to_str().unwrap()); + + let diagnostics = diags + .into_iter() + .flat_map(|diag| diag_to_lsp(diag, db).clone()); + + // we need to reduce the diagnostics to a map from URL to Vec + let mut result = FxHashMap::>::default(); + + // add a null diagnostic to the result for the given URL + let _ = result.entry(uri.clone()).or_insert_with(Vec::new); + + diagnostics.for_each(|(uri, more_diags)| { + let diags = result.entry(uri).or_insert_with(Vec::new); + diags.extend(more_diags); + }); + + Ok(result) +} diff --git a/crates/language-server/src/globals.rs b/crates/language-server/src/globals.rs new file mode 100644 index 000000000..e676d2565 --- /dev/null +++ b/crates/language-server/src/globals.rs @@ -0,0 +1 @@ +pub(crate) const LANGUAGE_ID : &str = "fe"; \ No newline at end of file diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 731d6f0d9..79bfc1429 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -4,10 +4,7 @@ use fxhash::FxHashMap; use serde::Deserialize; use crate::{ - backend::Backend, - db::LanguageServerDatabase, - util::diag_to_lsp, - workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace}, + backend::Backend, db::LanguageServerDatabase, diagnostics::get_diagnostics, workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace} }; #[cfg(target_arch = "wasm32")] @@ -211,7 +208,5 @@ pub fn handle_watched_file_changes( } } } - // info!("sending diagnostics... {:?}", diagnostics); - send_diagnostics(diagnostics) - // Ok(()) + Ok(()) } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 3ed45c175..e4e8cbb1c 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -7,13 +7,17 @@ use lsp_server::{Response, ResponseError}; use serde::Deserialize; use crate::{ - backend::Backend, db::LanguageServerDatabase, goto::{goto_enclosing_path, Cursor}, util::{to_lsp_location_from_scope, to_offset_from_position}, workspace::{IngotFileContext, Workspace} + db::LanguageServerDatabase, + goto::{goto_enclosing_path, Cursor}, + util::{to_lsp_location_from_scope, to_offset_from_position}, + workspace::{IngotFileContext, Workspace}, }; pub fn handle_hover( db: &mut LanguageServerDatabase, workspace: &mut Workspace, - req: lsp_server::Request) -> Result<(), anyhow::Error> { + req: lsp_server::Request, +) -> Result<(), anyhow::Error> { // TODO: get more relevant information for the hover let params = lsp_types::HoverParams::deserialize(req.params)?; let file_path = ¶ms diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs new file mode 100644 index 000000000..4a7d0b506 --- /dev/null +++ b/crates/language-server/src/language_server.rs @@ -0,0 +1,128 @@ +use log::info; +use lsp_types::{DidCloseTextDocumentParams, InitializeParams, InitializeResult, TextDocumentItem}; +use tower_lsp::LanguageServer; + +use crate::{ + backend::Backend, + capabilities::server_capabilities, + diagnostics::get_diagnostics, + globals::LANGUAGE_ID, + workspace::{IngotFileContext, SyncableInputFile}, +}; + +#[tower_lsp::async_trait] +impl LanguageServer for Backend { + async fn initialize( + &self, + initialize_params: InitializeParams, + ) -> tower_lsp::jsonrpc::Result { + // initialize + let capabilities = server_capabilities(); + let initialize_result = lsp_types::InitializeResult { + capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + // setup logging + let _ = self.init_logger(log::Level::Info); + + // setup workspace + let workspace = &mut *self.workspace.lock().await; + let db = &mut *self.db.lock().await; + let _ = workspace.set_workspace_root( + db, + initialize_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), + ); + + // register watchers + let _ = self.register_watchers().await; + + Ok(initialize_result) + } + async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { + Ok(()) + } + + async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { + info!("did open: {:?}", params); + self.on_change(TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.text_document.text, + }) + .await; + } + + async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { + info!("did change: {:?}", params); + self.on_change(TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.content_changes[0].text.clone(), + }) + .await; + } + + + // Currently this is used to handle document renaming since the "document open" handler is called + // before the "document was renamed" handler. + // + // The fix: handle document renaming more explicitly in the "will rename" flow, along with the document + // rename refactor. + async fn did_close( + &self, + params: DidCloseTextDocumentParams, + ) { + let workspace = &mut *self.workspace.lock().await; + let db = &mut *self.db.lock().await; + let input = workspace + .input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, None); + } +} + +impl Backend { + async fn on_change(&self, params: TextDocumentItem) { + let client = self.client.lock().await; + let db = &mut *self.db.lock().await; + let workspace = &mut *self.workspace.lock().await; + let input = workspace + .input_from_file_path( + db, + params + .uri + .to_file_path() + .expect("Failed to convert URI to file path") + .to_str() + .expect("Failed to convert file path to string"), + ) + .unwrap(); + let _ = input.sync(db, Some(params.text)); + let diagnostics = get_diagnostics(db, workspace, params.uri.clone()) + .unwrap() + .into_iter() + .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) + .collect::>(); + + futures::future::join_all(diagnostics).await; + } +} diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs new file mode 100644 index 000000000..681f2c098 --- /dev/null +++ b/crates/language-server/src/logger.rs @@ -0,0 +1,57 @@ +use std::sync::Arc; + +use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; +use tower_lsp::Client; + +use crate::backend::Backend; + +pub struct Logger { + pub(crate) level: Level, + pub(crate) client: Arc>, +} + +impl log::Log for Logger { + fn enabled(&self, metadata: &Metadata) -> bool { + let logger = self; + metadata.level() <= logger.level + } + + // TODO: investigate performance implications of spawning tasks for each log message + fn log(&self, record: &Record) { + if self.enabled(record.metadata()) { + let message = format!("{} - {}", record.level(), record.args()); + let level = record.level(); + let client = self.client.clone(); + tokio::task::spawn(async move { + let client = client.lock().await; + client + .log_message( + match level { + log::Level::Error => lsp_types::MessageType::ERROR, + log::Level::Warn => lsp_types::MessageType::WARNING, + log::Level::Info => lsp_types::MessageType::INFO, + log::Level::Debug => lsp_types::MessageType::LOG, + log::Level::Trace => lsp_types::MessageType::LOG, + }, + message, + ) + .await; + }); + } + } + + fn flush(&self) {} +} + +impl Backend { + pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { + let logger = Logger { + level, + client: self.client.clone(), + }; + let static_logger = Box::leak(Box::new(logger)); + log::set_logger(static_logger)?; + log::set_max_level(LevelFilter::Debug); + Ok(()) + } +} \ No newline at end of file diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index f15cc8c08..ead06af12 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,11 +1,13 @@ mod db; mod diagnostics; mod goto; -mod server; -// mod state; +mod capabilities; mod backend; mod util; mod workspace; +mod globals; +mod language_server; +mod logger; use backend::Backend; use db::Jar; @@ -14,11 +16,6 @@ mod handlers { pub mod request; } -// use server::run_server; - -// fn main() { -// // let _ = run_server(); -// } #[tokio_macros::main] async fn main() { let stdin = tokio::io::stdin(); diff --git a/crates/language-server/src/state.rs b/crates/language-server/src/state.rs deleted file mode 100644 index 2c063a724..000000000 --- a/crates/language-server/src/state.rs +++ /dev/null @@ -1,191 +0,0 @@ -use std::sync::{Arc, Mutex}; - -use crate::db::LanguageServerDatabase; -use crate::workspace::Workspace; -use anyhow::Result; -use crossbeam_channel::{Receiver, Sender}; -use log::{info, Level, Metadata, Record}; -use log::{LevelFilter, SetLoggerError}; -use lsp_server::Message; -use lsp_types::notification::Notification; -use lsp_types::request::Request; - -use crate::handlers::notifications::{ - handle_document_did_change, handle_document_did_close, handle_watched_file_changes, -}; -use crate::handlers::request::handle_goto_definition; -use crate::handlers::{notifications::handle_document_did_open, request::handle_hover}; - -pub struct ServerState { - pub(crate) sender: Arc>>, - pub(crate) db: LanguageServerDatabase, - pub(crate) workspace: Workspace, -} - -impl ServerState { - pub fn new(sender: Sender) -> Self { - let sender = Arc::new(Mutex::new(sender)); - - Self { - sender, - db: LanguageServerDatabase::default(), - workspace: Workspace::default(), - } - } - - fn send(&mut self, msg: Message) -> Result<()> { - info!("SEND: {:?}", msg); - let sender = self.sender.lock().unwrap(); - sender.send(msg)?; - Ok(()) - } - - pub fn run(&mut self, receiver: Receiver) -> Result<()> { - info!("Fe Language Server listening..."); - - // watch the workspace root for changes - self.send(lsp_server::Message::Request(lsp_server::Request::new( - 28_716_283.into(), - String::from("client/registerCapability"), - lsp_types::RegistrationParams { - registrations: vec![lsp_types::Registration { - id: String::from("watch-fe-files"), - method: String::from("workspace/didChangeWatchedFiles"), - register_options: Some( - serde_json::to_value(lsp_types::DidChangeWatchedFilesRegistrationOptions { - watchers: vec![lsp_types::FileSystemWatcher { - glob_pattern: lsp_types::GlobPattern::String("**/*.fe".to_string()), - kind: None, // kind: Some(WatchKind::Create | WatchKind::Change | WatchKind::Delete), - }], - }) - .unwrap(), - ), - }], - }, - )))?; - - while let Some(msg) = self.next_message(&receiver) { - if let lsp_server::Message::Notification(notification) = &msg { - if notification.method == lsp_types::notification::Exit::METHOD { - return Ok(()); - } - } - - let _ = self.handle_message(msg); - } - Ok(()) - } - - fn next_message(&self, receiver: &Receiver) -> Option { - crossbeam_channel::select! { - recv(receiver) -> msg => msg.ok() - } - } - - fn handle_message(&mut self, msg: lsp_server::Message) -> Result<()> { - if let lsp_server::Message::Request(req) = msg { - info!("REQUEST: {:?}", req); - - match req.method.as_str() { - // TODO: implement actually useful hover handler - lsp_types::request::HoverRequest::METHOD => handle_hover(self, req)?, - // goto definition - lsp_types::request::GotoDefinition::METHOD => handle_goto_definition(self, req)?, - lsp_types::request::GotoTypeDefinition::METHOD => { - handle_goto_definition(self, req)?; - } - lsp_types::request::GotoImplementation::METHOD => { - handle_goto_definition(self, req)?; - } - lsp_types::request::GotoDeclaration::METHOD => handle_goto_definition(self, req)?, - _ => {} - } - } else if let lsp_server::Message::Notification(note) = msg { - // log the notification to the console - info!("NOTIFICATION: {:?}", note); - - match note.method.as_str() { - lsp_types::notification::DidOpenTextDocument::METHOD => { - handle_document_did_open(self, note)?; - } - // TODO: this is currently something of a hack to deal with - // file renames. We should be using the workspace - // "will change" requests instead. - lsp_types::notification::DidCloseTextDocument::METHOD => { - handle_document_did_close(self, note)?; - } - lsp_types::notification::DidChangeTextDocument::METHOD => { - handle_document_did_change(self, note)?; - } - lsp_types::notification::DidChangeWatchedFiles::METHOD => { - handle_watched_file_changes(self, note)?; - } - _ => {} - } - } else if let lsp_server::Message::Response(resp) = msg { - info!("RESPONSE: {:?}", resp); - } - - Ok(()) - } - - pub(crate) fn send_response(&mut self, response: lsp_server::Response) -> Result<()> { - self.send(lsp_server::Message::Response(response))?; - Ok(()) - } - - pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { - let logger = LspLogger { - level, - sender: self.sender.clone(), - }; - let static_logger = Box::leak(Box::new(logger)); - log::set_logger(static_logger)?; - log::set_max_level(LevelFilter::Debug); - Ok(()) - } -} - -pub struct LspLogger { - level: Level, - sender: Arc>>, -} - -impl LspLogger { - fn send(&self, msg: Message) -> Result<()> { - let sender = self.sender.lock().unwrap(); - sender.send(msg)?; - Ok(()) - } -} - -impl log::Log for LspLogger { - fn enabled(&self, metadata: &Metadata) -> bool { - let logger = self; - metadata.level() <= logger.level - } - - fn log(&self, record: &Record) { - if self.enabled(record.metadata()) { - let message = format!("{} - {}", record.level(), record.args()); - let _ = self.send(lsp_server::Message::Notification( - lsp_server::Notification { - method: String::from("window/logMessage"), - params: serde_json::to_value(lsp_types::LogMessageParams { - typ: match record.level() { - Level::Error => lsp_types::MessageType::ERROR, - Level::Warn => lsp_types::MessageType::WARNING, - Level::Info => lsp_types::MessageType::INFO, - Level::Debug => lsp_types::MessageType::LOG, - Level::Trace => lsp_types::MessageType::LOG, - }, - message, - }) - .unwrap(), - }, - )); - } - } - - fn flush(&self) {} -} From f28784e96c22684fca94c226e5244c52368b7626 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 19 Jan 2024 10:09:07 -0500 Subject: [PATCH 06/66] language server clippy/fmt --- crates/language-server/src/backend.rs | 14 +++----------- crates/language-server/src/capabilities.rs | 2 +- crates/language-server/src/diagnostics.rs | 6 +++++- crates/language-server/src/globals.rs | 2 +- .../language-server/src/handlers/notifications.rs | 4 +++- crates/language-server/src/language_server.rs | 6 +----- crates/language-server/src/logger.rs | 2 +- crates/language-server/src/main.rs | 10 +++++----- 8 files changed, 20 insertions(+), 26 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 9274225a6..cc93c50e4 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -3,18 +3,12 @@ use tokio::sync::Mutex; use crate::db::LanguageServerDatabase; - - -use crate::workspace::{Workspace}; +use crate::workspace::Workspace; use anyhow::Result; - - - - use lsp_types::{ DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern, Registration, }; -use tower_lsp::{Client}; +use tower_lsp::Client; pub struct Backend { // pub(crate) sender: Arc>>, @@ -23,7 +17,6 @@ pub struct Backend { pub(crate) workspace: Arc>, } - impl Backend { pub fn new(client: Client) -> Self { let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); @@ -52,5 +45,4 @@ impl Backend { }; Ok(client.register_capability(vec![registration]).await?) } - -} \ No newline at end of file +} diff --git a/crates/language-server/src/capabilities.rs b/crates/language-server/src/capabilities.rs index 43f821099..a915da550 100644 --- a/crates/language-server/src/capabilities.rs +++ b/crates/language-server/src/capabilities.rs @@ -96,4 +96,4 @@ pub(crate) fn server_capabilities() -> ServerCapabilities { }), ..Default::default() } -} \ No newline at end of file +} diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 67bebe0ec..40e4fa59b 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -12,7 +12,11 @@ use common::{ use fxhash::FxHashMap; use hir::diagnostics::DiagnosticVoucher; -use crate::{db::{LanguageServerDatabase, LanguageServerDb}, util::diag_to_lsp, workspace::{IngotFileContext, Workspace}}; +use crate::{ + db::{LanguageServerDatabase, LanguageServerDb}, + util::diag_to_lsp, + workspace::{IngotFileContext, Workspace}, +}; pub trait ToCsDiag { fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic; diff --git a/crates/language-server/src/globals.rs b/crates/language-server/src/globals.rs index e676d2565..e6ef17dad 100644 --- a/crates/language-server/src/globals.rs +++ b/crates/language-server/src/globals.rs @@ -1 +1 @@ -pub(crate) const LANGUAGE_ID : &str = "fe"; \ No newline at end of file +pub(crate) const LANGUAGE_ID: &str = "fe"; diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 79bfc1429..40ab6f6cb 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -4,7 +4,9 @@ use fxhash::FxHashMap; use serde::Deserialize; use crate::{ - backend::Backend, db::LanguageServerDatabase, diagnostics::get_diagnostics, workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace} + db::LanguageServerDatabase, + diagnostics::get_diagnostics, + workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace}, }; #[cfg(target_arch = "wasm32")] diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 4a7d0b506..9f9538424 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -72,16 +72,12 @@ impl LanguageServer for Backend { .await; } - // Currently this is used to handle document renaming since the "document open" handler is called // before the "document was renamed" handler. // // The fix: handle document renaming more explicitly in the "will rename" flow, along with the document // rename refactor. - async fn did_close( - &self, - params: DidCloseTextDocumentParams, - ) { + async fn did_close(&self, params: DidCloseTextDocumentParams) { let workspace = &mut *self.workspace.lock().await; let db = &mut *self.db.lock().await; let input = workspace diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 681f2c098..d4db82887 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -54,4 +54,4 @@ impl Backend { log::set_max_level(LevelFilter::Debug); Ok(()) } -} \ No newline at end of file +} diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index ead06af12..e6f4b937d 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,13 +1,13 @@ +mod backend; +mod capabilities; mod db; mod diagnostics; -mod goto; -mod capabilities; -mod backend; -mod util; -mod workspace; mod globals; +mod goto; mod language_server; mod logger; +mod util; +mod workspace; use backend::Backend; use db::Jar; From 54a1c5905c0da46a73abb980799df425dc1917a9 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 22 Jan 2024 15:37:13 -0500 Subject: [PATCH 07/66] Language server prevent deadlocks --- crates/language-server/src/backend.rs | 19 ++- .../language-server/src/handlers/request.rs | 2 - crates/language-server/src/language_server.rs | 108 +++++++++++++++--- crates/language-server/src/logger.rs | 2 +- crates/language-server/src/workspace.rs | 70 ++++++++++-- 5 files changed, 172 insertions(+), 29 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index cc93c50e4..c21462faa 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -11,13 +11,24 @@ use lsp_types::{ use tower_lsp::Client; pub struct Backend { - // pub(crate) sender: Arc>>, - pub(crate) client: Arc>, - pub(crate) db: Arc>, - pub(crate) workspace: Arc>, + client: Arc>, + db: Arc>, + workspace: Arc>, } impl Backend { + pub(crate) fn db(&self) -> Arc> { + self.db.clone() + } + + pub(crate) fn workspace(&self) -> Arc> { + self.workspace.clone() + } + + pub(crate) fn client(&self) -> Arc> { + self.client.clone() + } + pub fn new(client: Client) -> Self { let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); let workspace = Arc::new(Mutex::new(Workspace::default())); diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index e4e8cbb1c..3caba33cb 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -142,8 +142,6 @@ pub fn handle_goto_definition( None => return Ok(()), }; - // info!("scopes: {:?}", scopes); - let locations = scopes .iter() .filter_map(|scope| *scope) diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 9f9538424..317db6c52 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,21 +1,22 @@ use log::info; -use lsp_types::{DidCloseTextDocumentParams, InitializeParams, InitializeResult, TextDocumentItem}; -use tower_lsp::LanguageServer; +use lsp_types::{ + DidChangeWatchedFilesParams, DidCloseTextDocumentParams, InitializeParams, InitializeResult, + TextDocumentItem, +}; + +use tower_lsp::{jsonrpc::Result, LanguageServer}; use crate::{ backend::Backend, capabilities::server_capabilities, diagnostics::get_diagnostics, globals::LANGUAGE_ID, - workspace::{IngotFileContext, SyncableInputFile}, + workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile}, }; #[tower_lsp::async_trait] impl LanguageServer for Backend { - async fn initialize( - &self, - initialize_params: InitializeParams, - ) -> tower_lsp::jsonrpc::Result { + async fn initialize(&self, initialize_params: InitializeParams) -> Result { // initialize let capabilities = server_capabilities(); let initialize_result = lsp_types::InitializeResult { @@ -29,8 +30,11 @@ impl LanguageServer for Backend { let _ = self.init_logger(log::Level::Info); // setup workspace - let workspace = &mut *self.workspace.lock().await; - let db = &mut *self.db.lock().await; + let workspace = self.workspace(); + let workspace = &mut workspace.lock().await; + let db = self.db(); + let db = &mut db.lock().await; + let _ = workspace.set_workspace_root( db, initialize_params @@ -52,6 +56,12 @@ impl LanguageServer for Backend { async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { info!("did open: {:?}", params); + let workspace = self.workspace(); + let workspace = &mut workspace.lock().await; + let db = self.db(); + let db = &mut db.lock().await; + let _ = workspace.sync(db); + self.on_change(TextDocumentItem { uri: params.text_document.uri, language_id: LANGUAGE_ID.to_string(), @@ -78,8 +88,11 @@ impl LanguageServer for Backend { // The fix: handle document renaming more explicitly in the "will rename" flow, along with the document // rename refactor. async fn did_close(&self, params: DidCloseTextDocumentParams) { - let workspace = &mut *self.workspace.lock().await; - let db = &mut *self.db.lock().await; + let workspace = self.workspace(); + let workspace = &mut workspace.lock().await; + let db = self.db(); + let db = &mut db.lock().await; + let input = workspace .input_from_file_path( db, @@ -94,13 +107,80 @@ impl LanguageServer for Backend { .unwrap(); let _ = input.sync(db, None); } + async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + let workspace = self.workspace(); + let workspace = &mut workspace.lock().await; + let db = self.db(); + let db = &mut db.lock().await; + + let changes = params.changes; + for change in changes { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + + match change.typ { + lsp_types::FileChangeType::CREATED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::CHANGED => { + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::DELETED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + } + _ => {} + } + // collect diagnostics for the file + if change.typ != lsp_types::FileChangeType::DELETED { + // let diags = get_diagnostics(db, workspace, uri.clone()); + // for (uri, more_diags) in diags.ok().unwrap() { + // let diags = diagnostics.entry(uri).or_insert_with(Vec::new); + // diags.extend(more_diags); + // } + let text = std::fs::read_to_string(path).unwrap(); + self.on_change(TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + }) + .await; + } + } + } + + // async fn will_rename_files(&self, params: RenameFilesParams) -> Result> { + // let workspace = &mut *self.workspace.lock().await; + // let db = &mut *self.db.lock().await; + + // for file in params.files { + // let _ = workspace.rename_file(db, &*file.old_uri, &*file.new_uri); + // } + + // // TODO: implement file rename auto-refactoring + // Ok(None) + // } } impl Backend { async fn on_change(&self, params: TextDocumentItem) { - let client = self.client.lock().await; - let db = &mut *self.db.lock().await; - let workspace = &mut *self.workspace.lock().await; + let workspace = self.workspace(); + let workspace = &mut workspace.lock().await; + let db = self.db(); + let db = &mut db.lock().await; + let client = self.client(); + let client = &mut *client.lock().await; let input = workspace .input_from_file_path( db, diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index d4db82887..ea9e59319 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -47,7 +47,7 @@ impl Backend { pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { let logger = Logger { level, - client: self.client.clone(), + client: self.client(), }; let static_logger = Box::leak(Box::new(logger)); log::set_logger(static_logger)?; diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 9e933029b..2b7c819f5 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -34,6 +34,12 @@ pub trait IngotFileContext { db: &mut LanguageServerDatabase, path: &str, ) -> Option; + fn rename_file( + &mut self, + db: &mut LanguageServerDatabase, + old_path: &str, + new_path: &str, + ) -> Result<()>; } pub struct LocalIngotContext { @@ -108,6 +114,20 @@ impl IngotFileContext for LocalIngotContext { let file = self.input_from_file_path(db, path)?; Some(map_file_to_mod(db, file)) } + + fn rename_file( + &mut self, + db: &mut LanguageServerDatabase, + old_path: &str, + new_path: &str, + ) -> Result<()> { + let file = self.files.remove(old_path); + if let Some(file) = file { + file.set_path(db).to(new_path.into()); + self.files.insert(new_path, file); + } + Ok(()) + } } pub struct StandaloneIngotContext { @@ -176,6 +196,20 @@ impl IngotFileContext for StandaloneIngotContext { let file = self.input_from_file_path(db, path)?; Some(map_file_to_mod(db, file)) } + + fn rename_file( + &mut self, + db: &mut LanguageServerDatabase, + old_path: &str, + new_path: &str, + ) -> Result<()> { + let file = self.files.remove(old_path); + if let Some(file) = file { + file.set_path(db).to(new_path.into()); + self.files.insert(new_path, file); + } + Ok(()) + } } pub struct Workspace { @@ -251,27 +285,27 @@ impl Workspace { info!("Syncing ingot at {}", config_path); let ingot_root = config_path.strip_suffix(FE_CONFIG_SUFFIX).unwrap(); - let paths = &glob::glob(&format!("{ingot_root}/src/**/*.fe")) + let actual_paths = &glob::glob(&format!("{ingot_root}/src/**/*.fe")) .unwrap() .map(|p| p.unwrap().to_str().unwrap().to_string()) .collect::>(); - info!("Found {} files in ingot", paths.len()); - info!("Syncing ingot files: {:?}", paths); + info!("Found {} files in ingot", actual_paths.len()); + info!("Syncing ingot files: {:?}", actual_paths); let ingot_context = self .ingot_context_from_config_path(db, config_path) .unwrap(); - let ingot_context_file_keys = &ingot_context.files.keys().collect::>(); - for path in ingot_context_file_keys { - if !paths.contains(path) { + let previous_ingot_context_file_keys = &ingot_context.files.keys().collect::>(); + for path in previous_ingot_context_file_keys { + if !actual_paths.contains(path) { ingot_context.files.remove(path); } } - for path in paths { - if !ingot_context_file_keys.contains(path) { + for path in actual_paths { + if !previous_ingot_context_file_keys.contains(path) { let file = ingot_context.input_from_file_path(db, path); let contents = std::fs::read_to_string(path).unwrap(); file.unwrap().set_text(db).to(contents); @@ -342,6 +376,21 @@ impl IngotFileContext for Workspace { .top_mod_from_file_path(db, path) } } + + fn rename_file( + &mut self, + db: &mut LanguageServerDatabase, + old_path: &str, + new_path: &str, + ) -> Result<()> { + let ctx = get_containing_ingot(&mut self.ingot_contexts, old_path); + if ctx.is_some() { + ctx.unwrap().rename_file(db, old_path, new_path) + } else { + self.standalone_ingot_context + .rename_file(db, old_path, new_path) + } + } } pub trait SyncableInputFile { @@ -349,6 +398,7 @@ pub trait SyncableInputFile { fn sync_from_fs(&self, db: &mut LanguageServerDatabase) -> Result<()>; fn sync_from_text(&self, db: &mut LanguageServerDatabase, contents: String) -> Result<()>; fn remove_from_ingot(&self, db: &mut LanguageServerDatabase) -> Result<()>; + // fn rename(&self, db: &mut LanguageServerDatabase, new_path: String) -> Result<()>; } impl SyncableInputFile for InputFile { @@ -366,6 +416,10 @@ impl SyncableInputFile for InputFile { // check to see if the file actually exists anymore: let path = self.path(db); if !path.exists() { + info!( + "File {:?} no longer exists... removing from workspace", + path + ); // if not let's remove it from the ingot self.remove_from_ingot(db) } else if let Some(contents) = contents { From 41e206a52e4cb2bd2a375d525ad059a6806f1ea3 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 25 Jan 2024 19:21:53 -0500 Subject: [PATCH 08/66] language server: clarify async shared state design --- crates/language-server/src/backend.rs | 27 ++-- .../src/handlers/notifications.rs | 2 +- crates/language-server/src/language_server.rs | 147 +++++++++--------- crates/language-server/src/logger.rs | 22 ++- 4 files changed, 103 insertions(+), 95 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index c21462faa..b9f0c0496 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,5 +1,6 @@ use std::sync::Arc; -use tokio::sync::Mutex; +use std::sync::{Mutex, MutexGuard}; +// use tokio::sync::{Mutex, MutexGuard}; use crate::db::LanguageServerDatabase; @@ -11,28 +12,24 @@ use lsp_types::{ use tower_lsp::Client; pub struct Backend { - client: Arc>, - db: Arc>, - workspace: Arc>, + pub(crate) client: Arc>, + pub(crate) db: Arc>, + pub(crate) workspace: Arc>, } impl Backend { - pub(crate) fn db(&self) -> Arc> { - self.db.clone() - } - - pub(crate) fn workspace(&self) -> Arc> { - self.workspace.clone() - } + // pub(crate) fn db(&self) -> MutexGuard { + // self.db.lock().unwrap() + // } - pub(crate) fn client(&self) -> Arc> { - self.client.clone() - } + // pub(crate) fn workspace(&self) -> MutexGuard { + // self.workspace.lock().unwrap() + // } pub fn new(client: Client) -> Self { let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); let workspace = Arc::new(Mutex::new(Workspace::default())); - let client = Arc::new(Mutex::new(client)); + let client = Arc::new(tokio::sync::Mutex::new(client)); Self { client, db, diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 40ab6f6cb..d4c261ae8 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -179,7 +179,7 @@ pub fn handle_watched_file_changes( lsp_types::FileChangeType::CREATED => { // TODO: handle this more carefully! // this is inefficient, a hack for now - // let db = state.db(); + // let db = state.db.lock().unwrap(); // let db = &mut state.db.lock().unwrap(); let _ = workspace.sync(db); let input = workspace diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 317db6c52..4e1cf587e 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -4,7 +4,8 @@ use lsp_types::{ TextDocumentItem, }; -use tower_lsp::{jsonrpc::Result, LanguageServer}; +use tokio::sync::MutexGuard; +use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; use crate::{ backend::Backend, @@ -27,23 +28,24 @@ impl LanguageServer for Backend { }), }; // setup logging - let _ = self.init_logger(log::Level::Info); + { + let _ = self.init_logger(log::Level::Info); + } // setup workspace - let workspace = self.workspace(); - let workspace = &mut workspace.lock().await; - let db = self.db(); - let db = &mut db.lock().await; - - let _ = workspace.set_workspace_root( - db, - initialize_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - ); + { + let workspace = &mut self.workspace.lock().unwrap(); + let db = &mut self.db.lock().unwrap(); + let _ = workspace.set_workspace_root( + db, + initialize_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), + ); + } // register watchers let _ = self.register_watchers().await; @@ -56,29 +58,35 @@ impl LanguageServer for Backend { async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { info!("did open: {:?}", params); - let workspace = self.workspace(); - let workspace = &mut workspace.lock().await; - let db = self.db(); - let db = &mut db.lock().await; - let _ = workspace.sync(db); + { + let workspace = &mut self.workspace.lock().unwrap(); + let db = &mut self.db.lock().unwrap(); + let _ = workspace.sync(db); + } - self.on_change(TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.text_document.text, - }) + on_change( + self, + TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.text_document.text, + }, + ) .await; } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { info!("did change: {:?}", params); - self.on_change(TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.content_changes[0].text.clone(), - }) + on_change( + self, + TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.content_changes[0].text.clone(), + }, + ) .await; } @@ -88,10 +96,10 @@ impl LanguageServer for Backend { // The fix: handle document renaming more explicitly in the "will rename" flow, along with the document // rename refactor. async fn did_close(&self, params: DidCloseTextDocumentParams) { - let workspace = self.workspace(); - let workspace = &mut workspace.lock().await; - let db = self.db(); - let db = &mut db.lock().await; + let workspace = &mut self.workspace.lock().unwrap(); + // let workspace = &mut workspace.lock().await; + let db = &mut self.db.lock().unwrap(); + // let db = &mut db.lock().await; let input = workspace .input_from_file_path( @@ -108,11 +116,6 @@ impl LanguageServer for Backend { let _ = input.sync(db, None); } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let workspace = self.workspace(); - let workspace = &mut workspace.lock().await; - let db = self.db(); - let db = &mut db.lock().await; - let changes = params.changes; for change in changes { let uri = change.uri; @@ -122,6 +125,8 @@ impl LanguageServer for Backend { lsp_types::FileChangeType::CREATED => { // TODO: handle this more carefully! // this is inefficient, a hack for now + let workspace = &mut self.workspace.lock().unwrap(); + let db = &mut self.db.lock().unwrap(); let _ = workspace.sync(db); let input = workspace .input_from_file_path(db, path.to_str().unwrap()) @@ -129,6 +134,8 @@ impl LanguageServer for Backend { let _ = input.sync(db, None); } lsp_types::FileChangeType::CHANGED => { + let workspace = &mut self.workspace.lock().unwrap(); + let db = &mut self.db.lock().unwrap(); let input = workspace .input_from_file_path(db, path.to_str().unwrap()) .unwrap(); @@ -137,50 +144,34 @@ impl LanguageServer for Backend { lsp_types::FileChangeType::DELETED => { // TODO: handle this more carefully! // this is inefficient, a hack for now + let workspace = &mut self.workspace.lock().unwrap(); + let db = &mut self.db.lock().unwrap(); let _ = workspace.sync(db); } _ => {} } // collect diagnostics for the file if change.typ != lsp_types::FileChangeType::DELETED { - // let diags = get_diagnostics(db, workspace, uri.clone()); - // for (uri, more_diags) in diags.ok().unwrap() { - // let diags = diagnostics.entry(uri).or_insert_with(Vec::new); - // diags.extend(more_diags); - // } let text = std::fs::read_to_string(path).unwrap(); - self.on_change(TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - }) + on_change( + self, + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + }, + ) .await; } } } - - // async fn will_rename_files(&self, params: RenameFilesParams) -> Result> { - // let workspace = &mut *self.workspace.lock().await; - // let db = &mut *self.db.lock().await; - - // for file in params.files { - // let _ = workspace.rename_file(db, &*file.old_uri, &*file.new_uri); - // } - - // // TODO: implement file rename auto-refactoring - // Ok(None) - // } } -impl Backend { - async fn on_change(&self, params: TextDocumentItem) { - let workspace = self.workspace(); - let workspace = &mut workspace.lock().await; - let db = self.db(); - let db = &mut db.lock().await; - let client = self.client(); - let client = &mut *client.lock().await; +async fn on_change(backend: &Backend, params: TextDocumentItem) { + let diagnostics = { + let workspace = &mut backend.workspace.lock().unwrap(); + let db = &mut backend.db.lock().unwrap(); let input = workspace .input_from_file_path( db, @@ -193,12 +184,16 @@ impl Backend { ) .unwrap(); let _ = input.sync(db, Some(params.text)); - let diagnostics = get_diagnostics(db, workspace, params.uri.clone()) + get_diagnostics(db, workspace, params.uri.clone()) + }; + + let client = backend.client.lock().await; + let diagnostics = + diagnostics .unwrap() .into_iter() .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) .collect::>(); - futures::future::join_all(diagnostics).await; - } + futures::future::join_all(diagnostics).await; } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index ea9e59319..9712274c0 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -22,8 +22,8 @@ impl log::Log for Logger { let message = format!("{} - {}", record.level(), record.args()); let level = record.level(); let client = self.client.clone(); - tokio::task::spawn(async move { - let client = client.lock().await; + tokio::spawn(async move { + let mut client = client.lock().await; client .log_message( match level { @@ -37,6 +37,22 @@ impl log::Log for Logger { ) .await; }); + // let client = self.client.clone(); + // tokio::task::spawn_blocking(async move { + // let client = client.lock().await; + // client + // .log_message( + // match level { + // log::Level::Error => lsp_types::MessageType::ERROR, + // log::Level::Warn => lsp_types::MessageType::WARNING, + // log::Level::Info => lsp_types::MessageType::INFO, + // log::Level::Debug => lsp_types::MessageType::LOG, + // log::Level::Trace => lsp_types::MessageType::LOG, + // }, + // message, + // ) + // .await; + // }); } } @@ -47,7 +63,7 @@ impl Backend { pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { let logger = Logger { level, - client: self.client(), + client: self.client.clone(), }; let static_logger = Box::leak(Box::new(logger)); log::set_logger(static_logger)?; From f3dc655a58e624bc0d9fdc33c7817334f2d7878f Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 26 Jan 2024 11:44:54 -0500 Subject: [PATCH 09/66] language server code cleanup --- crates/language-server/src/backend.rs | 11 +---------- crates/language-server/src/language_server.rs | 14 ++++++-------- crates/language-server/src/logger.rs | 18 +----------------- crates/language-server/src/workspace.rs | 4 ++-- 4 files changed, 10 insertions(+), 37 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index b9f0c0496..7f00b494d 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use std::sync::{Mutex, MutexGuard}; -// use tokio::sync::{Mutex, MutexGuard}; +use std::sync::Mutex; use crate::db::LanguageServerDatabase; @@ -18,14 +17,6 @@ pub struct Backend { } impl Backend { - // pub(crate) fn db(&self) -> MutexGuard { - // self.db.lock().unwrap() - // } - - // pub(crate) fn workspace(&self) -> MutexGuard { - // self.workspace.lock().unwrap() - // } - pub fn new(client: Client) -> Self { let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); let workspace = Arc::new(Mutex::new(Workspace::default())); diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 4e1cf587e..24edecce8 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -4,8 +4,7 @@ use lsp_types::{ TextDocumentItem, }; -use tokio::sync::MutexGuard; -use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; +use tower_lsp::{jsonrpc::Result, LanguageServer}; use crate::{ backend::Backend, @@ -188,12 +187,11 @@ async fn on_change(backend: &Backend, params: TextDocumentItem) { }; let client = backend.client.lock().await; - let diagnostics = - diagnostics - .unwrap() - .into_iter() - .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) - .collect::>(); + let diagnostics = diagnostics + .unwrap() + .into_iter() + .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) + .collect::>(); futures::future::join_all(diagnostics).await; } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 9712274c0..77485bbab 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -23,7 +23,7 @@ impl log::Log for Logger { let level = record.level(); let client = self.client.clone(); tokio::spawn(async move { - let mut client = client.lock().await; + let client = client.lock().await; client .log_message( match level { @@ -37,22 +37,6 @@ impl log::Log for Logger { ) .await; }); - // let client = self.client.clone(); - // tokio::task::spawn_blocking(async move { - // let client = client.lock().await; - // client - // .log_message( - // match level { - // log::Level::Error => lsp_types::MessageType::ERROR, - // log::Level::Warn => lsp_types::MessageType::WARNING, - // log::Level::Info => lsp_types::MessageType::INFO, - // log::Level::Debug => lsp_types::MessageType::LOG, - // log::Level::Trace => lsp_types::MessageType::LOG, - // }, - // message, - // ) - // .await; - // }); } } diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 2b7c819f5..702f37817 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -384,8 +384,8 @@ impl IngotFileContext for Workspace { new_path: &str, ) -> Result<()> { let ctx = get_containing_ingot(&mut self.ingot_contexts, old_path); - if ctx.is_some() { - ctx.unwrap().rename_file(db, old_path, new_path) + if let Some(ctx) = ctx { + ctx.rename_file(db, old_path, new_path) } else { self.standalone_ingot_context .rename_file(db, old_path, new_path) From fcdd0d9596cb3bad4e184b36ea3a9059245022cf Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 29 Jan 2024 16:25:32 -0500 Subject: [PATCH 10/66] workspace file removal improvement --- crates/language-server/src/workspace.rs | 45 ++++++++++++++++--------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 702f37817..73750654f 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -40,6 +40,7 @@ pub trait IngotFileContext { old_path: &str, new_path: &str, ) -> Result<()>; + fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()>; } pub struct LocalIngotContext { @@ -128,6 +129,14 @@ impl IngotFileContext for LocalIngotContext { } Ok(()) } + + fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()> { + let file = self.files.remove(path); + if let Some(file) = file { + file.remove_from_ingot(db)?; + } + Ok(()) + } } pub struct StandaloneIngotContext { @@ -210,6 +219,15 @@ impl IngotFileContext for StandaloneIngotContext { } Ok(()) } + + fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()> { + let file = self.files.remove(path); + if let Some(file) = file { + file.remove_from_ingot(db)?; + } + self.ingots.remove(path); + Ok(()) + } } pub struct Workspace { @@ -300,7 +318,7 @@ impl Workspace { let previous_ingot_context_file_keys = &ingot_context.files.keys().collect::>(); for path in previous_ingot_context_file_keys { if !actual_paths.contains(path) { - ingot_context.files.remove(path); + ingot_context.remove_file(db, path); } } @@ -391,6 +409,16 @@ impl IngotFileContext for Workspace { .rename_file(db, old_path, new_path) } } + + fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()> { + let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.remove_file(db, path) + } else { + self.standalone_ingot_context.remove_file(db, path)?; + Ok(()) + } + } } pub trait SyncableInputFile { @@ -465,21 +493,6 @@ impl SyncableIngotFileContext for Workspace { for ingot_path in ingot_paths { self.sync_ingot_files(db, &ingot_path); } - - let paths = glob::glob(&format!("{path}/src/**/*.fe")) - .ok() - .unwrap() - .filter_map(|p| { - p.ok() - .unwrap() - .to_str() - .map(std::string::ToString::to_string) - }) - .collect::>(); - - for path in paths { - self.input_from_file_path(db, &path); - } Ok(()) } } From 7a67675a096641290d29cd6ab37881cb642ae968 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 29 Feb 2024 10:00:26 -0600 Subject: [PATCH 11/66] generate LSP event channels automatically --- Cargo.lock | 22 ++ crates/language-server-macros/Cargo.toml | 12 + crates/language-server-macros/src/lib.rs | 132 +++++++ crates/language-server/Cargo.toml | 2 + crates/language-server/src/backend.rs | 65 ++-- crates/language-server/src/dispatcher.rs | 0 .../src/handlers/notifications.rs | 30 +- crates/language-server/src/language_server.rs | 354 +++++++++++------- crates/language-server/src/logger.rs | 4 +- crates/language-server/src/main.rs | 10 +- crates/language-server/test_files/lol.fe | 12 + 11 files changed, 445 insertions(+), 198 deletions(-) create mode 100644 crates/language-server-macros/Cargo.toml create mode 100644 crates/language-server-macros/src/lib.rs create mode 100644 crates/language-server/src/dispatcher.rs create mode 100644 crates/language-server/test_files/lol.fe diff --git a/Cargo.lock b/Cargo.lock index a7d671385..8ea6d3e03 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1161,6 +1161,7 @@ dependencies = [ "fe-driver2", "fe-hir", "fe-hir-analysis", + "fe-language-server-macros", "fe-macros", "futures", "fxhash", @@ -1176,10 +1177,20 @@ dependencies = [ "serde_json", "tokio", "tokio-macros", + "tokio-stream", "tower-lsp", "url", ] +[[package]] +name = "fe-language-server-macros" +version = "0.23.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "fe-library" version = "0.23.0" @@ -3178,6 +3189,17 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.10" diff --git a/crates/language-server-macros/Cargo.toml b/crates/language-server-macros/Cargo.toml new file mode 100644 index 000000000..f3b42833e --- /dev/null +++ b/crates/language-server-macros/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "fe-language-server-macros" +version = "0.23.0" +edition = "2021" + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = "1" +quote = "1" +syn = { version = "2", features = ["full"] } \ No newline at end of file diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs new file mode 100644 index 000000000..22ba25ce9 --- /dev/null +++ b/crates/language-server-macros/src/lib.rs @@ -0,0 +1,132 @@ +extern crate proc_macro; + +use proc_macro::TokenStream; +use quote::{format_ident, quote}; +use syn::{ + parse_macro_input, FnArg, ImplItem, ItemImpl, +}; + +/// Macro for generating tokio channels from [`lsp-types`](https://docs.rs/lsp-types). +/// +/// This procedural macro annotates the `tower_lsp::LanguageServer` trait implementation and generates +/// a struct full of tokio broadcast channels that can be used to signal the server to handle +/// defined requests and notifications. +#[proc_macro_attribute] +pub fn dispatcher(_attr: TokenStream, item: TokenStream) -> TokenStream { + let lang_server_trait_impl = parse_macro_input!(item as ItemImpl); + + let method_calls = parse_method_calls(&lang_server_trait_impl); + let channel_struct = gen_channel_struct(&method_calls); + + let tokens = quote! { + #channel_struct + #lang_server_trait_impl + }; + + tokens.into() + // item +} + +struct LspTypeChannel<'a> { + tx_name: syn::Ident, + rx_name: syn::Ident, + params: Option<&'a syn::Type>, +} + +fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { + let mut calls = Vec::new(); + + for item in &lang_server_trait.items { + let method = match item { + ImplItem::Fn(m) => m, + _ => continue, + }; + + let params = method.sig.inputs.iter().nth(1).and_then(|arg| match arg { + FnArg::Typed(pat) => Some(&*pat.ty), + _ => None, + }); + + // let result = match &method.sig.output { + // ReturnType::Default => None, + // ReturnType::Type(_, ty) => Some(&**ty), + // }; + + let handler_name = &method.sig.ident; + let tx_name = format_ident!("{}_tx", handler_name); + let rx_name = format_ident!("{}_rx", handler_name); + + calls.push(LspTypeChannel { + tx_name, + rx_name, + params, + }); + } + + calls +} + +fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { + // unit type + let unit_type = syn::Type::Tuple(syn::TypeTuple { + paren_token: syn::token::Paren::default(), + elems: syn::punctuated::Punctuated::new(), + }); + + let channel_declarations: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let tx = &channel.tx_name; + let rx = &channel.rx_name; + let params = channel.params; + + // if params is None we need to use the type of () as the default + let params = match params { + Some(params) => params, + None => &unit_type, + }; + quote! { + pub #tx: tokio::sync::broadcast::Sender<#params>, + pub #rx: tokio::sync::broadcast::Receiver<#params>, + } + }) + .collect(); + + let channel_instantiations: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let tx = &channel.tx_name; + let rx = &channel.rx_name; + quote! { + let (#tx, #rx) = tokio::sync::broadcast::channel(100); + } + }) + .collect(); + + let channel_assignments: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let tx = &channel.tx_name; + let rx = &channel.rx_name; + quote! { + #tx, + #rx, + } + }) + .collect(); + + quote! { + pub struct LspChannels { + #channel_declarations + } + + impl LspChannels { + pub fn new() -> Self { + #channel_instantiations + Self { + #channel_assignments + } + } + } + } +} diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index eb79c02cf..f9c013559 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -13,6 +13,7 @@ description = "An LSP language server for Fe lang" salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } codespan-reporting = "0.11" hir = { path = "../hir", package = "fe-hir" } +language-server-macros = { path = "../language-server-macros", package = "fe-language-server-macros" } macros = { path = "../macros", package = "fe-macros" } hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } camino = "1.1.4" @@ -39,3 +40,4 @@ tower-lsp = "0.20.0" tokio = { version = "1.35.1", features = ["full", "io-std"] } tokio-macros = "2.2.0" futures = "0.3.28" +tokio-stream = { version = "0.1.14", features = ["sync"] } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 7f00b494d..69a4b51ff 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,47 +1,60 @@ use std::sync::Arc; -use std::sync::Mutex; +use tokio::sync::Mutex; use crate::db::LanguageServerDatabase; +use crate::language_server::Server; use crate::workspace::Workspace; use anyhow::Result; +use log::info; use lsp_types::{ DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern, Registration, }; +use tokio_stream::wrappers::BroadcastStream; use tower_lsp::Client; +use tokio_stream::StreamExt; pub struct Backend { - pub(crate) client: Arc>, - pub(crate) db: Arc>, - pub(crate) workspace: Arc>, + // pub(crate) client: &'a Client, + pub(crate) client: Arc>, + pub(crate) db: LanguageServerDatabase, + pub(crate) workspace: Workspace, } impl Backend { - pub fn new(client: Client) -> Self { - let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); - let workspace = Arc::new(Mutex::new(Workspace::default())); - let client = Arc::new(tokio::sync::Mutex::new(client)); - Self { + pub fn new(client: Arc>, server: &Server) -> Self { + // let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); + // let workspace = Arc::new(Mutex::new(Workspace::default())); + // let client = Arc::new(tokio::sync::Mutex::new(client)); + let workspace = Workspace::default(); + let db = LanguageServerDatabase::default(); + let backend = Self { client, db, workspace, - } - } - pub(crate) async fn register_watchers(&self) -> Result<()> { - let client = self.client.lock().await; - let registration = Registration { - id: String::from("watch-fe-files"), - method: String::from("workspace/didChangeWatchedFiles"), - register_options: Some( - serde_json::to_value(DidChangeWatchedFilesRegistrationOptions { - watchers: vec![FileSystemWatcher { - glob_pattern: GlobPattern::String("**/*.fe".to_string()), - kind: None, - }], - }) - .unwrap(), - ), }; - Ok(client.register_capability(vec![registration]).await?) + + //subscribe to server initialize event + let mut stream = BroadcastStream::new(server.dispatch.initialize_tx.subscribe()); + tokio::spawn(async move { + while let Some(event) = stream.next().await { + match event { + Ok(params) => { + info!("initialize event received: {:?}", params); + // Handle the event here + } + Err(e) => { + eprintln!("Error receiving event: {:?}", e); + // Handle the error here + } + } + } + }); + + // server.dispatch.initialize_rx.resubscribe(move |params| { + // info!("initialize event received: {:?}", params); + // }); + + backend } } diff --git a/crates/language-server/src/dispatcher.rs b/crates/language-server/src/dispatcher.rs new file mode 100644 index 000000000..e69de29bb diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index d4c261ae8..38234e1a4 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -4,9 +4,7 @@ use fxhash::FxHashMap; use serde::Deserialize; use crate::{ - db::LanguageServerDatabase, - diagnostics::get_diagnostics, - workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace}, + backend::Backend, db::LanguageServerDatabase, diagnostics::get_diagnostics, workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace} }; #[cfg(target_arch = "wasm32")] @@ -25,32 +23,6 @@ fn run_diagnostics( db.finalize_diags() } -pub fn get_diagnostics( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, - uri: lsp_types::Url, -) -> Result>, Error> { - let diags = run_diagnostics(db, workspace, uri.to_file_path().unwrap().to_str().unwrap()); - // let db = &mut *state.db.lock().unwrap(); - - let diagnostics = diags - .into_iter() - .flat_map(|diag| diag_to_lsp(diag, db).clone()); - - // we need to reduce the diagnostics to a map from URL to Vec - let mut result = FxHashMap::>::default(); - - // add a null diagnostic to the result for the given URL - let _ = result.entry(uri.clone()).or_insert_with(Vec::new); - - diagnostics.for_each(|(uri, more_diags)| { - let diags = result.entry(uri).or_insert_with(Vec::new); - diags.extend(more_diags); - }); - - Ok(result) -} - pub fn handle_document_did_open( db: &mut LanguageServerDatabase, workspace: &mut Workspace, diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 24edecce8..a0413e959 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,22 +1,91 @@ +use std::{any::Any, sync::Arc}; + use log::info; use lsp_types::{ - DidChangeWatchedFilesParams, DidCloseTextDocumentParams, InitializeParams, InitializeResult, - TextDocumentItem, + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration, TextDocumentItem }; -use tower_lsp::{jsonrpc::Result, LanguageServer}; +use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; use crate::{ backend::Backend, capabilities::server_capabilities, diagnostics::get_diagnostics, - globals::LANGUAGE_ID, workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile}, }; +// This is replaced by the `dispatcher` procedural macro! +// struct Dispatch { +// tx_initialize: tokio::sync::mpsc::Sender, +// initialize_stream: tokio_stream::wrappers::ReceiverStream, +// tx_did_open: tokio::sync::mpsc::Sender, +// did_open_stream: tokio_stream::wrappers::ReceiverStream, +// tx_did_change: tokio::sync::mpsc::Sender, +// did_change_stream: tokio_stream::wrappers::ReceiverStream, +// tx_did_close: tokio::sync::mpsc::Sender, +// did_close_stream: tokio_stream::wrappers::ReceiverStream, +// tx_did_change_watched_files: tokio::sync::mpsc::Sender, +// did_change_watched_files_stream: tokio_stream::wrappers::ReceiverStream, +// } + +// impl Dispatch { +// fn new() -> Self { +// let (tx_initialize, rx_initialize) = tokio::sync::mpsc::channel(16); +// let (tx_did_open, rx_did_open) = tokio::sync::mpsc::channel(16); +// let (tx_did_change, rx_did_change) = tokio::sync::mpsc::channel(16); +// let (tx_did_close, rx_did_close) = tokio::sync::mpsc::channel(16); +// let (tx_did_change_watched_files, rx_did_change_watched_files) = tokio::sync::mpsc::channel(16); +// Self { +// tx_initialize, +// tx_did_open, +// tx_did_change, +// tx_did_close, +// tx_did_change_watched_files, +// initialize_stream: tokio_stream::wrappers::ReceiverStream::new(rx_initialize), +// did_open_stream: tokio_stream::wrappers::ReceiverStream::new(rx_did_open), +// did_close_stream: tokio_stream::wrappers::ReceiverStream::new(rx_did_close), +// did_change_stream: tokio_stream::wrappers::ReceiverStream::new(rx_did_change), +// did_change_watched_files_stream: tokio_stream::wrappers::ReceiverStream::new(rx_did_change_watched_files), +// } +// } +// } + +pub(crate) struct Server { + pub(crate) dispatch: LspChannels, + pub(crate) client: Arc>, +} + +impl Server { + pub(crate) async fn register_watchers(&self) -> Result<()> { + let client = self.client.lock().await; + let registration = Registration { + id: String::from("watch-fe-files"), + method: String::from("workspace/didChangeWatchedFiles"), + register_options: Some( + serde_json::to_value(DidChangeWatchedFilesRegistrationOptions { + watchers: vec![FileSystemWatcher { + glob_pattern: GlobPattern::String("**/*.fe".to_string()), + kind: None, + }], + }) + .unwrap(), + ), + }; + Ok(client.register_capability(vec![registration]).await?) + } + + pub(crate) fn new(client: Client) -> Self { + let dispatch = LspChannels::new(); + let client = Arc::new(tokio::sync::Mutex::new(client)); + Self { dispatch, client } + } +} + +#[language_server_macros::dispatcher] #[tower_lsp::async_trait] -impl LanguageServer for Backend { +impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { + let _ = self.dispatch.initialize_tx.send(initialize_params); // initialize let capabilities = server_capabilities(); let initialize_result = lsp_types::InitializeResult { @@ -32,19 +101,19 @@ impl LanguageServer for Backend { } // setup workspace - { - let workspace = &mut self.workspace.lock().unwrap(); - let db = &mut self.db.lock().unwrap(); - let _ = workspace.set_workspace_root( - db, - initialize_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - ); - } + // { + // let workspace = &mut self.workspace.lock().unwrap(); + // let db = &mut self.db.lock().unwrap(); + // let _ = workspace.set_workspace_root( + // db, + // initialize_params + // .root_uri + // .unwrap() + // .to_file_path() + // .ok() + // .unwrap(), + // ); + // } // register watchers let _ = self.register_watchers().await; @@ -56,37 +125,40 @@ impl LanguageServer for Backend { } async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - info!("did open: {:?}", params); - { - let workspace = &mut self.workspace.lock().unwrap(); - let db = &mut self.db.lock().unwrap(); - let _ = workspace.sync(db); - } + let _ = self.dispatch.did_open_tx.send(params); + // self.tx.send(Box::new(params)); + // info!("did open: {:?}", params); + // { + // let workspace = &mut self.workspace.lock().unwrap(); + // let db = &mut self.db.lock().unwrap(); + // let _ = workspace.sync(db); + // } - on_change( - self, - TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.text_document.text, - }, - ) - .await; + // on_change( + // self, + // TextDocumentItem { + // uri: params.text_document.uri, + // language_id: LANGUAGE_ID.to_string(), + // version: params.text_document.version, + // text: params.text_document.text, + // }, + // ) + // .await; } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - info!("did change: {:?}", params); - on_change( - self, - TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.content_changes[0].text.clone(), - }, - ) - .await; + let _ = self.dispatch.did_change_tx.send(params); + // info!("did change: {:?}", params); + // on_change( + // self, + // TextDocumentItem { + // uri: params.text_document.uri, + // language_id: LANGUAGE_ID.to_string(), + // version: params.text_document.version, + // text: params.content_changes[0].text.clone(), + // }, + // ) + // .await; } // Currently this is used to handle document renaming since the "document open" handler is called @@ -95,103 +167,105 @@ impl LanguageServer for Backend { // The fix: handle document renaming more explicitly in the "will rename" flow, along with the document // rename refactor. async fn did_close(&self, params: DidCloseTextDocumentParams) { - let workspace = &mut self.workspace.lock().unwrap(); - // let workspace = &mut workspace.lock().await; - let db = &mut self.db.lock().unwrap(); - // let db = &mut db.lock().await; - - let input = workspace - .input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, None); + let _ = self.dispatch.did_close_tx.send(params); + // let workspace = &mut self.workspace.lock().unwrap(); + // // let workspace = &mut workspace.lock().await; + // let db = &mut self.db.lock().unwrap(); + // // let db = &mut db.lock().await; + + // let input = workspace + // .input_from_file_path( + // db, + // params + // .text_document + // .uri + // .to_file_path() + // .unwrap() + // .to_str() + // .unwrap(), + // ) + // .unwrap(); + // let _ = input.sync(db, None); } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let changes = params.changes; - for change in changes { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let workspace = &mut self.workspace.lock().unwrap(); - let db = &mut self.db.lock().unwrap(); - let _ = workspace.sync(db); - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::CHANGED => { - let workspace = &mut self.workspace.lock().unwrap(); - let db = &mut self.db.lock().unwrap(); - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let workspace = &mut self.workspace.lock().unwrap(); - let db = &mut self.db.lock().unwrap(); - let _ = workspace.sync(db); - } - _ => {} - } - // collect diagnostics for the file - if change.typ != lsp_types::FileChangeType::DELETED { - let text = std::fs::read_to_string(path).unwrap(); - on_change( - self, - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - }, - ) - .await; - } - } + let _ = self.dispatch.did_change_watched_files_tx.send(params); + // let changes = params.changes; + // for change in changes { + // let uri = change.uri; + // let path = uri.to_file_path().unwrap(); + + // match change.typ { + // lsp_types::FileChangeType::CREATED => { + // // TODO: handle this more carefully! + // // this is inefficient, a hack for now + // let workspace = &mut self.workspace.lock().unwrap(); + // let db = &mut self.db.lock().unwrap(); + // let _ = workspace.sync(db); + // let input = workspace + // .input_from_file_path(db, path.to_str().unwrap()) + // .unwrap(); + // let _ = input.sync(db, None); + // } + // lsp_types::FileChangeType::CHANGED => { + // let workspace = &mut self.workspace.lock().unwrap(); + // let db = &mut self.db.lock().unwrap(); + // let input = workspace + // .input_from_file_path(db, path.to_str().unwrap()) + // .unwrap(); + // let _ = input.sync(db, None); + // } + // lsp_types::FileChangeType::DELETED => { + // // TODO: handle this more carefully! + // // this is inefficient, a hack for now + // let workspace = &mut self.workspace.lock().unwrap(); + // let db = &mut self.db.lock().unwrap(); + // let _ = workspace.sync(db); + // } + // _ => {} + // } + // // collect diagnostics for the file + // if change.typ != lsp_types::FileChangeType::DELETED { + // let text = std::fs::read_to_string(path).unwrap(); + // on_change( + // self, + // TextDocumentItem { + // uri: uri.clone(), + // language_id: LANGUAGE_ID.to_string(), + // version: 0, + // text, + // }, + // ) + // .await; + // } + // } } } -async fn on_change(backend: &Backend, params: TextDocumentItem) { - let diagnostics = { - let workspace = &mut backend.workspace.lock().unwrap(); - let db = &mut backend.db.lock().unwrap(); - let input = workspace - .input_from_file_path( - db, - params - .uri - .to_file_path() - .expect("Failed to convert URI to file path") - .to_str() - .expect("Failed to convert file path to string"), - ) - .unwrap(); - let _ = input.sync(db, Some(params.text)); - get_diagnostics(db, workspace, params.uri.clone()) - }; - - let client = backend.client.lock().await; - let diagnostics = diagnostics - .unwrap() - .into_iter() - .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) - .collect::>(); - - futures::future::join_all(diagnostics).await; -} +// async fn on_change(backend: &Backend, params: TextDocumentItem) { +// let diagnostics = { +// let workspace = &mut backend.workspace.lock().unwrap(); +// let db = &mut backend.db.lock().unwrap(); +// let input = workspace +// .input_from_file_path( +// db, +// params +// .uri +// .to_file_path() +// .expect("Failed to convert URI to file path") +// .to_str() +// .expect("Failed to convert file path to string"), +// ) +// .unwrap(); +// let _ = input.sync(db, Some(params.text)); +// get_diagnostics(db, workspace, params.uri.clone()) +// }; + +// let client = backend.client.lock().await; +// let diagnostics = diagnostics +// .unwrap() +// .into_iter() +// .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) +// .collect::>(); + +// futures::future::join_all(diagnostics).await; +// } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 77485bbab..cacd09968 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; use tower_lsp::Client; -use crate::backend::Backend; +use crate::{backend::Backend, language_server::Server}; pub struct Logger { pub(crate) level: Level, @@ -43,7 +43,7 @@ impl log::Log for Logger { fn flush(&self) {} } -impl Backend { +impl Server { pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { let logger = Logger { level, diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index e6f4b937d..09cd84744 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -10,18 +10,26 @@ mod util; mod workspace; use backend::Backend; +// use backend::Backend; use db::Jar; +use language_server::Server; mod handlers { pub mod notifications; pub mod request; } + + #[tokio_macros::main] async fn main() { let stdin = tokio::io::stdin(); let stdout = tokio::io::stdout(); - let (service, socket) = tower_lsp::LspService::build(Backend::new).finish(); + let (service, socket) = tower_lsp::LspService::build(Server::new).finish(); + + let server = service.inner(); + + let _backend = Backend::new(server.client.clone(), server); tower_lsp::Server::new(stdin, stdout, socket) .serve(service) .await; diff --git a/crates/language-server/test_files/lol.fe b/crates/language-server/test_files/lol.fe new file mode 100644 index 000000000..f08c02f07 --- /dev/null +++ b/crates/language-server/test_files/lol.fe @@ -0,0 +1,12 @@ +struct Foo {} +struct Bar {} + +fn main() { + let x: Foo + let y: Barrr + let z: baz::Bazzz +} + +mod baz { + pub struct Baz {} +} \ No newline at end of file From f56052a7721e75978542605dc46bf1e18c516ef9 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 29 Feb 2024 21:02:58 -0600 Subject: [PATCH 12/66] attempt at oneshot responses --- Cargo.lock | 1 + crates/language-server-macros/src/lib.rs | 143 ++++++++++++++++-- crates/language-server/src/backend.rs | 67 ++++---- .../src/handlers/notifications.rs | 2 +- crates/language-server/src/language_server.rs | 44 ++---- crates/language-server/src/logger.rs | 2 +- crates/language-server/src/main.rs | 4 +- 7 files changed, 187 insertions(+), 76 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8ea6d3e03..fbd7c6564 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3198,6 +3198,7 @@ dependencies = [ "futures-core", "pin-project-lite", "tokio", + "tokio-util", ] [[package]] diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index 22ba25ce9..f67a485e4 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -2,9 +2,7 @@ extern crate proc_macro; use proc_macro::TokenStream; use quote::{format_ident, quote}; -use syn::{ - parse_macro_input, FnArg, ImplItem, ItemImpl, -}; +use syn::{parse_macro_input, FnArg, ImplItem, ItemImpl, ReturnType}; /// Macro for generating tokio channels from [`lsp-types`](https://docs.rs/lsp-types). /// @@ -28,9 +26,13 @@ pub fn dispatcher(_attr: TokenStream, item: TokenStream) -> TokenStream { } struct LspTypeChannel<'a> { + // handler_name: &'a syn::Ident, tx_name: syn::Ident, + dispatcher_name: syn::Ident, + subscriber_name: syn::Ident, rx_name: syn::Ident, params: Option<&'a syn::Type>, + result: Option<&'a syn::Type>, } fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { @@ -47,19 +49,25 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { _ => None, }); - // let result = match &method.sig.output { - // ReturnType::Default => None, - // ReturnType::Type(_, ty) => Some(&**ty), - // }; + let result = match &method.sig.output { + ReturnType::Default => None, + ReturnType::Type(_, ty) => Some(&**ty), + }; let handler_name = &method.sig.ident; let tx_name = format_ident!("{}_tx", handler_name); + let dispatcher_name = format_ident!("dispatch_{}", handler_name); + let subscriber_name = format_ident!("subscribe_{}", handler_name); + let rx_name = format_ident!("{}_rx", handler_name); calls.push(LspTypeChannel { tx_name, rx_name, + dispatcher_name, + subscriber_name, params, + result, }); } @@ -79,15 +87,27 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { let tx = &channel.tx_name; let rx = &channel.rx_name; let params = channel.params; + let result = channel.result; // if params is None we need to use the type of () as the default let params = match params { Some(params) => params, None => &unit_type, }; + + let sender_type = match result { + Some(result) => quote! { tokio::sync::broadcast::Sender<(#params, OneshotResponder<#result>)> }, + None => quote! { tokio::sync::broadcast::Sender<#params> }, + }; + + let receiver_type = match result { + Some(result) => quote! { tokio::sync::broadcast::Receiver<(#params, OneshotResponder<#result>)> }, + None => quote! { tokio::sync::broadcast::Receiver<#params> }, + }; + quote! { - pub #tx: tokio::sync::broadcast::Sender<#params>, - pub #rx: tokio::sync::broadcast::Receiver<#params>, + pub #tx: #sender_type, + pub #rx: #receiver_type, } }) .collect(); @@ -115,7 +135,111 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { }) .collect(); + let dispatch_functions: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let tx = &channel.tx_name; + let rx = &channel.rx_name; + let params = &channel.params; + + let params_type = match params { + Some(params) => params, + None => &unit_type, + }; + let subscriber_name = &channel.subscriber_name; + let dispatcher_name = &channel.dispatcher_name; + let dispatcher_result = match channel.result { + Some(result) => quote!{tokio::sync::oneshot::Receiver<#result>}, + None => quote!{()}, + // Some(result) => quote! { tokio::sync::broadcast::Receiver<(#params, OneshotResponder>)> }, + // None => quote! { tokio::sync::broadcast::Receiver<#params> }, + }; + let receiver_type = match channel.result { + Some(result) => quote! { tokio::sync::broadcast::Receiver<(#params_type, OneshotResponder<#result>)> }, + None => quote! { tokio::sync::broadcast::Receiver<#params_type> }, + }; + + let dispatcher_payload = match params { + Some(params) => quote! { params }, + None => quote! { () }, + }; + + let dispatcher_send_payload = match channel.result { + Some(result) => quote!{ + let (tx, rx) = tokio::sync::oneshot::channel::<#result>(); + self.#tx.send((#dispatcher_payload, OneshotResponder::from(tx))).unwrap(); + rx + }, + None => quote!{ + self.#tx.send(#dispatcher_payload).unwrap(); + }, + }; + + let dispatcher_fn = match params { + Some(params) => quote! { + pub fn #dispatcher_name(&self, params: #params) -> #dispatcher_result { + #dispatcher_send_payload + } + }, + None => quote! { + pub fn #dispatcher_name(&self) -> #dispatcher_result { + #dispatcher_send_payload + } + }, + }; + + // Some(result) => quote! { tokio::sync::broadcast::Sender<(#params, OneshotResponder>)> }, + // None => quote! { tokio::sync::broadcast::Sender<#params> }, + let subscriber_fn = match params { + Some(_params) => quote! { + pub fn #subscriber_name(&self) -> #receiver_type { + self.#tx.subscribe() + } + }, + None => quote! { + pub fn #subscriber_name(&self) -> #receiver_type { + self.#tx.subscribe() + } + }, + }; + + quote! { + #dispatcher_fn + #subscriber_fn + } + }) + .collect(); + quote! { + use std::fmt::Debug; + #[derive(Debug)] + pub struct OneshotResponder{ + sender: std::sync::Arc>>> + } + impl Clone for OneshotResponder { + fn clone(&self) -> OneshotResponder { + Self { + sender: self.sender.clone() + } + } + } + + + impl OneshotResponder { + pub fn from(sender: tokio::sync::oneshot::Sender) -> Self { + Self { + sender: std::sync::Arc::new(std::sync::Mutex::new(Some(sender))) + } + } + pub fn respond(self, response: T) { + let mut sender = self.sender.lock().unwrap(); + // sender.send(response.clone()); + if let Some(sender) = sender.take() { + let _ = sender.send(response).unwrap(); + } + } + } + pub struct LspChannels { #channel_declarations } @@ -127,6 +251,7 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { #channel_assignments } } + #dispatch_functions } } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 69a4b51ff..e86f4fe6a 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,60 +1,69 @@ use std::sync::Arc; use tokio::sync::Mutex; +// use tokio::sync::oneshot::Receiver; + +use crate::capabilities::server_capabilities; use crate::db::LanguageServerDatabase; use crate::language_server::Server; use crate::workspace::Workspace; -use anyhow::Result; + use log::info; -use lsp_types::{ - DidChangeWatchedFilesRegistrationOptions, FileSystemWatcher, GlobPattern, Registration, -}; + use tokio_stream::wrappers::BroadcastStream; -use tower_lsp::Client; use tokio_stream::StreamExt; +use tower_lsp::Client; -pub struct Backend { +pub struct Backend<'a> { // pub(crate) client: &'a Client, + pub(crate) server: &'a Server, pub(crate) client: Arc>, pub(crate) db: LanguageServerDatabase, pub(crate) workspace: Workspace, } -impl Backend { - pub fn new(client: Arc>, server: &Server) -> Self { - // let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); - // let workspace = Arc::new(Mutex::new(Workspace::default())); - // let client = Arc::new(tokio::sync::Mutex::new(client)); +impl<'a> Backend<'a> { + pub fn new(client: Arc>, server: &'a Server) -> Self { let workspace = Workspace::default(); let db = LanguageServerDatabase::default(); let backend = Self { + server, client, db, workspace, }; - //subscribe to server initialize event - let mut stream = BroadcastStream::new(server.dispatch.initialize_tx.subscribe()); + backend + } + pub fn setup_streams(mut self) { + let mut stream = BroadcastStream::new(self.server.dispatch.subscribe_initialize()); tokio::spawn(async move { - while let Some(event) = stream.next().await { - match event { - Ok(params) => { - info!("initialize event received: {:?}", params); - // Handle the event here - } - Err(e) => { - eprintln!("Error receiving event: {:?}", e); - // Handle the error here - } + while let Some(result) = stream.next().await { + if let Ok((initialization_params, responder)) = result { + info!("initializing language server: {:?}", initialization_params); + // setup workspace + let capabilities = server_capabilities(); + let initialize_result = lsp_types::InitializeResult { + capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + let _ = self.workspace.set_workspace_root( + &mut self.db, + initialization_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), + ); + + responder.respond(Ok(initialize_result)); } } }); - - // server.dispatch.initialize_rx.resubscribe(move |params| { - // info!("initialize event received: {:?}", params); - // }); - - backend } } diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index 38234e1a4..f64fadb7d 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -58,7 +58,7 @@ pub fn handle_document_did_open( pub fn handle_document_did_close( db: &mut LanguageServerDatabase, workspace: &mut Workspace, - state: &mut Backend, + _state: &mut Backend, note: lsp_server::Notification, ) -> Result<(), Error> { let params = lsp_types::DidCloseTextDocumentParams::deserialize(note.params)?; diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index a0413e959..91428b5f5 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,17 +1,14 @@ -use std::{any::Any, sync::Arc}; +use std::{sync::Arc}; + -use log::info; use lsp_types::{ - DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration, TextDocumentItem + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration }; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; use crate::{ - backend::Backend, capabilities::server_capabilities, - diagnostics::get_diagnostics, - workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile}, }; // This is replaced by the `dispatcher` procedural macro! @@ -85,41 +82,18 @@ impl Server { #[tower_lsp::async_trait] impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { - let _ = self.dispatch.initialize_tx.send(initialize_params); - // initialize - let capabilities = server_capabilities(); - let initialize_result = lsp_types::InitializeResult { - capabilities, - server_info: Some(lsp_types::ServerInfo { - name: String::from("fe-language-server"), - version: Some(String::from(env!("CARGO_PKG_VERSION"))), - }), - }; - // setup logging - { - let _ = self.init_logger(log::Level::Info); - } + let rx = self.dispatch.dispatch_initialize(initialize_params); - // setup workspace - // { - // let workspace = &mut self.workspace.lock().unwrap(); - // let db = &mut self.db.lock().unwrap(); - // let _ = workspace.set_workspace_root( - // db, - // initialize_params - // .root_uri - // .unwrap() - // .to_file_path() - // .ok() - // .unwrap(), - // ); - // } + // setup logging + let _ = self.init_logger(log::Level::Info); // register watchers let _ = self.register_watchers().await; - Ok(initialize_result) + let initialize_result = rx.await.unwrap(); + initialize_result } + async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { Ok(()) } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index cacd09968..2022a2b0a 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; use tower_lsp::Client; -use crate::{backend::Backend, language_server::Server}; +use crate::{language_server::Server}; pub struct Logger { pub(crate) level: Level, diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 09cd84744..95fdf15d8 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -29,7 +29,9 @@ async fn main() { let server = service.inner(); - let _backend = Backend::new(server.client.clone(), server); + let backend = Backend::new(server.client.clone(), server); + backend.setup_streams(); + tower_lsp::Server::new(stdin, stdout, socket) .serve(service) .await; From 843ed9a3572cf651cc49149335494e6a97ad5ade Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 29 Feb 2024 23:05:40 -0600 Subject: [PATCH 13/66] stream handlers for LSP functionality --- crates/language-server/src/backend.rs | 199 ++++++++++++++++-- crates/language-server/src/language_server.rs | 179 +--------------- 2 files changed, 190 insertions(+), 188 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index e86f4fe6a..16a88db8f 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,3 +1,6 @@ +use crate::workspace::SyncableIngotFileContext; +use futures::TryStreamExt; +use lsp_types::TextDocumentItem; use std::sync::Arc; use tokio::sync::Mutex; @@ -6,8 +9,10 @@ use tokio::sync::Mutex; use crate::capabilities::server_capabilities; use crate::db::LanguageServerDatabase; +use crate::diagnostics::get_diagnostics; +use crate::globals::LANGUAGE_ID; use crate::language_server::Server; -use crate::workspace::Workspace; +use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; @@ -16,30 +21,36 @@ use tokio_stream::StreamExt; use tower_lsp::Client; pub struct Backend<'a> { - // pub(crate) client: &'a Client, pub(crate) server: &'a Server, - pub(crate) client: Arc>, - pub(crate) db: LanguageServerDatabase, - pub(crate) workspace: Workspace, + pub(crate) client: Arc>, + pub(crate) db: Arc>, + pub(crate) workspace: Arc>, + runtime: tokio::runtime::Runtime, } impl<'a> Backend<'a> { - pub fn new(client: Arc>, server: &'a Server) -> Self { - let workspace = Workspace::default(); - let db = LanguageServerDatabase::default(); + pub fn new(client: Arc>, server: &'a Server) -> Self { + let workspace = Arc::new(Mutex::new(Workspace::default())); + let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); + let runtime = tokio::runtime::Runtime::new().unwrap(); let backend = Self { server, client, db, workspace, + runtime, }; backend } - pub fn setup_streams(mut self) { - let mut stream = BroadcastStream::new(self.server.dispatch.subscribe_initialize()); - tokio::spawn(async move { - while let Some(result) = stream.next().await { + pub fn setup_streams(self) { + let db = self.db.clone(); + let workspace = self.workspace.clone(); + let client = self.client.clone(); + let mut initialized_stream = + BroadcastStream::new(self.server.dispatch.subscribe_initialize()); + self.runtime.spawn(async move { + while let Some(result) = initialized_stream.next().await { if let Ok((initialization_params, responder)) = result { info!("initializing language server: {:?}", initialization_params); // setup workspace @@ -51,8 +62,10 @@ impl<'a> Backend<'a> { version: Some(String::from(env!("CARGO_PKG_VERSION"))), }), }; - let _ = self.workspace.set_workspace_root( - &mut self.db, + let db = &mut self.db.lock().await; + let workspace = &mut self.workspace.lock().await; + let _ = workspace.set_workspace_root( + db, initialization_params .root_uri .unwrap() @@ -65,5 +78,163 @@ impl<'a> Backend<'a> { } } }); + + let mut shutdown_stream = BroadcastStream::new(self.server.dispatch.subscribe_shutdown()); + self.runtime.spawn(async move { + while let Some(result) = shutdown_stream.next().await { + if let Ok((_, responder)) = result { + info!("shutting down language server"); + responder.respond(Ok(())); + } + } + }); + + let did_open_stream = BroadcastStream::new(self.server.dispatch.subscribe_did_open()); + let did_change_stream = BroadcastStream::new(self.server.dispatch.subscribe_did_change()); + + let mut change_stream = tokio_stream::StreamExt::merge( + did_open_stream.map_ok(|params| TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.text_document.text, + }), + did_change_stream.map_ok(|params| TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.content_changes[0].text.clone(), + }), + ); + + let workspace_clone = workspace.clone(); + let client_clone = client.clone(); + let db_clone = db.clone(); + self.runtime.spawn(async move { + let workspace = &mut workspace_clone.lock().await; + let client = &mut client_clone.lock().await; + let db = &mut db_clone.lock().await; + while let Some(Ok(doc)) = change_stream.next().await { + on_change(client, workspace, db, doc).await; + } + }); + + let workspace_clone = workspace.clone(); + let client_clone = client.clone(); + let db_clone = db.clone(); + let mut did_close_stream = BroadcastStream::new(self.server.dispatch.subscribe_did_close()); + self.runtime.spawn(async move { + let workspace = &mut workspace_clone.lock().await; + let client = &mut client_clone.lock().await; + let db = &mut db_clone.lock().await; + while let Some(Ok(params)) = did_close_stream.next().await { + let input = workspace + .input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, None); + } + }); + + let workspace_clone = workspace.clone(); + let client_clone = client.clone(); + let db_clone = db.clone(); + let mut did_change_watched_files_stream = + BroadcastStream::new(self.server.dispatch.subscribe_did_change_watched_files()); + self.runtime.spawn(async move { + let workspace = &mut workspace_clone.lock().await; + let client = &mut client_clone.lock().await; + let db = &mut db_clone.lock().await; + + while let Some(Ok(params)) = did_change_watched_files_stream.next().await { + let changes = params.changes; + for change in changes { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + + match change.typ { + lsp_types::FileChangeType::CREATED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::CHANGED => { + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::DELETED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + } + _ => {} + } + // collect diagnostics for the file + if change.typ != lsp_types::FileChangeType::DELETED { + let text = std::fs::read_to_string(path).unwrap(); + on_change( + client, + workspace, + db, + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + }, + ) + .await; + } + } + } + }); } } + +async fn on_change( + client: &mut Client, + workspace: &mut Workspace, + db: &mut LanguageServerDatabase, + params: TextDocumentItem, +) { + let diagnostics = { + // let workspace = &mut workspace.lock().await; + // let db = &mut db.lock().await; + let input = workspace + .input_from_file_path( + db, + params + .uri + .to_file_path() + .expect("Failed to convert URI to file path") + .to_str() + .expect("Failed to convert file path to string"), + ) + .unwrap(); + let _ = input.sync(db, Some(params.text)); + get_diagnostics(db, workspace, params.uri.clone()) + }; + + // let client = client.lock().await; + let diagnostics = diagnostics + .unwrap() + .into_iter() + .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) + .collect::>(); + + futures::future::join_all(diagnostics).await; +} diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 91428b5f5..f105712a3 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -7,46 +7,6 @@ use lsp_types::{ use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; -use crate::{ - capabilities::server_capabilities, -}; - -// This is replaced by the `dispatcher` procedural macro! -// struct Dispatch { -// tx_initialize: tokio::sync::mpsc::Sender, -// initialize_stream: tokio_stream::wrappers::ReceiverStream, -// tx_did_open: tokio::sync::mpsc::Sender, -// did_open_stream: tokio_stream::wrappers::ReceiverStream, -// tx_did_change: tokio::sync::mpsc::Sender, -// did_change_stream: tokio_stream::wrappers::ReceiverStream, -// tx_did_close: tokio::sync::mpsc::Sender, -// did_close_stream: tokio_stream::wrappers::ReceiverStream, -// tx_did_change_watched_files: tokio::sync::mpsc::Sender, -// did_change_watched_files_stream: tokio_stream::wrappers::ReceiverStream, -// } - -// impl Dispatch { -// fn new() -> Self { -// let (tx_initialize, rx_initialize) = tokio::sync::mpsc::channel(16); -// let (tx_did_open, rx_did_open) = tokio::sync::mpsc::channel(16); -// let (tx_did_change, rx_did_change) = tokio::sync::mpsc::channel(16); -// let (tx_did_close, rx_did_close) = tokio::sync::mpsc::channel(16); -// let (tx_did_change_watched_files, rx_did_change_watched_files) = tokio::sync::mpsc::channel(16); -// Self { -// tx_initialize, -// tx_did_open, -// tx_did_change, -// tx_did_close, -// tx_did_change_watched_files, -// initialize_stream: tokio_stream::wrappers::ReceiverStream::new(rx_initialize), -// did_open_stream: tokio_stream::wrappers::ReceiverStream::new(rx_did_open), -// did_close_stream: tokio_stream::wrappers::ReceiverStream::new(rx_did_close), -// did_change_stream: tokio_stream::wrappers::ReceiverStream::new(rx_did_change), -// did_change_watched_files_stream: tokio_stream::wrappers::ReceiverStream::new(rx_did_change_watched_files), -// } -// } -// } - pub(crate) struct Server { pub(crate) dispatch: LspChannels, pub(crate) client: Arc>, @@ -99,147 +59,18 @@ impl LanguageServer for Server { } async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - let _ = self.dispatch.did_open_tx.send(params); - // self.tx.send(Box::new(params)); - // info!("did open: {:?}", params); - // { - // let workspace = &mut self.workspace.lock().unwrap(); - // let db = &mut self.db.lock().unwrap(); - // let _ = workspace.sync(db); - // } - - // on_change( - // self, - // TextDocumentItem { - // uri: params.text_document.uri, - // language_id: LANGUAGE_ID.to_string(), - // version: params.text_document.version, - // text: params.text_document.text, - // }, - // ) - // .await; + self.dispatch.dispatch_did_open(params); } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - let _ = self.dispatch.did_change_tx.send(params); - // info!("did change: {:?}", params); - // on_change( - // self, - // TextDocumentItem { - // uri: params.text_document.uri, - // language_id: LANGUAGE_ID.to_string(), - // version: params.text_document.version, - // text: params.content_changes[0].text.clone(), - // }, - // ) - // .await; + self.dispatch.dispatch_did_change(params); } - // Currently this is used to handle document renaming since the "document open" handler is called - // before the "document was renamed" handler. - // - // The fix: handle document renaming more explicitly in the "will rename" flow, along with the document - // rename refactor. async fn did_close(&self, params: DidCloseTextDocumentParams) { - let _ = self.dispatch.did_close_tx.send(params); - // let workspace = &mut self.workspace.lock().unwrap(); - // // let workspace = &mut workspace.lock().await; - // let db = &mut self.db.lock().unwrap(); - // // let db = &mut db.lock().await; - - // let input = workspace - // .input_from_file_path( - // db, - // params - // .text_document - // .uri - // .to_file_path() - // .unwrap() - // .to_str() - // .unwrap(), - // ) - // .unwrap(); - // let _ = input.sync(db, None); + self.dispatch.dispatch_did_close(params); } - async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let _ = self.dispatch.did_change_watched_files_tx.send(params); - // let changes = params.changes; - // for change in changes { - // let uri = change.uri; - // let path = uri.to_file_path().unwrap(); - // match change.typ { - // lsp_types::FileChangeType::CREATED => { - // // TODO: handle this more carefully! - // // this is inefficient, a hack for now - // let workspace = &mut self.workspace.lock().unwrap(); - // let db = &mut self.db.lock().unwrap(); - // let _ = workspace.sync(db); - // let input = workspace - // .input_from_file_path(db, path.to_str().unwrap()) - // .unwrap(); - // let _ = input.sync(db, None); - // } - // lsp_types::FileChangeType::CHANGED => { - // let workspace = &mut self.workspace.lock().unwrap(); - // let db = &mut self.db.lock().unwrap(); - // let input = workspace - // .input_from_file_path(db, path.to_str().unwrap()) - // .unwrap(); - // let _ = input.sync(db, None); - // } - // lsp_types::FileChangeType::DELETED => { - // // TODO: handle this more carefully! - // // this is inefficient, a hack for now - // let workspace = &mut self.workspace.lock().unwrap(); - // let db = &mut self.db.lock().unwrap(); - // let _ = workspace.sync(db); - // } - // _ => {} - // } - // // collect diagnostics for the file - // if change.typ != lsp_types::FileChangeType::DELETED { - // let text = std::fs::read_to_string(path).unwrap(); - // on_change( - // self, - // TextDocumentItem { - // uri: uri.clone(), - // language_id: LANGUAGE_ID.to_string(), - // version: 0, - // text, - // }, - // ) - // .await; - // } - // } + async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + self.dispatch.dispatch_did_change_watched_files(params); } } - -// async fn on_change(backend: &Backend, params: TextDocumentItem) { -// let diagnostics = { -// let workspace = &mut backend.workspace.lock().unwrap(); -// let db = &mut backend.db.lock().unwrap(); -// let input = workspace -// .input_from_file_path( -// db, -// params -// .uri -// .to_file_path() -// .expect("Failed to convert URI to file path") -// .to_str() -// .expect("Failed to convert file path to string"), -// ) -// .unwrap(); -// let _ = input.sync(db, Some(params.text)); -// get_diagnostics(db, workspace, params.uri.clone()) -// }; - -// let client = backend.client.lock().await; -// let diagnostics = diagnostics -// .unwrap() -// .into_iter() -// .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) -// .collect::>(); - -// futures::future::join_all(diagnostics).await; -// } From 63fd1cdd8a8fa8f54e1070f711d450c162f85b44 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 29 Feb 2024 23:10:40 -0600 Subject: [PATCH 14/66] formatting and cleanup --- crates/language-server-macros/src/lib.rs | 4 ++-- crates/language-server/src/backend.rs | 11 +++++------ crates/language-server/src/handlers/notifications.rs | 5 ++++- crates/language-server/src/language_server.rs | 12 ++++++------ crates/language-server/src/logger.rs | 2 +- crates/language-server/src/main.rs | 2 -- 6 files changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index f67a485e4..edb6dcf18 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -139,7 +139,7 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { .iter() .map(|channel| { let tx = &channel.tx_name; - let rx = &channel.rx_name; + // let rx = &channel.rx_name; let params = &channel.params; let params_type = match params { @@ -160,7 +160,7 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { }; let dispatcher_payload = match params { - Some(params) => quote! { params }, + Some(_params) => quote! { params }, None => quote! { () }, }; diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 16a88db8f..bf2cf8c22 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -33,15 +33,14 @@ impl<'a> Backend<'a> { let workspace = Arc::new(Mutex::new(Workspace::default())); let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); let runtime = tokio::runtime::Runtime::new().unwrap(); - let backend = Self { + + Self { server, client, db, workspace, runtime, - }; - - backend + } } pub fn setup_streams(self) { let db = self.db.clone(); @@ -125,7 +124,7 @@ impl<'a> Backend<'a> { let mut did_close_stream = BroadcastStream::new(self.server.dispatch.subscribe_did_close()); self.runtime.spawn(async move { let workspace = &mut workspace_clone.lock().await; - let client = &mut client_clone.lock().await; + let _client = &mut client_clone.lock().await; let db = &mut db_clone.lock().await; while let Some(Ok(params)) = did_close_stream.next().await { let input = workspace @@ -153,7 +152,7 @@ impl<'a> Backend<'a> { let workspace = &mut workspace_clone.lock().await; let client = &mut client_clone.lock().await; let db = &mut db_clone.lock().await; - + while let Some(Ok(params)) = did_change_watched_files_stream.next().await { let changes = params.changes; for change in changes { diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs index f64fadb7d..c6cb30a2e 100644 --- a/crates/language-server/src/handlers/notifications.rs +++ b/crates/language-server/src/handlers/notifications.rs @@ -4,7 +4,10 @@ use fxhash::FxHashMap; use serde::Deserialize; use crate::{ - backend::Backend, db::LanguageServerDatabase, diagnostics::get_diagnostics, workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace} + backend::Backend, + db::LanguageServerDatabase, + diagnostics::get_diagnostics, + workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace}, }; #[cfg(target_arch = "wasm32")] diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index f105712a3..5f91d9f90 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,8 +1,9 @@ -use std::{sync::Arc}; - +use std::sync::Arc; use lsp_types::{ - DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, + DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, + Registration, }; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; @@ -28,7 +29,7 @@ impl Server { .unwrap(), ), }; - Ok(client.register_capability(vec![registration]).await?) + client.register_capability(vec![registration]).await } pub(crate) fn new(client: Client) -> Self { @@ -50,8 +51,7 @@ impl LanguageServer for Server { // register watchers let _ = self.register_watchers().await; - let initialize_result = rx.await.unwrap(); - initialize_result + rx.await.unwrap() } async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 2022a2b0a..0010c8ddf 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; use tower_lsp::Client; -use crate::{language_server::Server}; +use crate::language_server::Server; pub struct Logger { pub(crate) level: Level, diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 95fdf15d8..b6766ede4 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -18,8 +18,6 @@ mod handlers { pub mod request; } - - #[tokio_macros::main] async fn main() { let stdin = tokio::io::stdin(); From 763a95691d5430bcbe4ffea2f2f34acdaf7c6d25 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 1 Mar 2024 14:57:21 -0600 Subject: [PATCH 15/66] language server concurrency progress --- crates/language-server-macros/src/lib.rs | 12 +- crates/language-server/src/backend.rs | 350 ++++++++++-------- crates/language-server/src/language_server.rs | 53 ++- crates/language-server/src/main.rs | 53 ++- crates/library/std/src/math.fe | 1 + 5 files changed, 287 insertions(+), 182 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index edb6dcf18..37a6dff43 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -167,7 +167,15 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { let dispatcher_send_payload = match channel.result { Some(result) => quote!{ let (tx, rx) = tokio::sync::oneshot::channel::<#result>(); - self.#tx.send((#dispatcher_payload, OneshotResponder::from(tx))).unwrap(); + // let payload = #dispatcher_payload.clone(); + let oneshot = OneshotResponder::from(tx); + let broadcast = self.#tx.clone(); + tokio::spawn(async move { + tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; + info!("sending oneshot sender: {:?}", #dispatcher_payload); + broadcast.send((#dispatcher_payload, oneshot)).unwrap(); + }); + info!("returning oneshot receiver: {:?}", rx); rx }, None => quote!{ @@ -232,9 +240,11 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { } } pub fn respond(self, response: T) { + info!("responding with: {:?}", response); let mut sender = self.sender.lock().unwrap(); // sender.send(response.clone()); if let Some(sender) = sender.take() { + info!("sending response: {:?} and {:?}", response, sender); let _ = sender.send(response).unwrap(); } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index bf2cf8c22..098d5e434 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,7 +1,9 @@ use crate::workspace::SyncableIngotFileContext; +use futures::stream::FuturesUnordered; use futures::TryStreamExt; use lsp_types::TextDocumentItem; use std::sync::Arc; +use tokio::join; use tokio::sync::Mutex; // use tokio::sync::oneshot::Receiver; @@ -11,7 +13,7 @@ use crate::db::LanguageServerDatabase; use crate::diagnostics::get_diagnostics; use crate::globals::LANGUAGE_ID; -use crate::language_server::Server; +use crate::language_server::{LspChannels, Server}; use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; @@ -20,196 +22,230 @@ use tokio_stream::wrappers::BroadcastStream; use tokio_stream::StreamExt; use tower_lsp::Client; -pub struct Backend<'a> { - pub(crate) server: &'a Server, - pub(crate) client: Arc>, +pub struct Backend { + // pub(crate) server: Arc>, + pub(crate) messaging: Arc>, + pub(crate) client: Arc>, pub(crate) db: Arc>, pub(crate) workspace: Arc>, - runtime: tokio::runtime::Runtime, + // runtime: tokio::runtime::Runtime, } -impl<'a> Backend<'a> { - pub fn new(client: Arc>, server: &'a Server) -> Self { +impl Backend { + pub fn new(client: Arc>, messaging: Arc>) -> Self { let workspace = Arc::new(Mutex::new(Workspace::default())); let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); - let runtime = tokio::runtime::Runtime::new().unwrap(); + // let runtime = tokio::runtime::Runtime::new().unwrap(); Self { - server, + messaging, client, db, workspace, - runtime, + // runtime, } } - pub fn setup_streams(self) { - let db = self.db.clone(); - let workspace = self.workspace.clone(); - let client = self.client.clone(); - let mut initialized_stream = - BroadcastStream::new(self.server.dispatch.subscribe_initialize()); - self.runtime.spawn(async move { - while let Some(result) = initialized_stream.next().await { - if let Ok((initialization_params, responder)) = result { - info!("initializing language server: {:?}", initialization_params); - // setup workspace - let capabilities = server_capabilities(); - let initialize_result = lsp_types::InitializeResult { - capabilities, - server_info: Some(lsp_types::ServerInfo { - name: String::from("fe-language-server"), - version: Some(String::from(env!("CARGO_PKG_VERSION"))), - }), - }; - let db = &mut self.db.lock().await; - let workspace = &mut self.workspace.lock().await; - let _ = workspace.set_workspace_root( - db, - initialization_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - ); - - responder.respond(Ok(initialize_result)); + pub async fn setup_streams( + self, + messaging: &LspChannels, // , db: &LanguageServerDatabase, workspace: &Workspace, client: &Client + ) { + // let db = self.db.clone(); + // let workspace = self.workspace.clone(); + // let client = self.client.clone(); + // let messaging = self.messaging.clone(); + + // info!("hmm, that's weird"); + info!("setting up streams"); + + let init_handler = { + let db = self.db.clone(); + let workspace = self.workspace.clone(); + let mut initialized_stream = BroadcastStream::new(messaging.subscribe_initialize()); + async move { + while let Some(result) = initialized_stream.next().await { + info!("received initialize request {:?}", result); + if let Ok((initialization_params, responder)) = result { + info!("initializing language server: {:?}", initialization_params); + // setup workspace + let db = &mut db.lock().await; + let workspace = &mut workspace.lock().await; + let _ = workspace.set_workspace_root( + db, + initialization_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), + ); + + info!("initializing language server!"); + // responder.respond(Ok(initialize_result)); + } } } - }); + }; - let mut shutdown_stream = BroadcastStream::new(self.server.dispatch.subscribe_shutdown()); - self.runtime.spawn(async move { - while let Some(result) = shutdown_stream.next().await { - if let Ok((_, responder)) = result { - info!("shutting down language server"); - responder.respond(Ok(())); + // let mut shutdown_stream = BroadcastStream::new(messaging.subscribe_shutdown()); + // tokio::spawn(async move { + // while let Some(result) = shutdown_stream.next().await { + // if let Ok((_, responder)) = result { + // info!("shutting down language server"); + // responder.respond(Ok(())); + // } + // } + // }); + let on_change_handler = { + let db = self.db.clone(); + let workspace = self.workspace.clone(); + let client = self.client.clone(); + + let did_open_stream = BroadcastStream::new(messaging.subscribe_did_open()); + let did_change_stream = BroadcastStream::new(messaging.subscribe_did_change()); + + let mut change_stream = tokio_stream::StreamExt::merge( + did_open_stream.map_ok(|params| TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.text_document.text, + }), + did_change_stream.map_ok(|params| TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.content_changes[0].text.clone(), + }), + ); + + // let workspace_clone = workspace.clone(); + // let client_clone = client.clone(); + // let db_clone = db.clone(); + async move { + // let workspace = &mut workspace.lock().await; + // let client = &mut client.lock().await; + // let db = &mut db.lock().await; + info!("listening for changes"); + while let Some(Ok(doc)) = change_stream.next().await { + info!("change detected: {:?}", doc.uri); + on_change(client.clone(), workspace.clone(), db.clone(), doc).await; } } - }); + }; - let did_open_stream = BroadcastStream::new(self.server.dispatch.subscribe_did_open()); - let did_change_stream = BroadcastStream::new(self.server.dispatch.subscribe_did_change()); - - let mut change_stream = tokio_stream::StreamExt::merge( - did_open_stream.map_ok(|params| TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.text_document.text, - }), - did_change_stream.map_ok(|params| TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.content_changes[0].text.clone(), - }), - ); - - let workspace_clone = workspace.clone(); - let client_clone = client.clone(); - let db_clone = db.clone(); - self.runtime.spawn(async move { - let workspace = &mut workspace_clone.lock().await; - let client = &mut client_clone.lock().await; - let db = &mut db_clone.lock().await; - while let Some(Ok(doc)) = change_stream.next().await { - on_change(client, workspace, db, doc).await; + let did_close_handler = { + let workspace_clone = self.workspace.clone(); + let client_clone = self.client.clone(); + let db_clone = self.db.clone(); + let mut did_close_stream = BroadcastStream::new(messaging.subscribe_did_close()); + async move { + let workspace = &mut workspace_clone.lock().await; + let _client = &mut client_clone.lock().await; + let db = &mut db_clone.lock().await; + while let Some(Ok(params)) = did_close_stream.next().await { + let input = workspace + .input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, None); + } } - }); + }; - let workspace_clone = workspace.clone(); - let client_clone = client.clone(); - let db_clone = db.clone(); - let mut did_close_stream = BroadcastStream::new(self.server.dispatch.subscribe_did_close()); - self.runtime.spawn(async move { - let workspace = &mut workspace_clone.lock().await; - let _client = &mut client_clone.lock().await; - let db = &mut db_clone.lock().await; - while let Some(Ok(params)) = did_close_stream.next().await { - let input = workspace - .input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, None); - } - }); + let did_change_watched_files_handler = { + let workspace_clone = self.workspace.clone(); + let client_clone = self.client.clone(); + let db_clone = self.db.clone(); + let mut did_change_watched_files_stream = + BroadcastStream::new(messaging.subscribe_did_change_watched_files()); + async move { + let workspace = &mut workspace_clone.lock().await; + let client = &mut client_clone.lock().await; + let db = &mut db_clone.lock().await; - let workspace_clone = workspace.clone(); - let client_clone = client.clone(); - let db_clone = db.clone(); - let mut did_change_watched_files_stream = - BroadcastStream::new(self.server.dispatch.subscribe_did_change_watched_files()); - self.runtime.spawn(async move { - let workspace = &mut workspace_clone.lock().await; - let client = &mut client_clone.lock().await; - let db = &mut db_clone.lock().await; - - while let Some(Ok(params)) = did_change_watched_files_stream.next().await { - let changes = params.changes; - for change in changes { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::CHANGED => { - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); + while let Some(Ok(params)) = did_change_watched_files_stream.next().await { + let changes = params.changes; + for change in changes { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + + match change.typ { + lsp_types::FileChangeType::CREATED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::CHANGED => { + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::DELETED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + } + _ => {} } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); + // collect diagnostics for the file + if change.typ != lsp_types::FileChangeType::DELETED { + let text = std::fs::read_to_string(path).unwrap(); + on_change( + self.client.clone(), + self.workspace.clone(), + self.db.clone(), + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + }, + ) + .await; } - _ => {} - } - // collect diagnostics for the file - if change.typ != lsp_types::FileChangeType::DELETED { - let text = std::fs::read_to_string(path).unwrap(); - on_change( - client, - workspace, - db, - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - }, - ) - .await; } } } + }; + // join!( + // init_handler, + // on_change_handler, + // did_close_handler, + // did_change_watched_files_handler + // ); + tokio::spawn(async move { + join!( + init_handler, + on_change_handler, + did_close_handler, + did_change_watched_files_handler + ); }); } } async fn on_change( - client: &mut Client, - workspace: &mut Workspace, - db: &mut LanguageServerDatabase, + client: Arc>, + workspace: Arc>, + db: Arc>, params: TextDocumentItem, ) { + let workspace = &mut workspace.lock().await; + let db = &mut db.lock().await; + let client = &mut client.lock().await; let diagnostics = { // let workspace = &mut workspace.lock().await; // let db = &mut db.lock().await; diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 5f91d9f90..411fc5688 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,15 +1,17 @@ -use std::sync::Arc; +use std::{sync::Arc}; + +use log::info; use lsp_types::{ - DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, - DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, - Registration, + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration }; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; +use crate::capabilities::server_capabilities; + pub(crate) struct Server { - pub(crate) dispatch: LspChannels, + pub(crate) messaging: Arc>, pub(crate) client: Arc>, } @@ -29,13 +31,13 @@ impl Server { .unwrap(), ), }; - client.register_capability(vec![registration]).await + Ok(client.register_capability(vec![registration]).await?) } pub(crate) fn new(client: Client) -> Self { - let dispatch = LspChannels::new(); + let messaging = Arc::new(tokio::sync::Mutex::new(LspChannels::new())); let client = Arc::new(tokio::sync::Mutex::new(client)); - Self { dispatch, client } + Self { messaging, client } } } @@ -43,15 +45,28 @@ impl Server { #[tower_lsp::async_trait] impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { - let rx = self.dispatch.dispatch_initialize(initialize_params); - // setup logging - let _ = self.init_logger(log::Level::Info); - + // let _ = self.init_logger(log::Level::Info); + // info!("initialized logger"); + // info!("initializing language server: {:?}", initialize_params); + let messaging = self.messaging.lock().await; + let rx = messaging.dispatch_initialize(initialize_params); + info!("awaiting initialization result"); + // let initialize_result = rx.await.unwrap(); // register watchers let _ = self.register_watchers().await; + info!("registered watchers"); - rx.await.unwrap() + info!("received initialization result"); + let capabilities = server_capabilities(); + let initialize_result = lsp_types::InitializeResult { + capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + Ok(initialize_result) } async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { @@ -59,18 +74,22 @@ impl LanguageServer for Server { } async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - self.dispatch.dispatch_did_open(params); + let messaging = self.messaging.lock().await; + messaging.dispatch_did_open(params); } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - self.dispatch.dispatch_did_change(params); + let messaging = self.messaging.lock().await; + messaging.dispatch_did_change(params); } async fn did_close(&self, params: DidCloseTextDocumentParams) { - self.dispatch.dispatch_did_close(params); + let messaging = self.messaging.lock().await; + messaging.dispatch_did_close(params); } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - self.dispatch.dispatch_did_change_watched_files(params); + let messaging = self.messaging.lock().await; + messaging.dispatch_did_change_watched_files(params); } } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index b6766ede4..f53c4a19f 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -9,10 +9,15 @@ mod logger; mod util; mod workspace; +use std::sync::Arc; + use backend::Backend; // use backend::Backend; use db::Jar; -use language_server::Server; +use futures::future::join_all; +use language_server::{LspChannels, Server}; +use log::info; +use tower_lsp::Client; mod handlers { pub mod notifications; pub mod request; @@ -20,17 +25,51 @@ mod handlers { #[tokio_macros::main] async fn main() { + // let runtime = tokio::runtime::Builder::new_multi_thread() + // .worker_threads(2) + // .enable_all() + // .build() + // .unwrap(); + + // let runtime2 = tokio::runtime::Builder::new_multi_thread() + // .worker_threads(2) + // .enable_all() + // .build() + // .unwrap(); + let stdin = tokio::io::stdin(); let stdout = tokio::io::stdout(); let (service, socket) = tower_lsp::LspService::build(Server::new).finish(); - let server = service.inner(); + server.init_logger(log::Level::Info).unwrap(); + info!("initialized logger"); + + let client = server.client.clone(); + let messaging = server.messaging.clone(); + let messaging_clone = messaging.clone(); + + // tokio::spawn( + + let _ = tokio::join!( + async move { + info!("spawning backend"); + let backend = Backend::new(client, messaging); + let messaging = messaging_clone.lock().await; + backend.setup_streams(&*messaging).await; + info!("setup streams"); + }, + async move { + info!("spawning server"); + tower_lsp::Server::new(stdin, stdout, socket) + .serve(service) + .await; + } + ); + // ); - let backend = Backend::new(server.client.clone(), server); - backend.setup_streams(); + // ); - tower_lsp::Server::new(stdin, stdout, socket) - .serve(service) - .await; + // { + // } } diff --git a/crates/library/std/src/math.fe b/crates/library/std/src/math.fe index bc37ee673..79f6317f0 100644 --- a/crates/library/std/src/math.fe +++ b/crates/library/std/src/math.fe @@ -1,3 +1,4 @@ + pub fn min(_ x: u256, _ y: u256) -> u256 { if x < y { return x From 07eaa4256b0c0d9f896852dec712c7faf84c09d9 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 1 Mar 2024 16:21:42 -0600 Subject: [PATCH 16/66] no deadlocks --- crates/language-server/src/backend.rs | 416 ++++++++++-------- crates/language-server/src/language_server.rs | 15 +- crates/language-server/src/main.rs | 33 +- 3 files changed, 258 insertions(+), 206 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 098d5e434..71bc21dac 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -47,193 +47,263 @@ impl Backend { } pub async fn setup_streams( self, - messaging: &LspChannels, // , db: &LanguageServerDatabase, workspace: &Workspace, client: &Client + // messaging: &LspChannels, // , db: &LanguageServerDatabase, workspace: &Workspace, client: &Client ) { - // let db = self.db.clone(); - // let workspace = self.workspace.clone(); - // let client = self.client.clone(); - // let messaging = self.messaging.clone(); - - // info!("hmm, that's weird"); info!("setting up streams"); + info!("what's next"); + + let db_wrapped = self.db.clone(); + let workspace_wrapped = self.workspace.clone(); + let client_wrapped = self.client.clone(); + let messaging = self.messaging.clone(); + let messaging = messaging.lock().await; + + let mut initialized_stream = BroadcastStream::new(messaging.subscribe_initialize()).fuse(); + let mut shutdown_stream = BroadcastStream::new(messaging.subscribe_shutdown()).fuse(); + let did_open_stream = BroadcastStream::new(messaging.subscribe_did_open()).fuse(); + let did_change_stream = BroadcastStream::new(messaging.subscribe_did_change()).fuse(); + let mut change_stream = tokio_stream::StreamExt::merge( + did_open_stream.map_ok(|params| TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.text_document.text, + }), + did_change_stream.map_ok(|params| TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.content_changes[0].text.clone(), + }), + ) + .fuse(); + let mut did_close_stream = BroadcastStream::new(messaging.subscribe_did_close()).fuse(); + let mut did_change_watched_files_stream = + BroadcastStream::new(messaging.subscribe_did_change_watched_files()).fuse(); - let init_handler = { - let db = self.db.clone(); - let workspace = self.workspace.clone(); - let mut initialized_stream = BroadcastStream::new(messaging.subscribe_initialize()); - async move { - while let Some(result) = initialized_stream.next().await { - info!("received initialize request {:?}", result); - if let Ok((initialization_params, responder)) = result { - info!("initializing language server: {:?}", initialization_params); - // setup workspace - let db = &mut db.lock().await; - let workspace = &mut workspace.lock().await; - let _ = workspace.set_workspace_root( - db, - initialization_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - ); + // This is very important! We absolutely need to drop the messaging lock here. + std::mem::drop(messaging); + info!("streams set up, looping on them now"); + loop { + tokio::select! { + Some(result) = initialized_stream.next() => { + let db = &mut db_wrapped.lock().await; + let workspace = &mut workspace_wrapped.lock().await; + let client = &mut client_wrapped.lock().await; + info!("received initialize request {:?}", result); + if let Ok((initialization_params, responder)) = result { + info!("initializing language server: {:?}", initialization_params); + // setup workspace + let _ = workspace.set_workspace_root( + db, + initialization_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), + ); - info!("initializing language server!"); - // responder.respond(Ok(initialize_result)); + info!("initializing language server!"); + // responder.respond(Ok(initialize_result)); + } + } + // Some(result) = shutdown_stream.next() => { + // if let Ok((_, responder)) = result { + // info!("shutting down language server"); + // responder.respond(Ok(())); + // } + // } + Some(Ok(doc)) = change_stream.next() => { + info!("change detected: {:?}", doc.uri); + on_change(client_wrapped.clone(), workspace_wrapped.clone(), db_wrapped.clone(), doc).await; + } + Some(Ok(params)) = did_close_stream.next() => { + let db = &mut db_wrapped.lock().await; + let workspace = &mut workspace_wrapped.lock().await; + let client = &mut client_wrapped.lock().await; + let input = workspace + .input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, None); + } + Some(Ok(params)) = did_change_watched_files_stream.next() => { + let db = &mut db_wrapped.lock().await; + let workspace = &mut workspace_wrapped.lock().await; + let client = &mut client_wrapped.lock().await; + let changes = params.changes; + for change in changes { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + + match change.typ { + lsp_types::FileChangeType::CREATED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::CHANGED => { + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::DELETED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + } + _ => {} + } + // collect diagnostics for the file + if change.typ != lsp_types::FileChangeType::DELETED { + let text = std::fs::read_to_string(path).unwrap(); + on_change( + self.client.clone(), + self.workspace.clone(), + self.db.clone(), + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + }, + ) + .await; + } + } } + } - } - }; - // let mut shutdown_stream = BroadcastStream::new(messaging.subscribe_shutdown()); - // tokio::spawn(async move { - // while let Some(result) = shutdown_stream.next().await { - // if let Ok((_, responder)) = result { - // info!("shutting down language server"); - // responder.respond(Ok(())); - // } - // } - // }); - let on_change_handler = { - let db = self.db.clone(); - let workspace = self.workspace.clone(); - let client = self.client.clone(); + // while let Some(result) = initialized_stream.next().await { + // info!("received initialize request {:?}", result); + // if let Ok((initialization_params, responder)) = result { + // info!("initializing language server: {:?}", initialization_params); + // // setup workspace + // let db = &mut db.lock().await; + // let workspace = &mut workspace.lock().await; + // let _ = workspace.set_workspace_root( + // db, + // initialization_params + // .root_uri + // .unwrap() + // .to_file_path() + // .ok() + // .unwrap(), + // ); - let did_open_stream = BroadcastStream::new(messaging.subscribe_did_open()); - let did_change_stream = BroadcastStream::new(messaging.subscribe_did_change()); + // info!("initializing language server!"); + // // responder.respond(Ok(initialize_result)); + // } + // } + // } - let mut change_stream = tokio_stream::StreamExt::merge( - did_open_stream.map_ok(|params| TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.text_document.text, - }), - did_change_stream.map_ok(|params| TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.content_changes[0].text.clone(), - }), - ); + // tokio::spawn(async move { + // while let Some(result) = shutdown_stream.next().await { + // if let Ok((_, responder)) = result { + // info!("shutting down language server"); + // responder.respond(Ok(())); + // } + // } + // }); - // let workspace_clone = workspace.clone(); - // let client_clone = client.clone(); - // let db_clone = db.clone(); - async move { - // let workspace = &mut workspace.lock().await; - // let client = &mut client.lock().await; - // let db = &mut db.lock().await; - info!("listening for changes"); - while let Some(Ok(doc)) = change_stream.next().await { - info!("change detected: {:?}", doc.uri); - on_change(client.clone(), workspace.clone(), db.clone(), doc).await; - } - } - }; + // async move { + // info!("listening for changes"); + // while let Some(Ok(doc)) = change_stream.next().await { + // info!("change detected: {:?}", doc.uri); + // on_change(client.clone(), workspace.clone(), db.clone(), doc).await; + // } + // } - let did_close_handler = { - let workspace_clone = self.workspace.clone(); - let client_clone = self.client.clone(); - let db_clone = self.db.clone(); - let mut did_close_stream = BroadcastStream::new(messaging.subscribe_did_close()); - async move { - let workspace = &mut workspace_clone.lock().await; - let _client = &mut client_clone.lock().await; - let db = &mut db_clone.lock().await; - while let Some(Ok(params)) = did_close_stream.next().await { - let input = workspace - .input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, None); - } - } - }; + // async move { + // let workspace = &mut workspace_clone.lock().await; + // let _client = &mut client_clone.lock().await; + // let db = &mut db_clone.lock().await; + // while let Some(Ok(params)) = did_close_stream.next().await { + // let input = workspace + // .input_from_file_path( + // db, + // params + // .text_document + // .uri + // .to_file_path() + // .unwrap() + // .to_str() + // .unwrap(), + // ) + // .unwrap(); + // let _ = input.sync(db, None); + // } + // } - let did_change_watched_files_handler = { - let workspace_clone = self.workspace.clone(); - let client_clone = self.client.clone(); - let db_clone = self.db.clone(); - let mut did_change_watched_files_stream = - BroadcastStream::new(messaging.subscribe_did_change_watched_files()); - async move { - let workspace = &mut workspace_clone.lock().await; - let client = &mut client_clone.lock().await; - let db = &mut db_clone.lock().await; + // async move { + // let workspace = &mut workspace_clone.lock().await; + // let client = &mut client_clone.lock().await; + // let db = &mut db_clone.lock().await; - while let Some(Ok(params)) = did_change_watched_files_stream.next().await { - let changes = params.changes; - for change in changes { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); + // while let Some(Ok(params)) = did_change_watched_files_stream.next().await { + // let changes = params.changes; + // for change in changes { + // let uri = change.uri; + // let path = uri.to_file_path().unwrap(); - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::CHANGED => { - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); - } - _ => {} - } - // collect diagnostics for the file - if change.typ != lsp_types::FileChangeType::DELETED { - let text = std::fs::read_to_string(path).unwrap(); - on_change( - self.client.clone(), - self.workspace.clone(), - self.db.clone(), - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - }, - ) - .await; - } - } - } - } - }; - // join!( - // init_handler, - // on_change_handler, - // did_close_handler, - // did_change_watched_files_handler - // ); - tokio::spawn(async move { - join!( - init_handler, - on_change_handler, - did_close_handler, - did_change_watched_files_handler - ); - }); + // match change.typ { + // lsp_types::FileChangeType::CREATED => { + // // TODO: handle this more carefully! + // // this is inefficient, a hack for now + // let _ = workspace.sync(db); + // let input = workspace + // .input_from_file_path(db, path.to_str().unwrap()) + // .unwrap(); + // let _ = input.sync(db, None); + // } + // lsp_types::FileChangeType::CHANGED => { + // let input = workspace + // .input_from_file_path(db, path.to_str().unwrap()) + // .unwrap(); + // let _ = input.sync(db, None); + // } + // lsp_types::FileChangeType::DELETED => { + // // TODO: handle this more carefully! + // // this is inefficient, a hack for now + // let _ = workspace.sync(db); + // } + // _ => {} + // } + // // collect diagnostics for the file + // if change.typ != lsp_types::FileChangeType::DELETED { + // let text = std::fs::read_to_string(path).unwrap(); + // on_change( + // self.client.clone(), + // self.workspace.clone(), + // self.db.clone(), + // TextDocumentItem { + // uri: uri.clone(), + // language_id: LANGUAGE_ID.to_string(), + // version: 0, + // text, + // }, + // ) + // .await; + // } + // } + // } + // } + } } } diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 411fc5688..cb95538ae 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,9 +1,10 @@ -use std::{sync::Arc}; - +use std::sync::Arc; use log::info; use lsp_types::{ - DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration + DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, + DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, + Registration, }; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; @@ -46,11 +47,11 @@ impl Server { impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { // setup logging - // let _ = self.init_logger(log::Level::Info); - // info!("initialized logger"); + let _ = self.init_logger(log::Level::Info); + info!("initialized logger"); // info!("initializing language server: {:?}", initialize_params); - let messaging = self.messaging.lock().await; - let rx = messaging.dispatch_initialize(initialize_params); + // let messaging = self.messaging.lock().await; + // let rx = messaging.dispatch_initialize(initialize_params); info!("awaiting initialization result"); // let initialize_result = rx.await.unwrap(); // register watchers diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index f53c4a19f..28704cc1e 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -42,34 +42,15 @@ async fn main() { let (service, socket) = tower_lsp::LspService::build(Server::new).finish(); let server = service.inner(); - server.init_logger(log::Level::Info).unwrap(); - info!("initialized logger"); + // server.init_logger(log::Level::Info).unwrap(); + // info!("initialized logger"); let client = server.client.clone(); let messaging = server.messaging.clone(); - let messaging_clone = messaging.clone(); + info!("spawning backend"); + let backend = Backend::new(client, messaging); - // tokio::spawn( - - let _ = tokio::join!( - async move { - info!("spawning backend"); - let backend = Backend::new(client, messaging); - let messaging = messaging_clone.lock().await; - backend.setup_streams(&*messaging).await; - info!("setup streams"); - }, - async move { - info!("spawning server"); - tower_lsp::Server::new(stdin, stdout, socket) - .serve(service) - .await; - } - ); - // ); - - // ); - - // { - // } + tokio::spawn(backend.setup_streams()); + info!("spawning server"); + tower_lsp::Server::new(stdin, stdout, socket).serve(service).await; } From 71d48a76d56be02daf043c4b4cb156ceb1626bf1 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 1 Mar 2024 16:57:34 -0600 Subject: [PATCH 17/66] no need for state locks! --- crates/language-server-macros/src/lib.rs | 1 - crates/language-server/src/backend.rs | 344 ++++++------------ crates/language-server/src/language_server.rs | 16 +- crates/language-server/src/main.rs | 4 +- 4 files changed, 114 insertions(+), 251 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index 37a6dff43..49a269882 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -141,7 +141,6 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { let tx = &channel.tx_name; // let rx = &channel.rx_name; let params = &channel.params; - let params_type = match params { Some(params) => params, None => &unit_type, diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 71bc21dac..a10c9ab00 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,19 +1,15 @@ use crate::workspace::SyncableIngotFileContext; -use futures::stream::FuturesUnordered; use futures::TryStreamExt; use lsp_types::TextDocumentItem; use std::sync::Arc; -use tokio::join; use tokio::sync::Mutex; -// use tokio::sync::oneshot::Receiver; - use crate::capabilities::server_capabilities; use crate::db::LanguageServerDatabase; use crate::diagnostics::get_diagnostics; use crate::globals::LANGUAGE_ID; -use crate::language_server::{LspChannels, Server}; +use crate::language_server::LspChannels; use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; @@ -23,37 +19,30 @@ use tokio_stream::StreamExt; use tower_lsp::Client; pub struct Backend { - // pub(crate) server: Arc>, pub(crate) messaging: Arc>, pub(crate) client: Arc>, - pub(crate) db: Arc>, - pub(crate) workspace: Arc>, - // runtime: tokio::runtime::Runtime, + pub(crate) db: LanguageServerDatabase, + pub(crate) workspace: Workspace, } impl Backend { pub fn new(client: Arc>, messaging: Arc>) -> Self { - let workspace = Arc::new(Mutex::new(Workspace::default())); - let db = Arc::new(Mutex::new(LanguageServerDatabase::default())); - // let runtime = tokio::runtime::Runtime::new().unwrap(); + let db = LanguageServerDatabase::default(); + let workspace = Workspace::default(); Self { messaging, client, db, workspace, - // runtime, } } - pub async fn setup_streams( - self, - // messaging: &LspChannels, // , db: &LanguageServerDatabase, workspace: &Workspace, client: &Client - ) { + pub async fn setup_streams(mut self) { info!("setting up streams"); info!("what's next"); + let workspace = &mut self.workspace; + let db = &mut self.db; - let db_wrapped = self.db.clone(); - let workspace_wrapped = self.workspace.clone(); let client_wrapped = self.client.clone(); let messaging = self.messaging.clone(); let messaging = messaging.lock().await; @@ -82,243 +71,125 @@ impl Backend { BroadcastStream::new(messaging.subscribe_did_change_watched_files()).fuse(); // This is very important! We absolutely need to drop the messaging lock here. + // TODO: make this more ergonomic and foolproof somehow std::mem::drop(messaging); + info!("streams set up, looping on them now"); loop { tokio::select! { - Some(result) = initialized_stream.next() => { - let db = &mut db_wrapped.lock().await; - let workspace = &mut workspace_wrapped.lock().await; - let client = &mut client_wrapped.lock().await; - info!("received initialize request {:?}", result); - if let Ok((initialization_params, responder)) = result { - info!("initializing language server: {:?}", initialization_params); - // setup workspace - let _ = workspace.set_workspace_root( - db, - initialization_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - ); - - info!("initializing language server!"); - // responder.respond(Ok(initialize_result)); + Some(result) = initialized_stream.next() => { + info!("received initialize request {:?}", result); + if let Ok((initialization_params, responder)) = result { + info!("initializing language server: {:?}", initialization_params); + // setup workspace + let _ = workspace.set_workspace_root( + db, + initialization_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), + ); + + let capabilities = server_capabilities(); + let initialize_result = lsp_types::InitializeResult { + capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + info!("initializing language server!"); + responder.respond(Ok(initialize_result)); + } + } + Some(result) = shutdown_stream.next() => { + if let Ok((_, responder)) = result { + info!("shutting down language server"); + responder.respond(Ok(())); + } + } + Some(Ok(doc)) = change_stream.next() => { + info!("change detected: {:?}", doc.uri); + on_change(client_wrapped.clone(), workspace, db, doc).await; + } + Some(Ok(params)) = did_close_stream.next() => { + let client = &mut client_wrapped.lock().await; + let input = workspace + .input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, None); + } + Some(Ok(params)) = did_change_watched_files_stream.next() => { + let changes = params.changes; + for change in changes { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + + match change.typ { + lsp_types::FileChangeType::CREATED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); } - } - // Some(result) = shutdown_stream.next() => { - // if let Ok((_, responder)) = result { - // info!("shutting down language server"); - // responder.respond(Ok(())); - // } - // } - Some(Ok(doc)) = change_stream.next() => { - info!("change detected: {:?}", doc.uri); - on_change(client_wrapped.clone(), workspace_wrapped.clone(), db_wrapped.clone(), doc).await; - } - Some(Ok(params)) = did_close_stream.next() => { - let db = &mut db_wrapped.lock().await; - let workspace = &mut workspace_wrapped.lock().await; - let client = &mut client_wrapped.lock().await; - let input = workspace - .input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, None); - } - Some(Ok(params)) = did_change_watched_files_stream.next() => { - let db = &mut db_wrapped.lock().await; - let workspace = &mut workspace_wrapped.lock().await; - let client = &mut client_wrapped.lock().await; - let changes = params.changes; - for change in changes { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::CHANGED => { - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); - } - _ => {} + lsp_types::FileChangeType::CHANGED => { + let input = workspace + .input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); } - // collect diagnostics for the file - if change.typ != lsp_types::FileChangeType::DELETED { - let text = std::fs::read_to_string(path).unwrap(); - on_change( - self.client.clone(), - self.workspace.clone(), - self.db.clone(), - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - }, - ) - .await; + lsp_types::FileChangeType::DELETED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); } + _ => {} + } + // collect diagnostics for the file + if change.typ != lsp_types::FileChangeType::DELETED { + let text = std::fs::read_to_string(path).unwrap(); + on_change( + self.client.clone(), + workspace, + db, + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + }, + ) + .await; } } - } - - // while let Some(result) = initialized_stream.next().await { - // info!("received initialize request {:?}", result); - // if let Ok((initialization_params, responder)) = result { - // info!("initializing language server: {:?}", initialization_params); - // // setup workspace - // let db = &mut db.lock().await; - // let workspace = &mut workspace.lock().await; - // let _ = workspace.set_workspace_root( - // db, - // initialization_params - // .root_uri - // .unwrap() - // .to_file_path() - // .ok() - // .unwrap(), - // ); - - // info!("initializing language server!"); - // // responder.respond(Ok(initialize_result)); - // } - // } - // } - - // tokio::spawn(async move { - // while let Some(result) = shutdown_stream.next().await { - // if let Ok((_, responder)) = result { - // info!("shutting down language server"); - // responder.respond(Ok(())); - // } - // } - // }); - - // async move { - // info!("listening for changes"); - // while let Some(Ok(doc)) = change_stream.next().await { - // info!("change detected: {:?}", doc.uri); - // on_change(client.clone(), workspace.clone(), db.clone(), doc).await; - // } - // } - - // async move { - // let workspace = &mut workspace_clone.lock().await; - // let _client = &mut client_clone.lock().await; - // let db = &mut db_clone.lock().await; - // while let Some(Ok(params)) = did_close_stream.next().await { - // let input = workspace - // .input_from_file_path( - // db, - // params - // .text_document - // .uri - // .to_file_path() - // .unwrap() - // .to_str() - // .unwrap(), - // ) - // .unwrap(); - // let _ = input.sync(db, None); - // } - // } - - // async move { - // let workspace = &mut workspace_clone.lock().await; - // let client = &mut client_clone.lock().await; - // let db = &mut db_clone.lock().await; - - // while let Some(Ok(params)) = did_change_watched_files_stream.next().await { - // let changes = params.changes; - // for change in changes { - // let uri = change.uri; - // let path = uri.to_file_path().unwrap(); - - // match change.typ { - // lsp_types::FileChangeType::CREATED => { - // // TODO: handle this more carefully! - // // this is inefficient, a hack for now - // let _ = workspace.sync(db); - // let input = workspace - // .input_from_file_path(db, path.to_str().unwrap()) - // .unwrap(); - // let _ = input.sync(db, None); - // } - // lsp_types::FileChangeType::CHANGED => { - // let input = workspace - // .input_from_file_path(db, path.to_str().unwrap()) - // .unwrap(); - // let _ = input.sync(db, None); - // } - // lsp_types::FileChangeType::DELETED => { - // // TODO: handle this more carefully! - // // this is inefficient, a hack for now - // let _ = workspace.sync(db); - // } - // _ => {} - // } - // // collect diagnostics for the file - // if change.typ != lsp_types::FileChangeType::DELETED { - // let text = std::fs::read_to_string(path).unwrap(); - // on_change( - // self.client.clone(), - // self.workspace.clone(), - // self.db.clone(), - // TextDocumentItem { - // uri: uri.clone(), - // language_id: LANGUAGE_ID.to_string(), - // version: 0, - // text, - // }, - // ) - // .await; - // } - // } - // } - // } + } } } } async fn on_change( client: Arc>, - workspace: Arc>, - db: Arc>, + workspace: &mut Workspace, + db: &mut LanguageServerDatabase, params: TextDocumentItem, ) { - let workspace = &mut workspace.lock().await; - let db = &mut db.lock().await; let client = &mut client.lock().await; let diagnostics = { - // let workspace = &mut workspace.lock().await; - // let db = &mut db.lock().await; let input = workspace .input_from_file_path( db, @@ -334,7 +205,6 @@ async fn on_change( get_diagnostics(db, workspace, params.uri.clone()) }; - // let client = client.lock().await; let diagnostics = diagnostics .unwrap() .into_iter() diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index cb95538ae..5e7b16f08 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -50,24 +50,16 @@ impl LanguageServer for Server { let _ = self.init_logger(log::Level::Info); info!("initialized logger"); // info!("initializing language server: {:?}", initialize_params); - // let messaging = self.messaging.lock().await; - // let rx = messaging.dispatch_initialize(initialize_params); + let messaging = self.messaging.lock().await; + let rx = messaging.dispatch_initialize(initialize_params); info!("awaiting initialization result"); - // let initialize_result = rx.await.unwrap(); + let initialize_result = rx.await.unwrap(); // register watchers let _ = self.register_watchers().await; info!("registered watchers"); info!("received initialization result"); - let capabilities = server_capabilities(); - let initialize_result = lsp_types::InitializeResult { - capabilities, - server_info: Some(lsp_types::ServerInfo { - name: String::from("fe-language-server"), - version: Some(String::from(env!("CARGO_PKG_VERSION"))), - }), - }; - Ok(initialize_result) + initialize_result } async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 28704cc1e..3cb451f93 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -52,5 +52,7 @@ async fn main() { tokio::spawn(backend.setup_streams()); info!("spawning server"); - tower_lsp::Server::new(stdin, stdout, socket).serve(service).await; + tower_lsp::Server::new(stdin, stdout, socket) + .serve(service) + .await; } From a9e69a7007008afcc67c31a3600cac5795824793 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Mar 2024 10:18:02 -0600 Subject: [PATCH 18/66] goto, hover; prevent panic in LSP channel response --- crates/language-server-macros/src/lib.rs | 81 +++++++++++-------- crates/language-server/src/backend.rs | 14 +++- .../language-server/src/handlers/request.rs | 59 +++++--------- crates/language-server/src/language_server.rs | 21 ++++- crates/language-server/src/main.rs | 8 +- 5 files changed, 100 insertions(+), 83 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index 49a269882..f71096dc2 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -172,13 +172,20 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { tokio::spawn(async move { tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; info!("sending oneshot sender: {:?}", #dispatcher_payload); - broadcast.send((#dispatcher_payload, oneshot)).unwrap(); + match broadcast.send((#dispatcher_payload, oneshot)) { + Ok(_) => info!("sent oneshot sender"), + Err(e) => error!("failed to send oneshot sender"), + } }); info!("returning oneshot receiver: {:?}", rx); rx }, None => quote!{ - self.#tx.send(#dispatcher_payload).unwrap(); + // self.#tx.send(#dispatcher_payload).unwrap(); + match self.#tx.send(#dispatcher_payload) { + Ok(_) => info!("sent notification"), + Err(e) => error!("failed to send notification: {:?}", e), + } }, }; @@ -218,49 +225,53 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { .collect(); quote! { - use std::fmt::Debug; - #[derive(Debug)] - pub struct OneshotResponder{ - sender: std::sync::Arc>>> - } - impl Clone for OneshotResponder { - fn clone(&self) -> OneshotResponder { - Self { - sender: self.sender.clone() + use std::fmt::Debug; + #[derive(Debug)] + pub struct OneshotResponder{ + sender: std::sync::Arc>>> + } + impl Clone for OneshotResponder { + fn clone(&self) -> OneshotResponder { + Self { + sender: self.sender.clone() + } } } - } - impl OneshotResponder { - pub fn from(sender: tokio::sync::oneshot::Sender) -> Self { - Self { - sender: std::sync::Arc::new(std::sync::Mutex::new(Some(sender))) + impl OneshotResponder { + pub fn from(sender: tokio::sync::oneshot::Sender) -> Self { + Self { + sender: std::sync::Arc::new(std::sync::Mutex::new(Some(sender))) + } } - } - pub fn respond(self, response: T) { - info!("responding with: {:?}", response); - let mut sender = self.sender.lock().unwrap(); - // sender.send(response.clone()); - if let Some(sender) = sender.take() { - info!("sending response: {:?} and {:?}", response, sender); - let _ = sender.send(response).unwrap(); + pub fn respond(self, response: T) { + info!("responding with: {:?}", response); + let mut sender = self.sender.lock().unwrap(); + + // sender.send(response.clone()); + if let Some(sender) = sender.take() { + info!("sending response: {:?} and {:?}", response, sender); + match sender.send(response) { + Ok(_) => info!("Response sent successfully"), + Err(e) => error!("Failed to send response: {:?}", e), + } + } } } - } - pub struct LspChannels { - #channel_declarations - } + pub struct LspChannels { + #channel_declarations + } - impl LspChannels { - pub fn new() -> Self { - #channel_instantiations - Self { - #channel_assignments + impl LspChannels { + pub fn new() -> Self { + #channel_instantiations + Self { + #channel_assignments + } } + #dispatch_functions } - #dispatch_functions } - } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index a10c9ab00..bb0606279 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,3 +1,4 @@ +use crate::handlers::request::{handle_goto_definition, handle_hover}; use crate::workspace::SyncableIngotFileContext; use futures::TryStreamExt; use lsp_types::TextDocumentItem; @@ -70,6 +71,9 @@ impl Backend { let mut did_change_watched_files_stream = BroadcastStream::new(messaging.subscribe_did_change_watched_files()).fuse(); + let mut hover_stream = BroadcastStream::new(messaging.subscribe_hover()).fuse(); + let mut goto_definition_stream = BroadcastStream::new(messaging.subscribe_goto_definition()).fuse(); + // This is very important! We absolutely need to drop the messaging lock here. // TODO: make this more ergonomic and foolproof somehow std::mem::drop(messaging); @@ -115,7 +119,7 @@ impl Backend { on_change(client_wrapped.clone(), workspace, db, doc).await; } Some(Ok(params)) = did_close_stream.next() => { - let client = &mut client_wrapped.lock().await; + let _client = &mut client_wrapped.lock().await; let input = workspace .input_from_file_path( db, @@ -177,6 +181,14 @@ impl Backend { } } } + Some(Ok((params, responder))) = hover_stream.next() => { + let response = handle_hover(db, workspace, params); + responder.respond(response); + } + Some(Ok((params, responder))) = goto_definition_stream.next() => { + let response = handle_goto_definition(db, workspace, params); + responder.respond(response); + } } } } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 3caba33cb..ced172057 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -5,6 +5,7 @@ use hir_analysis::name_resolution::{EarlyResolvedPath, NameRes}; use log::info; use lsp_server::{Response, ResponseError}; use serde::Deserialize; +use tower_lsp::jsonrpc::Result; use crate::{ db::LanguageServerDatabase, @@ -16,16 +17,16 @@ use crate::{ pub fn handle_hover( db: &mut LanguageServerDatabase, workspace: &mut Workspace, - req: lsp_server::Request, -) -> Result<(), anyhow::Error> { + params: lsp_types::HoverParams, +) -> Result> { + info!("handling hover"); // TODO: get more relevant information for the hover - let params = lsp_types::HoverParams::deserialize(req.params)?; let file_path = ¶ms .text_document_position_params .text_document .uri .path(); - let file = std::fs::File::open(file_path)?; + let file = std::fs::File::open(file_path).unwrap(); let reader = std::io::BufReader::new(file); let line = reader .lines() @@ -33,7 +34,7 @@ pub fn handle_hover( .unwrap() .unwrap(); - let file_text = std::fs::read_to_string(file_path)?; + let file_text = std::fs::read_to_string(file_path).unwrap(); // let cursor: Cursor = params.text_document_position_params.position.into(); let cursor: Cursor = to_offset_from_position( @@ -99,29 +100,20 @@ pub fn handle_hover( }), range: None, }; - let _response_message = Response { - id: req.id, - result: Some(serde_json::to_value(result)?), - error: None, - }; - - // state.send_response(response_message)?; - Ok(()) + Ok(Some(result)) } -use lsp_types::TextDocumentPositionParams; +use lsp_types::{lsif::ResultSet, GotoDefinitionResponse, Hover, TextDocumentPositionParams, GotoDefinitionParams}; pub fn handle_goto_definition( db: &mut LanguageServerDatabase, workspace: &mut Workspace, - req: lsp_server::Request, -) -> Result<(), anyhow::Error> { - info!("handling goto definition request: {:?}", req); - let params = TextDocumentPositionParams::deserialize(req.params)?; - + params: GotoDefinitionParams, +) -> Result> { // Convert the position to an offset in the file - let file_text = std::fs::read_to_string(params.text_document.uri.path())?; - let cursor: Cursor = to_offset_from_position(params.position, file_text.as_str()); + let params = params.text_document_position_params; + let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); + let cursor: Cursor = to_offset_from_position(params.position, file_text.unwrap().as_str()); // Get the module and the goto info let file_path = params.text_document.uri.path(); @@ -139,7 +131,7 @@ pub fn handle_goto_definition( }) => { vec![res.scope()] } - None => return Ok(()), + None => return Ok(None), }; let locations = scopes @@ -163,22 +155,11 @@ pub fn handle_goto_definition( data: None, }); - // Send the response - let response_message = Response { - id: req.id, - result: Some(serde_json::to_value( - lsp_types::GotoDefinitionResponse::Array( - locations - .into_iter() - .filter_map(std::result::Result::ok) - .collect(), - ), - )?), - error, - }; - - info!("goto definition response: {:?}", response_message); - // state.send_response(response_message)?; - Ok(()) + Ok(Some(lsp_types::GotoDefinitionResponse::Array( + locations + .into_iter() + .filter_map(std::result::Result::ok) + .collect(), + ))) } diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 5e7b16f08..a9bc787ee 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use log::info; +use log::{info, error}; use lsp_types::{ DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, @@ -9,8 +9,6 @@ use lsp_types::{ use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; -use crate::capabilities::server_capabilities; - pub(crate) struct Server { pub(crate) messaging: Arc>, pub(crate) client: Arc>, @@ -32,7 +30,7 @@ impl Server { .unwrap(), ), }; - Ok(client.register_capability(vec![registration]).await?) + client.register_capability(vec![registration]).await } pub(crate) fn new(client: Client) -> Self { @@ -85,4 +83,19 @@ impl LanguageServer for Server { let messaging = self.messaging.lock().await; messaging.dispatch_did_change_watched_files(params); } + + async fn hover(&self, params: lsp_types::HoverParams) -> Result> { + let messaging = self.messaging.lock().await; + let rx = messaging.dispatch_hover(params); + rx.await.unwrap() + } + + async fn goto_definition( + &self, + params: lsp_types::GotoDefinitionParams, + ) -> Result> { + let messaging = self.messaging.lock().await; + let rx = messaging.dispatch_goto_definition(params); + rx.await.unwrap() + } } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 3cb451f93..a539bf42f 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -9,15 +9,15 @@ mod logger; mod util; mod workspace; -use std::sync::Arc; + use backend::Backend; // use backend::Backend; use db::Jar; -use futures::future::join_all; -use language_server::{LspChannels, Server}; + +use language_server::{Server}; use log::info; -use tower_lsp::Client; + mod handlers { pub mod notifications; pub mod request; From bbef3a3434630de7f9d0b29dfba546161c554929 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Mar 2024 10:26:37 -0600 Subject: [PATCH 19/66] formatting --- crates/language-server-macros/src/lib.rs | 72 +++++++++---------- crates/language-server/src/backend.rs | 3 +- .../language-server/src/handlers/request.rs | 10 +-- crates/language-server/src/language_server.rs | 2 +- crates/language-server/src/main.rs | 4 +- 5 files changed, 46 insertions(+), 45 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index f71096dc2..6da875edb 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -225,53 +225,53 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { .collect(); quote! { - use std::fmt::Debug; - #[derive(Debug)] - pub struct OneshotResponder{ - sender: std::sync::Arc>>> - } - impl Clone for OneshotResponder { - fn clone(&self) -> OneshotResponder { - Self { - sender: self.sender.clone() - } + use std::fmt::Debug; + #[derive(Debug)] + pub struct OneshotResponder{ + sender: std::sync::Arc>>> + } + impl Clone for OneshotResponder { + fn clone(&self) -> OneshotResponder { + Self { + sender: self.sender.clone() } } + } - impl OneshotResponder { - pub fn from(sender: tokio::sync::oneshot::Sender) -> Self { - Self { - sender: std::sync::Arc::new(std::sync::Mutex::new(Some(sender))) - } + impl OneshotResponder { + pub fn from(sender: tokio::sync::oneshot::Sender) -> Self { + Self { + sender: std::sync::Arc::new(std::sync::Mutex::new(Some(sender))) } - pub fn respond(self, response: T) { - info!("responding with: {:?}", response); - let mut sender = self.sender.lock().unwrap(); - - // sender.send(response.clone()); - if let Some(sender) = sender.take() { - info!("sending response: {:?} and {:?}", response, sender); - match sender.send(response) { - Ok(_) => info!("Response sent successfully"), - Err(e) => error!("Failed to send response: {:?}", e), - } + } + pub fn respond(self, response: T) { + info!("responding with: {:?}", response); + let mut sender = self.sender.lock().unwrap(); + + // sender.send(response.clone()); + if let Some(sender) = sender.take() { + info!("sending response: {:?} and {:?}", response, sender); + match sender.send(response) { + Ok(_) => info!("Response sent successfully"), + Err(e) => error!("Failed to send response: {:?}", e), } } } + } - pub struct LspChannels { - #channel_declarations - } + pub struct LspChannels { + #channel_declarations + } - impl LspChannels { - pub fn new() -> Self { - #channel_instantiations - Self { - #channel_assignments - } + impl LspChannels { + pub fn new() -> Self { + #channel_instantiations + Self { + #channel_assignments } - #dispatch_functions } + #dispatch_functions } + } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index bb0606279..5a209eb55 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -72,7 +72,8 @@ impl Backend { BroadcastStream::new(messaging.subscribe_did_change_watched_files()).fuse(); let mut hover_stream = BroadcastStream::new(messaging.subscribe_hover()).fuse(); - let mut goto_definition_stream = BroadcastStream::new(messaging.subscribe_goto_definition()).fuse(); + let mut goto_definition_stream = + BroadcastStream::new(messaging.subscribe_goto_definition()).fuse(); // This is very important! We absolutely need to drop the messaging lock here. // TODO: make this more ergonomic and foolproof somehow diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index ced172057..94ed940bd 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -3,8 +3,8 @@ use std::io::BufRead; use common::input::IngotKind; use hir_analysis::name_resolution::{EarlyResolvedPath, NameRes}; use log::info; -use lsp_server::{Response, ResponseError}; -use serde::Deserialize; +use lsp_server::{ResponseError}; + use tower_lsp::jsonrpc::Result; use crate::{ @@ -103,7 +103,9 @@ pub fn handle_hover( Ok(Some(result)) } -use lsp_types::{lsif::ResultSet, GotoDefinitionResponse, Hover, TextDocumentPositionParams, GotoDefinitionParams}; +use lsp_types::{ + GotoDefinitionParams, GotoDefinitionResponse, Hover, +}; pub fn handle_goto_definition( db: &mut LanguageServerDatabase, @@ -149,7 +151,7 @@ pub fn handle_goto_definition( .collect::>() .join("\n"); - let error = (!errors.is_empty()).then_some(ResponseError { + let _error = (!errors.is_empty()).then_some(ResponseError { code: lsp_types::error_codes::SERVER_CANCELLED as i32, message: errors, data: None, diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index a9bc787ee..7644cf5e4 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use log::{info, error}; +use log::{error, info}; use lsp_types::{ DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index a539bf42f..3e1111f27 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -9,13 +9,11 @@ mod logger; mod util; mod workspace; - - use backend::Backend; // use backend::Backend; use db::Jar; -use language_server::{Server}; +use language_server::Server; use log::info; mod handlers { From c9f9984319d6a4aa654a22b13f2f51d572cb1623 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Mar 2024 10:39:16 -0600 Subject: [PATCH 20/66] clippy --- .../src/handlers/notifications.rs | 189 ------------------ .../language-server/src/handlers/request.rs | 6 +- crates/language-server/src/main.rs | 1 - crates/language-server/src/workspace.rs | 2 +- 4 files changed, 3 insertions(+), 195 deletions(-) delete mode 100644 crates/language-server/src/handlers/notifications.rs diff --git a/crates/language-server/src/handlers/notifications.rs b/crates/language-server/src/handlers/notifications.rs deleted file mode 100644 index c6cb30a2e..000000000 --- a/crates/language-server/src/handlers/notifications.rs +++ /dev/null @@ -1,189 +0,0 @@ -use anyhow::{Error, Result}; -use fxhash::FxHashMap; -// use log::info; -use serde::Deserialize; - -use crate::{ - backend::Backend, - db::LanguageServerDatabase, - diagnostics::get_diagnostics, - workspace::{IngotFileContext, SyncableIngotFileContext, SyncableInputFile, Workspace}, -}; - -#[cfg(target_arch = "wasm32")] -use crate::util::DummyFilePathConversion; - -fn run_diagnostics( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, - path: &str, -) -> Vec { - // let db = &mut *state.db.lock().unwrap(); - // let workspace = &mut *state.workspace.lock().unwrap(); - let file_path = path; - let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); - db.analyze_top_mod(top_mod); - db.finalize_diags() -} - -pub fn handle_document_did_open( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidOpenTextDocumentParams::deserialize(note.params)?; - { - // let db = &mut *state.db.lock().unwrap(); - // let workspace = &mut *state.workspace.lock().unwrap(); - let input = workspace - .input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, None); - } - let diagnostics = get_diagnostics(db, workspace, params.text_document.uri.clone())?; - send_diagnostics(diagnostics) -} - -// Currently this is used to handle document renaming since the "document open" handler is called -// before the "document was renamed" handler. -// -// The fix: handle document renaming more explicitly in the "will rename" flow, along with the document -// rename refactor. -pub fn handle_document_did_close( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, - _state: &mut Backend, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidCloseTextDocumentParams::deserialize(note.params)?; - // let db = &mut *state.db.lock().unwrap(); - // let workspace = &mut *state.workspace.lock().unwrap(); - let input = workspace - .input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - input.sync(db, None) -} - -pub fn handle_document_did_change( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidChangeTextDocumentParams::deserialize(note.params)?; - { - // let db = &mut *state.db.lock().unwrap(); - // let workspace = &mut *state.workspace.lock().unwrap(); - let input = workspace - .input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, Some(params.content_changes[0].text.clone())); - } - let diagnostics = get_diagnostics(db, workspace, params.text_document.uri.clone())?; - // info!("sending diagnostics... {:?}", diagnostics); - send_diagnostics(diagnostics) -} - -pub fn send_diagnostics( - // _state: &mut Backend, - diagnostics: FxHashMap>, -) -> Result<(), Error> { - let _results = diagnostics.into_iter().map(|(uri, diags)| { - let result = lsp_types::PublishDiagnosticsParams { - uri, - diagnostics: diags, - version: None, - }; - lsp_server::Message::Notification(lsp_server::Notification { - method: String::from("textDocument/publishDiagnostics"), - params: serde_json::to_value(result).unwrap(), - }) - }); - - // results.for_each(|result| { - // let sender = state.client; - // let _ = sender.send(result); - // }); - - Ok(()) -} - -pub fn handle_watched_file_changes( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, - note: lsp_server::Notification, -) -> Result<(), Error> { - let params = lsp_types::DidChangeWatchedFilesParams::deserialize(note.params)?; - let changes = params.changes; - let mut diagnostics = FxHashMap::>::default(); - for change in changes { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - - // TODO: sort out the mutable/immutable borrow issues here - { - // let db = &mut state.db.lock().unwrap(); - // let workspace = &mut state.workspace.lock().unwrap(); - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - // let db = state.db.lock().unwrap(); - // let db = &mut state.db.lock().unwrap(); - let _ = workspace.sync(db); - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::CHANGED => { - let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); - } - _ => {} - } - } - // collect diagnostics for the file - if change.typ != lsp_types::FileChangeType::DELETED { - let diags = get_diagnostics(db, workspace, uri.clone())?; - for (uri, more_diags) in diags { - let diags = diagnostics.entry(uri).or_insert_with(Vec::new); - diags.extend(more_diags); - } - } - } - Ok(()) -} diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 94ed940bd..98764a776 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -3,7 +3,6 @@ use std::io::BufRead; use common::input::IngotKind; use hir_analysis::name_resolution::{EarlyResolvedPath, NameRes}; use log::info; -use lsp_server::{ResponseError}; use tower_lsp::jsonrpc::Result; @@ -13,6 +12,7 @@ use crate::{ util::{to_lsp_location_from_scope, to_offset_from_position}, workspace::{IngotFileContext, Workspace}, }; +use lsp_server::ResponseError; pub fn handle_hover( db: &mut LanguageServerDatabase, @@ -103,9 +103,7 @@ pub fn handle_hover( Ok(Some(result)) } -use lsp_types::{ - GotoDefinitionParams, GotoDefinitionResponse, Hover, -}; +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Hover}; pub fn handle_goto_definition( db: &mut LanguageServerDatabase, diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 3e1111f27..11552e4a3 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -17,7 +17,6 @@ use language_server::Server; use log::info; mod handlers { - pub mod notifications; pub mod request; } diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 73750654f..951ea042c 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -318,7 +318,7 @@ impl Workspace { let previous_ingot_context_file_keys = &ingot_context.files.keys().collect::>(); for path in previous_ingot_context_file_keys { if !actual_paths.contains(path) { - ingot_context.remove_file(db, path); + let _ = ingot_context.remove_file(db, path); } } From a3079dba750fcc2d85c83aa48f2d5ff0e569ff00 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Mar 2024 10:58:54 -0600 Subject: [PATCH 21/66] pull some LSP channel code out of proc macro for better debugging --- crates/language-server-macros/src/lib.rs | 52 ++----------------- crates/language-server/src/language_server.rs | 2 + crates/language-server/src/main.rs | 15 +----- .../language-server/src/oneshot_responder.rs | 35 +++++++++++++ 4 files changed, 43 insertions(+), 61 deletions(-) create mode 100644 crates/language-server/src/oneshot_responder.rs diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index 6da875edb..a2819d267 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -166,22 +166,17 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { let dispatcher_send_payload = match channel.result { Some(result) => quote!{ let (tx, rx) = tokio::sync::oneshot::channel::<#result>(); - // let payload = #dispatcher_payload.clone(); let oneshot = OneshotResponder::from(tx); let broadcast = self.#tx.clone(); - tokio::spawn(async move { - tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; - info!("sending oneshot sender: {:?}", #dispatcher_payload); - match broadcast.send((#dispatcher_payload, oneshot)) { - Ok(_) => info!("sent oneshot sender"), - Err(e) => error!("failed to send oneshot sender"), - } - }); + info!("sending oneshot sender: {:?}", #dispatcher_payload); + match broadcast.send((#dispatcher_payload, oneshot)) { + Ok(_) => info!("sent oneshot sender"), + Err(e) => error!("failed to send oneshot sender"), + } info!("returning oneshot receiver: {:?}", rx); rx }, None => quote!{ - // self.#tx.send(#dispatcher_payload).unwrap(); match self.#tx.send(#dispatcher_payload) { Ok(_) => info!("sent notification"), Err(e) => error!("failed to send notification: {:?}", e), @@ -202,8 +197,6 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { }, }; - // Some(result) => quote! { tokio::sync::broadcast::Sender<(#params, OneshotResponder>)> }, - // None => quote! { tokio::sync::broadcast::Sender<#params> }, let subscriber_fn = match params { Some(_params) => quote! { pub fn #subscriber_name(&self) -> #receiver_type { @@ -225,41 +218,6 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { .collect(); quote! { - use std::fmt::Debug; - #[derive(Debug)] - pub struct OneshotResponder{ - sender: std::sync::Arc>>> - } - impl Clone for OneshotResponder { - fn clone(&self) -> OneshotResponder { - Self { - sender: self.sender.clone() - } - } - } - - - impl OneshotResponder { - pub fn from(sender: tokio::sync::oneshot::Sender) -> Self { - Self { - sender: std::sync::Arc::new(std::sync::Mutex::new(Some(sender))) - } - } - pub fn respond(self, response: T) { - info!("responding with: {:?}", response); - let mut sender = self.sender.lock().unwrap(); - - // sender.send(response.clone()); - if let Some(sender) = sender.take() { - info!("sending response: {:?} and {:?}", response, sender); - match sender.send(response) { - Ok(_) => info!("Response sent successfully"), - Err(e) => error!("Failed to send response: {:?}", e), - } - } - } - } - pub struct LspChannels { #channel_declarations } diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 7644cf5e4..eb0d6e27d 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -7,6 +7,8 @@ use lsp_types::{ Registration, }; +use crate::oneshot_responder::OneshotResponder; + use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; pub(crate) struct Server { diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 11552e4a3..201b7a9a1 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -6,6 +6,7 @@ mod globals; mod goto; mod language_server; mod logger; +mod oneshot_responder; mod util; mod workspace; @@ -22,25 +23,11 @@ mod handlers { #[tokio_macros::main] async fn main() { - // let runtime = tokio::runtime::Builder::new_multi_thread() - // .worker_threads(2) - // .enable_all() - // .build() - // .unwrap(); - - // let runtime2 = tokio::runtime::Builder::new_multi_thread() - // .worker_threads(2) - // .enable_all() - // .build() - // .unwrap(); - let stdin = tokio::io::stdin(); let stdout = tokio::io::stdout(); let (service, socket) = tower_lsp::LspService::build(Server::new).finish(); let server = service.inner(); - // server.init_logger(log::Level::Info).unwrap(); - // info!("initialized logger"); let client = server.client.clone(); let messaging = server.messaging.clone(); diff --git a/crates/language-server/src/oneshot_responder.rs b/crates/language-server/src/oneshot_responder.rs new file mode 100644 index 000000000..45fcb89d0 --- /dev/null +++ b/crates/language-server/src/oneshot_responder.rs @@ -0,0 +1,35 @@ +use std::fmt::Debug; + +use log::{error, info}; +#[derive(Debug)] +pub struct OneshotResponder { + pub(crate) sender: std::sync::Arc>>>, +} +impl Clone for OneshotResponder { + fn clone(&self) -> OneshotResponder { + Self { + sender: self.sender.clone(), + } + } +} + +impl OneshotResponder { + pub fn from(sender: tokio::sync::oneshot::Sender) -> Self { + Self { + sender: std::sync::Arc::new(std::sync::Mutex::new(Some(sender))), + } + } + pub fn respond(self, response: T) { + info!("responding with: {:?}", response); + let mut sender = self.sender.lock().unwrap(); + + // sender.send(response.clone()); + if let Some(sender) = sender.take() { + info!("sending response: {:?} and {:?}", response, sender); + match sender.send(response) { + Ok(_) => info!("Response sent successfully"), + Err(e) => error!("Failed to send response: {:?}", e), + } + } + } +} From e1e9efbc5c9d839a3e6135d23bc81134aa15352d Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Mar 2024 14:19:30 -0600 Subject: [PATCH 22/66] avoid unnecessary manual spawns --- crates/language-server/src/language_server.rs | 3 +- crates/language-server/src/logger.rs | 62 +++++++++---------- crates/language-server/src/main.rs | 21 ++++--- 3 files changed, 45 insertions(+), 41 deletions(-) diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index eb0d6e27d..979d98acc 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -38,6 +38,7 @@ impl Server { pub(crate) fn new(client: Client) -> Self { let messaging = Arc::new(tokio::sync::Mutex::new(LspChannels::new())); let client = Arc::new(tokio::sync::Mutex::new(client)); + Self { messaging, client } } } @@ -47,8 +48,6 @@ impl Server { impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { // setup logging - let _ = self.init_logger(log::Level::Info); - info!("initialized logger"); // info!("initializing language server: {:?}", initialize_params); let messaging = self.messaging.lock().await; let rx = messaging.dispatch_initialize(initialize_params); diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 0010c8ddf..918a80d58 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -1,13 +1,12 @@ use std::sync::Arc; use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; +use lsp_types::MessageType; use tower_lsp::Client; -use crate::language_server::Server; - pub struct Logger { pub(crate) level: Level, - pub(crate) client: Arc>, + log_sender: tokio::sync::mpsc::UnboundedSender<(String, MessageType)>, } impl log::Log for Logger { @@ -16,42 +15,43 @@ impl log::Log for Logger { metadata.level() <= logger.level } - // TODO: investigate performance implications of spawning tasks for each log message fn log(&self, record: &Record) { if self.enabled(record.metadata()) { let message = format!("{} - {}", record.level(), record.args()); - let level = record.level(); - let client = self.client.clone(); - tokio::spawn(async move { - let client = client.lock().await; - client - .log_message( - match level { - log::Level::Error => lsp_types::MessageType::ERROR, - log::Level::Warn => lsp_types::MessageType::WARNING, - log::Level::Info => lsp_types::MessageType::INFO, - log::Level::Debug => lsp_types::MessageType::LOG, - log::Level::Trace => lsp_types::MessageType::LOG, - }, - message, - ) - .await; - }); + let message_type = match record.level() { + log::Level::Error => MessageType::ERROR, + log::Level::Warn => MessageType::WARNING, + log::Level::Info => MessageType::INFO, + log::Level::Debug => MessageType::LOG, + log::Level::Trace => MessageType::LOG, + }; + self.log_sender.send((message, message_type)).unwrap(); } } fn flush(&self) {} } -impl Server { - pub fn init_logger(&self, level: Level) -> Result<(), SetLoggerError> { - let logger = Logger { - level, - client: self.client.clone(), - }; - let static_logger = Box::leak(Box::new(logger)); - log::set_logger(static_logger)?; - log::set_max_level(LevelFilter::Debug); - Ok(()) +pub fn setup_logger( + level: Level, +) -> Result, SetLoggerError> { + let (log_sender, log_receiver) = + tokio::sync::mpsc::unbounded_channel::<(String, MessageType)>(); + let logger = Logger { level, log_sender }; + let static_logger = Box::leak(Box::new(logger)); + log::set_logger(static_logger)?; + log::set_max_level(LevelFilter::Debug); + Ok(log_receiver) +} + +pub async fn handle_log_messages( + mut rx: tokio::sync::mpsc::UnboundedReceiver<(String, MessageType)>, + client: Arc>, +) -> tokio::sync::mpsc::UnboundedReceiver { + loop { + let (message, message_type) = rx.recv().await.unwrap(); + // let message_type = match + let client = client.lock().await; + client.log_message(message_type, message).await; } } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 201b7a9a1..d5839f5dd 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -11,11 +11,11 @@ mod util; mod workspace; use backend::Backend; -// use backend::Backend; use db::Jar; use language_server::Server; -use log::info; + +use crate::logger::{handle_log_messages, setup_logger}; mod handlers { pub mod request; @@ -31,12 +31,17 @@ async fn main() { let client = server.client.clone(); let messaging = server.messaging.clone(); - info!("spawning backend"); let backend = Backend::new(client, messaging); - tokio::spawn(backend.setup_streams()); - info!("spawning server"); - tower_lsp::Server::new(stdin, stdout, socket) - .serve(service) - .await; + let rx = setup_logger(log::Level::Info).unwrap(); + + tokio::select! { + // setup logging + _ = handle_log_messages(rx, server.client.clone()) => {}, + // setup streams + _ = backend.setup_streams() => {}, + // start the server + _ = tower_lsp::Server::new(stdin, stdout, socket) + .serve(service) => {} + } } From 45ef9f36cf0e1ba86ef45c9409147f37d746b0d7 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 5 Mar 2024 18:17:16 -0600 Subject: [PATCH 23/66] separate tokio runtime for stream handling --- crates/language-server/src/backend.rs | 2 +- crates/language-server/src/main.rs | 11 +++++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 5a209eb55..690e38f6d 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -38,7 +38,7 @@ impl Backend { workspace, } } - pub async fn setup_streams(mut self) { + pub async fn handle_streams(mut self) { info!("setting up streams"); info!("what's next"); let workspace = &mut self.workspace; diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index d5839f5dd..9ee9355e3 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -35,11 +35,18 @@ async fn main() { let rx = setup_logger(log::Level::Info).unwrap(); + // separate runtime for the backend + let backend_runtime = tokio::runtime::Builder::new_multi_thread() + .worker_threads(4) + .enable_all() + .build() + .unwrap(); + + backend_runtime.spawn(backend.handle_streams()); + tokio::select! { // setup logging _ = handle_log_messages(rx, server.client.clone()) => {}, - // setup streams - _ = backend.setup_streams() => {}, // start the server _ = tower_lsp::Server::new(stdin, stdout, socket) .serve(service) => {} From c37db9267d56dd24feb89773657bca17e9eb013f Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Mar 2024 03:36:35 -0600 Subject: [PATCH 24/66] generated struct name customization --- crates/language-server-macros/src/lib.rs | 63 ++++++++++--------- crates/language-server/src/backend.rs | 6 +- crates/language-server/src/language_server.rs | 26 ++++---- .../test_files/single_ingot/src/foo.fe | 2 +- 4 files changed, 50 insertions(+), 47 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index a2819d267..fd796e92d 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -10,11 +10,14 @@ use syn::{parse_macro_input, FnArg, ImplItem, ItemImpl, ReturnType}; /// a struct full of tokio broadcast channels that can be used to signal the server to handle /// defined requests and notifications. #[proc_macro_attribute] -pub fn dispatcher(_attr: TokenStream, item: TokenStream) -> TokenStream { +pub fn message_channels(attr: TokenStream, item: TokenStream) -> TokenStream { + let attr = parse_macro_input!(attr as Option); + let channel_struct_name = format_ident!("{}", attr.map_or("MessageChannels".to_string(), |attr| attr.to_string())); + let lang_server_trait_impl = parse_macro_input!(item as ItemImpl); let method_calls = parse_method_calls(&lang_server_trait_impl); - let channel_struct = gen_channel_struct(&method_calls); + let channel_struct = gen_channel_struct(&method_calls, channel_struct_name); let tokens = quote! { #channel_struct @@ -25,17 +28,17 @@ pub fn dispatcher(_attr: TokenStream, item: TokenStream) -> TokenStream { // item } -struct LspTypeChannel<'a> { +struct MessageTypeChannel<'a> { // handler_name: &'a syn::Ident, tx_name: syn::Ident, - dispatcher_name: syn::Ident, - subscriber_name: syn::Ident, + sender_fn_name: syn::Ident, + subscribe_fn_name: syn::Ident, rx_name: syn::Ident, params: Option<&'a syn::Type>, result: Option<&'a syn::Type>, } -fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { +fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { let mut calls = Vec::new(); for item in &lang_server_trait.items { @@ -56,16 +59,16 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { let handler_name = &method.sig.ident; let tx_name = format_ident!("{}_tx", handler_name); - let dispatcher_name = format_ident!("dispatch_{}", handler_name); - let subscriber_name = format_ident!("subscribe_{}", handler_name); + let sender_fn_name = format_ident!("send_{}", handler_name); + let subscribe_fn_name = format_ident!("subscribe_{}", handler_name); let rx_name = format_ident!("{}_rx", handler_name); - calls.push(LspTypeChannel { + calls.push(MessageTypeChannel { tx_name, rx_name, - dispatcher_name, - subscriber_name, + sender_fn_name, + subscribe_fn_name, params, result, }); @@ -74,7 +77,7 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { calls } -fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { +fn gen_channel_struct(channels: &[MessageTypeChannel], channel_struct_name: syn::Ident) -> proc_macro2::TokenStream { // unit type let unit_type = syn::Type::Tuple(syn::TypeTuple { paren_token: syn::token::Paren::default(), @@ -135,7 +138,7 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { }) .collect(); - let dispatch_functions: proc_macro2::TokenStream = channels + let send_functions: proc_macro2::TokenStream = channels .iter() .map(|channel| { let tx = &channel.tx_name; @@ -145,9 +148,9 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { Some(params) => params, None => &unit_type, }; - let subscriber_name = &channel.subscriber_name; - let dispatcher_name = &channel.dispatcher_name; - let dispatcher_result = match channel.result { + let subscribe_fn_name = &channel.subscribe_fn_name; + let sender_fn_name = &channel.sender_fn_name; + let sender_fn_result = match channel.result { Some(result) => quote!{tokio::sync::oneshot::Receiver<#result>}, None => quote!{()}, // Some(result) => quote! { tokio::sync::broadcast::Receiver<(#params, OneshotResponder>)> }, @@ -158,18 +161,18 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { None => quote! { tokio::sync::broadcast::Receiver<#params_type> }, }; - let dispatcher_payload = match params { + let payload = match params { Some(_params) => quote! { params }, None => quote! { () }, }; - let dispatcher_send_payload = match channel.result { + let send_payload = match channel.result { Some(result) => quote!{ let (tx, rx) = tokio::sync::oneshot::channel::<#result>(); let oneshot = OneshotResponder::from(tx); let broadcast = self.#tx.clone(); - info!("sending oneshot sender: {:?}", #dispatcher_payload); - match broadcast.send((#dispatcher_payload, oneshot)) { + info!("sending oneshot sender: {:?}", #payload); + match broadcast.send((#payload, oneshot)) { Ok(_) => info!("sent oneshot sender"), Err(e) => error!("failed to send oneshot sender"), } @@ -177,7 +180,7 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { rx }, None => quote!{ - match self.#tx.send(#dispatcher_payload) { + match self.#tx.send(#payload) { Ok(_) => info!("sent notification"), Err(e) => error!("failed to send notification: {:?}", e), } @@ -186,25 +189,25 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { let dispatcher_fn = match params { Some(params) => quote! { - pub fn #dispatcher_name(&self, params: #params) -> #dispatcher_result { - #dispatcher_send_payload + pub fn #sender_fn_name(&self, params: #params) -> #sender_fn_result { + #send_payload } }, None => quote! { - pub fn #dispatcher_name(&self) -> #dispatcher_result { - #dispatcher_send_payload + pub fn #sender_fn_name(&self) -> #sender_fn_result { + #send_payload } }, }; let subscriber_fn = match params { Some(_params) => quote! { - pub fn #subscriber_name(&self) -> #receiver_type { + pub fn #subscribe_fn_name(&self) -> #receiver_type { self.#tx.subscribe() } }, None => quote! { - pub fn #subscriber_name(&self) -> #receiver_type { + pub fn #subscribe_fn_name(&self) -> #receiver_type { self.#tx.subscribe() } }, @@ -218,18 +221,18 @@ fn gen_channel_struct(channels: &[LspTypeChannel]) -> proc_macro2::TokenStream { .collect(); quote! { - pub struct LspChannels { + pub struct #channel_struct_name { #channel_declarations } - impl LspChannels { + impl #channel_struct_name { pub fn new() -> Self { #channel_instantiations Self { #channel_assignments } } - #dispatch_functions + #send_functions } } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 690e38f6d..c622cb8f7 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -10,7 +10,7 @@ use crate::db::LanguageServerDatabase; use crate::diagnostics::get_diagnostics; use crate::globals::LANGUAGE_ID; -use crate::language_server::LspChannels; +use crate::language_server::MessageChannels; use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; @@ -20,14 +20,14 @@ use tokio_stream::StreamExt; use tower_lsp::Client; pub struct Backend { - pub(crate) messaging: Arc>, + pub(crate) messaging: Arc>, pub(crate) client: Arc>, pub(crate) db: LanguageServerDatabase, pub(crate) workspace: Workspace, } impl Backend { - pub fn new(client: Arc>, messaging: Arc>) -> Self { + pub fn new(client: Arc>, messaging: Arc>) -> Self { let db = LanguageServerDatabase::default(); let workspace = Workspace::default(); diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 979d98acc..50fba0f72 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -12,7 +12,7 @@ use crate::oneshot_responder::OneshotResponder; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; pub(crate) struct Server { - pub(crate) messaging: Arc>, + pub(crate) messaging: Arc>, pub(crate) client: Arc>, } @@ -36,24 +36,24 @@ impl Server { } pub(crate) fn new(client: Client) -> Self { - let messaging = Arc::new(tokio::sync::Mutex::new(LspChannels::new())); + let messaging = Arc::new(tokio::sync::Mutex::new(MessageChannels::new())); let client = Arc::new(tokio::sync::Mutex::new(client)); Self { messaging, client } } } -#[language_server_macros::dispatcher] +#[language_server_macros::message_channels(MessageChannels)] #[tower_lsp::async_trait] impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { - // setup logging - // info!("initializing language server: {:?}", initialize_params); + // forward the initialize request to the messaging system let messaging = self.messaging.lock().await; - let rx = messaging.dispatch_initialize(initialize_params); + let rx = messaging.send_initialize(initialize_params); info!("awaiting initialization result"); let initialize_result = rx.await.unwrap(); - // register watchers + + // register file watchers let _ = self.register_watchers().await; info!("registered watchers"); @@ -67,27 +67,27 @@ impl LanguageServer for Server { async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { let messaging = self.messaging.lock().await; - messaging.dispatch_did_open(params); + messaging.send_did_open(params); } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { let messaging = self.messaging.lock().await; - messaging.dispatch_did_change(params); + messaging.send_did_change(params); } async fn did_close(&self, params: DidCloseTextDocumentParams) { let messaging = self.messaging.lock().await; - messaging.dispatch_did_close(params); + messaging.send_did_close(params); } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { let messaging = self.messaging.lock().await; - messaging.dispatch_did_change_watched_files(params); + messaging.send_did_change_watched_files(params); } async fn hover(&self, params: lsp_types::HoverParams) -> Result> { let messaging = self.messaging.lock().await; - let rx = messaging.dispatch_hover(params); + let rx = messaging.send_hover(params); rx.await.unwrap() } @@ -96,7 +96,7 @@ impl LanguageServer for Server { params: lsp_types::GotoDefinitionParams, ) -> Result> { let messaging = self.messaging.lock().await; - let rx = messaging.dispatch_goto_definition(params); + let rx = messaging.send_goto_definition(params); rx.await.unwrap() } } diff --git a/crates/language-server/test_files/single_ingot/src/foo.fe b/crates/language-server/test_files/single_ingot/src/foo.fe index c2251ee70..178d9ff50 100644 --- a/crates/language-server/test_files/single_ingot/src/foo.fe +++ b/crates/language-server/test_files/single_ingot/src/foo.fe @@ -4,5 +4,5 @@ pub fn foo() { } pub struct Foo { - pub x: i32 + pub x: i32; } \ No newline at end of file From ff33a22292e44baa6ff66853024281e639d242ea Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Mar 2024 03:45:14 -0600 Subject: [PATCH 25/66] lsp channel responder cleanup --- crates/language-server/src/dispatcher.rs | 0 .../language-server/src/oneshot_responder.rs | 23 ++++++++++++------- 2 files changed, 15 insertions(+), 8 deletions(-) delete mode 100644 crates/language-server/src/dispatcher.rs diff --git a/crates/language-server/src/dispatcher.rs b/crates/language-server/src/dispatcher.rs deleted file mode 100644 index e69de29bb..000000000 diff --git a/crates/language-server/src/oneshot_responder.rs b/crates/language-server/src/oneshot_responder.rs index 45fcb89d0..10c9c1304 100644 --- a/crates/language-server/src/oneshot_responder.rs +++ b/crates/language-server/src/oneshot_responder.rs @@ -1,10 +1,11 @@ use std::fmt::Debug; -use log::{error, info}; +use log::{debug, error}; #[derive(Debug)] pub struct OneshotResponder { pub(crate) sender: std::sync::Arc>>>, } + impl Clone for OneshotResponder { fn clone(&self) -> OneshotResponder { Self { @@ -20,15 +21,21 @@ impl OneshotResponder { } } pub fn respond(self, response: T) { - info!("responding with: {:?}", response); + debug!("responding with: {:?}", response); let mut sender = self.sender.lock().unwrap(); - // sender.send(response.clone()); - if let Some(sender) = sender.take() { - info!("sending response: {:?} and {:?}", response, sender); - match sender.send(response) { - Ok(_) => info!("Response sent successfully"), - Err(e) => error!("Failed to send response: {:?}", e), + match sender.take() { + Some(sender) => { + debug!("sending response: {:?} and {:?}", response, sender); + match sender.send(response) { + Ok(_) => { + debug!("Response sent successfully") + }, + Err(e) => error!("Failed to send response: {:?}", e), + } + } + None => { + error!("OneshotResponder already responded"); } } } From d9c35b6d4d8659f8156a04f45f123485789145f2 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Mar 2024 03:54:09 -0600 Subject: [PATCH 26/66] cleanup; remove unneeded lock --- crates/language-server/src/backend.rs | 10 +++------- crates/language-server/src/language_server.rs | 1 + 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index c622cb8f7..dfbc367e8 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -40,11 +40,10 @@ impl Backend { } pub async fn handle_streams(mut self) { info!("setting up streams"); - info!("what's next"); let workspace = &mut self.workspace; let db = &mut self.db; - let client_wrapped = self.client.clone(); + let client = self.client.clone(); let messaging = self.messaging.clone(); let messaging = messaging.lock().await; @@ -83,9 +82,8 @@ impl Backend { loop { tokio::select! { Some(result) = initialized_stream.next() => { - info!("received initialize request {:?}", result); if let Ok((initialization_params, responder)) = result { - info!("initializing language server: {:?}", initialization_params); + info!("initializing language server!"); // setup workspace let _ = workspace.set_workspace_root( db, @@ -105,7 +103,6 @@ impl Backend { version: Some(String::from(env!("CARGO_PKG_VERSION"))), }), }; - info!("initializing language server!"); responder.respond(Ok(initialize_result)); } } @@ -117,10 +114,9 @@ impl Backend { } Some(Ok(doc)) = change_stream.next() => { info!("change detected: {:?}", doc.uri); - on_change(client_wrapped.clone(), workspace, db, doc).await; + on_change(client.clone(), workspace, db, doc).await; } Some(Ok(params)) = did_close_stream.next() => { - let _client = &mut client_wrapped.lock().await; let input = workspace .input_from_file_path( db, diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 50fba0f72..a2dda55b8 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -50,6 +50,7 @@ impl LanguageServer for Server { // forward the initialize request to the messaging system let messaging = self.messaging.lock().await; let rx = messaging.send_initialize(initialize_params); + info!("awaiting initialization result"); let initialize_result = rx.await.unwrap(); From 8e7d64c5da70003c6a9e85f2e1e92fd9e7131fee Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Mar 2024 03:57:56 -0600 Subject: [PATCH 27/66] exclude language-server from wasm tests --- .github/workflows/main.yml | 2 +- Makefile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index aa8f9d7fc..ebceb1540 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -102,7 +102,7 @@ jobs: # wasm-pack needs a Cargo.toml with a 'package' field. # (see https://github.com/rustwasm/wasm-pack/issues/642) # This will still run all tests in the workspace. - run: wasm-pack test --node crates/fe --workspace + run: wasm-pack test --node crates/fe --workspace --exclude fe-language-server release: # Only run this when we push a tag diff --git a/Makefile b/Makefile index f0849e099..976cff8b6 100644 --- a/Makefile +++ b/Makefile @@ -71,7 +71,7 @@ docker-wasm-test: --volume "$(shell pwd):/mnt" \ --workdir '/mnt' \ davesque/rust-wasm \ - wasm-pack test --node -- --workspace + wasm-pack test --node -- --workspace --exclude fe-language-server .PHONY: coverage coverage: From 29b219733c2a61f96b5047ebd0d1ab71241a4b52 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 6 Mar 2024 03:58:37 -0600 Subject: [PATCH 28/66] formatting --- crates/language-server-macros/src/lib.rs | 10 ++++++++-- crates/language-server/src/oneshot_responder.rs | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index fd796e92d..151739aef 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -12,7 +12,10 @@ use syn::{parse_macro_input, FnArg, ImplItem, ItemImpl, ReturnType}; #[proc_macro_attribute] pub fn message_channels(attr: TokenStream, item: TokenStream) -> TokenStream { let attr = parse_macro_input!(attr as Option); - let channel_struct_name = format_ident!("{}", attr.map_or("MessageChannels".to_string(), |attr| attr.to_string())); + let channel_struct_name = format_ident!( + "{}", + attr.map_or("MessageChannels".to_string(), |attr| attr.to_string()) + ); let lang_server_trait_impl = parse_macro_input!(item as ItemImpl); @@ -77,7 +80,10 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { calls } -fn gen_channel_struct(channels: &[MessageTypeChannel], channel_struct_name: syn::Ident) -> proc_macro2::TokenStream { +fn gen_channel_struct( + channels: &[MessageTypeChannel], + channel_struct_name: syn::Ident, +) -> proc_macro2::TokenStream { // unit type let unit_type = syn::Type::Tuple(syn::TypeTuple { paren_token: syn::token::Paren::default(), diff --git a/crates/language-server/src/oneshot_responder.rs b/crates/language-server/src/oneshot_responder.rs index 10c9c1304..1c094d2ab 100644 --- a/crates/language-server/src/oneshot_responder.rs +++ b/crates/language-server/src/oneshot_responder.rs @@ -30,7 +30,7 @@ impl OneshotResponder { match sender.send(response) { Ok(_) => { debug!("Response sent successfully") - }, + } Err(e) => error!("Failed to send response: {:?}", e), } } From 4b109a51eea6a3ef22219367b7af329bd5466d0a Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 8 Mar 2024 14:26:31 -0600 Subject: [PATCH 29/66] separate workspace file methods by mutability --- crates/language-server/src/backend.rs | 8 +- crates/language-server/src/goto.rs | 8 +- .../language-server/src/handlers/request.rs | 2 +- crates/language-server/src/workspace.rs | 171 +++++++++++++----- 4 files changed, 135 insertions(+), 54 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index dfbc367e8..85148add5 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -118,7 +118,7 @@ impl Backend { } Some(Ok(params)) = did_close_stream.next() => { let input = workspace - .input_from_file_path( + .touch_input_from_file_path( db, params .text_document @@ -143,13 +143,13 @@ impl Backend { // this is inefficient, a hack for now let _ = workspace.sync(db); let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) + .touch_input_from_file_path(db, path.to_str().unwrap()) .unwrap(); let _ = input.sync(db, None); } lsp_types::FileChangeType::CHANGED => { let input = workspace - .input_from_file_path(db, path.to_str().unwrap()) + .touch_input_from_file_path(db, path.to_str().unwrap()) .unwrap(); let _ = input.sync(db, None); } @@ -200,7 +200,7 @@ async fn on_change( let client = &mut client.lock().await; let diagnostics = { let input = workspace - .input_from_file_path( + .touch_input_from_file_path( db, params .uri diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 038694cf4..730348927 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -131,7 +131,7 @@ mod tests { let fe_source_path = ingot_base_dir.join(fixture.path()); let fe_source_path = fe_source_path.to_str().unwrap(); - let input = workspace.input_from_file_path(db, fixture.path()); + let input = workspace.touch_input_from_file_path(db, fixture.path()); assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); input @@ -142,7 +142,7 @@ mod tests { .top_mod_from_file_path(db, fe_source_path) .unwrap(); - let ingot = workspace.ingot_from_file_path(db, fixture.path()); + let ingot = workspace.touch_ingot_from_file_path(db, fixture.path()); assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); @@ -192,7 +192,7 @@ mod tests { fn test_goto_enclosing_path(fixture: Fixture<&str>) { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); - let input = workspace.input_from_file_path(db, fixture.path()).unwrap(); + let input = workspace.touch_input_from_file_path(db, fixture.path()).unwrap(); input.set_text(db).to((*fixture.content()).to_string()); let top_mod = workspace .top_mod_from_file_path(db, fixture.path()) @@ -247,7 +247,7 @@ mod tests { let workspace = &mut Workspace::default(); workspace - .input_from_file_path(db, fixture.path()) + .touch_input_from_file_path(db, fixture.path()) .unwrap() .set_text(db) .to((*fixture.content()).to_string()); diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 98764a776..716acf8f1 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -44,7 +44,7 @@ pub fn handle_hover( // let file_path = std::path::Path::new(file_path); info!("getting hover info for file_path: {:?}", file_path); let ingot = workspace - .input_from_file_path(db, file_path) + .touch_input_from_file_path(db, file_path) .map(|input| input.ingot(db)); // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 951ea042c..792408106 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -19,19 +19,29 @@ fn ingot_directory_key(path: String) -> String { } pub trait IngotFileContext { - fn input_from_file_path( + fn touch_input_from_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option; - fn ingot_from_file_path( + fn get_ingot_from_file_path( + &self, + db: &LanguageServerDatabase, + path: &str, + ) -> Option; + fn touch_ingot_from_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option; + fn get_input_from_file_path( + &self, + db: &LanguageServerDatabase, + path: &str, + ) -> Option; fn top_mod_from_file_path( - &mut self, - db: &mut LanguageServerDatabase, + &self, + db: &LanguageServerDatabase, path: &str, ) -> Option; fn rename_file( @@ -55,7 +65,7 @@ fn ingot_contains_file(ingot_path: &str, file_path: &str) -> bool { file_path.starts_with(ingot_path) } -pub fn get_containing_ingot<'a, T>( +pub fn get_containing_ingot_mut<'a, T>( ingots: &'a mut StringPatriciaMap, path: &'a str, ) -> Option<&'a mut T> { @@ -65,6 +75,16 @@ pub fn get_containing_ingot<'a, T>( .map(|(_, ingot)| ingot) } +pub fn get_containing_ingot<'a, T>( + ingots: &'a StringPatriciaMap, + path: &'a str, +) -> Option<&'a T> { + ingots + .get_longest_common_prefix(path) + .filter(|(ingot_path, _)| ingot_contains_file(ingot_path, path)) + .map(|(_, ingot)| ingot) +} + impl LocalIngotContext { pub fn new(db: &LanguageServerDatabase, config_path: &str) -> Option { let ingot = InputIngot::new( @@ -82,12 +102,12 @@ impl LocalIngotContext { } impl IngotFileContext for LocalIngotContext { - fn input_from_file_path( + fn touch_input_from_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ingot = self.ingot_from_file_path(db, path)?; + let ingot = self.touch_ingot_from_file_path(db, path)?; let input = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), String::new()); @@ -99,7 +119,15 @@ impl IngotFileContext for LocalIngotContext { input } - fn ingot_from_file_path( + fn get_input_from_file_path( + &self, + _db: &LanguageServerDatabase, + path: &str, + ) -> Option { + self.files.get(path).copied() + } + + fn touch_ingot_from_file_path( &mut self, _db: &mut LanguageServerDatabase, _path: &str, @@ -107,12 +135,20 @@ impl IngotFileContext for LocalIngotContext { Some(self.ingot) } + fn get_ingot_from_file_path( + &self, + _db: &LanguageServerDatabase, + _path: &str, + ) -> Option { + Some(self.ingot) + } + fn top_mod_from_file_path( - &mut self, - db: &mut LanguageServerDatabase, + &self, + db: &LanguageServerDatabase, path: &str, ) -> Option { - let file = self.input_from_file_path(db, path)?; + let file = self.get_input_from_file_path(db, path)?; Some(map_file_to_mod(db, file)) } @@ -154,12 +190,12 @@ impl StandaloneIngotContext { } impl IngotFileContext for StandaloneIngotContext { - fn input_from_file_path( + fn touch_input_from_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ingot = self.ingot_from_file_path(db, path)?; + let ingot = self.touch_ingot_from_file_path(db, path)?; let input_file = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), String::new()); @@ -173,12 +209,20 @@ impl IngotFileContext for StandaloneIngotContext { input_file } - fn ingot_from_file_path( + fn get_input_from_file_path( + &self, + _db: &LanguageServerDatabase, + path: &str, + ) -> Option { + self.files.get(path).copied() + } + + fn touch_ingot_from_file_path( &mut self, _db: &mut LanguageServerDatabase, path: &str, ) -> Option { - get_containing_ingot(&mut self.ingots, path) + get_containing_ingot_mut(&mut self.ingots, path) .as_deref() .copied() .map_or_else( @@ -197,12 +241,23 @@ impl IngotFileContext for StandaloneIngotContext { ) } + fn get_ingot_from_file_path( + &self, + _db: &LanguageServerDatabase, + path: &str, + ) -> Option { + // this shouldn't mutate, it should only get the ingot or return `None` + get_containing_ingot(&self.ingots, path) + .as_deref() + .copied() + } + fn top_mod_from_file_path( - &mut self, - db: &mut LanguageServerDatabase, + &self, + db: &LanguageServerDatabase, path: &str, ) -> Option { - let file = self.input_from_file_path(db, path)?; + let file = self.get_input_from_file_path(db, path)?; Some(map_file_to_mod(db, file)) } @@ -324,7 +379,7 @@ impl Workspace { for path in actual_paths { if !previous_ingot_context_file_keys.contains(path) { - let file = ingot_context.input_from_file_path(db, path); + let file = ingot_context.touch_input_from_file_path(db, path); let contents = std::fs::read_to_string(path).unwrap(); file.unwrap().set_text(db).to(contents); } @@ -355,38 +410,64 @@ impl Workspace { } impl IngotFileContext for Workspace { - fn input_from_file_path( + fn touch_input_from_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.input_from_file_path(db, path) + ctx.touch_input_from_file_path(db, path) } else { - self.standalone_ingot_context.input_from_file_path(db, path) + self.standalone_ingot_context.touch_input_from_file_path(db, path) } } - fn ingot_from_file_path( + fn get_input_from_file_path( + &self, + db: &LanguageServerDatabase, + path: &str, + ) -> Option { + let ctx = get_containing_ingot(&self.ingot_contexts, path); + if let Some(ctx) = ctx { + ctx.get_input_from_file_path(db, path) + } else { + self.standalone_ingot_context.get_input_from_file_path(db, path) + } + } + + fn touch_ingot_from_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); + if let Some(ctx) = ctx { + Some(ctx.touch_ingot_from_file_path(db, path).unwrap()) + } else { + self.standalone_ingot_context.touch_ingot_from_file_path(db, path) + } + } + + fn get_ingot_from_file_path( + &self, + db: &LanguageServerDatabase, + path: &str, + ) -> Option { + let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { - Some(ctx.ingot_from_file_path(db, path).unwrap()) + ctx.get_ingot_from_file_path(db, path) } else { - self.standalone_ingot_context.ingot_from_file_path(db, path) + self.standalone_ingot_context.get_ingot_from_file_path(db, path) } } fn top_mod_from_file_path( - &mut self, - db: &mut LanguageServerDatabase, + &self, + db: &LanguageServerDatabase, path: &str, ) -> Option { - let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { Some(ctx.top_mod_from_file_path(db, path).unwrap()) } else { @@ -401,7 +482,7 @@ impl IngotFileContext for Workspace { old_path: &str, new_path: &str, ) -> Result<()> { - let ctx = get_containing_ingot(&mut self.ingot_contexts, old_path); + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, old_path); if let Some(ctx) = ctx { ctx.rename_file(db, old_path, new_path) } else { @@ -411,7 +492,7 @@ impl IngotFileContext for Workspace { } fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()> { - let ctx = get_containing_ingot(&mut self.ingot_contexts, path); + let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); if let Some(ctx) = ctx { ctx.remove_file(db, path) } else { @@ -500,7 +581,7 @@ impl SyncableIngotFileContext for Workspace { #[cfg(test)] mod tests { - use crate::workspace::{get_containing_ingot, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; + use crate::workspace::{get_containing_ingot_mut, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; use std::path::PathBuf; use super::StandaloneIngotContext; @@ -511,11 +592,11 @@ mod tests { let file_path = "tests/data/ingot1/src/main.fe"; let ctx = &mut StandaloneIngotContext::new(); - let file = ctx.input_from_file_path(&mut db, file_path); + let file = ctx.touch_input_from_file_path(&mut db, file_path); assert!(file.is_some()); - let ingot = ctx.ingot_from_file_path(&mut db, file_path); + let ingot = ctx.touch_ingot_from_file_path(&mut db, file_path); assert!(ingot.is_some()); assert_eq!( ingot.unwrap().kind(&db), @@ -529,7 +610,7 @@ mod tests { let mut workspace = Workspace::default(); let mut db = crate::db::LanguageServerDatabase::default(); let file_path = "tests/data/ingot1/src/main.fe"; - let file = workspace.input_from_file_path(&mut db, file_path); + let file = workspace.touch_input_from_file_path(&mut db, file_path); assert!(file.is_some()); } @@ -556,12 +637,12 @@ mod tests { .get_longest_common_prefix(file_path) .is_some()); - let containing_ingot = get_containing_ingot(&mut workspace.ingot_contexts, file_path); + let containing_ingot = get_containing_ingot_mut(&mut workspace.ingot_contexts, file_path); assert!(containing_ingot.as_deref().is_some()); let ingot = workspace - .ingot_from_file_path(&mut crate::db::LanguageServerDatabase::default(), file_path); + .touch_ingot_from_file_path(&mut crate::db::LanguageServerDatabase::default(), file_path); assert!(ingot.is_some()); } @@ -579,10 +660,10 @@ mod tests { }; let file_path = "tests/data/ingot1/src/main.fe"; - let file = workspace.input_from_file_path(&mut db, file_path); + let file = workspace.touch_input_from_file_path(&mut db, file_path); assert!(file.is_some()); - let ingot = workspace.ingot_from_file_path(&mut db, file_path); + let ingot = workspace.touch_ingot_from_file_path(&mut db, file_path); assert!(ingot.is_some()); assert_eq!(file.map(|f| f.ingot(&db)).unwrap(), ingot.unwrap()); @@ -611,7 +692,7 @@ mod tests { assert_eq!(workspace.ingot_contexts.len(), 1); let fe_source_path = ingot_base_dir.join("src/main.fe"); - let input = workspace.input_from_file_path(&mut db, fe_source_path.to_str().unwrap()); + let input = workspace.touch_input_from_file_path(&mut db, fe_source_path.to_str().unwrap()); assert!(input.is_some()); assert!(input.unwrap().ingot(&db).kind(&db) == common::input::IngotKind::Local); } @@ -644,7 +725,7 @@ mod tests { .collect::>(); for src_path in fe_files { - let _file = workspace.input_from_file_path(&mut db, &src_path).unwrap(); + let _file = workspace.touch_input_from_file_path(&mut db, &src_path).unwrap(); // normally would do this but it's not relevant here... // file.sync(&mut db, None); @@ -683,7 +764,7 @@ mod tests { let foo_files = foo_context.files.keys().collect::>(); for file in foo_files { let contents = std::fs::read_to_string(&file).unwrap(); - let file = foo_context.input_from_file_path(&mut db, &file).unwrap(); + let file = foo_context.touch_input_from_file_path(&mut db, &file).unwrap(); assert!(*file.text(&db) == contents); } @@ -700,7 +781,7 @@ mod tests { workspace.sync_local_ingots(&mut db, &messy_workspace_path); let dangling_file = workspace - .input_from_file_path(&mut db, &dangling_path) + .touch_input_from_file_path(&mut db, &dangling_path) .unwrap(); assert_eq!( @@ -721,7 +802,7 @@ mod tests { let non_dangling_file_path = format!("{crate_dir}/test_files/messy/foo/bar/src/main.fe"); let non_dangling_input = workspace - .input_from_file_path(&mut db, &non_dangling_file_path) + .touch_input_from_file_path(&mut db, &non_dangling_file_path) .unwrap(); assert_eq!( From e2dd3850bf784b2c508fa75205913ca9abda649e Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 8 Mar 2024 15:48:28 -0600 Subject: [PATCH 30/66] language server get rid of unnecessary mutable references --- crates/language-server/src/backend.rs | 65 ++++++++++++++--------- crates/language-server/src/db.rs | 21 ++------ crates/language-server/src/diagnostics.rs | 12 ++--- 3 files changed, 52 insertions(+), 46 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 85148add5..9d3f5f089 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -114,7 +114,8 @@ impl Backend { } Some(Ok(doc)) = change_stream.next() => { info!("change detected: {:?}", doc.uri); - on_change(client.clone(), workspace, db, doc).await; + update_inputs(workspace, db, &doc).await; + handle_diagnostics(client.clone(), workspace, db, &doc).await; } Some(Ok(params)) = did_close_stream.next() => { let input = workspace @@ -163,15 +164,21 @@ impl Backend { // collect diagnostics for the file if change.typ != lsp_types::FileChangeType::DELETED { let text = std::fs::read_to_string(path).unwrap(); - on_change( + update_inputs(workspace, db, &TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text: text.clone(), + }).await; + handle_diagnostics( self.client.clone(), workspace, db, - TextDocumentItem { + &TextDocumentItem { uri: uri.clone(), language_id: LANGUAGE_ID.to_string(), version: 0, - text, + text: text, }, ) .await; @@ -191,34 +198,44 @@ impl Backend { } } -async fn on_change( - client: Arc>, +async fn update_inputs( workspace: &mut Workspace, db: &mut LanguageServerDatabase, - params: TextDocumentItem, + params: &TextDocumentItem, +) { + let input = workspace + .touch_input_from_file_path( + db, + params + .uri + .to_file_path() + .expect("Failed to convert URI to file path") + .to_str() + .expect("Failed to convert file path to string"), + ) + .unwrap(); + let _ = input.sync(db, Some(params.text.clone())); +} + +async fn handle_diagnostics( + client: Arc>, + workspace: &Workspace, + db: &LanguageServerDatabase, + params: &TextDocumentItem, ) { - let client = &mut client.lock().await; - let diagnostics = { - let input = workspace - .touch_input_from_file_path( - db, - params - .uri - .to_file_path() - .expect("Failed to convert URI to file path") - .to_str() - .expect("Failed to convert file path to string"), - ) - .unwrap(); - let _ = input.sync(db, Some(params.text)); - get_diagnostics(db, workspace, params.uri.clone()) - }; + // let client = &mut client.lock().await; + let diagnostics = get_diagnostics(db, workspace, params.uri.clone()); let diagnostics = diagnostics .unwrap() .into_iter() - .map(|(uri, diags)| client.publish_diagnostics(uri, diags, None)) + .map(|(uri, diags)| async { + let client = client.clone(); + let client = client.lock().await; + client.publish_diagnostics(uri, diags, None).await + }) .collect::>(); + futures::future::join_all(diagnostics).await; } diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 3bdadfd23..7c1d0a80e 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -36,23 +36,13 @@ impl LanguageServerDb for DB where )] pub struct LanguageServerDatabase { storage: salsa::Storage, - diags: Vec>, } impl LanguageServerDatabase { - pub fn analyze_top_mod(&mut self, top_mod: TopLevelMod) { - self.run_on_file_with_pass_manager(top_mod, initialize_analysis_pass); - } - - pub fn run_on_file_with_pass_manager(&mut self, top_mod: TopLevelMod, pm_builder: F) - where - F: FnOnce(&Self) -> AnalysisPassManager<'_>, + pub fn analyze_top_mod(&self, top_mod: TopLevelMod) -> Vec> { - self.diags.clear(); - self.diags = { - let mut pass_manager = pm_builder(self); - pass_manager.run_on_module(top_mod) - }; + let mut pass_manager = initialize_analysis_pass(self); + pass_manager.run_on_module(top_mod) } pub fn find_enclosing_item( @@ -85,8 +75,8 @@ impl LanguageServerDatabase { smallest_enclosing_item } - pub fn finalize_diags(&self) -> Vec { - let mut diags: Vec<_> = self.diags.iter().map(|d| d.to_complete(self)).collect(); + pub fn finalize_diags(&self, diags: Vec>) -> Vec { + let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), ord => ord, @@ -103,7 +93,6 @@ impl Default for LanguageServerDatabase { fn default() -> Self { let db = Self { storage: Default::default(), - diags: Vec::new(), }; db.prefill(); db diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 40e4fa59b..17b3247b6 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -128,19 +128,19 @@ impl<'a> cs_files::Files<'a> for LanguageServerDatabase { } fn run_diagnostics( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, + db: &LanguageServerDatabase, + workspace: &Workspace, path: &str, ) -> Vec { let file_path = path; let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); - db.analyze_top_mod(top_mod); - db.finalize_diags() + let diags = db.analyze_top_mod(top_mod); + db.finalize_diags(diags) } pub fn get_diagnostics( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, + db: &LanguageServerDatabase, + workspace: &Workspace, uri: lsp_types::Url, ) -> Result>, Error> { let diags = run_diagnostics(db, workspace, uri.to_file_path().unwrap().to_str().unwrap()); From 6356ba41627e6b1c9dd1e06279f789737c8be3b1 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 8 Mar 2024 16:48:08 -0600 Subject: [PATCH 31/66] language server: even Arc/RwLock wrapped salsadb is not Send/Sync --- crates/language-server/src/backend.rs | 135 ++++++++++-------- crates/language-server/src/db.rs | 2 +- crates/language-server/src/goto.rs | 2 +- .../language-server/src/handlers/request.rs | 34 ++--- crates/language-server/src/language_server.rs | 24 ++-- crates/language-server/src/logger.rs | 4 +- crates/language-server/src/main.rs | 10 +- 7 files changed, 113 insertions(+), 98 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 9d3f5f089..eb837c145 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -3,7 +3,8 @@ use crate::workspace::SyncableIngotFileContext; use futures::TryStreamExt; use lsp_types::TextDocumentItem; use std::sync::Arc; -use tokio::sync::Mutex; +use tokio::sync::RwLock; +// use tokio::sync::Mutex; use crate::capabilities::server_capabilities; use crate::db::LanguageServerDatabase; @@ -20,15 +21,15 @@ use tokio_stream::StreamExt; use tower_lsp::Client; pub struct Backend { - pub(crate) messaging: Arc>, - pub(crate) client: Arc>, - pub(crate) db: LanguageServerDatabase, + pub(crate) messaging: Arc>, + pub(crate) client: Arc>, + pub(crate) db: Arc>, pub(crate) workspace: Workspace, } impl Backend { - pub fn new(client: Arc>, messaging: Arc>) -> Self { - let db = LanguageServerDatabase::default(); + pub fn new(client: Arc>, messaging: Arc>) -> Self { + let db = Arc::new(RwLock::new(LanguageServerDatabase::default())); let workspace = Workspace::default(); Self { @@ -41,11 +42,8 @@ impl Backend { pub async fn handle_streams(mut self) { info!("setting up streams"); let workspace = &mut self.workspace; - let db = &mut self.db; - - let client = self.client.clone(); let messaging = self.messaging.clone(); - let messaging = messaging.lock().await; + let messaging = messaging.read().await; let mut initialized_stream = BroadcastStream::new(messaging.subscribe_initialize()).fuse(); let mut shutdown_stream = BroadcastStream::new(messaging.subscribe_shutdown()).fuse(); @@ -85,8 +83,11 @@ impl Backend { if let Ok((initialization_params, responder)) = result { info!("initializing language server!"); // setup workspace + + let db = self.db.clone(); + let mut db_write = db.write().await; let _ = workspace.set_workspace_root( - db, + &mut db_write, initialization_params .root_uri .unwrap() @@ -114,66 +115,73 @@ impl Backend { } Some(Ok(doc)) = change_stream.next() => { info!("change detected: {:?}", doc.uri); - update_inputs(workspace, db, &doc).await; - handle_diagnostics(client.clone(), workspace, db, &doc).await; - } - Some(Ok(params)) = did_close_stream.next() => { - let input = workspace - .touch_input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, None); + update_inputs(workspace, self.db.clone(), &doc).await; + handle_diagnostics(self.client.clone(), workspace, self.db.clone(), &doc).await; } + // Some(Ok(params)) = did_close_stream.next() => { + // let input = workspace + // .touch_input_from_file_path( + // db, + // params + // .text_document + // .uri + // .to_file_path() + // .unwrap() + // .to_str() + // .unwrap(), + // ) + // .unwrap(); + // let _ = input.sync(db, None); + // } Some(Ok(params)) = did_change_watched_files_stream.next() => { let changes = params.changes; for change in changes { let uri = change.uri; let path = uri.to_file_path().unwrap(); - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); - let input = workspace - .touch_input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::CHANGED => { - let input = workspace - .touch_input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); + { + let db = self.db.clone(); + let db_write = &mut db.write().await; + + match change.typ { + lsp_types::FileChangeType::CREATED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db_write); + let input = workspace + .touch_input_from_file_path(db_write, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db_write, None); + } + lsp_types::FileChangeType::CHANGED => { + let input = workspace + .touch_input_from_file_path(db_write, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db_write, None); + } + lsp_types::FileChangeType::DELETED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db_write); + } + _ => {} } - _ => {} } // collect diagnostics for the file if change.typ != lsp_types::FileChangeType::DELETED { let text = std::fs::read_to_string(path).unwrap(); - update_inputs(workspace, db, &TextDocumentItem { + update_inputs(workspace, self.db.clone(), &TextDocumentItem { uri: uri.clone(), language_id: LANGUAGE_ID.to_string(), version: 0, text: text.clone(), - }).await; + }); + + // let db_read = db.read().await; handle_diagnostics( self.client.clone(), workspace, - db, + self.db.clone(), &TextDocumentItem { uri: uri.clone(), language_id: LANGUAGE_ID.to_string(), @@ -186,11 +194,15 @@ impl Backend { } } Some(Ok((params, responder))) = hover_stream.next() => { - let response = handle_hover(db, workspace, params); + let db = self.db.clone(); + let db_read = db.read().await; + let response = handle_hover(&db_read, workspace, params); responder.respond(response); } Some(Ok((params, responder))) = goto_definition_stream.next() => { - let response = handle_goto_definition(db, workspace, params); + let db = self.db.clone(); + let db_read = db.read().await; + let response = handle_goto_definition(&db_read, workspace, params); responder.respond(response); } } @@ -200,9 +212,10 @@ impl Backend { async fn update_inputs( workspace: &mut Workspace, - db: &mut LanguageServerDatabase, + db: Arc>, params: &TextDocumentItem, ) { + let db = &mut db.write().await; let input = workspace .touch_input_from_file_path( db, @@ -218,20 +231,22 @@ async fn update_inputs( } async fn handle_diagnostics( - client: Arc>, + client: Arc>, workspace: &Workspace, - db: &LanguageServerDatabase, + db: Arc>, params: &TextDocumentItem, ) { - // let client = &mut client.lock().await; - let diagnostics = get_diagnostics(db, workspace, params.uri.clone()); + let diagnostics = { + let db_read = &db.read().await; + get_diagnostics(db_read, workspace, params.uri.clone()) + }; let diagnostics = diagnostics .unwrap() .into_iter() .map(|(uri, diags)| async { let client = client.clone(); - let client = client.lock().await; + let client = client.read().await; client.publish_diagnostics(uri, diags, None).await }) .collect::>(); diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 7c1d0a80e..5cf426dfb 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -46,7 +46,7 @@ impl LanguageServerDatabase { } pub fn find_enclosing_item( - &mut self, + &self, top_mod: TopLevelMod, cursor: Cursor, ) -> Option { diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 730348927..0e00ee1d3 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -61,7 +61,7 @@ fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option Option { diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 716acf8f1..bcb8d805d 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -15,39 +15,33 @@ use crate::{ use lsp_server::ResponseError; pub fn handle_hover( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, + db: &LanguageServerDatabase, + workspace: &Workspace, params: lsp_types::HoverParams, ) -> Result> { - info!("handling hover"); - // TODO: get more relevant information for the hover let file_path = ¶ms .text_document_position_params .text_document .uri .path(); - let file = std::fs::File::open(file_path).unwrap(); - let reader = std::io::BufReader::new(file); - let line = reader + + info!("handling hover"); + info!("getting hover info for file_path: {:?}", file_path); + let input = workspace.get_input_from_file_path(db, file_path); + let ingot = input.map(|input| input.ingot(db)); + + // TODO: get more relevant information for the hover + let file_text = input.unwrap().text(db).to_string(); + let line = file_text .lines() .nth(params.text_document_position_params.position.line as usize) .unwrap() - .unwrap(); - - let file_text = std::fs::read_to_string(file_path).unwrap(); + .to_string(); - // let cursor: Cursor = params.text_document_position_params.position.into(); let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, file_text.as_str(), ); - // let file_path = std::path::Path::new(file_path); - info!("getting hover info for file_path: {:?}", file_path); - let ingot = workspace - .touch_input_from_file_path(db, file_path) - .map(|input| input.ingot(db)); - - // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); let ingot_info: Option = { let ingot_type = match ingot { @@ -106,8 +100,8 @@ pub fn handle_hover( use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Hover}; pub fn handle_goto_definition( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, + db: &LanguageServerDatabase, + workspace: &Workspace, params: GotoDefinitionParams, ) -> Result> { // Convert the position to an offset in the file diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index a2dda55b8..ed8f87b10 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -12,13 +12,13 @@ use crate::oneshot_responder::OneshotResponder; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; pub(crate) struct Server { - pub(crate) messaging: Arc>, - pub(crate) client: Arc>, + pub(crate) messaging: Arc>, + pub(crate) client: Arc>, } impl Server { pub(crate) async fn register_watchers(&self) -> Result<()> { - let client = self.client.lock().await; + let client = self.client.read().await; let registration = Registration { id: String::from("watch-fe-files"), method: String::from("workspace/didChangeWatchedFiles"), @@ -36,8 +36,8 @@ impl Server { } pub(crate) fn new(client: Client) -> Self { - let messaging = Arc::new(tokio::sync::Mutex::new(MessageChannels::new())); - let client = Arc::new(tokio::sync::Mutex::new(client)); + let messaging = Arc::new(tokio::sync::RwLock::new(MessageChannels::new())); + let client = Arc::new(tokio::sync::RwLock::new(client)); Self { messaging, client } } @@ -48,7 +48,7 @@ impl Server { impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { // forward the initialize request to the messaging system - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; let rx = messaging.send_initialize(initialize_params); info!("awaiting initialization result"); @@ -67,27 +67,27 @@ impl LanguageServer for Server { } async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; messaging.send_did_open(params); } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; messaging.send_did_change(params); } async fn did_close(&self, params: DidCloseTextDocumentParams) { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; messaging.send_did_close(params); } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; messaging.send_did_change_watched_files(params); } async fn hover(&self, params: lsp_types::HoverParams) -> Result> { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; let rx = messaging.send_hover(params); rx.await.unwrap() } @@ -96,7 +96,7 @@ impl LanguageServer for Server { &self, params: lsp_types::GotoDefinitionParams, ) -> Result> { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; let rx = messaging.send_goto_definition(params); rx.await.unwrap() } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 918a80d58..5a652ff01 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -46,12 +46,12 @@ pub fn setup_logger( pub async fn handle_log_messages( mut rx: tokio::sync::mpsc::UnboundedReceiver<(String, MessageType)>, - client: Arc>, + client: Arc>, ) -> tokio::sync::mpsc::UnboundedReceiver { loop { let (message, message_type) = rx.recv().await.unwrap(); // let message_type = match - let client = client.lock().await; + let client = client.read().await; client.log_message(message_type, message).await; } } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 9ee9355e3..c48334ccf 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -36,8 +36,14 @@ async fn main() { let rx = setup_logger(log::Level::Info).unwrap(); // separate runtime for the backend - let backend_runtime = tokio::runtime::Builder::new_multi_thread() - .worker_threads(4) + // let backend_runtime = tokio::runtime::Builder::new_multi_thread() + // .worker_threads(4) + // .enable_all() + // .build() + // .unwrap(); + + // use a single threaded runtime instead + let backend_runtime = tokio::runtime::Builder::new_current_thread() .enable_all() .build() .unwrap(); From dad57c1ad9bd6b9e5d5cf06f71fbafcba57787c8 Mon Sep 17 00:00:00 2001 From: Micah Date: Fri, 8 Mar 2024 17:27:24 -0600 Subject: [PATCH 32/66] language server: reflock issues are due to salsa! --- crates/analyzer/src/context.rs | 6 +- crates/analyzer/src/db/queries/contracts.rs | 1 + crates/analyzer/src/db/queries/enums.rs | 1 + crates/analyzer/src/db/queries/functions.rs | 16 +++++- crates/analyzer/src/db/queries/impls.rs | 1 + crates/analyzer/src/db/queries/module.rs | 3 +- crates/analyzer/src/db/queries/structs.rs | 1 + crates/analyzer/src/db/queries/traits.rs | 1 + crates/analyzer/src/db/queries/types.rs | 1 + crates/analyzer/src/lib.rs | 14 +++++ crates/analyzer/src/namespace/scopes.rs | 62 +++++++++++---------- crates/language-server/src/backend.rs | 25 +++++---- crates/language-server/src/main.rs | 10 +++- crates/test-utils/src/lib.rs | 8 +-- 14 files changed, 100 insertions(+), 50 deletions(-) diff --git a/crates/analyzer/src/context.rs b/crates/analyzer/src/context.rs index 7cb202d0b..9ba6b7de3 100644 --- a/crates/analyzer/src/context.rs +++ b/crates/analyzer/src/context.rs @@ -26,7 +26,7 @@ use indexmap::IndexMap; use num_bigint::BigInt; use smol_str::SmolStr; use std::{ - cell::RefCell, + sync::RwLock, collections::HashMap, fmt::{self, Debug}, hash::Hash, @@ -320,7 +320,7 @@ impl DiagnosticVoucher { #[derive(Default)] pub struct TempContext { - pub diagnostics: RefCell>, + pub diagnostics: RwLock>, } impl AnalyzerContext for TempContext { fn db(&self) -> &dyn AnalyzerDb { @@ -396,7 +396,7 @@ impl AnalyzerContext for TempContext { } fn add_diagnostic(&self, diag: Diagnostic) { - self.diagnostics.borrow_mut().push(diag) + self.diagnostics.write().unwrap().push(diag) } fn get_context_type(&self) -> Option { diff --git a/crates/analyzer/src/db/queries/contracts.rs b/crates/analyzer/src/db/queries/contracts.rs index 44ac44bf8..a1b1d3d82 100644 --- a/crates/analyzer/src/db/queries/contracts.rs +++ b/crates/analyzer/src/db/queries/contracts.rs @@ -11,6 +11,7 @@ use crate::{ types::{self, Type}, }, traversal::types::type_desc, + TakeableRwLock, }; use fe_common::diagnostics::Label; use fe_parser::ast; diff --git a/crates/analyzer/src/db/queries/enums.rs b/crates/analyzer/src/db/queries/enums.rs index 23f4a9a38..d08a5172e 100644 --- a/crates/analyzer/src/db/queries/enums.rs +++ b/crates/analyzer/src/db/queries/enums.rs @@ -19,6 +19,7 @@ use crate::{ }, traversal::types::type_desc, AnalyzerDb, + TakeableRwLock, }; pub fn enum_all_variants(db: &dyn AnalyzerDb, enum_: EnumId) -> Rc<[EnumVariantId]> { diff --git a/crates/analyzer/src/db/queries/functions.rs b/crates/analyzer/src/db/queries/functions.rs index 9876a28a6..37959d45e 100644 --- a/crates/analyzer/src/db/queries/functions.rs +++ b/crates/analyzer/src/db/queries/functions.rs @@ -12,6 +12,7 @@ use crate::{ functions::traverse_statements, types::{type_desc, type_desc_to_trait}, }, + TakeableRwLock, }; use fe_common::diagnostics::Label; use fe_parser::{ @@ -344,9 +345,20 @@ pub fn function_body(db: &dyn AnalyzerDb, function: FunctionId) -> Analysis Result<(), Ve Err(diagnostics) } } + + +use std::sync::RwLock; +use std::mem; + +pub trait TakeableRwLock { + fn take(&self) -> T; +} +impl TakeableRwLock for RwLock { + fn take(&self) -> T { + let mut write_guard = self.write().unwrap(); + mem::replace(&mut *write_guard, T::default()) + } +} diff --git a/crates/analyzer/src/namespace/scopes.rs b/crates/analyzer/src/namespace/scopes.rs index affaf3da9..fe813e8cb 100644 --- a/crates/analyzer/src/namespace/scopes.rs +++ b/crates/analyzer/src/namespace/scopes.rs @@ -20,21 +20,21 @@ use fe_parser::{ Label, }; use indexmap::IndexMap; -use std::{cell::RefCell, collections::BTreeMap}; +use std::{sync::RwLock, collections::BTreeMap}; pub struct ItemScope<'a> { db: &'a dyn AnalyzerDb, module: ModuleId, - expressions: RefCell>, - pub diagnostics: RefCell>, + expressions: RwLock>, + pub diagnostics: RwLock>, } impl<'a> ItemScope<'a> { pub fn new(db: &'a dyn AnalyzerDb, module: ModuleId) -> Self { Self { db, module, - expressions: RefCell::new(IndexMap::default()), - diagnostics: RefCell::new(vec![]), + expressions: RwLock::new(IndexMap::default()), + diagnostics: RwLock::new(vec![]), } } } @@ -46,18 +46,20 @@ impl<'a> AnalyzerContext for ItemScope<'a> { fn add_expression(&self, node: &Node, attributes: ExpressionAttributes) { self.expressions - .borrow_mut() + .write() + .unwrap() .insert(node.id, attributes) .expect_none("expression attributes already exist"); } fn update_expression(&self, node: &Node, f: &dyn Fn(&mut ExpressionAttributes)) { - f(self.expressions.borrow_mut().get_mut(&node.id).unwrap()) + f(self.expressions.write().unwrap().get_mut(&node.id).unwrap()) } fn expr_typ(&self, expr: &Node) -> Type { self.expressions - .borrow() + .read() + .unwrap() .get(&expr.id) .unwrap() .typ @@ -172,7 +174,7 @@ impl<'a> AnalyzerContext for ItemScope<'a> { } fn add_diagnostic(&self, diag: Diagnostic) { - self.diagnostics.borrow_mut().push(diag) + self.diagnostics.write().unwrap().push(diag) } /// Gets `std::context::Context` if it exists @@ -194,8 +196,8 @@ impl<'a> AnalyzerContext for ItemScope<'a> { pub struct FunctionScope<'a> { pub db: &'a dyn AnalyzerDb, pub function: FunctionId, - pub body: RefCell, - pub diagnostics: RefCell>, + pub body: RwLock, + pub diagnostics: RwLock>, } impl<'a> FunctionScope<'a> { @@ -203,8 +205,8 @@ impl<'a> FunctionScope<'a> { Self { db, function, - body: RefCell::new(FunctionBody::default()), - diagnostics: RefCell::new(vec![]), + body: RwLock::new(FunctionBody::default()), + diagnostics: RwLock::new(vec![]), } } @@ -215,7 +217,8 @@ impl<'a> FunctionScope<'a> { pub fn map_variable_type(&self, node: &Node, typ: TypeId) { self.add_node(node); self.body - .borrow_mut() + .write() + .unwrap() .var_types .insert(node.id, typ) .expect_none("variable has already registered") @@ -224,14 +227,15 @@ impl<'a> FunctionScope<'a> { pub fn map_pattern_matrix(&self, node: &Node, matrix: PatternMatrix) { debug_assert!(matches!(node.kind, ast::FuncStmt::Match { .. })); self.body - .borrow_mut() + .write() + .unwrap() .matches .insert(node.id, matrix) .expect_none("match statement attributes already exists") } fn add_node(&self, node: &Node) { - self.body.borrow_mut().spans.insert(node.id, node.span); + self.body.write().unwrap().spans.insert(node.id, node.span); } } @@ -241,13 +245,13 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { } fn add_diagnostic(&self, diag: Diagnostic) { - self.diagnostics.borrow_mut().push(diag) + self.diagnostics.write().unwrap().push(diag) } fn add_expression(&self, node: &Node, attributes: ExpressionAttributes) { self.add_node(node); self.body - .borrow_mut() + .write().unwrap() .expressions .insert(node.id, attributes) .expect_none("expression attributes already exist"); @@ -256,7 +260,7 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { fn update_expression(&self, node: &Node, f: &dyn Fn(&mut ExpressionAttributes)) { f(self .body - .borrow_mut() + .write().unwrap() .expressions .get_mut(&node.id) .unwrap()) @@ -264,7 +268,7 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { fn expr_typ(&self, expr: &Node) -> Type { self.body - .borrow() + .read().unwrap() .expressions .get(&expr.id) .unwrap() @@ -274,7 +278,7 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { fn add_constant(&self, _name: &Node, expr: &Node, value: Constant) { self.body - .borrow_mut() + .write().unwrap() .expressions .get_mut(&expr.id) .expect("expression attributes must exist before adding constant value") @@ -305,13 +309,13 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { // TODO: should probably take the Expr::Call node, rather than the function node self.add_node(node); self.body - .borrow_mut() + .write().unwrap() .calls .insert(node.id, call_type) .expect_none("call attributes already exist"); } fn get_call(&self, node: &Node) -> Option { - self.body.borrow().calls.get(&node.id).cloned() + self.body.read().unwrap().calls.get(&node.id).cloned() } fn is_in_function(&self) -> bool { @@ -448,7 +452,7 @@ pub struct BlockScope<'a, 'b> { pub parent: Option<&'a BlockScope<'a, 'b>>, /// Maps Name -> (Type, is_const, span) pub variable_defs: BTreeMap, - pub constant_defs: RefCell>, + pub constant_defs: RwLock>, pub typ: BlockScopeType, } @@ -500,7 +504,7 @@ impl AnalyzerContext for BlockScope<'_, '_> { fn add_constant(&self, name: &Node, expr: &Node, value: Constant) { self.constant_defs - .borrow_mut() + .write().unwrap() .insert(name.kind.clone().to_string(), value.clone()) .expect_none("expression attributes already exist"); @@ -512,7 +516,7 @@ impl AnalyzerContext for BlockScope<'_, '_> { name: &ast::SmolStr, span: Span, ) -> Result, IncompleteItem> { - if let Some(constant) = self.constant_defs.borrow().get(name.as_str()) { + if let Some(constant) = self.constant_defs.read().unwrap().get(name.as_str()) { Ok(Some(constant.clone())) } else if let Some(parent) = self.parent { parent.constant_value_by_name(name, span) @@ -567,7 +571,7 @@ impl AnalyzerContext for BlockScope<'_, '_> { } fn add_diagnostic(&self, diag: Diagnostic) { - self.root.diagnostics.borrow_mut().push(diag) + self.root.diagnostics.write().unwrap().push(diag) } fn get_context_type(&self) -> Option { @@ -581,7 +585,7 @@ impl<'a, 'b> BlockScope<'a, 'b> { root, parent: None, variable_defs: BTreeMap::new(), - constant_defs: RefCell::new(BTreeMap::new()), + constant_defs: RwLock::new(BTreeMap::new()), typ, } } @@ -591,7 +595,7 @@ impl<'a, 'b> BlockScope<'a, 'b> { root: self.root, parent: Some(self), variable_defs: BTreeMap::new(), - constant_defs: RefCell::new(BTreeMap::new()), + constant_defs: RwLock::new(BTreeMap::new()), typ, } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index eb837c145..33f7465f9 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -28,17 +28,22 @@ pub struct Backend { } impl Backend { - pub fn new(client: Arc>, messaging: Arc>) -> Self { - let db = Arc::new(RwLock::new(LanguageServerDatabase::default())); - let workspace = Workspace::default(); + // pub fn new( + // client: Arc>, + // messaging: Arc>, + // db: Arc>, + // workspace: Workspace, + // ) -> Self { + // let db = Arc::new(RwLock::new(LanguageServerDatabase::default())); + // let workspace = Workspace::default(); - Self { - messaging, - client, - db, - workspace, - } - } + // Self { + // messaging, + // client, + // db, + // workspace, + // } + // } pub async fn handle_streams(mut self) { info!("setting up streams"); let workspace = &mut self.workspace; diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index c48334ccf..3bef230cd 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -10,10 +10,13 @@ mod oneshot_responder; mod util; mod workspace; +use std::sync::Arc; + use backend::Backend; use db::Jar; use language_server::Server; +use tokio::sync::RwLock; use crate::logger::{handle_log_messages, setup_logger}; @@ -31,7 +34,12 @@ async fn main() { let client = server.client.clone(); let messaging = server.messaging.clone(); - let backend = Backend::new(client, messaging); + let backend = Backend{ + client, + messaging, + db: Arc::new(RwLock::new(db::LanguageServerDatabase::default())), + workspace: workspace::Workspace::default(), + }; let rx = setup_logger(log::Level::Info).unwrap(); diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index d0fc0207e..89e8dae4b 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -7,7 +7,7 @@ use fe_driver as driver; use primitive_types::{H160, U256}; use std::{ - cell::RefCell, + sync::RwLock, collections::BTreeMap, fmt::{Display, Formatter}, str::FromStr, @@ -31,12 +31,12 @@ macro_rules! assert_harness_gas_report { #[derive(Default, Debug)] pub struct GasReporter { - records: RefCell>, + records: RwLock>, } impl GasReporter { pub fn add_record(&self, description: &str, gas_used: u64) { - self.records.borrow_mut().push(GasRecord { + self.records.write().unwrap().push(GasRecord { description: description.to_string(), gas_used, }) @@ -50,7 +50,7 @@ impl GasReporter { impl Display for GasReporter { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - for record in self.records.borrow().iter() { + for record in self.records.read().unwrap().iter() { writeln!(f, "{} used {} gas", record.description, record.gas_used)?; } From 693f67be35846e190674502f8f617bb623dd33a5 Mon Sep 17 00:00:00 2001 From: Micah Date: Sun, 10 Mar 2024 16:28:23 -0500 Subject: [PATCH 33/66] Revert "language server: reflock issues are due to salsa!" This reverts commit 76daadb044f8c2a97819e782559d0b6a41af1f2a. --- crates/analyzer/src/context.rs | 6 +- crates/analyzer/src/db/queries/contracts.rs | 1 - crates/analyzer/src/db/queries/enums.rs | 1 - crates/analyzer/src/db/queries/functions.rs | 16 +----- crates/analyzer/src/db/queries/impls.rs | 1 - crates/analyzer/src/db/queries/module.rs | 3 +- crates/analyzer/src/db/queries/structs.rs | 1 - crates/analyzer/src/db/queries/traits.rs | 1 - crates/analyzer/src/db/queries/types.rs | 1 - crates/analyzer/src/lib.rs | 14 ----- crates/analyzer/src/namespace/scopes.rs | 62 ++++++++++----------- crates/language-server/src/backend.rs | 25 ++++----- crates/language-server/src/main.rs | 10 +--- crates/test-utils/src/lib.rs | 8 +-- 14 files changed, 50 insertions(+), 100 deletions(-) diff --git a/crates/analyzer/src/context.rs b/crates/analyzer/src/context.rs index 9ba6b7de3..7cb202d0b 100644 --- a/crates/analyzer/src/context.rs +++ b/crates/analyzer/src/context.rs @@ -26,7 +26,7 @@ use indexmap::IndexMap; use num_bigint::BigInt; use smol_str::SmolStr; use std::{ - sync::RwLock, + cell::RefCell, collections::HashMap, fmt::{self, Debug}, hash::Hash, @@ -320,7 +320,7 @@ impl DiagnosticVoucher { #[derive(Default)] pub struct TempContext { - pub diagnostics: RwLock>, + pub diagnostics: RefCell>, } impl AnalyzerContext for TempContext { fn db(&self) -> &dyn AnalyzerDb { @@ -396,7 +396,7 @@ impl AnalyzerContext for TempContext { } fn add_diagnostic(&self, diag: Diagnostic) { - self.diagnostics.write().unwrap().push(diag) + self.diagnostics.borrow_mut().push(diag) } fn get_context_type(&self) -> Option { diff --git a/crates/analyzer/src/db/queries/contracts.rs b/crates/analyzer/src/db/queries/contracts.rs index a1b1d3d82..44ac44bf8 100644 --- a/crates/analyzer/src/db/queries/contracts.rs +++ b/crates/analyzer/src/db/queries/contracts.rs @@ -11,7 +11,6 @@ use crate::{ types::{self, Type}, }, traversal::types::type_desc, - TakeableRwLock, }; use fe_common::diagnostics::Label; use fe_parser::ast; diff --git a/crates/analyzer/src/db/queries/enums.rs b/crates/analyzer/src/db/queries/enums.rs index d08a5172e..23f4a9a38 100644 --- a/crates/analyzer/src/db/queries/enums.rs +++ b/crates/analyzer/src/db/queries/enums.rs @@ -19,7 +19,6 @@ use crate::{ }, traversal::types::type_desc, AnalyzerDb, - TakeableRwLock, }; pub fn enum_all_variants(db: &dyn AnalyzerDb, enum_: EnumId) -> Rc<[EnumVariantId]> { diff --git a/crates/analyzer/src/db/queries/functions.rs b/crates/analyzer/src/db/queries/functions.rs index 37959d45e..9876a28a6 100644 --- a/crates/analyzer/src/db/queries/functions.rs +++ b/crates/analyzer/src/db/queries/functions.rs @@ -12,7 +12,6 @@ use crate::{ functions::traverse_statements, types::{type_desc, type_desc_to_trait}, }, - TakeableRwLock, }; use fe_common::diagnostics::Label; use fe_parser::{ @@ -345,20 +344,9 @@ pub fn function_body(db: &dyn AnalyzerDb, function: FunctionId) -> Analysis Result<(), Ve Err(diagnostics) } } - - -use std::sync::RwLock; -use std::mem; - -pub trait TakeableRwLock { - fn take(&self) -> T; -} -impl TakeableRwLock for RwLock { - fn take(&self) -> T { - let mut write_guard = self.write().unwrap(); - mem::replace(&mut *write_guard, T::default()) - } -} diff --git a/crates/analyzer/src/namespace/scopes.rs b/crates/analyzer/src/namespace/scopes.rs index fe813e8cb..affaf3da9 100644 --- a/crates/analyzer/src/namespace/scopes.rs +++ b/crates/analyzer/src/namespace/scopes.rs @@ -20,21 +20,21 @@ use fe_parser::{ Label, }; use indexmap::IndexMap; -use std::{sync::RwLock, collections::BTreeMap}; +use std::{cell::RefCell, collections::BTreeMap}; pub struct ItemScope<'a> { db: &'a dyn AnalyzerDb, module: ModuleId, - expressions: RwLock>, - pub diagnostics: RwLock>, + expressions: RefCell>, + pub diagnostics: RefCell>, } impl<'a> ItemScope<'a> { pub fn new(db: &'a dyn AnalyzerDb, module: ModuleId) -> Self { Self { db, module, - expressions: RwLock::new(IndexMap::default()), - diagnostics: RwLock::new(vec![]), + expressions: RefCell::new(IndexMap::default()), + diagnostics: RefCell::new(vec![]), } } } @@ -46,20 +46,18 @@ impl<'a> AnalyzerContext for ItemScope<'a> { fn add_expression(&self, node: &Node, attributes: ExpressionAttributes) { self.expressions - .write() - .unwrap() + .borrow_mut() .insert(node.id, attributes) .expect_none("expression attributes already exist"); } fn update_expression(&self, node: &Node, f: &dyn Fn(&mut ExpressionAttributes)) { - f(self.expressions.write().unwrap().get_mut(&node.id).unwrap()) + f(self.expressions.borrow_mut().get_mut(&node.id).unwrap()) } fn expr_typ(&self, expr: &Node) -> Type { self.expressions - .read() - .unwrap() + .borrow() .get(&expr.id) .unwrap() .typ @@ -174,7 +172,7 @@ impl<'a> AnalyzerContext for ItemScope<'a> { } fn add_diagnostic(&self, diag: Diagnostic) { - self.diagnostics.write().unwrap().push(diag) + self.diagnostics.borrow_mut().push(diag) } /// Gets `std::context::Context` if it exists @@ -196,8 +194,8 @@ impl<'a> AnalyzerContext for ItemScope<'a> { pub struct FunctionScope<'a> { pub db: &'a dyn AnalyzerDb, pub function: FunctionId, - pub body: RwLock, - pub diagnostics: RwLock>, + pub body: RefCell, + pub diagnostics: RefCell>, } impl<'a> FunctionScope<'a> { @@ -205,8 +203,8 @@ impl<'a> FunctionScope<'a> { Self { db, function, - body: RwLock::new(FunctionBody::default()), - diagnostics: RwLock::new(vec![]), + body: RefCell::new(FunctionBody::default()), + diagnostics: RefCell::new(vec![]), } } @@ -217,8 +215,7 @@ impl<'a> FunctionScope<'a> { pub fn map_variable_type(&self, node: &Node, typ: TypeId) { self.add_node(node); self.body - .write() - .unwrap() + .borrow_mut() .var_types .insert(node.id, typ) .expect_none("variable has already registered") @@ -227,15 +224,14 @@ impl<'a> FunctionScope<'a> { pub fn map_pattern_matrix(&self, node: &Node, matrix: PatternMatrix) { debug_assert!(matches!(node.kind, ast::FuncStmt::Match { .. })); self.body - .write() - .unwrap() + .borrow_mut() .matches .insert(node.id, matrix) .expect_none("match statement attributes already exists") } fn add_node(&self, node: &Node) { - self.body.write().unwrap().spans.insert(node.id, node.span); + self.body.borrow_mut().spans.insert(node.id, node.span); } } @@ -245,13 +241,13 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { } fn add_diagnostic(&self, diag: Diagnostic) { - self.diagnostics.write().unwrap().push(diag) + self.diagnostics.borrow_mut().push(diag) } fn add_expression(&self, node: &Node, attributes: ExpressionAttributes) { self.add_node(node); self.body - .write().unwrap() + .borrow_mut() .expressions .insert(node.id, attributes) .expect_none("expression attributes already exist"); @@ -260,7 +256,7 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { fn update_expression(&self, node: &Node, f: &dyn Fn(&mut ExpressionAttributes)) { f(self .body - .write().unwrap() + .borrow_mut() .expressions .get_mut(&node.id) .unwrap()) @@ -268,7 +264,7 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { fn expr_typ(&self, expr: &Node) -> Type { self.body - .read().unwrap() + .borrow() .expressions .get(&expr.id) .unwrap() @@ -278,7 +274,7 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { fn add_constant(&self, _name: &Node, expr: &Node, value: Constant) { self.body - .write().unwrap() + .borrow_mut() .expressions .get_mut(&expr.id) .expect("expression attributes must exist before adding constant value") @@ -309,13 +305,13 @@ impl<'a> AnalyzerContext for FunctionScope<'a> { // TODO: should probably take the Expr::Call node, rather than the function node self.add_node(node); self.body - .write().unwrap() + .borrow_mut() .calls .insert(node.id, call_type) .expect_none("call attributes already exist"); } fn get_call(&self, node: &Node) -> Option { - self.body.read().unwrap().calls.get(&node.id).cloned() + self.body.borrow().calls.get(&node.id).cloned() } fn is_in_function(&self) -> bool { @@ -452,7 +448,7 @@ pub struct BlockScope<'a, 'b> { pub parent: Option<&'a BlockScope<'a, 'b>>, /// Maps Name -> (Type, is_const, span) pub variable_defs: BTreeMap, - pub constant_defs: RwLock>, + pub constant_defs: RefCell>, pub typ: BlockScopeType, } @@ -504,7 +500,7 @@ impl AnalyzerContext for BlockScope<'_, '_> { fn add_constant(&self, name: &Node, expr: &Node, value: Constant) { self.constant_defs - .write().unwrap() + .borrow_mut() .insert(name.kind.clone().to_string(), value.clone()) .expect_none("expression attributes already exist"); @@ -516,7 +512,7 @@ impl AnalyzerContext for BlockScope<'_, '_> { name: &ast::SmolStr, span: Span, ) -> Result, IncompleteItem> { - if let Some(constant) = self.constant_defs.read().unwrap().get(name.as_str()) { + if let Some(constant) = self.constant_defs.borrow().get(name.as_str()) { Ok(Some(constant.clone())) } else if let Some(parent) = self.parent { parent.constant_value_by_name(name, span) @@ -571,7 +567,7 @@ impl AnalyzerContext for BlockScope<'_, '_> { } fn add_diagnostic(&self, diag: Diagnostic) { - self.root.diagnostics.write().unwrap().push(diag) + self.root.diagnostics.borrow_mut().push(diag) } fn get_context_type(&self) -> Option { @@ -585,7 +581,7 @@ impl<'a, 'b> BlockScope<'a, 'b> { root, parent: None, variable_defs: BTreeMap::new(), - constant_defs: RwLock::new(BTreeMap::new()), + constant_defs: RefCell::new(BTreeMap::new()), typ, } } @@ -595,7 +591,7 @@ impl<'a, 'b> BlockScope<'a, 'b> { root: self.root, parent: Some(self), variable_defs: BTreeMap::new(), - constant_defs: RwLock::new(BTreeMap::new()), + constant_defs: RefCell::new(BTreeMap::new()), typ, } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 33f7465f9..eb837c145 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -28,22 +28,17 @@ pub struct Backend { } impl Backend { - // pub fn new( - // client: Arc>, - // messaging: Arc>, - // db: Arc>, - // workspace: Workspace, - // ) -> Self { - // let db = Arc::new(RwLock::new(LanguageServerDatabase::default())); - // let workspace = Workspace::default(); + pub fn new(client: Arc>, messaging: Arc>) -> Self { + let db = Arc::new(RwLock::new(LanguageServerDatabase::default())); + let workspace = Workspace::default(); - // Self { - // messaging, - // client, - // db, - // workspace, - // } - // } + Self { + messaging, + client, + db, + workspace, + } + } pub async fn handle_streams(mut self) { info!("setting up streams"); let workspace = &mut self.workspace; diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 3bef230cd..c48334ccf 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -10,13 +10,10 @@ mod oneshot_responder; mod util; mod workspace; -use std::sync::Arc; - use backend::Backend; use db::Jar; use language_server::Server; -use tokio::sync::RwLock; use crate::logger::{handle_log_messages, setup_logger}; @@ -34,12 +31,7 @@ async fn main() { let client = server.client.clone(); let messaging = server.messaging.clone(); - let backend = Backend{ - client, - messaging, - db: Arc::new(RwLock::new(db::LanguageServerDatabase::default())), - workspace: workspace::Workspace::default(), - }; + let backend = Backend::new(client, messaging); let rx = setup_logger(log::Level::Info).unwrap(); diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 89e8dae4b..d0fc0207e 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -7,7 +7,7 @@ use fe_driver as driver; use primitive_types::{H160, U256}; use std::{ - sync::RwLock, + cell::RefCell, collections::BTreeMap, fmt::{Display, Formatter}, str::FromStr, @@ -31,12 +31,12 @@ macro_rules! assert_harness_gas_report { #[derive(Default, Debug)] pub struct GasReporter { - records: RwLock>, + records: RefCell>, } impl GasReporter { pub fn add_record(&self, description: &str, gas_used: u64) { - self.records.write().unwrap().push(GasRecord { + self.records.borrow_mut().push(GasRecord { description: description.to_string(), gas_used, }) @@ -50,7 +50,7 @@ impl GasReporter { impl Display for GasReporter { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - for record in self.records.read().unwrap().iter() { + for record in self.records.borrow().iter() { writeln!(f, "{} used {} gas", record.description, record.gas_used)?; } From 6a2cc6f2e71b2e414700d50bb64d974780ec4a32 Mon Sep 17 00:00:00 2001 From: Micah Date: Sun, 10 Mar 2024 16:28:26 -0500 Subject: [PATCH 34/66] Revert "language server: even Arc/RwLock wrapped salsadb is not Send/Sync" This reverts commit 4ef6a176a434b73cb0ce070dedf2e4aedac9b004. --- crates/language-server/src/backend.rs | 135 ++++++++---------- crates/language-server/src/db.rs | 2 +- crates/language-server/src/goto.rs | 2 +- .../language-server/src/handlers/request.rs | 34 +++-- crates/language-server/src/language_server.rs | 24 ++-- crates/language-server/src/logger.rs | 4 +- crates/language-server/src/main.rs | 10 +- 7 files changed, 98 insertions(+), 113 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index eb837c145..9d3f5f089 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -3,8 +3,7 @@ use crate::workspace::SyncableIngotFileContext; use futures::TryStreamExt; use lsp_types::TextDocumentItem; use std::sync::Arc; -use tokio::sync::RwLock; -// use tokio::sync::Mutex; +use tokio::sync::Mutex; use crate::capabilities::server_capabilities; use crate::db::LanguageServerDatabase; @@ -21,15 +20,15 @@ use tokio_stream::StreamExt; use tower_lsp::Client; pub struct Backend { - pub(crate) messaging: Arc>, - pub(crate) client: Arc>, - pub(crate) db: Arc>, + pub(crate) messaging: Arc>, + pub(crate) client: Arc>, + pub(crate) db: LanguageServerDatabase, pub(crate) workspace: Workspace, } impl Backend { - pub fn new(client: Arc>, messaging: Arc>) -> Self { - let db = Arc::new(RwLock::new(LanguageServerDatabase::default())); + pub fn new(client: Arc>, messaging: Arc>) -> Self { + let db = LanguageServerDatabase::default(); let workspace = Workspace::default(); Self { @@ -42,8 +41,11 @@ impl Backend { pub async fn handle_streams(mut self) { info!("setting up streams"); let workspace = &mut self.workspace; + let db = &mut self.db; + + let client = self.client.clone(); let messaging = self.messaging.clone(); - let messaging = messaging.read().await; + let messaging = messaging.lock().await; let mut initialized_stream = BroadcastStream::new(messaging.subscribe_initialize()).fuse(); let mut shutdown_stream = BroadcastStream::new(messaging.subscribe_shutdown()).fuse(); @@ -83,11 +85,8 @@ impl Backend { if let Ok((initialization_params, responder)) = result { info!("initializing language server!"); // setup workspace - - let db = self.db.clone(); - let mut db_write = db.write().await; let _ = workspace.set_workspace_root( - &mut db_write, + db, initialization_params .root_uri .unwrap() @@ -115,73 +114,66 @@ impl Backend { } Some(Ok(doc)) = change_stream.next() => { info!("change detected: {:?}", doc.uri); - update_inputs(workspace, self.db.clone(), &doc).await; - handle_diagnostics(self.client.clone(), workspace, self.db.clone(), &doc).await; + update_inputs(workspace, db, &doc).await; + handle_diagnostics(client.clone(), workspace, db, &doc).await; + } + Some(Ok(params)) = did_close_stream.next() => { + let input = workspace + .touch_input_from_file_path( + db, + params + .text_document + .uri + .to_file_path() + .unwrap() + .to_str() + .unwrap(), + ) + .unwrap(); + let _ = input.sync(db, None); } - // Some(Ok(params)) = did_close_stream.next() => { - // let input = workspace - // .touch_input_from_file_path( - // db, - // params - // .text_document - // .uri - // .to_file_path() - // .unwrap() - // .to_str() - // .unwrap(), - // ) - // .unwrap(); - // let _ = input.sync(db, None); - // } Some(Ok(params)) = did_change_watched_files_stream.next() => { let changes = params.changes; for change in changes { let uri = change.uri; let path = uri.to_file_path().unwrap(); - { - let db = self.db.clone(); - let db_write = &mut db.write().await; - - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db_write); - let input = workspace - .touch_input_from_file_path(db_write, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db_write, None); - } - lsp_types::FileChangeType::CHANGED => { - let input = workspace - .touch_input_from_file_path(db_write, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db_write, None); - } - lsp_types::FileChangeType::DELETED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db_write); - } - _ => {} + match change.typ { + lsp_types::FileChangeType::CREATED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + let input = workspace + .touch_input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); + } + lsp_types::FileChangeType::CHANGED => { + let input = workspace + .touch_input_from_file_path(db, path.to_str().unwrap()) + .unwrap(); + let _ = input.sync(db, None); } + lsp_types::FileChangeType::DELETED => { + // TODO: handle this more carefully! + // this is inefficient, a hack for now + let _ = workspace.sync(db); + } + _ => {} } // collect diagnostics for the file if change.typ != lsp_types::FileChangeType::DELETED { let text = std::fs::read_to_string(path).unwrap(); - update_inputs(workspace, self.db.clone(), &TextDocumentItem { + update_inputs(workspace, db, &TextDocumentItem { uri: uri.clone(), language_id: LANGUAGE_ID.to_string(), version: 0, text: text.clone(), - }); - - // let db_read = db.read().await; + }).await; handle_diagnostics( self.client.clone(), workspace, - self.db.clone(), + db, &TextDocumentItem { uri: uri.clone(), language_id: LANGUAGE_ID.to_string(), @@ -194,15 +186,11 @@ impl Backend { } } Some(Ok((params, responder))) = hover_stream.next() => { - let db = self.db.clone(); - let db_read = db.read().await; - let response = handle_hover(&db_read, workspace, params); + let response = handle_hover(db, workspace, params); responder.respond(response); } Some(Ok((params, responder))) = goto_definition_stream.next() => { - let db = self.db.clone(); - let db_read = db.read().await; - let response = handle_goto_definition(&db_read, workspace, params); + let response = handle_goto_definition(db, workspace, params); responder.respond(response); } } @@ -212,10 +200,9 @@ impl Backend { async fn update_inputs( workspace: &mut Workspace, - db: Arc>, + db: &mut LanguageServerDatabase, params: &TextDocumentItem, ) { - let db = &mut db.write().await; let input = workspace .touch_input_from_file_path( db, @@ -231,22 +218,20 @@ async fn update_inputs( } async fn handle_diagnostics( - client: Arc>, + client: Arc>, workspace: &Workspace, - db: Arc>, + db: &LanguageServerDatabase, params: &TextDocumentItem, ) { - let diagnostics = { - let db_read = &db.read().await; - get_diagnostics(db_read, workspace, params.uri.clone()) - }; + // let client = &mut client.lock().await; + let diagnostics = get_diagnostics(db, workspace, params.uri.clone()); let diagnostics = diagnostics .unwrap() .into_iter() .map(|(uri, diags)| async { let client = client.clone(); - let client = client.read().await; + let client = client.lock().await; client.publish_diagnostics(uri, diags, None).await }) .collect::>(); diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 5cf426dfb..7c1d0a80e 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -46,7 +46,7 @@ impl LanguageServerDatabase { } pub fn find_enclosing_item( - &self, + &mut self, top_mod: TopLevelMod, cursor: Cursor, ) -> Option { diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 0e00ee1d3..730348927 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -61,7 +61,7 @@ fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option Option { diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index bcb8d805d..716acf8f1 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -15,33 +15,39 @@ use crate::{ use lsp_server::ResponseError; pub fn handle_hover( - db: &LanguageServerDatabase, - workspace: &Workspace, + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, params: lsp_types::HoverParams, ) -> Result> { + info!("handling hover"); + // TODO: get more relevant information for the hover let file_path = ¶ms .text_document_position_params .text_document .uri .path(); - - info!("handling hover"); - info!("getting hover info for file_path: {:?}", file_path); - let input = workspace.get_input_from_file_path(db, file_path); - let ingot = input.map(|input| input.ingot(db)); - - // TODO: get more relevant information for the hover - let file_text = input.unwrap().text(db).to_string(); - let line = file_text + let file = std::fs::File::open(file_path).unwrap(); + let reader = std::io::BufReader::new(file); + let line = reader .lines() .nth(params.text_document_position_params.position.line as usize) .unwrap() - .to_string(); + .unwrap(); + + let file_text = std::fs::read_to_string(file_path).unwrap(); + // let cursor: Cursor = params.text_document_position_params.position.into(); let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, file_text.as_str(), ); + // let file_path = std::path::Path::new(file_path); + info!("getting hover info for file_path: {:?}", file_path); + let ingot = workspace + .touch_input_from_file_path(db, file_path) + .map(|input| input.ingot(db)); + + // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); let ingot_info: Option = { let ingot_type = match ingot { @@ -100,8 +106,8 @@ pub fn handle_hover( use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Hover}; pub fn handle_goto_definition( - db: &LanguageServerDatabase, - workspace: &Workspace, + db: &mut LanguageServerDatabase, + workspace: &mut Workspace, params: GotoDefinitionParams, ) -> Result> { // Convert the position to an offset in the file diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index ed8f87b10..a2dda55b8 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -12,13 +12,13 @@ use crate::oneshot_responder::OneshotResponder; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; pub(crate) struct Server { - pub(crate) messaging: Arc>, - pub(crate) client: Arc>, + pub(crate) messaging: Arc>, + pub(crate) client: Arc>, } impl Server { pub(crate) async fn register_watchers(&self) -> Result<()> { - let client = self.client.read().await; + let client = self.client.lock().await; let registration = Registration { id: String::from("watch-fe-files"), method: String::from("workspace/didChangeWatchedFiles"), @@ -36,8 +36,8 @@ impl Server { } pub(crate) fn new(client: Client) -> Self { - let messaging = Arc::new(tokio::sync::RwLock::new(MessageChannels::new())); - let client = Arc::new(tokio::sync::RwLock::new(client)); + let messaging = Arc::new(tokio::sync::Mutex::new(MessageChannels::new())); + let client = Arc::new(tokio::sync::Mutex::new(client)); Self { messaging, client } } @@ -48,7 +48,7 @@ impl Server { impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { // forward the initialize request to the messaging system - let messaging = self.messaging.read().await; + let messaging = self.messaging.lock().await; let rx = messaging.send_initialize(initialize_params); info!("awaiting initialization result"); @@ -67,27 +67,27 @@ impl LanguageServer for Server { } async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - let messaging = self.messaging.read().await; + let messaging = self.messaging.lock().await; messaging.send_did_open(params); } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - let messaging = self.messaging.read().await; + let messaging = self.messaging.lock().await; messaging.send_did_change(params); } async fn did_close(&self, params: DidCloseTextDocumentParams) { - let messaging = self.messaging.read().await; + let messaging = self.messaging.lock().await; messaging.send_did_close(params); } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let messaging = self.messaging.read().await; + let messaging = self.messaging.lock().await; messaging.send_did_change_watched_files(params); } async fn hover(&self, params: lsp_types::HoverParams) -> Result> { - let messaging = self.messaging.read().await; + let messaging = self.messaging.lock().await; let rx = messaging.send_hover(params); rx.await.unwrap() } @@ -96,7 +96,7 @@ impl LanguageServer for Server { &self, params: lsp_types::GotoDefinitionParams, ) -> Result> { - let messaging = self.messaging.read().await; + let messaging = self.messaging.lock().await; let rx = messaging.send_goto_definition(params); rx.await.unwrap() } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 5a652ff01..918a80d58 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -46,12 +46,12 @@ pub fn setup_logger( pub async fn handle_log_messages( mut rx: tokio::sync::mpsc::UnboundedReceiver<(String, MessageType)>, - client: Arc>, + client: Arc>, ) -> tokio::sync::mpsc::UnboundedReceiver { loop { let (message, message_type) = rx.recv().await.unwrap(); // let message_type = match - let client = client.read().await; + let client = client.lock().await; client.log_message(message_type, message).await; } } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index c48334ccf..9ee9355e3 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -36,14 +36,8 @@ async fn main() { let rx = setup_logger(log::Level::Info).unwrap(); // separate runtime for the backend - // let backend_runtime = tokio::runtime::Builder::new_multi_thread() - // .worker_threads(4) - // .enable_all() - // .build() - // .unwrap(); - - // use a single threaded runtime instead - let backend_runtime = tokio::runtime::Builder::new_current_thread() + let backend_runtime = tokio::runtime::Builder::new_multi_thread() + .worker_threads(4) .enable_all() .build() .unwrap(); From 64e24f1a66c4326025b92e1a65659964a5039453 Mon Sep 17 00:00:00 2001 From: Micah Date: Sun, 10 Mar 2024 16:53:42 -0500 Subject: [PATCH 35/66] language server snapshot mechanism --- .../src/name_resolution/diagnostics.rs | 4 ++ crates/hir-analysis/src/ty/diagnostics.rs | 16 +++++ crates/hir/src/diagnostics.rs | 10 ++++ crates/hir/src/lower/parse.rs | 4 ++ crates/language-server/src/backend.rs | 59 +++++++++++-------- crates/language-server/src/db.rs | 13 +++- crates/language-server/src/diagnostics.rs | 14 ++--- crates/language-server/src/goto.rs | 18 +++--- .../language-server/src/handlers/request.rs | 47 ++++++++------- crates/language-server/src/language_server.rs | 24 ++++---- crates/language-server/src/logger.rs | 4 +- crates/language-server/src/workspace.rs | 35 ++++++----- 12 files changed, 154 insertions(+), 94 deletions(-) diff --git a/crates/hir-analysis/src/name_resolution/diagnostics.rs b/crates/hir-analysis/src/name_resolution/diagnostics.rs index 34c0bd274..3fe6c025d 100644 --- a/crates/hir-analysis/src/name_resolution/diagnostics.rs +++ b/crates/hir-analysis/src/name_resolution/diagnostics.rs @@ -292,4 +292,8 @@ impl DiagnosticVoucher for NameResDiag { CompleteDiagnostic::new(self.severity(), message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } diff --git a/crates/hir-analysis/src/ty/diagnostics.rs b/crates/hir-analysis/src/ty/diagnostics.rs index 100a6328e..44ef63b65 100644 --- a/crates/hir-analysis/src/ty/diagnostics.rs +++ b/crates/hir-analysis/src/ty/diagnostics.rs @@ -516,6 +516,10 @@ impl DiagnosticVoucher for TyLowerDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -614,6 +618,10 @@ impl DiagnosticVoucher for TraitLowerDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -813,6 +821,10 @@ impl DiagnosticVoucher for TraitConstraintDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -1259,4 +1271,8 @@ impl DiagnosticVoucher for ImplDiag { CompleteDiagnostic::new(severity, message, sub_diags, vec![], error_code) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 4ec953039..106ff6af2 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -24,6 +24,7 @@ pub trait DiagnosticVoucher: Send { fn error_code(&self) -> GlobalErrorCode; /// Makes a [`CompleteDiagnostic`]. fn to_complete(&self, db: &dyn SpannedHirDb) -> CompleteDiagnostic; + fn clone_box(&self) -> Box; } impl DiagnosticVoucher for CompleteDiagnostic { @@ -34,6 +35,10 @@ impl DiagnosticVoucher for CompleteDiagnostic { fn to_complete(&self, _db: &dyn SpannedHirDb) -> CompleteDiagnostic { self.clone() } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } impl DiagnosticVoucher for Box { @@ -44,4 +49,9 @@ impl DiagnosticVoucher for Box { fn to_complete(&self, db: &dyn SpannedHirDb) -> CompleteDiagnostic { self.as_ref().to_complete(db) } + + fn clone_box(&self) -> Box { + self.as_ref().clone_box() + } } + diff --git a/crates/hir/src/lower/parse.rs b/crates/hir/src/lower/parse.rs index c1e6fed2d..fbdabec06 100644 --- a/crates/hir/src/lower/parse.rs +++ b/crates/hir/src/lower/parse.rs @@ -54,4 +54,8 @@ impl DiagnosticVoucher for ParserError { error_code, ) } + + fn clone_box(&self) -> Box { + Box::new(self.clone()) + } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 9d3f5f089..f98ccc411 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -2,8 +2,9 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; use crate::workspace::SyncableIngotFileContext; use futures::TryStreamExt; use lsp_types::TextDocumentItem; +use salsa::{ParallelDatabase, Snapshot}; use std::sync::Arc; -use tokio::sync::Mutex; +use tokio::sync::{Mutex, RwLock}; use crate::capabilities::server_capabilities; use crate::db::LanguageServerDatabase; @@ -20,16 +21,16 @@ use tokio_stream::StreamExt; use tower_lsp::Client; pub struct Backend { - pub(crate) messaging: Arc>, - pub(crate) client: Arc>, + pub(crate) messaging: Arc>, + pub(crate) client: Arc>, pub(crate) db: LanguageServerDatabase, - pub(crate) workspace: Workspace, + pub(crate) workspace: Arc>, } impl Backend { - pub fn new(client: Arc>, messaging: Arc>) -> Self { + pub fn new(client: Arc>, messaging: Arc>) -> Self { let db = LanguageServerDatabase::default(); - let workspace = Workspace::default(); + let workspace = Arc::new(RwLock::new(Workspace::default())); Self { messaging, @@ -40,12 +41,12 @@ impl Backend { } pub async fn handle_streams(mut self) { info!("setting up streams"); - let workspace = &mut self.workspace; + let workspace = self.workspace.clone(); let db = &mut self.db; let client = self.client.clone(); let messaging = self.messaging.clone(); - let messaging = messaging.lock().await; + let messaging = messaging.read().await; let mut initialized_stream = BroadcastStream::new(messaging.subscribe_initialize()).fuse(); let mut shutdown_stream = BroadcastStream::new(messaging.subscribe_shutdown()).fuse(); @@ -85,6 +86,8 @@ impl Backend { if let Ok((initialization_params, responder)) = result { info!("initializing language server!"); // setup workspace + // let workspace = self.workspace.clone(); + let mut workspace = self.workspace.write().await; let _ = workspace.set_workspace_root( db, initialization_params @@ -114,10 +117,13 @@ impl Backend { } Some(Ok(doc)) = change_stream.next() => { info!("change detected: {:?}", doc.uri); - update_inputs(workspace, db, &doc).await; - handle_diagnostics(client.clone(), workspace, db, &doc).await; + update_inputs(workspace.clone(), db, &doc).await; + tokio::spawn( + handle_diagnostics(client.clone(), workspace.clone(), db.snapshot(), doc) + ); } Some(Ok(params)) = did_close_stream.next() => { + let workspace = &mut workspace.write().await; let input = workspace .touch_input_from_file_path( db, @@ -142,6 +148,7 @@ impl Backend { lsp_types::FileChangeType::CREATED => { // TODO: handle this more carefully! // this is inefficient, a hack for now + let workspace = &mut workspace.write().await; let _ = workspace.sync(db); let input = workspace .touch_input_from_file_path(db, path.to_str().unwrap()) @@ -149,12 +156,14 @@ impl Backend { let _ = input.sync(db, None); } lsp_types::FileChangeType::CHANGED => { + let workspace = &mut workspace.write().await; let input = workspace .touch_input_from_file_path(db, path.to_str().unwrap()) .unwrap(); let _ = input.sync(db, None); } lsp_types::FileChangeType::DELETED => { + let workspace = &mut workspace.write().await; // TODO: handle this more carefully! // this is inefficient, a hack for now let _ = workspace.sync(db); @@ -164,7 +173,7 @@ impl Backend { // collect diagnostics for the file if change.typ != lsp_types::FileChangeType::DELETED { let text = std::fs::read_to_string(path).unwrap(); - update_inputs(workspace, db, &TextDocumentItem { + update_inputs(workspace.clone(), db, &TextDocumentItem { uri: uri.clone(), language_id: LANGUAGE_ID.to_string(), version: 0, @@ -172,9 +181,9 @@ impl Backend { }).await; handle_diagnostics( self.client.clone(), - workspace, - db, - &TextDocumentItem { + workspace.clone(), + db.snapshot(), + TextDocumentItem { uri: uri.clone(), language_id: LANGUAGE_ID.to_string(), version: 0, @@ -186,11 +195,13 @@ impl Backend { } } Some(Ok((params, responder))) = hover_stream.next() => { - let response = handle_hover(db, workspace, params); + let workspace = &workspace.read().await; + let response = handle_hover(&db.snapshot(), workspace, params); responder.respond(response); } Some(Ok((params, responder))) = goto_definition_stream.next() => { - let response = handle_goto_definition(db, workspace, params); + let workspace = &workspace.read().await; + let response = handle_goto_definition(&db.snapshot(), workspace, params); responder.respond(response); } } @@ -199,10 +210,11 @@ impl Backend { } async fn update_inputs( - workspace: &mut Workspace, + workspace: Arc>, db: &mut LanguageServerDatabase, params: &TextDocumentItem, ) { + let workspace = &mut workspace.write().await; let input = workspace .touch_input_from_file_path( db, @@ -218,20 +230,21 @@ async fn update_inputs( } async fn handle_diagnostics( - client: Arc>, - workspace: &Workspace, - db: &LanguageServerDatabase, - params: &TextDocumentItem, + client: Arc>, + workspace: Arc>, + db: Snapshot, + params: TextDocumentItem, ) { + let workspace = &workspace.read().await; // let client = &mut client.lock().await; - let diagnostics = get_diagnostics(db, workspace, params.uri.clone()); + let diagnostics = get_diagnostics(&db, workspace, params.uri.clone()); let diagnostics = diagnostics .unwrap() .into_iter() .map(|(uri, diags)| async { let client = client.clone(); - let client = client.lock().await; + let client = client.read().await; client.publish_diagnostics(uri, diags, None).await }) .collect::>(); diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 7c1d0a80e..17898a8fe 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -10,6 +10,7 @@ use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, HirAnalysisDb, }; +use salsa::{ParallelDatabase, Snapshot}; use crate::goto::Cursor; @@ -46,7 +47,7 @@ impl LanguageServerDatabase { } pub fn find_enclosing_item( - &mut self, + &self, top_mod: TopLevelMod, cursor: Cursor, ) -> Option { @@ -75,7 +76,7 @@ impl LanguageServerDatabase { smallest_enclosing_item } - pub fn finalize_diags(&self, diags: Vec>) -> Vec { + pub fn finalize_diags(&self, diags: &Vec>) -> Vec { let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), @@ -99,6 +100,14 @@ impl Default for LanguageServerDatabase { } } +impl ParallelDatabase for LanguageServerDatabase { + fn snapshot(&self) -> Snapshot { + Snapshot::new(LanguageServerDatabase { + storage: self.storage.snapshot(), + }) + } +} + fn initialize_analysis_pass(db: &LanguageServerDatabase) -> AnalysisPassManager<'_> { let mut pass_manager = AnalysisPassManager::new(); pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 17b3247b6..ed07147d8 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -6,11 +6,11 @@ use codespan_reporting as cs; use cs::{diagnostic as cs_diag, files as cs_files}; use common::{ - diagnostics::{LabelStyle, Severity}, - InputFile, + diagnostics::{LabelStyle, Severity}, InputDb, InputFile }; use fxhash::FxHashMap; use hir::diagnostics::DiagnosticVoucher; +use salsa::Snapshot; use crate::{ db::{LanguageServerDatabase, LanguageServerDb}, @@ -128,18 +128,18 @@ impl<'a> cs_files::Files<'a> for LanguageServerDatabase { } fn run_diagnostics( - db: &LanguageServerDatabase, + db: &Snapshot, workspace: &Workspace, path: &str, ) -> Vec { let file_path = path; - let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); - let diags = db.analyze_top_mod(top_mod); + let top_mod = workspace.top_mod_from_file_path(&db, file_path).unwrap(); + let diags = &db.analyze_top_mod(top_mod); db.finalize_diags(diags) } pub fn get_diagnostics( - db: &LanguageServerDatabase, + db: &Snapshot, workspace: &Workspace, uri: lsp_types::Url, ) -> Result>, Error> { @@ -147,7 +147,7 @@ pub fn get_diagnostics( let diagnostics = diags .into_iter() - .flat_map(|diag| diag_to_lsp(diag, db).clone()); + .flat_map(|diag| diag_to_lsp(diag, db.as_input_db()).clone()); // we need to reduce the diagnostics to a map from URL to Vec let mut result = FxHashMap::>::default(); diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 730348927..3fe708c5e 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -4,7 +4,8 @@ use hir::{ visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, HirDb, }; -use hir_analysis::name_resolution::EarlyResolvedPath; +use hir_analysis::{name_resolution::EarlyResolvedPath, HirAnalysisDb}; +use salsa::Snapshot; use crate::db::{LanguageServerDatabase, LanguageServerDb}; use common::diagnostics::Span; @@ -61,7 +62,7 @@ fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option, top_mod: TopLevelMod, cursor: Cursor, ) -> Option { @@ -81,7 +82,7 @@ pub fn goto_enclosing_path( let (path_id, scope_id) = goto_starting_path; // Resolve path. - let resolved_path = hir_analysis::name_resolution::resolve_path_early(db, path_id, scope_id); + let resolved_path = hir_analysis::name_resolution::resolve_path_early(db.as_hir_analysis_db(), path_id, scope_id); Some(resolved_path) } @@ -94,6 +95,7 @@ mod tests { use common::input::IngotKind; use dir_test::{dir_test, Fixture}; use fe_compiler_test_utils::snap_test; + use salsa::ParallelDatabase; use std::path::Path; fn extract_multiple_cursor_positions_from_spans( @@ -139,7 +141,7 @@ mod tests { .set_text(db) .to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(db, fe_source_path) + .top_mod_from_file_path(&db.snapshot(), fe_source_path) .unwrap(); let ingot = workspace.touch_ingot_from_file_path(db, fixture.path()); @@ -149,7 +151,7 @@ mod tests { let mut cursor_path_map: FxHashMap = FxHashMap::default(); for cursor in &cursors { - let early_resolution = goto_enclosing_path(db, top_mod, *cursor); + let early_resolution = goto_enclosing_path(&db.snapshot(), top_mod, *cursor); let goto_info = match early_resolution { Some(EarlyResolvedPath::Full(bucket)) => { @@ -195,7 +197,7 @@ mod tests { let input = workspace.touch_input_from_file_path(db, fixture.path()).unwrap(); input.set_text(db).to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(db, fixture.path()) + .top_mod_from_file_path(&db.snapshot(), fixture.path()) .unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); @@ -203,7 +205,7 @@ mod tests { let mut cursor_path_map: FxHashMap = FxHashMap::default(); for cursor in &cursors { - let resolved_path = goto_enclosing_path(db, top_mod, *cursor); + let resolved_path = goto_enclosing_path(&db.snapshot(), top_mod, *cursor); if let Some(path) = resolved_path { match path { @@ -252,7 +254,7 @@ mod tests { .set_text(db) .to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(db, fixture.path()) + .top_mod_from_file_path(&db.snapshot(), fixture.path()) .unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 716acf8f1..a204c66d2 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,9 +1,11 @@ use std::io::BufRead; -use common::input::IngotKind; -use hir_analysis::name_resolution::{EarlyResolvedPath, NameRes}; +use common::{input::IngotKind, InputDb}; +use hir::SpannedHirDb; +use hir_analysis::{name_resolution::{EarlyResolvedPath, NameRes}, HirAnalysisDb}; use log::info; +use salsa::{ParallelDatabase, Snapshot}; use tower_lsp::jsonrpc::Result; use crate::{ @@ -15,8 +17,8 @@ use crate::{ use lsp_server::ResponseError; pub fn handle_hover( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, + db: &Snapshot, + workspace: &Workspace, params: lsp_types::HoverParams, ) -> Result> { info!("handling hover"); @@ -26,15 +28,16 @@ pub fn handle_hover( .text_document .uri .path(); - let file = std::fs::File::open(file_path).unwrap(); - let reader = std::io::BufReader::new(file); - let line = reader + info!("getting hover info for file_path: {:?}", file_path); + let input = workspace.get_input_from_file_path(db, file_path); + let ingot = input.map(|input| input.ingot(db.as_input_db())); + + let file_text = input.unwrap().text(db.as_input_db()); + let line = file_text .lines() .nth(params.text_document_position_params.position.line as usize) - .unwrap() .unwrap(); - let file_text = std::fs::read_to_string(file_path).unwrap(); // let cursor: Cursor = params.text_document_position_params.position.into(); let cursor: Cursor = to_offset_from_position( @@ -42,16 +45,12 @@ pub fn handle_hover( file_text.as_str(), ); // let file_path = std::path::Path::new(file_path); - info!("getting hover info for file_path: {:?}", file_path); - let ingot = workspace - .touch_input_from_file_path(db, file_path) - .map(|input| input.ingot(db)); // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); let ingot_info: Option = { let ingot_type = match ingot { - Some(ingot) => match ingot.kind(db) { + Some(ingot) => match ingot.kind(db.as_input_db()) { IngotKind::StandAlone => None, IngotKind::Local => Some("Local ingot"), IngotKind::External => Some("External ingot"), @@ -59,10 +58,10 @@ pub fn handle_hover( }, None => Some("No ingot information available"), }; - let ingot_file_count = ingot.unwrap().files(db).len(); + let ingot_file_count = ingot.unwrap().files(db.as_input_db()).len(); let ingot_path = ingot .unwrap() - .path(db) + .path(db.as_input_db()) .strip_prefix(workspace.root_path.clone().unwrap_or("".into())) .ok(); @@ -71,19 +70,19 @@ pub fn handle_hover( }) }; - let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); + let top_mod = workspace.top_mod_from_file_path(&db.snapshot(), file_path).unwrap(); let early_resolution = goto_enclosing_path(db, top_mod, cursor); let goto_info = match early_resolution { Some(EarlyResolvedPath::Full(bucket)) => bucket .iter() - .map(|x| x.pretty_path(db).unwrap()) + .map(|x| x.pretty_path(db.as_hir_analysis_db()).unwrap()) .collect::>() .join("\n"), Some(EarlyResolvedPath::Partial { res, unresolved_from: _, - }) => res.pretty_path(db).unwrap(), + }) => res.pretty_path(db.as_hir_analysis_db()).unwrap(), None => String::from("No goto info available"), }; @@ -106,8 +105,8 @@ pub fn handle_hover( use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Hover}; pub fn handle_goto_definition( - db: &mut LanguageServerDatabase, - workspace: &mut Workspace, + db: &Snapshot, + workspace: &Workspace, params: GotoDefinitionParams, ) -> Result> { // Convert the position to an offset in the file @@ -117,7 +116,7 @@ pub fn handle_goto_definition( // Get the module and the goto info let file_path = params.text_document.uri.path(); - let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); + let top_mod = workspace.top_mod_from_file_path(&db.snapshot(), file_path).unwrap(); let goto_info = goto_enclosing_path(db, top_mod, cursor); // Convert the goto info to a Location @@ -137,13 +136,13 @@ pub fn handle_goto_definition( let locations = scopes .iter() .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, db)) + .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) .collect::>(); let errors = scopes .iter() .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, db)) + .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) .filter_map(std::result::Result::err) .map(|err| err.to_string()) .collect::>() diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index a2dda55b8..ed8f87b10 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -12,13 +12,13 @@ use crate::oneshot_responder::OneshotResponder; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; pub(crate) struct Server { - pub(crate) messaging: Arc>, - pub(crate) client: Arc>, + pub(crate) messaging: Arc>, + pub(crate) client: Arc>, } impl Server { pub(crate) async fn register_watchers(&self) -> Result<()> { - let client = self.client.lock().await; + let client = self.client.read().await; let registration = Registration { id: String::from("watch-fe-files"), method: String::from("workspace/didChangeWatchedFiles"), @@ -36,8 +36,8 @@ impl Server { } pub(crate) fn new(client: Client) -> Self { - let messaging = Arc::new(tokio::sync::Mutex::new(MessageChannels::new())); - let client = Arc::new(tokio::sync::Mutex::new(client)); + let messaging = Arc::new(tokio::sync::RwLock::new(MessageChannels::new())); + let client = Arc::new(tokio::sync::RwLock::new(client)); Self { messaging, client } } @@ -48,7 +48,7 @@ impl Server { impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { // forward the initialize request to the messaging system - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; let rx = messaging.send_initialize(initialize_params); info!("awaiting initialization result"); @@ -67,27 +67,27 @@ impl LanguageServer for Server { } async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; messaging.send_did_open(params); } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; messaging.send_did_change(params); } async fn did_close(&self, params: DidCloseTextDocumentParams) { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; messaging.send_did_close(params); } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; messaging.send_did_change_watched_files(params); } async fn hover(&self, params: lsp_types::HoverParams) -> Result> { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; let rx = messaging.send_hover(params); rx.await.unwrap() } @@ -96,7 +96,7 @@ impl LanguageServer for Server { &self, params: lsp_types::GotoDefinitionParams, ) -> Result> { - let messaging = self.messaging.lock().await; + let messaging = self.messaging.read().await; let rx = messaging.send_goto_definition(params); rx.await.unwrap() } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 918a80d58..5a652ff01 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -46,12 +46,12 @@ pub fn setup_logger( pub async fn handle_log_messages( mut rx: tokio::sync::mpsc::UnboundedReceiver<(String, MessageType)>, - client: Arc>, + client: Arc>, ) -> tokio::sync::mpsc::UnboundedReceiver { loop { let (message, message_type) = rx.recv().await.unwrap(); // let message_type = match - let client = client.lock().await; + let client = client.read().await; client.log_message(message_type, message).await; } } diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 792408106..006d7267e 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -5,9 +5,10 @@ use common::{ input::{IngotKind, Version}, InputFile, InputIngot, }; -use hir::{hir_def::TopLevelMod, lower::map_file_to_mod}; +use hir::{hir_def::TopLevelMod, lower::map_file_to_mod, LowerHirDb}; use log::info; use patricia_tree::StringPatriciaMap; +use salsa::Snapshot; use crate::db::LanguageServerDatabase; @@ -26,7 +27,7 @@ pub trait IngotFileContext { ) -> Option; fn get_ingot_from_file_path( &self, - db: &LanguageServerDatabase, + db: &Snapshot, path: &str, ) -> Option; fn touch_ingot_from_file_path( @@ -36,12 +37,12 @@ pub trait IngotFileContext { ) -> Option; fn get_input_from_file_path( &self, - db: &LanguageServerDatabase, + db: &Snapshot, path: &str, ) -> Option; fn top_mod_from_file_path( &self, - db: &LanguageServerDatabase, + db: &Snapshot, path: &str, ) -> Option; fn rename_file( @@ -121,7 +122,7 @@ impl IngotFileContext for LocalIngotContext { fn get_input_from_file_path( &self, - _db: &LanguageServerDatabase, + _db: &Snapshot, path: &str, ) -> Option { self.files.get(path).copied() @@ -137,7 +138,7 @@ impl IngotFileContext for LocalIngotContext { fn get_ingot_from_file_path( &self, - _db: &LanguageServerDatabase, + _db: &Snapshot, _path: &str, ) -> Option { Some(self.ingot) @@ -145,11 +146,11 @@ impl IngotFileContext for LocalIngotContext { fn top_mod_from_file_path( &self, - db: &LanguageServerDatabase, + db: &Snapshot, path: &str, ) -> Option { let file = self.get_input_from_file_path(db, path)?; - Some(map_file_to_mod(db, file)) + Some(map_file_to_mod(db.as_lower_hir_db(), file)) } fn rename_file( @@ -211,7 +212,7 @@ impl IngotFileContext for StandaloneIngotContext { fn get_input_from_file_path( &self, - _db: &LanguageServerDatabase, + _db: &Snapshot, path: &str, ) -> Option { self.files.get(path).copied() @@ -243,7 +244,7 @@ impl IngotFileContext for StandaloneIngotContext { fn get_ingot_from_file_path( &self, - _db: &LanguageServerDatabase, + _db: &Snapshot, path: &str, ) -> Option { // this shouldn't mutate, it should only get the ingot or return `None` @@ -254,11 +255,11 @@ impl IngotFileContext for StandaloneIngotContext { fn top_mod_from_file_path( &self, - db: &LanguageServerDatabase, + db: &Snapshot, path: &str, ) -> Option { let file = self.get_input_from_file_path(db, path)?; - Some(map_file_to_mod(db, file)) + Some(map_file_to_mod(db.as_lower_hir_db(), file)) } fn rename_file( @@ -425,7 +426,7 @@ impl IngotFileContext for Workspace { fn get_input_from_file_path( &self, - db: &LanguageServerDatabase, + db: &Snapshot, path: &str, ) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); @@ -451,7 +452,7 @@ impl IngotFileContext for Workspace { fn get_ingot_from_file_path( &self, - db: &LanguageServerDatabase, + db: &Snapshot, path: &str, ) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); @@ -464,7 +465,7 @@ impl IngotFileContext for Workspace { fn top_mod_from_file_path( &self, - db: &LanguageServerDatabase, + db: &Snapshot, path: &str, ) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); @@ -581,6 +582,8 @@ impl SyncableIngotFileContext for Workspace { #[cfg(test)] mod tests { + use salsa::ParallelDatabase; + use crate::workspace::{get_containing_ingot_mut, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; use std::path::PathBuf; @@ -730,7 +733,7 @@ mod tests { // file.sync(&mut db, None); // this would panic if a file has been added to multiple ingots - let _top_mod = workspace.top_mod_from_file_path(&mut db, src_path.as_str()); + let _top_mod = workspace.top_mod_from_file_path(&db.snapshot(), src_path.as_str()); } } From 9c46292b854bd4af735e038c75aaab130eb66ec9 Mon Sep 17 00:00:00 2001 From: Micah Date: Sun, 10 Mar 2024 20:44:46 -0500 Subject: [PATCH 36/66] multithreaded diagnostics --- crates/language-server/src/backend.rs | 67 ++++++++++++------- .../language-server/src/handlers/request.rs | 23 ++++--- .../test_files/single_ingot/src/foo.fe | 3 +- 3 files changed, 59 insertions(+), 34 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index f98ccc411..8d7d0027a 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -117,9 +117,13 @@ impl Backend { } Some(Ok(doc)) = change_stream.next() => { info!("change detected: {:?}", doc.uri); - update_inputs(workspace.clone(), db, &doc).await; + update_inputs(workspace.clone(), db, doc.clone()).await; + + let db = db.snapshot(); + let client = client.clone(); + let workspace = workspace.clone(); tokio::spawn( - handle_diagnostics(client.clone(), workspace.clone(), db.snapshot(), doc) + async move { handle_diagnostics(client, workspace, db, doc.uri).await } ); } Some(Ok(params)) = did_close_stream.next() => { @@ -173,36 +177,53 @@ impl Backend { // collect diagnostics for the file if change.typ != lsp_types::FileChangeType::DELETED { let text = std::fs::read_to_string(path).unwrap(); - update_inputs(workspace.clone(), db, &TextDocumentItem { + update_inputs(workspace.clone(), db, TextDocumentItem { uri: uri.clone(), language_id: LANGUAGE_ID.to_string(), version: 0, text: text.clone(), }).await; - handle_diagnostics( - self.client.clone(), - workspace.clone(), - db.snapshot(), - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text: text, - }, - ) - .await; + + let client = client.clone(); + let workspace = workspace.clone(); + let db = db.snapshot(); + + tokio::spawn( + async move { + handle_diagnostics( + client, + workspace, + db, + uri.clone(), + ).await + } + ); } } } Some(Ok((params, responder))) = hover_stream.next() => { - let workspace = &workspace.read().await; - let response = handle_hover(&db.snapshot(), workspace, params); + let db = db.snapshot(); + let workspace = workspace.clone(); + let response = match tokio::spawn(handle_hover(db, workspace, params)).await { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling hover: {:?}", e); + Ok(None) + } + }; responder.respond(response); } Some(Ok((params, responder))) = goto_definition_stream.next() => { - let workspace = &workspace.read().await; - let response = handle_goto_definition(&db.snapshot(), workspace, params); - responder.respond(response); + let db = db.snapshot(); + let workspace = workspace.clone(); + let response = match handle_goto_definition(db, workspace, params).await { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling goto definition: {:?}", e); + None + } + }; + responder.respond(Ok(response)); } } } @@ -212,7 +233,7 @@ impl Backend { async fn update_inputs( workspace: Arc>, db: &mut LanguageServerDatabase, - params: &TextDocumentItem, + params: TextDocumentItem, ) { let workspace = &mut workspace.write().await; let input = workspace @@ -233,11 +254,11 @@ async fn handle_diagnostics( client: Arc>, workspace: Arc>, db: Snapshot, - params: TextDocumentItem, + url: lsp_types::Url, ) { let workspace = &workspace.read().await; // let client = &mut client.lock().await; - let diagnostics = get_diagnostics(&db, workspace, params.uri.clone()); + let diagnostics = get_diagnostics(&db, workspace, url.clone()); let diagnostics = diagnostics .unwrap() diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index a204c66d2..643394a0e 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,4 +1,4 @@ -use std::io::BufRead; +use std::sync::Arc; use common::{input::IngotKind, InputDb}; use hir::SpannedHirDb; @@ -6,6 +6,7 @@ use hir_analysis::{name_resolution::{EarlyResolvedPath, NameRes}, HirAnalysisDb} use log::info; use salsa::{ParallelDatabase, Snapshot}; +use tokio::sync::RwLock; use tower_lsp::jsonrpc::Result; use crate::{ @@ -16,11 +17,12 @@ use crate::{ }; use lsp_server::ResponseError; -pub fn handle_hover( - db: &Snapshot, - workspace: &Workspace, +pub async fn handle_hover( + db: Snapshot, + workspace: Arc>, params: lsp_types::HoverParams, ) -> Result> { + let workspace = workspace.read().await; info!("handling hover"); // TODO: get more relevant information for the hover let file_path = ¶ms @@ -29,7 +31,7 @@ pub fn handle_hover( .uri .path(); info!("getting hover info for file_path: {:?}", file_path); - let input = workspace.get_input_from_file_path(db, file_path); + let input = workspace.get_input_from_file_path(&db, file_path); let ingot = input.map(|input| input.ingot(db.as_input_db())); let file_text = input.unwrap().text(db.as_input_db()); @@ -71,7 +73,7 @@ pub fn handle_hover( }; let top_mod = workspace.top_mod_from_file_path(&db.snapshot(), file_path).unwrap(); - let early_resolution = goto_enclosing_path(db, top_mod, cursor); + let early_resolution = goto_enclosing_path(&db, top_mod, cursor); let goto_info = match early_resolution { Some(EarlyResolvedPath::Full(bucket)) => bucket @@ -104,11 +106,12 @@ pub fn handle_hover( use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Hover}; -pub fn handle_goto_definition( - db: &Snapshot, - workspace: &Workspace, +pub async fn handle_goto_definition( + db: Snapshot, + workspace: Arc>, params: GotoDefinitionParams, ) -> Result> { + let workspace = workspace.read().await; // Convert the position to an offset in the file let params = params.text_document_position_params; let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); @@ -117,7 +120,7 @@ pub fn handle_goto_definition( // Get the module and the goto info let file_path = params.text_document.uri.path(); let top_mod = workspace.top_mod_from_file_path(&db.snapshot(), file_path).unwrap(); - let goto_info = goto_enclosing_path(db, top_mod, cursor); + let goto_info = goto_enclosing_path(&db, top_mod, cursor); // Convert the goto info to a Location let scopes = match goto_info { diff --git a/crates/language-server/test_files/single_ingot/src/foo.fe b/crates/language-server/test_files/single_ingot/src/foo.fe index 178d9ff50..180cd1e35 100644 --- a/crates/language-server/test_files/single_ingot/src/foo.fe +++ b/crates/language-server/test_files/single_ingot/src/foo.fe @@ -4,5 +4,6 @@ pub fn foo() { } pub struct Foo { - pub x: i32; + pub x: i32 + } \ No newline at end of file From 93a2c1b30a2f19d7c2c8682667d09cd63e71e641 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 11 Mar 2024 01:05:00 -0500 Subject: [PATCH 37/66] language server: use mpsc instead of broadcast channels --- crates/language-server-macros/src/lib.rs | 176 +++++++++++------- crates/language-server/src/backend.rs | 67 +++---- crates/language-server/src/language_server.rs | 60 +++--- crates/language-server/src/logger.rs | 4 +- crates/language-server/src/main.rs | 11 +- 5 files changed, 175 insertions(+), 143 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index 151739aef..63256a1c3 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -7,20 +7,29 @@ use syn::{parse_macro_input, FnArg, ImplItem, ItemImpl, ReturnType}; /// Macro for generating tokio channels from [`lsp-types`](https://docs.rs/lsp-types). /// /// This procedural macro annotates the `tower_lsp::LanguageServer` trait implementation and generates -/// a struct full of tokio broadcast channels that can be used to signal the server to handle +/// a struct full of tokio mpsc channels that can be used to signal the server to handle /// defined requests and notifications. #[proc_macro_attribute] pub fn message_channels(attr: TokenStream, item: TokenStream) -> TokenStream { - let attr = parse_macro_input!(attr as Option); - let channel_struct_name = format_ident!( - "{}", - attr.map_or("MessageChannels".to_string(), |attr| attr.to_string()) + // let attr = parse_macro_input!(attr as Option); + let channel_senders_struct_name = format_ident!( + "MessageSenders", + // attr.clone().map_or("MessageSenders".to_string(), |attr| attr.to_string()) + ); + + let channel_receivers_struct_name = format_ident!( + "MessageReceivers", + // attr.map_or("MessageReceivers".to_string(), |attr| attr.to_string()) ); let lang_server_trait_impl = parse_macro_input!(item as ItemImpl); let method_calls = parse_method_calls(&lang_server_trait_impl); - let channel_struct = gen_channel_struct(&method_calls, channel_struct_name); + let channel_struct = gen_channel_structs( + &method_calls, + channel_senders_struct_name, + channel_receivers_struct_name, + ); let tokens = quote! { #channel_struct @@ -34,6 +43,7 @@ pub fn message_channels(attr: TokenStream, item: TokenStream) -> TokenStream { struct MessageTypeChannel<'a> { // handler_name: &'a syn::Ident, tx_name: syn::Ident, + stream_name: syn::Ident, sender_fn_name: syn::Ident, subscribe_fn_name: syn::Ident, rx_name: syn::Ident, @@ -62,6 +72,7 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { let handler_name = &method.sig.ident; let tx_name = format_ident!("{}_tx", handler_name); + let stream_name = format_ident!("{}_stream", handler_name); let sender_fn_name = format_ident!("send_{}", handler_name); let subscribe_fn_name = format_ident!("subscribe_{}", handler_name); @@ -69,6 +80,7 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { calls.push(MessageTypeChannel { tx_name, + stream_name, rx_name, sender_fn_name, subscribe_fn_name, @@ -80,9 +92,10 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { calls } -fn gen_channel_struct( +fn gen_channel_structs( channels: &[MessageTypeChannel], - channel_struct_name: syn::Ident, + channel_senders_struct_name: syn::Ident, + channel_receivers_struct_name: syn::Ident, ) -> proc_macro2::TokenStream { // unit type let unit_type = syn::Type::Tuple(syn::TypeTuple { @@ -90,11 +103,11 @@ fn gen_channel_struct( elems: syn::punctuated::Punctuated::new(), }); - let channel_declarations: proc_macro2::TokenStream = channels + let channel_senders_declarations: proc_macro2::TokenStream = channels .iter() .map(|channel| { let tx = &channel.tx_name; - let rx = &channel.rx_name; + // let rx = &channel.rx_name; let params = channel.params; let result = channel.result; @@ -105,18 +118,50 @@ fn gen_channel_struct( }; let sender_type = match result { - Some(result) => quote! { tokio::sync::broadcast::Sender<(#params, OneshotResponder<#result>)> }, - None => quote! { tokio::sync::broadcast::Sender<#params> }, + Some(result) => quote! { tokio::sync::mpsc::Sender<(#params, tokio::sync::oneshot::Sender<#result>)> }, + None => quote! { tokio::sync::mpsc::Sender<#params> }, + }; + + quote! { + pub #tx: #sender_type, + } + }) + .collect(); + + let channel_receivers_declarations: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + // let tx = &channel.tx_name; + let rx = &channel.rx_name; + let stream_name = &channel.stream_name; + let params = channel.params; + let result = channel.result; + + // if params is None we need to use the type of () as the default + let params = match params { + Some(params) => params, + None => &unit_type, }; + // let sender_type = match result { + // Some(result) => quote! { tokio::sync::mpsc::Sender<(#params, tokio::sync::oneshot::Sender<#result>)> }, + // None => quote! { tokio::sync::mpsc::Sender<#params> }, + // }; + let receiver_type = match result { - Some(result) => quote! { tokio::sync::broadcast::Receiver<(#params, OneshotResponder<#result>)> }, - None => quote! { tokio::sync::broadcast::Receiver<#params> }, + Some(result) => quote! { tokio::sync::mpsc::Receiver<(#params, tokio::sync::oneshot::Sender<#result>)> }, + None => quote! { tokio::sync::mpsc::Receiver<#params> }, + }; + + let stream_type = match result { + Some(result) => quote! { tokio_stream::wrappers::ReceiverStream<(#params, tokio::sync::oneshot::Sender<#result>)> }, + None => quote! { tokio_stream::wrappers::ReceiverStream<#params> }, }; quote! { - pub #tx: #sender_type, - pub #rx: #receiver_type, + // pub #tx: #sender_type, + // pub #rx: #receiver_type, + pub #stream_name: #stream_type, } }) .collect(); @@ -127,44 +172,42 @@ fn gen_channel_struct( let tx = &channel.tx_name; let rx = &channel.rx_name; quote! { - let (#tx, #rx) = tokio::sync::broadcast::channel(100); + let (#tx, #rx) = tokio::sync::mpsc::channel(100); } }) .collect(); - let channel_assignments: proc_macro2::TokenStream = channels + let channel_senders_assignments: proc_macro2::TokenStream = channels .iter() .map(|channel| { let tx = &channel.tx_name; - let rx = &channel.rx_name; quote! { #tx, - #rx, } }) .collect(); - let send_functions: proc_macro2::TokenStream = channels + let channel_receivers_assignments: proc_macro2::TokenStream = channels + .iter() + .map(|channel| { + let stream_name = &channel.stream_name; + let rx = &channel.rx_name; + quote! { + // #rx, + #stream_name: tokio_stream::wrappers::ReceiverStream::new(#rx), + } + }) + .collect(); + + let sender_dispatch_functions: proc_macro2::TokenStream = channels .iter() .map(|channel| { let tx = &channel.tx_name; - // let rx = &channel.rx_name; let params = &channel.params; - let params_type = match params { - Some(params) => params, - None => &unit_type, - }; - let subscribe_fn_name = &channel.subscribe_fn_name; let sender_fn_name = &channel.sender_fn_name; let sender_fn_result = match channel.result { - Some(result) => quote!{tokio::sync::oneshot::Receiver<#result>}, - None => quote!{()}, - // Some(result) => quote! { tokio::sync::broadcast::Receiver<(#params, OneshotResponder>)> }, - // None => quote! { tokio::sync::broadcast::Receiver<#params> }, - }; - let receiver_type = match channel.result { - Some(result) => quote! { tokio::sync::broadcast::Receiver<(#params_type, OneshotResponder<#result>)> }, - None => quote! { tokio::sync::broadcast::Receiver<#params_type> }, + Some(result) => quote! {tokio::sync::oneshot::Receiver<#result>}, + None => quote! {()}, }; let payload = match params { @@ -173,20 +216,19 @@ fn gen_channel_struct( }; let send_payload = match channel.result { - Some(result) => quote!{ - let (tx, rx) = tokio::sync::oneshot::channel::<#result>(); - let oneshot = OneshotResponder::from(tx); - let broadcast = self.#tx.clone(); + Some(result) => quote! { + let (oneshot_tx, oneshot_rx) = tokio::sync::oneshot::channel::<#result>(); + let mpsc = self.#tx.clone(); info!("sending oneshot sender: {:?}", #payload); - match broadcast.send((#payload, oneshot)) { + match mpsc.send((#payload, oneshot_tx)).await { Ok(_) => info!("sent oneshot sender"), Err(e) => error!("failed to send oneshot sender"), } - info!("returning oneshot receiver: {:?}", rx); - rx + info!("returning oneshot receiver: {:?}", oneshot_rx); + oneshot_rx }, - None => quote!{ - match self.#tx.send(#payload) { + None => quote! { + match self.#tx.send(#payload).await { Ok(_) => info!("sent notification"), Err(e) => error!("failed to send notification: {:?}", e), } @@ -195,50 +237,46 @@ fn gen_channel_struct( let dispatcher_fn = match params { Some(params) => quote! { - pub fn #sender_fn_name(&self, params: #params) -> #sender_fn_result { + pub async fn #sender_fn_name(&self, params: #params) -> #sender_fn_result { #send_payload } }, None => quote! { - pub fn #sender_fn_name(&self) -> #sender_fn_result { + pub async fn #sender_fn_name(&self) -> #sender_fn_result { #send_payload } }, }; - let subscriber_fn = match params { - Some(_params) => quote! { - pub fn #subscribe_fn_name(&self) -> #receiver_type { - self.#tx.subscribe() - } - }, - None => quote! { - pub fn #subscribe_fn_name(&self) -> #receiver_type { - self.#tx.subscribe() - } - }, - }; - quote! { #dispatcher_fn - #subscriber_fn } }) .collect(); quote! { - pub struct #channel_struct_name { - #channel_declarations + pub struct #channel_receivers_struct_name { + #channel_receivers_declarations } - impl #channel_struct_name { - pub fn new() -> Self { - #channel_instantiations - Self { - #channel_assignments + pub struct #channel_senders_struct_name { + #channel_senders_declarations + } + + pub fn setup_message_channels() -> (#channel_senders_struct_name, #channel_receivers_struct_name) { + #channel_instantiations + ( + #channel_senders_struct_name { + #channel_senders_assignments + }, + #channel_receivers_struct_name { + #channel_receivers_assignments } - } - #send_functions + ) + } + + impl #channel_senders_struct_name { + #sender_dispatch_functions } } } diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 8d7d0027a..66c06c2e8 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -11,24 +11,24 @@ use crate::db::LanguageServerDatabase; use crate::diagnostics::get_diagnostics; use crate::globals::LANGUAGE_ID; -use crate::language_server::MessageChannels; +use crate::language_server::MessageReceivers; use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; -use tokio_stream::wrappers::BroadcastStream; +use tokio_stream::wrappers::{BroadcastStream, ReceiverStream}; use tokio_stream::StreamExt; use tower_lsp::Client; pub struct Backend { - pub(crate) messaging: Arc>, - pub(crate) client: Arc>, + pub(crate) messaging: MessageReceivers, + pub(crate) client: Client, pub(crate) db: LanguageServerDatabase, pub(crate) workspace: Arc>, } impl Backend { - pub fn new(client: Arc>, messaging: Arc>) -> Self { + pub fn new(client: Client, messaging: MessageReceivers) -> Self { let db = LanguageServerDatabase::default(); let workspace = Arc::new(RwLock::new(Workspace::default())); @@ -45,21 +45,22 @@ impl Backend { let db = &mut self.db; let client = self.client.clone(); - let messaging = self.messaging.clone(); - let messaging = messaging.read().await; + let messaging = self.messaging; + // let messaging = self.messaging.clone(); + // let messaging = messaging.read().await; - let mut initialized_stream = BroadcastStream::new(messaging.subscribe_initialize()).fuse(); - let mut shutdown_stream = BroadcastStream::new(messaging.subscribe_shutdown()).fuse(); - let did_open_stream = BroadcastStream::new(messaging.subscribe_did_open()).fuse(); - let did_change_stream = BroadcastStream::new(messaging.subscribe_did_change()).fuse(); + let mut initialized_stream = messaging.initialize_stream.fuse(); + let mut shutdown_stream = messaging.shutdown_stream.fuse(); + let did_open_stream = messaging.did_open_stream.fuse(); + let did_change_stream = messaging.did_change_stream.fuse(); let mut change_stream = tokio_stream::StreamExt::merge( - did_open_stream.map_ok(|params| TextDocumentItem { + did_open_stream.map(|params| TextDocumentItem { uri: params.text_document.uri, language_id: LANGUAGE_ID.to_string(), version: params.text_document.version, text: params.text_document.text, }), - did_change_stream.map_ok(|params| TextDocumentItem { + did_change_stream.map(|params| TextDocumentItem { uri: params.text_document.uri, language_id: LANGUAGE_ID.to_string(), version: params.text_document.version, @@ -67,23 +68,19 @@ impl Backend { }), ) .fuse(); - let mut did_close_stream = BroadcastStream::new(messaging.subscribe_did_close()).fuse(); + let mut did_close_stream = messaging.did_close_stream.fuse(); let mut did_change_watched_files_stream = - BroadcastStream::new(messaging.subscribe_did_change_watched_files()).fuse(); + messaging.did_change_watched_files_stream.fuse(); - let mut hover_stream = BroadcastStream::new(messaging.subscribe_hover()).fuse(); + let mut hover_stream = messaging.hover_stream.fuse(); let mut goto_definition_stream = - BroadcastStream::new(messaging.subscribe_goto_definition()).fuse(); - - // This is very important! We absolutely need to drop the messaging lock here. - // TODO: make this more ergonomic and foolproof somehow - std::mem::drop(messaging); + messaging.goto_definition_stream.fuse(); info!("streams set up, looping on them now"); loop { tokio::select! { Some(result) = initialized_stream.next() => { - if let Ok((initialization_params, responder)) = result { + if let (initialization_params, responder) = result { info!("initializing language server!"); // setup workspace // let workspace = self.workspace.clone(); @@ -106,16 +103,16 @@ impl Backend { version: Some(String::from(env!("CARGO_PKG_VERSION"))), }), }; - responder.respond(Ok(initialize_result)); + responder.send(Ok(initialize_result)); } } Some(result) = shutdown_stream.next() => { - if let Ok((_, responder)) = result { + if let (_, responder) = result { info!("shutting down language server"); - responder.respond(Ok(())); + responder.send(Ok(())); } } - Some(Ok(doc)) = change_stream.next() => { + Some(doc) = change_stream.next() => { info!("change detected: {:?}", doc.uri); update_inputs(workspace.clone(), db, doc.clone()).await; @@ -126,7 +123,7 @@ impl Backend { async move { handle_diagnostics(client, workspace, db, doc.uri).await } ); } - Some(Ok(params)) = did_close_stream.next() => { + Some(params) = did_close_stream.next() => { let workspace = &mut workspace.write().await; let input = workspace .touch_input_from_file_path( @@ -142,7 +139,7 @@ impl Backend { .unwrap(); let _ = input.sync(db, None); } - Some(Ok(params)) = did_change_watched_files_stream.next() => { + Some(params) = did_change_watched_files_stream.next() => { let changes = params.changes; for change in changes { let uri = change.uri; @@ -201,7 +198,7 @@ impl Backend { } } } - Some(Ok((params, responder))) = hover_stream.next() => { + Some((params, responder)) = hover_stream.next() => { let db = db.snapshot(); let workspace = workspace.clone(); let response = match tokio::spawn(handle_hover(db, workspace, params)).await { @@ -211,9 +208,9 @@ impl Backend { Ok(None) } }; - responder.respond(response); + responder.send(response); } - Some(Ok((params, responder))) = goto_definition_stream.next() => { + Some((params, responder)) = goto_definition_stream.next() => { let db = db.snapshot(); let workspace = workspace.clone(); let response = match handle_goto_definition(db, workspace, params).await { @@ -223,7 +220,7 @@ impl Backend { None } }; - responder.respond(Ok(response)); + responder.send(Ok(response)); } } } @@ -251,21 +248,19 @@ async fn update_inputs( } async fn handle_diagnostics( - client: Arc>, + client: Client, workspace: Arc>, db: Snapshot, url: lsp_types::Url, ) { let workspace = &workspace.read().await; - // let client = &mut client.lock().await; let diagnostics = get_diagnostics(&db, workspace, url.clone()); + let client = client.clone(); let diagnostics = diagnostics .unwrap() .into_iter() .map(|(uri, diags)| async { - let client = client.clone(); - let client = client.read().await; client.publish_diagnostics(uri, diags, None).await }) .collect::>(); diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index ed8f87b10..f3b143fc1 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -7,18 +7,15 @@ use lsp_types::{ Registration, }; -use crate::oneshot_responder::OneshotResponder; - use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; pub(crate) struct Server { - pub(crate) messaging: Arc>, - pub(crate) client: Arc>, + pub(crate) messaging: MessageSenders, + pub(crate) client: Client, } impl Server { pub(crate) async fn register_watchers(&self) -> Result<()> { - let client = self.client.read().await; let registration = Registration { id: String::from("watch-fe-files"), method: String::from("workspace/didChangeWatchedFiles"), @@ -32,13 +29,10 @@ impl Server { .unwrap(), ), }; - client.register_capability(vec![registration]).await + self.client.register_capability(vec![registration]).await } - pub(crate) fn new(client: Client) -> Self { - let messaging = Arc::new(tokio::sync::RwLock::new(MessageChannels::new())); - let client = Arc::new(tokio::sync::RwLock::new(client)); - + pub(crate) fn new(client: Client, messaging: MessageSenders) -> Self { Self { messaging, client } } } @@ -48,18 +42,25 @@ impl Server { impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { // forward the initialize request to the messaging system - let messaging = self.messaging.read().await; - let rx = messaging.send_initialize(initialize_params); + // let messaging = self.messaging.read().await; + let rx = self.messaging.send_initialize(initialize_params).await; info!("awaiting initialization result"); - let initialize_result = rx.await.unwrap(); - - // register file watchers - let _ = self.register_watchers().await; - info!("registered watchers"); - - info!("received initialization result"); - initialize_result + match rx.await { + Ok(initialize_result) => { + // register file watchers + if let Err(e) = self.register_watchers().await { + error!("Failed to register file watchers: {}", e); + } else { + info!("registered watchers"); + } + initialize_result + }, + Err(e) => { + error!("Failed to initialize: {}", e); + return Err(tower_lsp::jsonrpc::Error::internal_error()); + } + } } async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { @@ -67,28 +68,23 @@ impl LanguageServer for Server { } async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - let messaging = self.messaging.read().await; - messaging.send_did_open(params); + self.messaging.send_did_open(params).await; } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - let messaging = self.messaging.read().await; - messaging.send_did_change(params); + self.messaging.send_did_change(params).await; } async fn did_close(&self, params: DidCloseTextDocumentParams) { - let messaging = self.messaging.read().await; - messaging.send_did_close(params); + self.messaging.send_did_close(params).await; } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let messaging = self.messaging.read().await; - messaging.send_did_change_watched_files(params); + self.messaging.send_did_change_watched_files(params).await; } async fn hover(&self, params: lsp_types::HoverParams) -> Result> { - let messaging = self.messaging.read().await; - let rx = messaging.send_hover(params); + let rx = self.messaging.send_hover(params).await; rx.await.unwrap() } @@ -96,8 +92,8 @@ impl LanguageServer for Server { &self, params: lsp_types::GotoDefinitionParams, ) -> Result> { - let messaging = self.messaging.read().await; - let rx = messaging.send_goto_definition(params); + // let messaging = self.messaging.read().await; + let rx = self.messaging.send_goto_definition(params).await; rx.await.unwrap() } } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 5a652ff01..a1ce23684 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -46,12 +46,10 @@ pub fn setup_logger( pub async fn handle_log_messages( mut rx: tokio::sync::mpsc::UnboundedReceiver<(String, MessageType)>, - client: Arc>, + client: Client, ) -> tokio::sync::mpsc::UnboundedReceiver { loop { let (message, message_type) = rx.recv().await.unwrap(); - // let message_type = match - let client = client.read().await; client.log_message(message_type, message).await; } } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 9ee9355e3..983a1e8f1 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -10,10 +10,13 @@ mod oneshot_responder; mod util; mod workspace; +use std::sync::Arc; + use backend::Backend; use db::Jar; use language_server::Server; +use tokio::sync::RwLock; use crate::logger::{handle_log_messages, setup_logger}; @@ -26,12 +29,14 @@ async fn main() { let stdin = tokio::io::stdin(); let stdout = tokio::io::stdout(); - let (service, socket) = tower_lsp::LspService::build(Server::new).finish(); + // let message_channels = language_server::MessageChannels::new(); + let (message_senders, message_receivers) = language_server::setup_message_channels(); + let (service, socket) = + tower_lsp::LspService::build(|client| Server::new(client, message_senders)).finish(); let server = service.inner(); let client = server.client.clone(); - let messaging = server.messaging.clone(); - let backend = Backend::new(client, messaging); + let backend = Backend::new(client, message_receivers); let rx = setup_logger(log::Level::Info).unwrap(); From d530bdd3e1743faac6133b7f16861e12e54c5f54 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 11 Mar 2024 01:07:46 -0500 Subject: [PATCH 38/66] language server: proc macro cleanup --- crates/hir/src/diagnostics.rs | 1 - crates/language-server-macros/src/lib.rs | 22 +----- crates/language-server/src/backend.rs | 76 +++++++++---------- crates/language-server/src/db.rs | 14 ++-- crates/language-server/src/diagnostics.rs | 5 +- crates/language-server/src/goto.rs | 10 ++- .../language-server/src/handlers/request.rs | 16 ++-- crates/language-server/src/language_server.rs | 4 +- crates/language-server/src/logger.rs | 2 - crates/language-server/src/main.rs | 4 - .../language-server/src/oneshot_responder.rs | 42 ---------- crates/language-server/src/workspace.rs | 42 ++++++---- 12 files changed, 91 insertions(+), 147 deletions(-) delete mode 100644 crates/language-server/src/oneshot_responder.rs diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 106ff6af2..0157b8c4e 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -54,4 +54,3 @@ impl DiagnosticVoucher for Box { self.as_ref().clone_box() } } - diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index 63256a1c3..becdf37b4 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -10,7 +10,7 @@ use syn::{parse_macro_input, FnArg, ImplItem, ItemImpl, ReturnType}; /// a struct full of tokio mpsc channels that can be used to signal the server to handle /// defined requests and notifications. #[proc_macro_attribute] -pub fn message_channels(attr: TokenStream, item: TokenStream) -> TokenStream { +pub fn message_channels(_attr: TokenStream, item: TokenStream) -> TokenStream { // let attr = parse_macro_input!(attr as Option); let channel_senders_struct_name = format_ident!( "MessageSenders", @@ -45,7 +45,7 @@ struct MessageTypeChannel<'a> { tx_name: syn::Ident, stream_name: syn::Ident, sender_fn_name: syn::Ident, - subscribe_fn_name: syn::Ident, + // subscribe_fn_name: syn::Ident, rx_name: syn::Ident, params: Option<&'a syn::Type>, result: Option<&'a syn::Type>, @@ -74,7 +74,6 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { let tx_name = format_ident!("{}_tx", handler_name); let stream_name = format_ident!("{}_stream", handler_name); let sender_fn_name = format_ident!("send_{}", handler_name); - let subscribe_fn_name = format_ident!("subscribe_{}", handler_name); let rx_name = format_ident!("{}_rx", handler_name); @@ -83,7 +82,6 @@ fn parse_method_calls(lang_server_trait: &ItemImpl) -> Vec { stream_name, rx_name, sender_fn_name, - subscribe_fn_name, params, result, }); @@ -107,7 +105,6 @@ fn gen_channel_structs( .iter() .map(|channel| { let tx = &channel.tx_name; - // let rx = &channel.rx_name; let params = channel.params; let result = channel.result; @@ -131,8 +128,6 @@ fn gen_channel_structs( let channel_receivers_declarations: proc_macro2::TokenStream = channels .iter() .map(|channel| { - // let tx = &channel.tx_name; - let rx = &channel.rx_name; let stream_name = &channel.stream_name; let params = channel.params; let result = channel.result; @@ -142,25 +137,12 @@ fn gen_channel_structs( Some(params) => params, None => &unit_type, }; - - // let sender_type = match result { - // Some(result) => quote! { tokio::sync::mpsc::Sender<(#params, tokio::sync::oneshot::Sender<#result>)> }, - // None => quote! { tokio::sync::mpsc::Sender<#params> }, - // }; - - let receiver_type = match result { - Some(result) => quote! { tokio::sync::mpsc::Receiver<(#params, tokio::sync::oneshot::Sender<#result>)> }, - None => quote! { tokio::sync::mpsc::Receiver<#params> }, - }; - let stream_type = match result { Some(result) => quote! { tokio_stream::wrappers::ReceiverStream<(#params, tokio::sync::oneshot::Sender<#result>)> }, None => quote! { tokio_stream::wrappers::ReceiverStream<#params> }, }; quote! { - // pub #tx: #sender_type, - // pub #rx: #receiver_type, pub #stream_name: #stream_type, } }) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 66c06c2e8..0302b5555 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,10 +1,10 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; use crate::workspace::SyncableIngotFileContext; -use futures::TryStreamExt; + use lsp_types::TextDocumentItem; use salsa::{ParallelDatabase, Snapshot}; use std::sync::Arc; -use tokio::sync::{Mutex, RwLock}; +use tokio::sync::RwLock; use crate::capabilities::server_capabilities; use crate::db::LanguageServerDatabase; @@ -16,7 +16,6 @@ use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; -use tokio_stream::wrappers::{BroadcastStream, ReceiverStream}; use tokio_stream::StreamExt; use tower_lsp::Client; @@ -69,48 +68,44 @@ impl Backend { ) .fuse(); let mut did_close_stream = messaging.did_close_stream.fuse(); - let mut did_change_watched_files_stream = - messaging.did_change_watched_files_stream.fuse(); + let mut did_change_watched_files_stream = messaging.did_change_watched_files_stream.fuse(); let mut hover_stream = messaging.hover_stream.fuse(); - let mut goto_definition_stream = - messaging.goto_definition_stream.fuse(); + let mut goto_definition_stream = messaging.goto_definition_stream.fuse(); info!("streams set up, looping on them now"); loop { tokio::select! { Some(result) = initialized_stream.next() => { - if let (initialization_params, responder) = result { - info!("initializing language server!"); - // setup workspace - // let workspace = self.workspace.clone(); - let mut workspace = self.workspace.write().await; - let _ = workspace.set_workspace_root( - db, - initialization_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(), - ); + let (initialization_params, responder) = result; + info!("initializing language server!"); + // setup workspace + // let workspace = self.workspace.clone(); + let mut workspace = self.workspace.write().await; + let _ = workspace.set_workspace_root( + db, + initialization_params + .root_uri + .unwrap() + .to_file_path() + .ok() + .unwrap(), + ); - let capabilities = server_capabilities(); - let initialize_result = lsp_types::InitializeResult { - capabilities, - server_info: Some(lsp_types::ServerInfo { - name: String::from("fe-language-server"), - version: Some(String::from(env!("CARGO_PKG_VERSION"))), - }), - }; - responder.send(Ok(initialize_result)); - } + let capabilities = server_capabilities(); + let initialize_result = lsp_types::InitializeResult { + capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + let _ = responder.send(Ok(initialize_result)); } Some(result) = shutdown_stream.next() => { - if let (_, responder) = result { - info!("shutting down language server"); - responder.send(Ok(())); - } + let (_, responder) = result; + info!("shutting down language server"); + let _ = responder.send(Ok(())); } Some(doc) = change_stream.next() => { info!("change detected: {:?}", doc.uri); @@ -180,7 +175,7 @@ impl Backend { version: 0, text: text.clone(), }).await; - + let client = client.clone(); let workspace = workspace.clone(); let db = db.snapshot(); @@ -208,7 +203,7 @@ impl Backend { Ok(None) } }; - responder.send(response); + let _ = responder.send(response); } Some((params, responder)) = goto_definition_stream.next() => { let db = db.snapshot(); @@ -220,7 +215,7 @@ impl Backend { None } }; - responder.send(Ok(response)); + let _ = responder.send(Ok(response)); } } } @@ -260,11 +255,8 @@ async fn handle_diagnostics( let diagnostics = diagnostics .unwrap() .into_iter() - .map(|(uri, diags)| async { - client.publish_diagnostics(uri, diags, None).await - }) + .map(|(uri, diags)| async { client.publish_diagnostics(uri, diags, None).await }) .collect::>(); - futures::future::join_all(diagnostics).await; } diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 17898a8fe..145f2c96a 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -40,17 +40,12 @@ pub struct LanguageServerDatabase { } impl LanguageServerDatabase { - pub fn analyze_top_mod(&self, top_mod: TopLevelMod) -> Vec> - { + pub fn analyze_top_mod(&self, top_mod: TopLevelMod) -> Vec> { let mut pass_manager = initialize_analysis_pass(self); pass_manager.run_on_module(top_mod) } - pub fn find_enclosing_item( - &self, - top_mod: TopLevelMod, - cursor: Cursor, - ) -> Option { + pub fn find_enclosing_item(&self, top_mod: TopLevelMod, cursor: Cursor) -> Option { let items = top_mod .scope_graph(self.as_hir_db()) .items_dfs(self.as_hir_db()); @@ -76,7 +71,10 @@ impl LanguageServerDatabase { smallest_enclosing_item } - pub fn finalize_diags(&self, diags: &Vec>) -> Vec { + pub fn finalize_diags( + &self, + diags: &Vec>, + ) -> Vec { let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index ed07147d8..d87a821a8 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -6,7 +6,8 @@ use codespan_reporting as cs; use cs::{diagnostic as cs_diag, files as cs_files}; use common::{ - diagnostics::{LabelStyle, Severity}, InputDb, InputFile + diagnostics::{LabelStyle, Severity}, + InputDb, InputFile, }; use fxhash::FxHashMap; use hir::diagnostics::DiagnosticVoucher; @@ -133,7 +134,7 @@ fn run_diagnostics( path: &str, ) -> Vec { let file_path = path; - let top_mod = workspace.top_mod_from_file_path(&db, file_path).unwrap(); + let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); let diags = &db.analyze_top_mod(top_mod); db.finalize_diags(diags) } diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 3fe708c5e..00ecb73c2 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -82,7 +82,11 @@ pub fn goto_enclosing_path( let (path_id, scope_id) = goto_starting_path; // Resolve path. - let resolved_path = hir_analysis::name_resolution::resolve_path_early(db.as_hir_analysis_db(), path_id, scope_id); + let resolved_path = hir_analysis::name_resolution::resolve_path_early( + db.as_hir_analysis_db(), + path_id, + scope_id, + ); Some(resolved_path) } @@ -194,7 +198,9 @@ mod tests { fn test_goto_enclosing_path(fixture: Fixture<&str>) { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); - let input = workspace.touch_input_from_file_path(db, fixture.path()).unwrap(); + let input = workspace + .touch_input_from_file_path(db, fixture.path()) + .unwrap(); input.set_text(db).to((*fixture.content()).to_string()); let top_mod = workspace .top_mod_from_file_path(&db.snapshot(), fixture.path()) diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 643394a0e..26b79b935 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -2,7 +2,10 @@ use std::sync::Arc; use common::{input::IngotKind, InputDb}; use hir::SpannedHirDb; -use hir_analysis::{name_resolution::{EarlyResolvedPath, NameRes}, HirAnalysisDb}; +use hir_analysis::{ + name_resolution::{EarlyResolvedPath, NameRes}, + HirAnalysisDb, +}; use log::info; use salsa::{ParallelDatabase, Snapshot}; @@ -22,7 +25,7 @@ pub async fn handle_hover( workspace: Arc>, params: lsp_types::HoverParams, ) -> Result> { - let workspace = workspace.read().await; + let workspace = workspace.read().await; info!("handling hover"); // TODO: get more relevant information for the hover let file_path = ¶ms @@ -40,7 +43,6 @@ pub async fn handle_hover( .nth(params.text_document_position_params.position.line as usize) .unwrap(); - // let cursor: Cursor = params.text_document_position_params.position.into(); let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, @@ -72,7 +74,9 @@ pub async fn handle_hover( }) }; - let top_mod = workspace.top_mod_from_file_path(&db.snapshot(), file_path).unwrap(); + let top_mod = workspace + .top_mod_from_file_path(&db.snapshot(), file_path) + .unwrap(); let early_resolution = goto_enclosing_path(&db, top_mod, cursor); let goto_info = match early_resolution { @@ -119,7 +123,9 @@ pub async fn handle_goto_definition( // Get the module and the goto info let file_path = params.text_document.uri.path(); - let top_mod = workspace.top_mod_from_file_path(&db.snapshot(), file_path).unwrap(); + let top_mod = workspace + .top_mod_from_file_path(&db.snapshot(), file_path) + .unwrap(); let goto_info = goto_enclosing_path(&db, top_mod, cursor); // Convert the goto info to a Location diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index f3b143fc1..e2379f384 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use log::{error, info}; use lsp_types::{ DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, @@ -55,7 +53,7 @@ impl LanguageServer for Server { info!("registered watchers"); } initialize_result - }, + } Err(e) => { error!("Failed to initialize: {}", e); return Err(tower_lsp::jsonrpc::Error::internal_error()); diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index a1ce23684..17f051c4d 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; use lsp_types::MessageType; use tower_lsp::Client; diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 983a1e8f1..8270ec87f 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -6,17 +6,13 @@ mod globals; mod goto; mod language_server; mod logger; -mod oneshot_responder; mod util; mod workspace; -use std::sync::Arc; - use backend::Backend; use db::Jar; use language_server::Server; -use tokio::sync::RwLock; use crate::logger::{handle_log_messages, setup_logger}; diff --git a/crates/language-server/src/oneshot_responder.rs b/crates/language-server/src/oneshot_responder.rs deleted file mode 100644 index 1c094d2ab..000000000 --- a/crates/language-server/src/oneshot_responder.rs +++ /dev/null @@ -1,42 +0,0 @@ -use std::fmt::Debug; - -use log::{debug, error}; -#[derive(Debug)] -pub struct OneshotResponder { - pub(crate) sender: std::sync::Arc>>>, -} - -impl Clone for OneshotResponder { - fn clone(&self) -> OneshotResponder { - Self { - sender: self.sender.clone(), - } - } -} - -impl OneshotResponder { - pub fn from(sender: tokio::sync::oneshot::Sender) -> Self { - Self { - sender: std::sync::Arc::new(std::sync::Mutex::new(Some(sender))), - } - } - pub fn respond(self, response: T) { - debug!("responding with: {:?}", response); - let mut sender = self.sender.lock().unwrap(); - - match sender.take() { - Some(sender) => { - debug!("sending response: {:?} and {:?}", response, sender); - match sender.send(response) { - Ok(_) => { - debug!("Response sent successfully") - } - Err(e) => error!("Failed to send response: {:?}", e), - } - } - None => { - error!("OneshotResponder already responded"); - } - } - } -} diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 006d7267e..a22388d90 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -243,14 +243,12 @@ impl IngotFileContext for StandaloneIngotContext { } fn get_ingot_from_file_path( - &self, - _db: &Snapshot, - path: &str, - ) -> Option { + &self, + _db: &Snapshot, + path: &str, + ) -> Option { // this shouldn't mutate, it should only get the ingot or return `None` - get_containing_ingot(&self.ingots, path) - .as_deref() - .copied() + get_containing_ingot(&self.ingots, path).copied() } fn top_mod_from_file_path( @@ -420,7 +418,8 @@ impl IngotFileContext for Workspace { if let Some(ctx) = ctx { ctx.touch_input_from_file_path(db, path) } else { - self.standalone_ingot_context.touch_input_from_file_path(db, path) + self.standalone_ingot_context + .touch_input_from_file_path(db, path) } } @@ -433,7 +432,8 @@ impl IngotFileContext for Workspace { if let Some(ctx) = ctx { ctx.get_input_from_file_path(db, path) } else { - self.standalone_ingot_context.get_input_from_file_path(db, path) + self.standalone_ingot_context + .get_input_from_file_path(db, path) } } @@ -446,7 +446,8 @@ impl IngotFileContext for Workspace { if let Some(ctx) = ctx { Some(ctx.touch_ingot_from_file_path(db, path).unwrap()) } else { - self.standalone_ingot_context.touch_ingot_from_file_path(db, path) + self.standalone_ingot_context + .touch_ingot_from_file_path(db, path) } } @@ -459,7 +460,8 @@ impl IngotFileContext for Workspace { if let Some(ctx) = ctx { ctx.get_ingot_from_file_path(db, path) } else { - self.standalone_ingot_context.get_ingot_from_file_path(db, path) + self.standalone_ingot_context + .get_ingot_from_file_path(db, path) } } @@ -584,7 +586,9 @@ mod tests { use salsa::ParallelDatabase; - use crate::workspace::{get_containing_ingot_mut, IngotFileContext, Workspace, FE_CONFIG_SUFFIX}; + use crate::workspace::{ + get_containing_ingot_mut, IngotFileContext, Workspace, FE_CONFIG_SUFFIX, + }; use std::path::PathBuf; use super::StandaloneIngotContext; @@ -644,8 +648,10 @@ mod tests { assert!(containing_ingot.as_deref().is_some()); - let ingot = workspace - .touch_ingot_from_file_path(&mut crate::db::LanguageServerDatabase::default(), file_path); + let ingot = workspace.touch_ingot_from_file_path( + &mut crate::db::LanguageServerDatabase::default(), + file_path, + ); assert!(ingot.is_some()); } @@ -728,7 +734,9 @@ mod tests { .collect::>(); for src_path in fe_files { - let _file = workspace.touch_input_from_file_path(&mut db, &src_path).unwrap(); + let _file = workspace + .touch_input_from_file_path(&mut db, &src_path) + .unwrap(); // normally would do this but it's not relevant here... // file.sync(&mut db, None); @@ -767,7 +775,9 @@ mod tests { let foo_files = foo_context.files.keys().collect::>(); for file in foo_files { let contents = std::fs::read_to_string(&file).unwrap(); - let file = foo_context.touch_input_from_file_path(&mut db, &file).unwrap(); + let file = foo_context + .touch_input_from_file_path(&mut db, &file) + .unwrap(); assert!(*file.text(&db) == contents); } From bdbf12408d66c31e0e79dbe9525e7bae1c101c6e Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 11 Mar 2024 13:24:47 -0500 Subject: [PATCH 39/66] language server buffer/release mechanism --- Cargo.lock | 49 +++++- crates/language-server/Cargo.toml | 4 + crates/language-server/src/backend.rs | 23 ++- .../src/buffer_release_stream.rs | 157 ++++++++++++++++++ crates/language-server/src/main.rs | 1 + 5 files changed, 229 insertions(+), 5 deletions(-) create mode 100644 crates/language-server/src/buffer_release_stream.rs diff --git a/Cargo.lock b/Cargo.lock index fbd7c6564..06d4df12d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -626,6 +626,16 @@ dependencies = [ "parking_lot_core 0.9.7", ] +[[package]] +name = "debounced" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d8b0346b9fa0aa01a3fa4bcce48d62f8738e9c2956e92f275bbf6cf9d6fab5" +dependencies = [ + "futures-timer", + "futures-util", +] + [[package]] name = "der" version = "0.7.4" @@ -1154,6 +1164,7 @@ dependencies = [ "clap 4.3.12", "codespan-reporting", "crossbeam-channel", + "debounced", "dir-test", "fe-analyzer", "fe-common2", @@ -1163,6 +1174,7 @@ dependencies = [ "fe-hir-analysis", "fe-language-server-macros", "fe-macros", + "fork_stream", "futures", "fxhash", "glob", @@ -1171,10 +1183,12 @@ dependencies = [ "lsp-server", "lsp-types", "patricia_tree", + "pin-project", "rowan", "salsa-2022", "serde", "serde_json", + "stream-operators", "tokio", "tokio-macros", "tokio-stream", @@ -1326,6 +1340,16 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "fork_stream" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc54cf296aa5a82dfffcc911fc7a37b0dcba605725bbb4db486f7b24d7667f9d" +dependencies = [ + "futures", + "pin-project", +] + [[package]] name = "form_urlencoded" version = "1.2.0" @@ -1418,6 +1442,12 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + [[package]] name = "futures-util" version = "0.3.30" @@ -2156,18 +2186,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.4" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0302c4a0442c456bd56f841aee5c3bfd17967563f6fadc9ceb9f9c23cf3807e0" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.4" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", @@ -2978,6 +3008,17 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "stream-operators" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "903e4d7cdada44bf788a0949ae2864bd4ae959deed241e57037e4efe01f7dc1b" +dependencies = [ + "pin-project-lite", + "tokio", + "tokio-stream", +] + [[package]] name = "strsim" version = "0.10.0" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index f9c013559..f833fd092 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -41,3 +41,7 @@ tokio = { version = "1.35.1", features = ["full", "io-std"] } tokio-macros = "2.2.0" futures = "0.3.28" tokio-stream = { version = "0.1.14", features = ["sync"] } +fork_stream = "0.1.0" +debounced = "0.1.0" +stream-operators = "0.1.1" +pin-project = "1.1.5" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 0302b5555..80b4259de 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,8 +1,10 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; use crate::workspace::SyncableIngotFileContext; +use futures::StreamExt; use lsp_types::TextDocumentItem; use salsa::{ParallelDatabase, Snapshot}; +use tokio_stream::wrappers::ReceiverStream; use std::sync::Arc; use tokio::sync::RwLock; @@ -16,8 +18,11 @@ use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; -use tokio_stream::StreamExt; +use fork_stream::StreamExt as _; +// use tokio_stream::StreamExt; +use stream_operators::StreamOps; use tower_lsp::Client; +use debounced::debounced; pub struct Backend { pub(crate) messaging: MessageReceivers, @@ -69,6 +74,22 @@ impl Backend { .fuse(); let mut did_close_stream = messaging.did_close_stream.fuse(); let mut did_change_watched_files_stream = messaging.did_change_watched_files_stream.fuse(); + + // let flat_did_change_watched_files = + // did_change_watched_files_stream.flat_map(|params| tokio_stream::iter(params.changes)); + // let need_filesystem_sync = flat_did_change_watched_files.fork().filter(|change| { + // let change_type = change.typ.clone(); + // async move { + // matches!( + // change_type, + // lsp_types::FileChangeType::CREATED | lsp_types::FileChangeType::DELETED + // ) + // } + // }); + + // let (filesystem_synced_tx, filesystem_synced_rx) = tokio::sync::mpsc::channel::<()>(1); + // let filesystem_just_synced_stream = ReceiverStream::new(filesystem_synced_rx); + // let need_filesystem_sync_debounced = need_filesystem_sync.debounce_time(std::time::Duration::from_millis(50)); let mut hover_stream = messaging.hover_stream.fuse(); let mut goto_definition_stream = messaging.goto_definition_stream.fuse(); diff --git a/crates/language-server/src/buffer_release_stream.rs b/crates/language-server/src/buffer_release_stream.rs new file mode 100644 index 000000000..4b8c367a7 --- /dev/null +++ b/crates/language-server/src/buffer_release_stream.rs @@ -0,0 +1,157 @@ +use std::{ + collections::VecDeque, pin::{pin, Pin}, task::{Context, Poll} +}; + +use futures::stream::Stream; +use futures::stream::StreamExt; +use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; + +use pin_project::pin_project; + +#[pin_project] +struct AccumulatingStream { + #[pin] + input_stream: I, + #[pin] + trigger_stream: T, + // #[pin] + pending_buffer: VecDeque, + // #[pin] + ready_buffer: VecDeque +} + +impl AccumulatingStream +where + I: Stream, + T: Stream, +{ + fn new(input_stream: I, trigger_stream: T) -> Self { + AccumulatingStream { + input_stream, + trigger_stream, + pending_buffer: VecDeque::new(), + ready_buffer: VecDeque::new(), + } + } +} +impl Stream for AccumulatingStream +where + I: Stream, + T: Stream, +{ + type Item = I::Item; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + let this = self.project(); + + let ready_buffer : &mut VecDeque = this.ready_buffer; + // Drain the existing accumulated values + if let Some(item) = ready_buffer.pop_front() { + return Poll::Ready(Some(item)); + } + + // // Check if the trigger_stream has a new value + if let Poll::Ready(Some(_)) = this.trigger_stream.poll_next(cx) { + // move the pending buffer to the ready buffer + let pending_buffer : &mut VecDeque = this.pending_buffer; + let ready_buffer : &mut VecDeque = this.ready_buffer; + ready_buffer.append(pending_buffer); + } + + // Check if the input_stream has a new value + if let Poll::Ready(Some(item)) = this.input_stream.poll_next(cx) { + let pending_buffer : &mut VecDeque = this.pending_buffer; + pending_buffer.push_back(item); + } + + Poll::Pending + } +} + +// impl Stream for AccumulatingStream +// where +// I: Stream, +// T: Stream, +// { +// type Item = I::Item; + +// fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { +// let this = self.project(); + +// let trigger_stream: Pin<&mut T> = this.trigger_stream; +// // let accumulation_receiver : Pin<&mut UnboundedReceiver> = this.accumulation_receiver; +// let output_sender: Pin<&mut UnboundedSender>> = this.output_sender; +// if let Poll::Ready(Some(_)) = trigger_stream.poll_next(cx) { +// let (new_accumulation_sender, new_accumulation_receiver) = unbounded_channel(); +// let old_accumulation_receiver = +// std::mem::replace(this.accumulation_receiver, new_accumulation_receiver); +// // we also need to replace the accumulation sender but it's pinned +// let _ = std::mem::replace(this.accumulation_sender, new_accumulation_sender); +// let _ = output_sender.send(old_accumulation_receiver); +// } + +// let mut output_receiver: Pin<&mut UnboundedReceiver>> = +// this.output_receiver; +// if let Poll::Ready(Some(mut inner)) = output_receiver.poll_recv(cx) { +// if let Poll::Ready(Some(item)) = inner.poll_recv(cx) { +// return Poll::Ready(Some(item)); +// } +// } + +// let input_stream: Pin<&mut I> = this.input_stream; +// // let accumulation_sender: Pin<&mut UnboundedSender> = self.accumulation_sender; +// if let Poll::Ready(Some(item)) = input_stream.poll_next(cx) { +// let _ = this.accumulation_sender.send(item); +// } + +// Poll::Pending +// } +// } + +// how about some tests +#[cfg(test)] +mod tests { + use std::sync::Arc; + use fork_stream::StreamExt as _; + use tokio::sync::Mutex; + + use super::*; + use futures::stream::StreamExt; + use tokio::time::{timeout, Duration}; + use tokio_stream::wrappers::{BroadcastStream, UnboundedReceiverStream}; + + #[tokio::test] + async fn test_accumulating_stream() { + let (trigger_sender, mut trigger_receiver) = tokio::sync::broadcast::channel(100); + let (input_sender, input_receiver) = unbounded_channel(); + // let (output_sender, mut output_receiver) = tokio::sync::broadcast::channel(1); + + let mut output = vec![]; + + let mut accumulating_stream = AccumulatingStream::new( + UnboundedReceiverStream::from(input_receiver), + BroadcastStream::from(trigger_sender.subscribe()), + ); + + input_sender.send(1).unwrap(); + input_sender.send(2).unwrap(); + input_sender.send(3).unwrap(); + trigger_sender.send(()).unwrap(); + // tokio::time::sleep(Duration::from_millis(1000)).await; + + while let Some(item) = accumulating_stream.next().await { + output.push(item); + } + + assert_eq!(output, vec![1, 2, 3]); + + input_sender.send(4).unwrap(); + input_sender.send(5).unwrap(); + input_sender.send(6).unwrap(); + trigger_sender.send(()).unwrap(); + input_sender.send(7).unwrap(); + input_sender.send(8).unwrap(); + input_sender.send(9).unwrap(); + input_sender.send(10).unwrap(); + } +} diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 8270ec87f..84c7139b8 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -8,6 +8,7 @@ mod language_server; mod logger; mod util; mod workspace; +mod buffer_release_stream; use backend::Backend; use db::Jar; From e5c1a4b4a73136b3323ffae2d4ab3ccc97101e56 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 11 Mar 2024 14:52:11 -0500 Subject: [PATCH 40/66] buffer/release stream --- Cargo.lock | 29 ++- crates/language-server/Cargo.toml | 2 + crates/language-server/src/backend.rs | 212 +++++++++++------- .../src/buffer_release_stream.rs | 157 ------------- crates/language-server/src/language_server.rs | 16 +- crates/language-server/src/main.rs | 2 +- .../src/stream_buffer_until.rs | 155 +++++++++++++ 7 files changed, 329 insertions(+), 244 deletions(-) delete mode 100644 crates/language-server/src/buffer_release_stream.rs create mode 100644 crates/language-server/src/stream_buffer_until.rs diff --git a/Cargo.lock b/Cargo.lock index 06d4df12d..ea5b0d954 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1176,12 +1176,14 @@ dependencies = [ "fe-macros", "fork_stream", "futures", + "futures-concurrency", "fxhash", "glob", "indexmap", "log", "lsp-server", "lsp-types", + "merge-streams", "patricia_tree", "pin-project", "rowan", @@ -1396,6 +1398,19 @@ dependencies = [ "futures-sink", ] +[[package]] +name = "futures-concurrency" +version = "7.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b590a729e1cbaf9ae3ec294143ea034d93cbb1de01c884d04bcd0af8b613d02" +dependencies = [ + "bitvec", + "futures-core", + "pin-project", + "slab", + "smallvec", +] + [[package]] name = "futures-core" version = "0.3.30" @@ -1935,6 +1950,16 @@ dependencies = [ "autocfg", ] +[[package]] +name = "merge-streams" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f84f6452969abd246e7ac1fe4fe75906c76e8ec88d898df9aef37e0f3b6a7c2" +dependencies = [ + "futures-core", + "pin-project", +] + [[package]] name = "miniz_oxide" version = "0.7.1" @@ -2964,9 +2989,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.10.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" [[package]] name = "smol_str" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index f833fd092..1545cf434 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -45,3 +45,5 @@ fork_stream = "0.1.0" debounced = "0.1.0" stream-operators = "0.1.1" pin-project = "1.1.5" +merge-streams = "0.1.2" +futures-concurrency = "7.5.0" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 80b4259de..5ca4301bd 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,10 +1,15 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; +use crate::stream_buffer_until::BufferUntilStreamExt as _; use crate::workspace::SyncableIngotFileContext; +use fork_stream::StreamExt as _; use futures::StreamExt; +use futures_concurrency::prelude::*; use lsp_types::TextDocumentItem; use salsa::{ParallelDatabase, Snapshot}; +use stream_operators::StreamOps; use tokio_stream::wrappers::ReceiverStream; + use std::sync::Arc; use tokio::sync::RwLock; @@ -18,11 +23,9 @@ use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; -use fork_stream::StreamExt as _; // use tokio_stream::StreamExt; -use stream_operators::StreamOps; + use tower_lsp::Client; -use debounced::debounced; pub struct Backend { pub(crate) messaging: MessageReceivers, @@ -55,9 +58,68 @@ impl Backend { let mut initialized_stream = messaging.initialize_stream.fuse(); let mut shutdown_stream = messaging.shutdown_stream.fuse(); + let mut did_close_stream = messaging.did_close_stream.fuse(); + let mut did_change_watched_files_stream = messaging.did_change_watched_files_stream.fork(); + + let mut need_filesystem_sync = did_change_watched_files_stream + .clone() + .debounce_time(std::time::Duration::from_millis(10)); + + // let need_filesystem_sync = flat_did_change_watched_files.filter(|change| { + // let change_type = change.typ.clone(); + // async move { + // matches!( + // change_type, + // lsp_types::FileChangeType::CREATED | lsp_types::FileChangeType::DELETED + // ) + // } + // }); + + let (filesystem_recently_synced_tx, filesystem_recently_synced_rx) = + tokio::sync::mpsc::channel::<()>(1); + let filesystem_recently_synced_stream = ReceiverStream::new(filesystem_recently_synced_rx); + + let flat_did_change_watched_files = + did_change_watched_files_stream.map(|params| futures::stream::iter(params.changes)).flatten().fork(); + + let did_change_watched_file_stream = flat_did_change_watched_files.clone().filter(|change| { + let change_type = change.typ.clone(); + Box::pin(async move { + matches!(change_type, lsp_types::FileChangeType::CHANGED) + }) + }); + + let did_create_watched_file_stream = flat_did_change_watched_files.clone().filter(|change| { + let change_type = change.typ.clone(); + Box::pin(async move { + matches!(change_type, lsp_types::FileChangeType::CREATED) + }) + }); let did_open_stream = messaging.did_open_stream.fuse(); let did_change_stream = messaging.did_change_stream.fuse(); - let mut change_stream = tokio_stream::StreamExt::merge( + let change_stream = ( + did_change_watched_file_stream.map(|change| { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + let text = std::fs::read_to_string(path).unwrap(); + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + } + }), + did_create_watched_file_stream.map(|change| { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + let text = std::fs::read_to_string(path).unwrap(); + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + } + }), did_open_stream.map(|params| TextDocumentItem { uri: params.text_document.uri, language_id: LANGUAGE_ID.to_string(), @@ -70,26 +132,11 @@ impl Backend { version: params.text_document.version, text: params.content_changes[0].text.clone(), }), - ) + ).merge() .fuse(); - let mut did_close_stream = messaging.did_close_stream.fuse(); - let mut did_change_watched_files_stream = messaging.did_change_watched_files_stream.fuse(); - - // let flat_did_change_watched_files = - // did_change_watched_files_stream.flat_map(|params| tokio_stream::iter(params.changes)); - // let need_filesystem_sync = flat_did_change_watched_files.fork().filter(|change| { - // let change_type = change.typ.clone(); - // async move { - // matches!( - // change_type, - // lsp_types::FileChangeType::CREATED | lsp_types::FileChangeType::DELETED - // ) - // } - // }); + let mut change_stream = change_stream.buffer_until(filesystem_recently_synced_stream); - // let (filesystem_synced_tx, filesystem_synced_rx) = tokio::sync::mpsc::channel::<()>(1); - // let filesystem_just_synced_stream = ReceiverStream::new(filesystem_synced_rx); - // let need_filesystem_sync_debounced = need_filesystem_sync.debounce_time(std::time::Duration::from_millis(50)); + // let need_filesystem_sync_debounced = need_filesystem_sync; //.debounce_time(std::time::Duration::from_millis(10)); let mut hover_stream = messaging.hover_stream.fuse(); let mut goto_definition_stream = messaging.goto_definition_stream.fuse(); @@ -128,6 +175,15 @@ impl Backend { info!("shutting down language server"); let _ = responder.send(Ok(())); } + Some(_) = need_filesystem_sync.next() => { + let workspace = &mut workspace.write().await; + let _ = workspace.sync(db); + filesystem_recently_synced_tx.send(()).await.unwrap(); + } + // Some(_) = need_filesystem_sync_debounced.next() => { + // info!("filesystem recently synced"); + // // let _ = filesystem_recently_synced_tx.send(()); + // } Some(doc) = change_stream.next() => { info!("change detected: {:?}", doc.uri); update_inputs(workspace.clone(), db, doc.clone()).await; @@ -155,65 +211,65 @@ impl Backend { .unwrap(); let _ = input.sync(db, None); } - Some(params) = did_change_watched_files_stream.next() => { - let changes = params.changes; - for change in changes { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); + // Some(params) = did_change_watched_files_stream.next() => { + // let changes = params.changes; + // for change in changes { + // let uri = change.uri; + // let path = uri.to_file_path().unwrap(); - match change.typ { - lsp_types::FileChangeType::CREATED => { - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let workspace = &mut workspace.write().await; - let _ = workspace.sync(db); - let input = workspace - .touch_input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::CHANGED => { - let workspace = &mut workspace.write().await; - let input = workspace - .touch_input_from_file_path(db, path.to_str().unwrap()) - .unwrap(); - let _ = input.sync(db, None); - } - lsp_types::FileChangeType::DELETED => { - let workspace = &mut workspace.write().await; - // TODO: handle this more carefully! - // this is inefficient, a hack for now - let _ = workspace.sync(db); - } - _ => {} - } - // collect diagnostics for the file - if change.typ != lsp_types::FileChangeType::DELETED { - let text = std::fs::read_to_string(path).unwrap(); - update_inputs(workspace.clone(), db, TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text: text.clone(), - }).await; + // match change.typ { + // lsp_types::FileChangeType::CREATED => { + // // TODO: handle this more carefully! + // // this is inefficient, a hack for now + // let workspace = &mut workspace.write().await; + // let _ = workspace.sync(db); + // let input = workspace + // .touch_input_from_file_path(db, path.to_str().unwrap()) + // .unwrap(); + // let _ = input.sync(db, None); + // } + // lsp_types::FileChangeType::CHANGED => { + // let workspace = &mut workspace.write().await; + // let input = workspace + // .touch_input_from_file_path(db, path.to_str().unwrap()) + // .unwrap(); + // let _ = input.sync(db, None); + // } + // lsp_types::FileChangeType::DELETED => { + // let workspace = &mut workspace.write().await; + // // TODO: handle this more carefully! + // // this is inefficient, a hack for now + // let _ = workspace.sync(db); + // } + // _ => {} + // } + // // collect diagnostics for the file + // if change.typ != lsp_types::FileChangeType::DELETED { + // let text = std::fs::read_to_string(path).unwrap(); + // update_inputs(workspace.clone(), db, TextDocumentItem { + // uri: uri.clone(), + // language_id: LANGUAGE_ID.to_string(), + // version: 0, + // text: text.clone(), + // }).await; - let client = client.clone(); - let workspace = workspace.clone(); - let db = db.snapshot(); + // let client = client.clone(); + // let workspace = workspace.clone(); + // let db = db.snapshot(); - tokio::spawn( - async move { - handle_diagnostics( - client, - workspace, - db, - uri.clone(), - ).await - } - ); - } - } - } + // tokio::spawn( + // async move { + // handle_diagnostics( + // client, + // workspace, + // db, + // uri.clone(), + // ).await + // } + // ); + // } + // } + // } Some((params, responder)) = hover_stream.next() => { let db = db.snapshot(); let workspace = workspace.clone(); diff --git a/crates/language-server/src/buffer_release_stream.rs b/crates/language-server/src/buffer_release_stream.rs deleted file mode 100644 index 4b8c367a7..000000000 --- a/crates/language-server/src/buffer_release_stream.rs +++ /dev/null @@ -1,157 +0,0 @@ -use std::{ - collections::VecDeque, pin::{pin, Pin}, task::{Context, Poll} -}; - -use futures::stream::Stream; -use futures::stream::StreamExt; -use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; - -use pin_project::pin_project; - -#[pin_project] -struct AccumulatingStream { - #[pin] - input_stream: I, - #[pin] - trigger_stream: T, - // #[pin] - pending_buffer: VecDeque, - // #[pin] - ready_buffer: VecDeque -} - -impl AccumulatingStream -where - I: Stream, - T: Stream, -{ - fn new(input_stream: I, trigger_stream: T) -> Self { - AccumulatingStream { - input_stream, - trigger_stream, - pending_buffer: VecDeque::new(), - ready_buffer: VecDeque::new(), - } - } -} -impl Stream for AccumulatingStream -where - I: Stream, - T: Stream, -{ - type Item = I::Item; - - fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - let this = self.project(); - - let ready_buffer : &mut VecDeque = this.ready_buffer; - // Drain the existing accumulated values - if let Some(item) = ready_buffer.pop_front() { - return Poll::Ready(Some(item)); - } - - // // Check if the trigger_stream has a new value - if let Poll::Ready(Some(_)) = this.trigger_stream.poll_next(cx) { - // move the pending buffer to the ready buffer - let pending_buffer : &mut VecDeque = this.pending_buffer; - let ready_buffer : &mut VecDeque = this.ready_buffer; - ready_buffer.append(pending_buffer); - } - - // Check if the input_stream has a new value - if let Poll::Ready(Some(item)) = this.input_stream.poll_next(cx) { - let pending_buffer : &mut VecDeque = this.pending_buffer; - pending_buffer.push_back(item); - } - - Poll::Pending - } -} - -// impl Stream for AccumulatingStream -// where -// I: Stream, -// T: Stream, -// { -// type Item = I::Item; - -// fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { -// let this = self.project(); - -// let trigger_stream: Pin<&mut T> = this.trigger_stream; -// // let accumulation_receiver : Pin<&mut UnboundedReceiver> = this.accumulation_receiver; -// let output_sender: Pin<&mut UnboundedSender>> = this.output_sender; -// if let Poll::Ready(Some(_)) = trigger_stream.poll_next(cx) { -// let (new_accumulation_sender, new_accumulation_receiver) = unbounded_channel(); -// let old_accumulation_receiver = -// std::mem::replace(this.accumulation_receiver, new_accumulation_receiver); -// // we also need to replace the accumulation sender but it's pinned -// let _ = std::mem::replace(this.accumulation_sender, new_accumulation_sender); -// let _ = output_sender.send(old_accumulation_receiver); -// } - -// let mut output_receiver: Pin<&mut UnboundedReceiver>> = -// this.output_receiver; -// if let Poll::Ready(Some(mut inner)) = output_receiver.poll_recv(cx) { -// if let Poll::Ready(Some(item)) = inner.poll_recv(cx) { -// return Poll::Ready(Some(item)); -// } -// } - -// let input_stream: Pin<&mut I> = this.input_stream; -// // let accumulation_sender: Pin<&mut UnboundedSender> = self.accumulation_sender; -// if let Poll::Ready(Some(item)) = input_stream.poll_next(cx) { -// let _ = this.accumulation_sender.send(item); -// } - -// Poll::Pending -// } -// } - -// how about some tests -#[cfg(test)] -mod tests { - use std::sync::Arc; - use fork_stream::StreamExt as _; - use tokio::sync::Mutex; - - use super::*; - use futures::stream::StreamExt; - use tokio::time::{timeout, Duration}; - use tokio_stream::wrappers::{BroadcastStream, UnboundedReceiverStream}; - - #[tokio::test] - async fn test_accumulating_stream() { - let (trigger_sender, mut trigger_receiver) = tokio::sync::broadcast::channel(100); - let (input_sender, input_receiver) = unbounded_channel(); - // let (output_sender, mut output_receiver) = tokio::sync::broadcast::channel(1); - - let mut output = vec![]; - - let mut accumulating_stream = AccumulatingStream::new( - UnboundedReceiverStream::from(input_receiver), - BroadcastStream::from(trigger_sender.subscribe()), - ); - - input_sender.send(1).unwrap(); - input_sender.send(2).unwrap(); - input_sender.send(3).unwrap(); - trigger_sender.send(()).unwrap(); - // tokio::time::sleep(Duration::from_millis(1000)).await; - - while let Some(item) = accumulating_stream.next().await { - output.push(item); - } - - assert_eq!(output, vec![1, 2, 3]); - - input_sender.send(4).unwrap(); - input_sender.send(5).unwrap(); - input_sender.send(6).unwrap(); - trigger_sender.send(()).unwrap(); - input_sender.send(7).unwrap(); - input_sender.send(8).unwrap(); - input_sender.send(9).unwrap(); - input_sender.send(10).unwrap(); - } -} diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index e2379f384..0628e1f96 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -46,12 +46,6 @@ impl LanguageServer for Server { info!("awaiting initialization result"); match rx.await { Ok(initialize_result) => { - // register file watchers - if let Err(e) = self.register_watchers().await { - error!("Failed to register file watchers: {}", e); - } else { - info!("registered watchers"); - } initialize_result } Err(e) => { @@ -61,6 +55,16 @@ impl LanguageServer for Server { } } + async fn initialized(&self, _params: lsp_types::InitializedParams) { + info!("initialized... registering file watchers"); + // register file watchers + if let Err(e) = self.register_watchers().await { + error!("Failed to register file watchers: {}", e); + } else { + info!("registered watchers"); + } + } + async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { Ok(()) } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 84c7139b8..7d47a4566 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,4 +1,5 @@ mod backend; +mod stream_buffer_until; mod capabilities; mod db; mod diagnostics; @@ -8,7 +9,6 @@ mod language_server; mod logger; mod util; mod workspace; -mod buffer_release_stream; use backend::Backend; use db::Jar; diff --git a/crates/language-server/src/stream_buffer_until.rs b/crates/language-server/src/stream_buffer_until.rs new file mode 100644 index 000000000..eee9b4730 --- /dev/null +++ b/crates/language-server/src/stream_buffer_until.rs @@ -0,0 +1,155 @@ +use futures::stream::Stream; +use std::{ + collections::VecDeque, + fmt::Debug, + pin::{pin, Pin}, + task::{Context, Poll}, +}; + +use pin_project::pin_project; + +#[pin_project(project_replace)] +pub struct BufferUntilStream { + #[pin] + input_stream: I, + #[pin] + trigger_stream: T, + pending_buffer: VecDeque, + ready_buffer: VecDeque, +} + +impl BufferUntilStream +where + I: Stream, + T: Stream, +{ + fn new(input_stream: I, trigger_stream: T) -> Self { + BufferUntilStream { + input_stream, + trigger_stream, + pending_buffer: VecDeque::new(), + ready_buffer: VecDeque::new(), + } + } +} +impl Stream for BufferUntilStream +where + I: Stream, + T: Stream, +{ + type Item = I::Item; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + let mut this = self.project(); + let ready_buffer: &mut VecDeque = this.ready_buffer; + let pending_buffer: &mut VecDeque = this.pending_buffer; + + // Check if the input_stream has a new value + while let Poll::Ready(Some(item)) = this.input_stream.as_mut().poll_next(cx) { + println!("Received item from input_stream: {:?}", item); + pending_buffer.push_back(item); + } + + // Drain the ready buffer + if let Some(item) = ready_buffer.pop_front() { + println!("Returning item from ready_buffer: {:?}", item); + return Poll::Ready(Some(item)); + } + + // Check if the trigger_stream has a new value + if let Poll::Ready(Some(_)) = this.trigger_stream.poll_next(cx) { + // Move the pending buffer to the ready buffer + println!("Triggered, moving pending_buffer to ready_buffer"); + ready_buffer.append(pending_buffer); + println!("Ready buffer length after trigger: {}", ready_buffer.len()); + + // Return the next item from the ready buffer + if let Some(item) = ready_buffer.pop_front() { + println!("Returning item from ready_buffer after trigger: {:?}", item); + return Poll::Ready(Some(item)); + } + } + + Poll::Pending + } +} + +pub trait BufferUntilStreamExt: Sized +where + I: Stream, + T: Stream, +{ + fn buffer_until(self, trigger: T) -> BufferUntilStream; +} + +impl BufferUntilStreamExt for I +where + I: Stream, + T: Stream, +{ + fn buffer_until(self, trigger: T) -> BufferUntilStream { + BufferUntilStream::new(self, trigger) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use futures::{stream::StreamExt, FutureExt}; + use tokio_stream::wrappers::{BroadcastStream, UnboundedReceiverStream}; + + #[tokio::test] + async fn test_accumulating_stream() { + println!("running test_accumulating_stream"); + let (trigger_sender, trigger_receiver) = tokio::sync::broadcast::channel(100); + let (input_sender, input_receiver) = tokio::sync::mpsc::unbounded_channel(); + + let mut output = vec![]; + + let mut accumulating_stream = BufferUntilStream::new( + UnboundedReceiverStream::from(input_receiver), + BroadcastStream::from(trigger_receiver), + ); + + input_sender.send(1).unwrap(); + input_sender.send(2).unwrap(); + input_sender.send(3).unwrap(); + + while let Some(item) = accumulating_stream.next().now_or_never().flatten() + // timeout(Duration::from_millis(0), accumulating_stream.next()).await + { + output.push(item); + } + assert_eq!(output, Vec::::new()); + + trigger_sender.send(()).unwrap(); + + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + assert_eq!(output, vec![1, 2, 3]); + + input_sender.send(4).unwrap(); + input_sender.send(5).unwrap(); + input_sender.send(6).unwrap(); + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + + assert_eq!(output, vec![1, 2, 3]); + trigger_sender.send(()).unwrap(); + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + + assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); + input_sender.send(7).unwrap(); + input_sender.send(8).unwrap(); + input_sender.send(9).unwrap(); + input_sender.send(10).unwrap(); + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); + } +} From cb492ff9fee319ebf3a6967bb3a3d555823af1a6 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 11 Mar 2024 22:31:34 -0500 Subject: [PATCH 41/66] higher order stream buffer --- .../src/stream_buffer_until.rs | 84 +++++++++++++------ 1 file changed, 59 insertions(+), 25 deletions(-) diff --git a/crates/language-server/src/stream_buffer_until.rs b/crates/language-server/src/stream_buffer_until.rs index eee9b4730..0300070f9 100644 --- a/crates/language-server/src/stream_buffer_until.rs +++ b/crates/language-server/src/stream_buffer_until.rs @@ -1,4 +1,6 @@ use futures::stream::Stream; +use futures::stream::{iter, Iter}; +use log::info; use std::{ collections::VecDeque, fmt::Debug, @@ -18,12 +20,12 @@ pub struct BufferUntilStream { ready_buffer: VecDeque, } -impl BufferUntilStream +impl<'s, I, T, U> BufferUntilStream where I: Stream, T: Stream, { - fn new(input_stream: I, trigger_stream: T) -> Self { + pub fn new(input_stream: I, trigger_stream: T) -> Self { BufferUntilStream { input_stream, trigger_stream, @@ -31,46 +33,71 @@ where ready_buffer: VecDeque::new(), } } + + pub fn input_stream_mut(&mut self) -> &mut I { + &mut self.input_stream + } + + pub fn input_stream(&self) -> &I { + &self.input_stream + } + + pub fn trigger_stream_mut(&mut self) -> &mut T { + &mut self.trigger_stream + } + + pub fn trigger_stream(&self) -> &T { + &self.trigger_stream + } } impl Stream for BufferUntilStream where I: Stream, T: Stream, { - type Item = I::Item; + type Item = Iter>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let mut this = self.project(); let ready_buffer: &mut VecDeque = this.ready_buffer; let pending_buffer: &mut VecDeque = this.pending_buffer; + let mut finished = false; + // Check if the input_stream has a new value while let Poll::Ready(Some(item)) = this.input_stream.as_mut().poll_next(cx) { - println!("Received item from input_stream: {:?}", item); + info!("Received item from input_stream: {:?}", item); pending_buffer.push_back(item); } - // Drain the ready buffer - if let Some(item) = ready_buffer.pop_front() { - println!("Returning item from ready_buffer: {:?}", item); - return Poll::Ready(Some(item)); + if let Poll::Ready(None) = this.input_stream.as_mut().poll_next(cx) { + info!("input_stream finished"); + finished = true; } - // Check if the trigger_stream has a new value - if let Poll::Ready(Some(_)) = this.trigger_stream.poll_next(cx) { - // Move the pending buffer to the ready buffer - println!("Triggered, moving pending_buffer to ready_buffer"); - ready_buffer.append(pending_buffer); - println!("Ready buffer length after trigger: {}", ready_buffer.len()); - - // Return the next item from the ready buffer - if let Some(item) = ready_buffer.pop_front() { - println!("Returning item from ready_buffer after trigger: {:?}", item); - return Poll::Ready(Some(item)); + match this.trigger_stream.as_mut().poll_next(cx) { + Poll::Ready(Some(_)) => { + info!("Triggered, moving pending_buffer to ready_buffer"); + ready_buffer.append(pending_buffer); + } + Poll::Ready(None) => { + ready_buffer.append(pending_buffer); + } + _ => { + finished = true; } } - Poll::Pending + // Send any ready buffer or finish up + if ready_buffer.len() > 0 { + info!("Returning items stream from ready_buffer"); + let current_ready_buffer = std::mem::replace(this.ready_buffer, VecDeque::new()); + return Poll::Ready(Some(iter(current_ready_buffer.into_iter()))); + } else if finished { + return Poll::Ready(None); + } else { + Poll::Pending + } } } @@ -82,7 +109,7 @@ where fn buffer_until(self, trigger: T) -> BufferUntilStream; } -impl BufferUntilStreamExt for I +impl<'s, I, T, U: Debug> BufferUntilStreamExt for I where I: Stream, T: Stream, @@ -109,15 +136,13 @@ mod tests { let mut accumulating_stream = BufferUntilStream::new( UnboundedReceiverStream::from(input_receiver), BroadcastStream::from(trigger_receiver), - ); + ).flatten(); input_sender.send(1).unwrap(); input_sender.send(2).unwrap(); input_sender.send(3).unwrap(); - while let Some(item) = accumulating_stream.next().now_or_never().flatten() - // timeout(Duration::from_millis(0), accumulating_stream.next()).await - { + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { output.push(item); } assert_eq!(output, Vec::::new()); @@ -151,5 +176,14 @@ mod tests { output.push(item); } assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); + + drop(trigger_sender); + + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + assert_eq!(output, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); } + + // TODO: write tests for end of input stream } From 7ae47ad00752a05d8fa86eb717c68367905e9710 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 11 Mar 2024 22:31:48 -0500 Subject: [PATCH 42/66] language server tweaks --- Cargo.lock | 470 +++++++++++++++++- crates/language-server-macros/src/lib.rs | 2 +- crates/language-server/Cargo.toml | 3 +- crates/language-server/src/backend.rs | 17 +- crates/language-server/src/language_server.rs | 2 + crates/language-server/src/main.rs | 3 +- 6 files changed, 470 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ea5b0d954..49f08f50f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -115,6 +115,28 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "async-trait" version = "0.1.77" @@ -155,6 +177,51 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + [[package]] name = "backtrace" version = "0.3.69" @@ -185,6 +252,12 @@ dependencies = [ "byteorder", ] +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + [[package]] name = "beef" version = "0.5.2" @@ -293,7 +366,7 @@ dependencies = [ "bitflags", "clap_derive 3.2.18", "clap_lex 0.2.4", - "indexmap", + "indexmap 1.9.3", "once_cell", "strsim", "termcolor", @@ -408,6 +481,43 @@ dependencies = [ "winapi", ] +[[package]] +name = "console-api" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" +dependencies = [ + "futures-core", + "prost", + "prost-types", + "tonic", + "tracing-core", +] + +[[package]] +name = "console-subscriber" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e" +dependencies = [ + "console-api", + "crossbeam-channel", + "crossbeam-utils", + "futures-task", + "hdrhistogram", + "humantime", + "prost-types", + "serde", + "serde_json", + "thread_local", + "tokio", + "tokio-stream", + "tonic", + "tracing", + "tracing-core", + "tracing-subscriber", +] + [[package]] name = "console_error_panic_hook" version = "0.1.7" @@ -451,6 +561,15 @@ version = "0.91.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a59bcbca89c3f1b70b93ab3cbba5e5e0cbf3e63dadb23c7525cb142e21a9d4c" +[[package]] +name = "crc32fast" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "criterion" version = "0.3.6" @@ -778,6 +897,12 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + [[package]] name = "errno" version = "0.3.1" @@ -933,7 +1058,7 @@ dependencies = [ "fe-test-runner", "fs_extra", "include_dir", - "indexmap", + "indexmap 1.9.3", "walkdir", ] @@ -957,7 +1082,7 @@ dependencies = [ "fe-test-files", "hex", "if_chain", - "indexmap", + "indexmap 1.9.3", "insta", "num-bigint", "num-traits", @@ -982,7 +1107,7 @@ dependencies = [ "fe-common", "fe-mir", "fxhash", - "indexmap", + "indexmap 1.9.3", "num-bigint", "salsa", "smol_str", @@ -998,7 +1123,7 @@ dependencies = [ "difference", "fe-library", "hex", - "indexmap", + "indexmap 1.9.3", "num-bigint", "num-traits", "once_cell", @@ -1035,7 +1160,7 @@ dependencies = [ "fe-yulc", "getrandom", "hex", - "indexmap", + "indexmap 1.9.3", "insta", "primitive-types", "serde_json", @@ -1091,7 +1216,7 @@ dependencies = [ "fe-parser", "fe-test-runner", "fe-yulc", - "indexmap", + "indexmap 1.9.3", "serde_json", "smol_str", "toml", @@ -1163,6 +1288,7 @@ dependencies = [ "camino", "clap 4.3.12", "codespan-reporting", + "console-subscriber", "crossbeam-channel", "debounced", "dir-test", @@ -1179,7 +1305,7 @@ dependencies = [ "futures-concurrency", "fxhash", "glob", - "indexmap", + "indexmap 1.9.3", "log", "lsp-server", "lsp-types", @@ -1236,7 +1362,7 @@ dependencies = [ "fe-test-files", "fxhash", "id-arena", - "indexmap", + "indexmap 1.9.3", "num-bigint", "num-integer", "num-traits", @@ -1303,7 +1429,7 @@ dependencies = [ name = "fe-yulc" version = "0.23.0" dependencies = [ - "indexmap", + "indexmap 1.9.3", "serde_json", "solc", ] @@ -1336,6 +1462,16 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "flate2" +version = "1.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1537,6 +1673,25 @@ dependencies = [ "subtle", ] +[[package]] +name = "h2" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 2.2.5", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "1.8.2" @@ -1576,6 +1731,12 @@ dependencies = [ "ahash 0.8.3", ] +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + [[package]] name = "hashlink" version = "0.8.1" @@ -1585,6 +1746,19 @@ dependencies = [ "hashbrown 0.12.3", ] +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "base64 0.21.7", + "byteorder", + "flate2", + "nom", + "num-traits", +] + [[package]] name = "heck" version = "0.3.3" @@ -1645,12 +1819,82 @@ dependencies = [ "digest", ] +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + [[package]] name = "httparse" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + [[package]] name = "id-arena" version = "2.2.1" @@ -1746,6 +1990,16 @@ dependencies = [ "hashbrown 0.12.3", ] +[[package]] +name = "indexmap" +version = "2.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", +] + [[package]] name = "insta" version = "1.29.0" @@ -1935,6 +2189,21 @@ dependencies = [ "url", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + [[package]] name = "memchr" version = "2.6.4" @@ -1960,6 +2229,18 @@ dependencies = [ "pin-project", ] +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "miniz_oxide" version = "0.7.1" @@ -1980,6 +2261,16 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + [[package]] name = "num" version = "0.4.0" @@ -2206,7 +2497,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" dependencies = [ "fixedbitset", - "indexmap", + "indexmap 1.9.3", ] [[package]] @@ -2362,6 +2653,38 @@ dependencies = [ "unarray", ] +[[package]] +name = "prost" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 2.0.48", +] + +[[package]] +name = "prost-types" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" +dependencies = [ + "prost", +] + [[package]] name = "quick-error" version = "2.0.1" @@ -2467,10 +2790,19 @@ checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" dependencies = [ "aho-corasick", "memchr", - "regex-automata", + "regex-automata 0.4.3", "regex-syntax 0.8.2", ] +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + [[package]] name = "regex-automata" version = "0.4.3" @@ -2602,7 +2934,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ece421e0c4129b90e4a35b6f625e472e96c552136f5093a2f4fa2bbb75a62d5" dependencies = [ - "base64", + "base64 0.10.1", "bitflags", "serde", ] @@ -2722,7 +3054,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b84d9f96071f3f3be0dc818eae3327625d8ebc95b58da37d6850724f31d3403" dependencies = [ "crossbeam-utils", - "indexmap", + "indexmap 1.9.3", "lock_api", "log", "oorandom", @@ -2742,7 +3074,7 @@ dependencies = [ "crossbeam-utils", "dashmap", "hashlink", - "indexmap", + "indexmap 1.9.3", "log", "parking_lot 0.12.1", "rustc-hash", @@ -2953,6 +3285,15 @@ dependencies = [ "keccak", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "signal-hook-registry" version = "1.4.1" @@ -3135,6 +3476,12 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + [[package]] name = "tap" version = "1.0.1" @@ -3191,6 +3538,16 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", +] + [[package]] name = "tiny-keccak" version = "2.0.2" @@ -3241,9 +3598,20 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", + "tracing", "windows-sys 0.48.0", ] +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-macros" version = "2.2.0" @@ -3302,11 +3670,38 @@ version = "0.19.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" dependencies = [ - "indexmap", + "indexmap 1.9.3", "toml_datetime", "winnow", ] +[[package]] +name = "tonic" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.21.7", + "bytes", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.4.13" @@ -3315,10 +3710,16 @@ checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", + "indexmap 1.9.3", "pin-project", "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-util", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -3396,6 +3797,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "once_cell", + "regex", + "sharded-slab", + "thread_local", + "tracing", + "tracing-core", ] [[package]] @@ -3408,6 +3825,12 @@ dependencies = [ "rlp", ] +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + [[package]] name = "typenum" version = "1.16.0" @@ -3489,6 +3912,12 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + [[package]] name = "vec1" version = "1.10.1" @@ -3523,6 +3952,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index becdf37b4..ffeaf4f33 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -154,7 +154,7 @@ fn gen_channel_structs( let tx = &channel.tx_name; let rx = &channel.rx_name; quote! { - let (#tx, #rx) = tokio::sync::mpsc::channel(100); + let (#tx, #rx) = tokio::sync::mpsc::channel(10000); } }) .collect(); diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 1545cf434..4c951d3c4 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -37,7 +37,7 @@ patricia_tree = "0.6.2" glob = "0.3.1" url = "2.4.1" tower-lsp = "0.20.0" -tokio = { version = "1.35.1", features = ["full", "io-std"] } +tokio = { version = "1.35.1", features = ["full", "io-std", "tracing"] } tokio-macros = "2.2.0" futures = "0.3.28" tokio-stream = { version = "0.1.14", features = ["sync"] } @@ -47,3 +47,4 @@ stream-operators = "0.1.1" pin-project = "1.1.5" merge-streams = "0.1.2" futures-concurrency = "7.5.0" +console-subscriber = "0.2.0" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 5ca4301bd..e127b8508 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -134,7 +134,7 @@ impl Backend { }), ).merge() .fuse(); - let mut change_stream = change_stream.buffer_until(filesystem_recently_synced_stream); + let mut change_stream = change_stream.buffer_until(filesystem_recently_synced_stream).flatten(); // let need_filesystem_sync_debounced = need_filesystem_sync; //.debounce_time(std::time::Duration::from_millis(10)); @@ -273,13 +273,14 @@ impl Backend { Some((params, responder)) = hover_stream.next() => { let db = db.snapshot(); let workspace = workspace.clone(); - let response = match tokio::spawn(handle_hover(db, workspace, params)).await { - Ok(response) => response, - Err(e) => { - eprintln!("Error handling hover: {:?}", e); - Ok(None) - } - }; + let response = handle_hover(db, workspace, params).await; + // let response = match tokio::spawn(handle_hover(db, workspace, params)).await { + // Ok(response) => response, + // Err(e) => { + // eprintln!("Error handling hover: {:?}", e); + // Ok(None) + // } + // }; let _ = responder.send(response); } Some((params, responder)) = goto_definition_stream.next() => { diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 0628e1f96..5eda855f0 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -74,6 +74,7 @@ impl LanguageServer for Server { } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { + info!("sending did change to channel of capacity {}", self.messaging.did_change_tx.capacity()); self.messaging.send_did_change(params).await; } @@ -86,6 +87,7 @@ impl LanguageServer for Server { } async fn hover(&self, params: lsp_types::HoverParams) -> Result> { + info!("sending hover to channel of capacity {}", self.messaging.hover_tx.capacity()); let rx = self.messaging.send_hover(params).await; rx.await.unwrap() } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 7d47a4566..57ced7b46 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -25,6 +25,7 @@ mod handlers { async fn main() { let stdin = tokio::io::stdin(); let stdout = tokio::io::stdout(); + console_subscriber::init(); // let message_channels = language_server::MessageChannels::new(); let (message_senders, message_receivers) = language_server::setup_message_channels(); @@ -39,7 +40,7 @@ async fn main() { // separate runtime for the backend let backend_runtime = tokio::runtime::Builder::new_multi_thread() - .worker_threads(4) + .worker_threads(1) .enable_all() .build() .unwrap(); From dd983aa7688c551f2ff47fc59fdd2fec15e04af9 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 12 Mar 2024 00:34:27 -0500 Subject: [PATCH 43/66] use unbounded channels --- crates/language-server-macros/src/lib.rs | 26 ++--- crates/language-server/src/backend.rs | 105 ++++++++++-------- crates/language-server/src/language_server.rs | 26 ++--- crates/language-server/src/logger.rs | 2 + crates/language-server/src/main.rs | 16 +-- .../src/stream_buffer_until.rs | 35 +++--- .../test_files/single_ingot/src/foo.fe | 1 - 7 files changed, 112 insertions(+), 99 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index ffeaf4f33..99150f982 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -115,8 +115,8 @@ fn gen_channel_structs( }; let sender_type = match result { - Some(result) => quote! { tokio::sync::mpsc::Sender<(#params, tokio::sync::oneshot::Sender<#result>)> }, - None => quote! { tokio::sync::mpsc::Sender<#params> }, + Some(result) => quote! { tokio::sync::mpsc::UnboundedSender<(#params, tokio::sync::oneshot::Sender<#result>)> }, + None => quote! { tokio::sync::mpsc::UnboundedSender<#params> }, }; quote! { @@ -138,8 +138,8 @@ fn gen_channel_structs( None => &unit_type, }; let stream_type = match result { - Some(result) => quote! { tokio_stream::wrappers::ReceiverStream<(#params, tokio::sync::oneshot::Sender<#result>)> }, - None => quote! { tokio_stream::wrappers::ReceiverStream<#params> }, + Some(result) => quote! { tokio_stream::wrappers::UnboundedReceiverStream<(#params, tokio::sync::oneshot::Sender<#result>)> }, + None => quote! { tokio_stream::wrappers::UnboundedReceiverStream<#params> }, }; quote! { @@ -154,7 +154,7 @@ fn gen_channel_structs( let tx = &channel.tx_name; let rx = &channel.rx_name; quote! { - let (#tx, #rx) = tokio::sync::mpsc::channel(10000); + let (#tx, #rx) = tokio::sync::mpsc::unbounded_channel(); } }) .collect(); @@ -176,7 +176,7 @@ fn gen_channel_structs( let rx = &channel.rx_name; quote! { // #rx, - #stream_name: tokio_stream::wrappers::ReceiverStream::new(#rx), + #stream_name: tokio_stream::wrappers::UnboundedReceiverStream::new(#rx), } }) .collect(); @@ -202,29 +202,23 @@ fn gen_channel_structs( let (oneshot_tx, oneshot_rx) = tokio::sync::oneshot::channel::<#result>(); let mpsc = self.#tx.clone(); info!("sending oneshot sender: {:?}", #payload); - match mpsc.send((#payload, oneshot_tx)).await { - Ok(_) => info!("sent oneshot sender"), - Err(e) => error!("failed to send oneshot sender"), - } + mpsc.send((#payload, oneshot_tx)).expect("send payload with oneshot"); info!("returning oneshot receiver: {:?}", oneshot_rx); oneshot_rx }, None => quote! { - match self.#tx.send(#payload).await { - Ok(_) => info!("sent notification"), - Err(e) => error!("failed to send notification: {:?}", e), - } + self.#tx.send(#payload).expect("send payload"); }, }; let dispatcher_fn = match params { Some(params) => quote! { - pub async fn #sender_fn_name(&self, params: #params) -> #sender_fn_result { + pub fn #sender_fn_name(&self, params: #params) -> #sender_fn_result { #send_payload } }, None => quote! { - pub async fn #sender_fn_name(&self) -> #sender_fn_result { + pub fn #sender_fn_name(&self) -> #sender_fn_result { #send_payload } }, diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index e127b8508..55e5fd191 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,14 +1,15 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; -use crate::stream_buffer_until::BufferUntilStreamExt as _; + use crate::workspace::SyncableIngotFileContext; use fork_stream::StreamExt as _; +use futures::stream::iter; use futures::StreamExt; use futures_concurrency::prelude::*; use lsp_types::TextDocumentItem; use salsa::{ParallelDatabase, Snapshot}; use stream_operators::StreamOps; -use tokio_stream::wrappers::ReceiverStream; +use tokio_stream::wrappers::UnboundedReceiverStream; use std::sync::Arc; use tokio::sync::RwLock; @@ -32,6 +33,7 @@ pub struct Backend { pub(crate) client: Client, pub(crate) db: LanguageServerDatabase, pub(crate) workspace: Arc>, + workers: tokio::runtime::Runtime, } impl Backend { @@ -39,11 +41,17 @@ impl Backend { let db = LanguageServerDatabase::default(); let workspace = Arc::new(RwLock::new(Workspace::default())); + let workers = tokio::runtime::Builder::new_multi_thread() + .worker_threads(4) + .enable_all() + .build() + .unwrap(); Self { messaging, client, db, workspace, + workers, } } pub async fn handle_streams(mut self) { @@ -59,45 +67,51 @@ impl Backend { let mut initialized_stream = messaging.initialize_stream.fuse(); let mut shutdown_stream = messaging.shutdown_stream.fuse(); let mut did_close_stream = messaging.did_close_stream.fuse(); - let mut did_change_watched_files_stream = messaging.did_change_watched_files_stream.fork(); + let did_change_watched_files_stream = messaging.did_change_watched_files_stream.fork(); + + // let mut need_filesystem_sync = did_change_watched_files_stream + // .clone() + // .debounce_time(std::time::Duration::from_millis(500)); let mut need_filesystem_sync = did_change_watched_files_stream .clone() - .debounce_time(std::time::Duration::from_millis(10)); - - // let need_filesystem_sync = flat_did_change_watched_files.filter(|change| { - // let change_type = change.typ.clone(); - // async move { - // matches!( - // change_type, - // lsp_types::FileChangeType::CREATED | lsp_types::FileChangeType::DELETED - // ) - // } - // }); + .map(|params| iter(params.changes.into_iter())) + .flatten() + .filter(|change| { + let change_type = change.typ; + Box::pin(async move { + matches!( + change_type, + lsp_types::FileChangeType::CREATED | lsp_types::FileChangeType::DELETED + ) + }) + }) + .debounce_time(std::time::Duration::from_millis(500)); let (filesystem_recently_synced_tx, filesystem_recently_synced_rx) = - tokio::sync::mpsc::channel::<()>(1); - let filesystem_recently_synced_stream = ReceiverStream::new(filesystem_recently_synced_rx); + tokio::sync::mpsc::unbounded_channel::<()>(); + let _filesystem_recently_synced_stream = + UnboundedReceiverStream::new(filesystem_recently_synced_rx); - let flat_did_change_watched_files = - did_change_watched_files_stream.map(|params| futures::stream::iter(params.changes)).flatten().fork(); + let flat_did_change_watched_files = did_change_watched_files_stream + .map(|params| futures::stream::iter(params.changes)) + .flatten() + .fork(); - let did_change_watched_file_stream = flat_did_change_watched_files.clone().filter(|change| { - let change_type = change.typ.clone(); - Box::pin(async move { - matches!(change_type, lsp_types::FileChangeType::CHANGED) - }) - }); + let did_change_watched_file_stream = + flat_did_change_watched_files.clone().filter(|change| { + let change_type = change.typ; + Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CHANGED) }) + }); - let did_create_watched_file_stream = flat_did_change_watched_files.clone().filter(|change| { - let change_type = change.typ.clone(); - Box::pin(async move { - matches!(change_type, lsp_types::FileChangeType::CREATED) - }) - }); + let did_create_watched_file_stream = + flat_did_change_watched_files.clone().filter(|change| { + let change_type = change.typ; + Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CREATED) }) + }); let did_open_stream = messaging.did_open_stream.fuse(); let did_change_stream = messaging.did_change_stream.fuse(); - let change_stream = ( + let mut change_stream = ( did_change_watched_file_stream.map(|change| { let uri = change.uri; let path = uri.to_file_path().unwrap(); @@ -132,9 +146,10 @@ impl Backend { version: params.text_document.version, text: params.content_changes[0].text.clone(), }), - ).merge() - .fuse(); - let mut change_stream = change_stream.buffer_until(filesystem_recently_synced_stream).flatten(); + ) + .merge() + .fuse(); + // let mut change_stream = change_stream.buffer_until(filesystem_recently_synced_stream).flatten(); // let need_filesystem_sync_debounced = need_filesystem_sync; //.debounce_time(std::time::Duration::from_millis(10)); @@ -176,9 +191,10 @@ impl Backend { let _ = responder.send(Ok(())); } Some(_) = need_filesystem_sync.next() => { + info!("filesystem recently synced"); let workspace = &mut workspace.write().await; let _ = workspace.sync(db); - filesystem_recently_synced_tx.send(()).await.unwrap(); + filesystem_recently_synced_tx.send(()).unwrap(); } // Some(_) = need_filesystem_sync_debounced.next() => { // info!("filesystem recently synced"); @@ -191,7 +207,7 @@ impl Backend { let db = db.snapshot(); let client = client.clone(); let workspace = workspace.clone(); - tokio::spawn( + self.workers.spawn( async move { handle_diagnostics(client, workspace, db, doc.uri).await } ); } @@ -273,14 +289,14 @@ impl Backend { Some((params, responder)) = hover_stream.next() => { let db = db.snapshot(); let workspace = workspace.clone(); - let response = handle_hover(db, workspace, params).await; - // let response = match tokio::spawn(handle_hover(db, workspace, params)).await { - // Ok(response) => response, - // Err(e) => { - // eprintln!("Error handling hover: {:?}", e); - // Ok(None) - // } - // }; + // let response = handle_hover(db, workspace, params).await; + let response = match self.workers.spawn(handle_hover(db, workspace, params)).await { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling hover: {:?}", e); + Ok(None) + } + }; let _ = responder.send(response); } Some((params, responder)) = goto_definition_stream.next() => { @@ -296,6 +312,7 @@ impl Backend { let _ = responder.send(Ok(response)); } } + tokio::task::yield_now().await; } } } diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 5eda855f0..843ca47bd 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -41,13 +41,11 @@ impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { // forward the initialize request to the messaging system // let messaging = self.messaging.read().await; - let rx = self.messaging.send_initialize(initialize_params).await; + let rx = self.messaging.send_initialize(initialize_params); info!("awaiting initialization result"); match rx.await { - Ok(initialize_result) => { - initialize_result - } + Ok(initialize_result) => initialize_result, Err(e) => { error!("Failed to initialize: {}", e); return Err(tower_lsp::jsonrpc::Error::internal_error()); @@ -70,26 +68,26 @@ impl LanguageServer for Server { } async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - self.messaging.send_did_open(params).await; + self.messaging.send_did_open(params); } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - info!("sending did change to channel of capacity {}", self.messaging.did_change_tx.capacity()); - self.messaging.send_did_change(params).await; + // info!("sending did change to channel of capacity {}", self.messaging.did_change_tx.capacity()); + self.messaging.send_did_change(params); } async fn did_close(&self, params: DidCloseTextDocumentParams) { - self.messaging.send_did_close(params).await; + self.messaging.send_did_close(params); } async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - self.messaging.send_did_change_watched_files(params).await; + self.messaging.send_did_change_watched_files(params); } async fn hover(&self, params: lsp_types::HoverParams) -> Result> { - info!("sending hover to channel of capacity {}", self.messaging.hover_tx.capacity()); - let rx = self.messaging.send_hover(params).await; - rx.await.unwrap() + // info!("sending hover to channel of capacity {}", self.messaging.hover_tx.capacity()); + let rx = self.messaging.send_hover(params); + rx.await.expect("hover response") } async fn goto_definition( @@ -97,7 +95,7 @@ impl LanguageServer for Server { params: lsp_types::GotoDefinitionParams, ) -> Result> { // let messaging = self.messaging.read().await; - let rx = self.messaging.send_goto_definition(params).await; - rx.await.unwrap() + let rx = self.messaging.send_goto_definition(params); + rx.await.expect("goto definition response") } } diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 17f051c4d..a05fdee90 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -1,5 +1,6 @@ use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; use lsp_types::MessageType; +use tokio::task::yield_now; use tower_lsp::Client; pub struct Logger { @@ -49,5 +50,6 @@ pub async fn handle_log_messages( loop { let (message, message_type) = rx.recv().await.unwrap(); client.log_message(message_type, message).await; + yield_now().await; } } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 57ced7b46..26b66553f 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,5 +1,4 @@ mod backend; -mod stream_buffer_until; mod capabilities; mod db; mod diagnostics; @@ -7,6 +6,7 @@ mod globals; mod goto; mod language_server; mod logger; +mod stream_buffer_until; mod util; mod workspace; @@ -39,13 +39,13 @@ async fn main() { let rx = setup_logger(log::Level::Info).unwrap(); // separate runtime for the backend - let backend_runtime = tokio::runtime::Builder::new_multi_thread() - .worker_threads(1) - .enable_all() - .build() - .unwrap(); + // let backend_runtime = tokio::runtime::Builder::new_multi_thread() + // .worker_threads(4) + // .enable_all() + // .build() + // .unwrap(); - backend_runtime.spawn(backend.handle_streams()); + // backend_runtime.spawn(backend.handle_streams()); tokio::select! { // setup logging @@ -53,5 +53,7 @@ async fn main() { // start the server _ = tower_lsp::Server::new(stdin, stdout, socket) .serve(service) => {} + // backend + _ = backend.handle_streams() => {} } } diff --git a/crates/language-server/src/stream_buffer_until.rs b/crates/language-server/src/stream_buffer_until.rs index 0300070f9..3248e6821 100644 --- a/crates/language-server/src/stream_buffer_until.rs +++ b/crates/language-server/src/stream_buffer_until.rs @@ -34,21 +34,21 @@ where } } - pub fn input_stream_mut(&mut self) -> &mut I { - &mut self.input_stream - } + // pub fn input_stream_mut(&mut self) -> &mut I { + // &mut self.input_stream + // } - pub fn input_stream(&self) -> &I { - &self.input_stream - } + // pub fn input_stream(&self) -> &I { + // &self.input_stream + // } - pub fn trigger_stream_mut(&mut self) -> &mut T { - &mut self.trigger_stream - } + // pub fn trigger_stream_mut(&mut self) -> &mut T { + // &mut self.trigger_stream + // } - pub fn trigger_stream(&self) -> &T { - &self.trigger_stream - } + // pub fn trigger_stream(&self) -> &T { + // &self.trigger_stream + // } } impl Stream for BufferUntilStream where @@ -89,10 +89,10 @@ where } // Send any ready buffer or finish up - if ready_buffer.len() > 0 { + if !ready_buffer.is_empty() { info!("Returning items stream from ready_buffer"); - let current_ready_buffer = std::mem::replace(this.ready_buffer, VecDeque::new()); - return Poll::Ready(Some(iter(current_ready_buffer.into_iter()))); + let current_ready_buffer = std::mem::take(this.ready_buffer); + Poll::Ready(Some(iter(current_ready_buffer))) } else if finished { return Poll::Ready(None); } else { @@ -136,7 +136,8 @@ mod tests { let mut accumulating_stream = BufferUntilStream::new( UnboundedReceiverStream::from(input_receiver), BroadcastStream::from(trigger_receiver), - ).flatten(); + ) + .flatten(); input_sender.send(1).unwrap(); input_sender.send(2).unwrap(); @@ -184,6 +185,6 @@ mod tests { } assert_eq!(output, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); } - + // TODO: write tests for end of input stream } diff --git a/crates/language-server/test_files/single_ingot/src/foo.fe b/crates/language-server/test_files/single_ingot/src/foo.fe index 180cd1e35..c2251ee70 100644 --- a/crates/language-server/test_files/single_ingot/src/foo.fe +++ b/crates/language-server/test_files/single_ingot/src/foo.fe @@ -5,5 +5,4 @@ pub fn foo() { pub struct Foo { pub x: i32 - } \ No newline at end of file From 55dbc4d3c75fdac00b1b680022f673cd0267ff58 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 13 Mar 2024 19:33:29 -0500 Subject: [PATCH 44/66] language server tower-lsp rewrite cleanup --- .vscode/launch.json | 5 +- crates/language-server/Cargo.toml | 2 - crates/language-server/src/backend.rs | 182 +++++++----------- .../language-server/src/handlers/request.rs | 12 +- 4 files changed, 83 insertions(+), 118 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 3b7fb3e6a..f835c32ec 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -7,7 +7,8 @@ { "args": [ "--extensionDevelopmentPath=${workspaceFolder}/crates/language-server/editors/vscode", - "${workspaceFolder}/crates/" + "${workspaceFolder}/crates/", + "--disable-extensions" ], "name": "Launch Fe VSCode Extension", "outFiles": [ @@ -20,6 +21,8 @@ "env": { "RUST_BACKTRACE": "1" } + // we need to disable rust-analyzer it's so slow + }, ] } \ No newline at end of file diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 4c951d3c4..3eb70bc2a 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -22,8 +22,6 @@ fe-analyzer = {path = "../analyzer", version = "^0.23.0"} driver = { path = "../driver2", package = "fe-driver2" } common = { path = "../common2", package = "fe-common2" } anyhow = "1.0.71" -crossbeam-channel = "0.5.8" -lsp-server = "0.7.0" lsp-types = "0.94.0" serde = "1.0.162" serde_json = "1.0.96" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 55e5fd191..e45c43f19 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,5 +1,6 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; +use crate::stream_buffer_until::BufferUntilStreamExt; use crate::workspace::SyncableIngotFileContext; use fork_stream::StreamExt as _; @@ -9,7 +10,7 @@ use futures_concurrency::prelude::*; use lsp_types::TextDocumentItem; use salsa::{ParallelDatabase, Snapshot}; use stream_operators::StreamOps; -use tokio_stream::wrappers::UnboundedReceiverStream; +use tokio_stream::wrappers::{IntervalStream, UnboundedReceiverStream}; use std::sync::Arc; use tokio::sync::RwLock; @@ -73,7 +74,7 @@ impl Backend { // .clone() // .debounce_time(std::time::Duration::from_millis(500)); - let mut need_filesystem_sync = did_change_watched_files_stream + let mut need_filetree_sync = did_change_watched_files_stream .clone() .map(|params| iter(params.changes.into_iter())) .flatten() @@ -85,13 +86,13 @@ impl Backend { lsp_types::FileChangeType::CREATED | lsp_types::FileChangeType::DELETED ) }) - }) - .debounce_time(std::time::Duration::from_millis(500)); + }); + // .debounce_time(std::time::Duration::from_millis(50)); let (filesystem_recently_synced_tx, filesystem_recently_synced_rx) = tokio::sync::mpsc::unbounded_channel::<()>(); - let _filesystem_recently_synced_stream = - UnboundedReceiverStream::new(filesystem_recently_synced_rx); + let filesystem_recently_synced_stream = + UnboundedReceiverStream::new(filesystem_recently_synced_rx).fork(); let flat_did_change_watched_files = did_change_watched_files_stream .map(|params| futures::stream::iter(params.changes)) @@ -112,28 +113,28 @@ impl Backend { let did_open_stream = messaging.did_open_stream.fuse(); let did_change_stream = messaging.did_change_stream.fuse(); let mut change_stream = ( - did_change_watched_file_stream.map(|change| { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - let text = std::fs::read_to_string(path).unwrap(); - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - } - }), - did_create_watched_file_stream.map(|change| { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - let text = std::fs::read_to_string(path).unwrap(); - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - } - }), + // did_change_watched_file_stream.map(|change| { + // let uri = change.uri; + // let path = uri.to_file_path().unwrap(); + // let text = std::fs::read_to_string(path).unwrap(); + // TextDocumentItem { + // uri: uri.clone(), + // language_id: LANGUAGE_ID.to_string(), + // version: 0, + // text, + // } + // }), + // did_create_watched_file_stream.map(|change| { + // let uri = change.uri; + // let path = uri.to_file_path().unwrap(); + // let text = std::fs::read_to_string(path).unwrap(); + // TextDocumentItem { + // uri: uri.clone(), + // language_id: LANGUAGE_ID.to_string(), + // version: 0, + // text, + // } + // }), did_open_stream.map(|params| TextDocumentItem { uri: params.text_document.uri, language_id: LANGUAGE_ID.to_string(), @@ -148,10 +149,8 @@ impl Backend { }), ) .merge() + .debounce_time(std::time::Duration::from_millis(20)) .fuse(); - // let mut change_stream = change_stream.buffer_until(filesystem_recently_synced_stream).flatten(); - - // let need_filesystem_sync_debounced = need_filesystem_sync; //.debounce_time(std::time::Duration::from_millis(10)); let mut hover_stream = messaging.hover_stream.fuse(); let mut goto_definition_stream = messaging.goto_definition_stream.fuse(); @@ -175,6 +174,8 @@ impl Backend { .unwrap(), ); + let _ = workspace.sync(db); + let capabilities = server_capabilities(); let initialize_result = lsp_types::InitializeResult { capabilities, @@ -190,19 +191,45 @@ impl Backend { info!("shutting down language server"); let _ = responder.send(Ok(())); } - Some(_) = need_filesystem_sync.next() => { - info!("filesystem recently synced"); - let workspace = &mut workspace.write().await; - let _ = workspace.sync(db); - filesystem_recently_synced_tx.send(()).unwrap(); + Some(change) = need_filetree_sync.next() => { + let change_type = change.typ.clone(); + let path_buf = change.uri.to_file_path().unwrap(); + let path = path_buf.to_str().unwrap(); + match change_type { + lsp_types::FileChangeType::CREATED => { + // let workspace = &mut workspace.write().await; + // workspace.sync(db).expect("Failed to sync workspace"); + // let input = workspace + // .touch_input_from_file_path(db, path) + // .unwrap(); + + // let ingot = workspace + // .touch_ingot_from_file_path(db, path) + // .unwrap(); + + // let config_path = ingot.path(db).as_str(); + // let mut context = workspace.ingot_context_from_config_path(db, config_path).expect("Failed to get ingot context"); + // let files = context.files.insert(path, input); + + } + lsp_types::FileChangeType::DELETED => { + let workspace = &mut workspace.write().await; + let input = workspace + .touch_input_from_file_path( + db, + path + ) + .unwrap(); + let _ = input.sync(db, None); + } + _ => {} + } } - // Some(_) = need_filesystem_sync_debounced.next() => { - // info!("filesystem recently synced"); - // // let _ = filesystem_recently_synced_tx.send(()); - // } Some(doc) = change_stream.next() => { info!("change detected: {:?}", doc.uri); - update_inputs(workspace.clone(), db, doc.clone()).await; + update_input(workspace.clone(), db, doc.clone()).await; + // wait 2 seconds before syncing the filesystem + tokio::time::sleep(std::time::Duration::from_secs(2)).await; let db = db.snapshot(); let client = client.clone(); @@ -227,65 +254,6 @@ impl Backend { .unwrap(); let _ = input.sync(db, None); } - // Some(params) = did_change_watched_files_stream.next() => { - // let changes = params.changes; - // for change in changes { - // let uri = change.uri; - // let path = uri.to_file_path().unwrap(); - - // match change.typ { - // lsp_types::FileChangeType::CREATED => { - // // TODO: handle this more carefully! - // // this is inefficient, a hack for now - // let workspace = &mut workspace.write().await; - // let _ = workspace.sync(db); - // let input = workspace - // .touch_input_from_file_path(db, path.to_str().unwrap()) - // .unwrap(); - // let _ = input.sync(db, None); - // } - // lsp_types::FileChangeType::CHANGED => { - // let workspace = &mut workspace.write().await; - // let input = workspace - // .touch_input_from_file_path(db, path.to_str().unwrap()) - // .unwrap(); - // let _ = input.sync(db, None); - // } - // lsp_types::FileChangeType::DELETED => { - // let workspace = &mut workspace.write().await; - // // TODO: handle this more carefully! - // // this is inefficient, a hack for now - // let _ = workspace.sync(db); - // } - // _ => {} - // } - // // collect diagnostics for the file - // if change.typ != lsp_types::FileChangeType::DELETED { - // let text = std::fs::read_to_string(path).unwrap(); - // update_inputs(workspace.clone(), db, TextDocumentItem { - // uri: uri.clone(), - // language_id: LANGUAGE_ID.to_string(), - // version: 0, - // text: text.clone(), - // }).await; - - // let client = client.clone(); - // let workspace = workspace.clone(); - // let db = db.snapshot(); - - // tokio::spawn( - // async move { - // handle_diagnostics( - // client, - // workspace, - // db, - // uri.clone(), - // ).await - // } - // ); - // } - // } - // } Some((params, responder)) = hover_stream.next() => { let db = db.snapshot(); let workspace = workspace.clone(); @@ -317,24 +285,20 @@ impl Backend { } } -async fn update_inputs( +async fn update_input( workspace: Arc>, db: &mut LanguageServerDatabase, - params: TextDocumentItem, + path: &str, + contents: Option, ) { let workspace = &mut workspace.write().await; let input = workspace .touch_input_from_file_path( db, - params - .uri - .to_file_path() - .expect("Failed to convert URI to file path") - .to_str() - .expect("Failed to convert file path to string"), + path ) .unwrap(); - let _ = input.sync(db, Some(params.text.clone())); + let _ = input.sync(db, contents); } async fn handle_diagnostics( diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 26b79b935..7fe561011 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -18,7 +18,7 @@ use crate::{ util::{to_lsp_location_from_scope, to_offset_from_position}, workspace::{IngotFileContext, Workspace}, }; -use lsp_server::ResponseError; +// use tower_lsp::lsp_types::{ResponseError, Url}; pub async fn handle_hover( db: Snapshot, @@ -157,11 +157,11 @@ pub async fn handle_goto_definition( .collect::>() .join("\n"); - let _error = (!errors.is_empty()).then_some(ResponseError { - code: lsp_types::error_codes::SERVER_CANCELLED as i32, - message: errors, - data: None, - }); + // let _error = (!errors.is_empty()).then_some(ResponseError{ + // code: lsp_types::error_codes::SERVER_CANCELLED as i32, + // message: errors, + // data: None, + // }); // state.send_response(response_message)?; Ok(Some(lsp_types::GotoDefinitionResponse::Array( From e746872eeb102302bb7d1d3da081de32c8f8a9ee Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 18 Mar 2024 23:49:16 -0500 Subject: [PATCH 45/66] workspace cleanup; fix panic --- .vscode/launch.json | 2 +- Cargo.lock | 14 -- crates/language-server/src/backend.rs | 151 ++++-------- crates/language-server/src/db.rs | 2 +- crates/language-server/src/diagnostics.rs | 4 +- crates/language-server/src/goto.rs | 7 +- .../language-server/src/handlers/request.rs | 12 +- .../src/stream_buffer_until.rs | 4 +- crates/language-server/src/workspace.rs | 223 ++++++------------ 9 files changed, 142 insertions(+), 277 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index f835c32ec..564278de6 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -19,7 +19,7 @@ "type": "extensionHost", // we need to enable backtrace on the extension host "env": { - "RUST_BACKTRACE": "1" + "RUST_BACKTRACE": "full" } // we need to disable rust-analyzer it's so slow diff --git a/Cargo.lock b/Cargo.lock index 49f08f50f..2a6584274 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1289,7 +1289,6 @@ dependencies = [ "clap 4.3.12", "codespan-reporting", "console-subscriber", - "crossbeam-channel", "debounced", "dir-test", "fe-analyzer", @@ -1307,7 +1306,6 @@ dependencies = [ "glob", "indexmap 1.9.3", "log", - "lsp-server", "lsp-types", "merge-streams", "patricia_tree", @@ -2164,18 +2162,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "lsp-server" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37ea9ae5a5082ca3b6ae824fc7666cd206b99168a4d4c769ad8fe9cc740df6a6" -dependencies = [ - "crossbeam-channel", - "log", - "serde", - "serde_json", -] - [[package]] name = "lsp-types" version = "0.94.1" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index e45c43f19..49025c93b 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,16 +1,16 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; -use crate::stream_buffer_until::BufferUntilStreamExt; + use crate::workspace::SyncableIngotFileContext; use fork_stream::StreamExt as _; -use futures::stream::iter; + use futures::StreamExt; use futures_concurrency::prelude::*; use lsp_types::TextDocumentItem; use salsa::{ParallelDatabase, Snapshot}; use stream_operators::StreamOps; -use tokio_stream::wrappers::{IntervalStream, UnboundedReceiverStream}; + use std::sync::Arc; use tokio::sync::RwLock; @@ -67,33 +67,9 @@ impl Backend { let mut initialized_stream = messaging.initialize_stream.fuse(); let mut shutdown_stream = messaging.shutdown_stream.fuse(); - let mut did_close_stream = messaging.did_close_stream.fuse(); + // let mut did_close_stream = messaging.did_close_stream.fuse(); let did_change_watched_files_stream = messaging.did_change_watched_files_stream.fork(); - // let mut need_filesystem_sync = did_change_watched_files_stream - // .clone() - // .debounce_time(std::time::Duration::from_millis(500)); - - let mut need_filetree_sync = did_change_watched_files_stream - .clone() - .map(|params| iter(params.changes.into_iter())) - .flatten() - .filter(|change| { - let change_type = change.typ; - Box::pin(async move { - matches!( - change_type, - lsp_types::FileChangeType::CREATED | lsp_types::FileChangeType::DELETED - ) - }) - }); - // .debounce_time(std::time::Duration::from_millis(50)); - - let (filesystem_recently_synced_tx, filesystem_recently_synced_rx) = - tokio::sync::mpsc::unbounded_channel::<()>(); - let filesystem_recently_synced_stream = - UnboundedReceiverStream::new(filesystem_recently_synced_rx).fork(); - let flat_did_change_watched_files = did_change_watched_files_stream .map(|params| futures::stream::iter(params.changes)) .flatten() @@ -110,31 +86,38 @@ impl Backend { let change_type = change.typ; Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CREATED) }) }); + + let mut did_delete_watch_file_stream = + flat_did_change_watched_files.clone().filter(|change| { + let change_type = change.typ; + Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::DELETED) }) + }); + let did_open_stream = messaging.did_open_stream.fuse(); let did_change_stream = messaging.did_change_stream.fuse(); let mut change_stream = ( - // did_change_watched_file_stream.map(|change| { - // let uri = change.uri; - // let path = uri.to_file_path().unwrap(); - // let text = std::fs::read_to_string(path).unwrap(); - // TextDocumentItem { - // uri: uri.clone(), - // language_id: LANGUAGE_ID.to_string(), - // version: 0, - // text, - // } - // }), - // did_create_watched_file_stream.map(|change| { - // let uri = change.uri; - // let path = uri.to_file_path().unwrap(); - // let text = std::fs::read_to_string(path).unwrap(); - // TextDocumentItem { - // uri: uri.clone(), - // language_id: LANGUAGE_ID.to_string(), - // version: 0, - // text, - // } - // }), + did_change_watched_file_stream.map(|change| { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + let text = std::fs::read_to_string(path).unwrap(); + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + } + }), + did_create_watched_file_stream.map(|change| { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + let text = std::fs::read_to_string(path).unwrap(); + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + } + }), did_open_stream.map(|params| TextDocumentItem { uri: params.text_document.uri, language_id: LANGUAGE_ID.to_string(), @@ -191,45 +174,19 @@ impl Backend { info!("shutting down language server"); let _ = responder.send(Ok(())); } - Some(change) = need_filetree_sync.next() => { - let change_type = change.typ.clone(); - let path_buf = change.uri.to_file_path().unwrap(); - let path = path_buf.to_str().unwrap(); - match change_type { - lsp_types::FileChangeType::CREATED => { - // let workspace = &mut workspace.write().await; - // workspace.sync(db).expect("Failed to sync workspace"); - // let input = workspace - // .touch_input_from_file_path(db, path) - // .unwrap(); - - // let ingot = workspace - // .touch_ingot_from_file_path(db, path) - // .unwrap(); - - // let config_path = ingot.path(db).as_str(); - // let mut context = workspace.ingot_context_from_config_path(db, config_path).expect("Failed to get ingot context"); - // let files = context.files.insert(path, input); - - } - lsp_types::FileChangeType::DELETED => { - let workspace = &mut workspace.write().await; - let input = workspace - .touch_input_from_file_path( - db, - path - ) - .unwrap(); - let _ = input.sync(db, None); - } - _ => {} - } + Some(deleted) = did_delete_watch_file_stream.next() => { + let path = deleted.uri.to_file_path().unwrap(); + info!("file deleted: {:?}", path); + let path = path.to_str().unwrap(); + let workspace = workspace.clone(); + let _ = workspace.write().await.remove_input_for_file_path(db, path); } Some(doc) = change_stream.next() => { info!("change detected: {:?}", doc.uri); - update_input(workspace.clone(), db, doc.clone()).await; - // wait 2 seconds before syncing the filesystem - tokio::time::sleep(std::time::Duration::from_secs(2)).await; + let path_buf = doc.uri.to_file_path().unwrap(); + let path = path_buf.to_str().unwrap(); + let contents = Some(doc.text); + update_input(workspace.clone(), db, path, contents).await; let db = db.snapshot(); let client = client.clone(); @@ -238,22 +195,6 @@ impl Backend { async move { handle_diagnostics(client, workspace, db, doc.uri).await } ); } - Some(params) = did_close_stream.next() => { - let workspace = &mut workspace.write().await; - let input = workspace - .touch_input_from_file_path( - db, - params - .text_document - .uri - .to_file_path() - .unwrap() - .to_str() - .unwrap(), - ) - .unwrap(); - let _ = input.sync(db, None); - } Some((params, responder)) = hover_stream.next() => { let db = db.snapshot(); let workspace = workspace.clone(); @@ -291,6 +232,7 @@ async fn update_input( path: &str, contents: Option, ) { + info!("updating input for {:?}", path); let workspace = &mut workspace.write().await; let input = workspace .touch_input_from_file_path( @@ -298,7 +240,9 @@ async fn update_input( path ) .unwrap(); - let _ = input.sync(db, contents); + if let Some(contents) = contents { + let _ = input.sync_from_text(db, contents); + } } async fn handle_diagnostics( @@ -307,6 +251,7 @@ async fn handle_diagnostics( db: Snapshot, url: lsp_types::Url, ) { + info!("handling diagnostics for {:?}", url); let workspace = &workspace.read().await; let diagnostics = get_diagnostics(&db, workspace, url.clone()); diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 145f2c96a..89bac2c06 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -73,7 +73,7 @@ impl LanguageServerDatabase { pub fn finalize_diags( &self, - diags: &Vec>, + diags: &[Box], ) -> Vec { let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index d87a821a8..3af80c502 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -10,7 +10,7 @@ use common::{ InputDb, InputFile, }; use fxhash::FxHashMap; -use hir::diagnostics::DiagnosticVoucher; +use hir::{diagnostics::DiagnosticVoucher, LowerHirDb}; use salsa::Snapshot; use crate::{ @@ -134,7 +134,7 @@ fn run_diagnostics( path: &str, ) -> Vec { let file_path = path; - let top_mod = workspace.top_mod_from_file_path(db, file_path).unwrap(); + let top_mod = workspace.top_mod_from_file_path(db.as_lower_hir_db(), file_path).unwrap(); let diags = &db.analyze_top_mod(top_mod); db.finalize_diags(diags) } diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index 00ecb73c2..a424284e3 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -99,6 +99,7 @@ mod tests { use common::input::IngotKind; use dir_test::{dir_test, Fixture}; use fe_compiler_test_utils::snap_test; + use hir::LowerHirDb; use salsa::ParallelDatabase; use std::path::Path; @@ -145,7 +146,7 @@ mod tests { .set_text(db) .to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(&db.snapshot(), fe_source_path) + .top_mod_from_file_path(db.as_lower_hir_db(), fe_source_path) .unwrap(); let ingot = workspace.touch_ingot_from_file_path(db, fixture.path()); @@ -203,7 +204,7 @@ mod tests { .unwrap(); input.set_text(db).to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(&db.snapshot(), fixture.path()) + .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) .unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); @@ -260,7 +261,7 @@ mod tests { .set_text(db) .to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(&db.snapshot(), fixture.path()) + .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) .unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index 7fe561011..f615ce08f 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -1,14 +1,14 @@ use std::sync::Arc; use common::{input::IngotKind, InputDb}; -use hir::SpannedHirDb; +use hir::{LowerHirDb, SpannedHirDb}; use hir_analysis::{ name_resolution::{EarlyResolvedPath, NameRes}, HirAnalysisDb, }; use log::info; -use salsa::{ParallelDatabase, Snapshot}; +use salsa::{Snapshot}; use tokio::sync::RwLock; use tower_lsp::jsonrpc::Result; @@ -34,7 +34,7 @@ pub async fn handle_hover( .uri .path(); info!("getting hover info for file_path: {:?}", file_path); - let input = workspace.get_input_from_file_path(&db, file_path); + let input = workspace.get_input_from_file_path(file_path); let ingot = input.map(|input| input.ingot(db.as_input_db())); let file_text = input.unwrap().text(db.as_input_db()); @@ -75,7 +75,7 @@ pub async fn handle_hover( }; let top_mod = workspace - .top_mod_from_file_path(&db.snapshot(), file_path) + .top_mod_from_file_path(db.as_lower_hir_db(), file_path) .unwrap(); let early_resolution = goto_enclosing_path(&db, top_mod, cursor); @@ -124,7 +124,7 @@ pub async fn handle_goto_definition( // Get the module and the goto info let file_path = params.text_document.uri.path(); let top_mod = workspace - .top_mod_from_file_path(&db.snapshot(), file_path) + .top_mod_from_file_path(db.as_lower_hir_db(), file_path) .unwrap(); let goto_info = goto_enclosing_path(&db, top_mod, cursor); @@ -148,7 +148,7 @@ pub async fn handle_goto_definition( .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) .collect::>(); - let errors = scopes + let _errors = scopes .iter() .filter_map(|scope| *scope) .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) diff --git a/crates/language-server/src/stream_buffer_until.rs b/crates/language-server/src/stream_buffer_until.rs index 3248e6821..43403f880 100644 --- a/crates/language-server/src/stream_buffer_until.rs +++ b/crates/language-server/src/stream_buffer_until.rs @@ -20,7 +20,7 @@ pub struct BufferUntilStream { ready_buffer: VecDeque, } -impl<'s, I, T, U> BufferUntilStream +impl BufferUntilStream where I: Stream, T: Stream, @@ -109,7 +109,7 @@ where fn buffer_until(self, trigger: T) -> BufferUntilStream; } -impl<'s, I, T, U: Debug> BufferUntilStreamExt for I +impl BufferUntilStreamExt for I where I: Stream, T: Stream, diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index a22388d90..2db39b4ef 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -8,7 +8,6 @@ use common::{ use hir::{hir_def::TopLevelMod, lower::map_file_to_mod, LowerHirDb}; use log::info; use patricia_tree::StringPatriciaMap; -use salsa::Snapshot; use crate::db::LanguageServerDatabase; @@ -20,38 +19,24 @@ fn ingot_directory_key(path: String) -> String { } pub trait IngotFileContext { + fn get_input_from_file_path(&self, path: &str) -> Option; fn touch_input_from_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option; - fn get_ingot_from_file_path( - &self, - db: &Snapshot, - path: &str, - ) -> Option; + fn get_ingot_from_file_path(&self, path: &str) -> Option; fn touch_ingot_from_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option; - fn get_input_from_file_path( - &self, - db: &Snapshot, - path: &str, - ) -> Option; - fn top_mod_from_file_path( - &self, - db: &Snapshot, - path: &str, - ) -> Option; - fn rename_file( + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option; + fn remove_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, - old_path: &str, - new_path: &str, + path: &str, ) -> Result<()>; - fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()>; } pub struct LocalIngotContext { @@ -117,14 +102,11 @@ impl IngotFileContext for LocalIngotContext { |file| Some(*file), ); self.files.insert(path, input.unwrap()); + ingot.set_files(db, self.files.values().copied().collect()); input } - fn get_input_from_file_path( - &self, - _db: &Snapshot, - path: &str, - ) -> Option { + fn get_input_from_file_path(&self, path: &str) -> Option { self.files.get(path).copied() } @@ -136,43 +118,52 @@ impl IngotFileContext for LocalIngotContext { Some(self.ingot) } - fn get_ingot_from_file_path( - &self, - _db: &Snapshot, - _path: &str, - ) -> Option { + fn get_ingot_from_file_path(&self, _path: &str) -> Option { Some(self.ingot) } fn top_mod_from_file_path( &self, - db: &Snapshot, + db: &dyn LowerHirDb, path: &str, ) -> Option { - let file = self.get_input_from_file_path(db, path)?; + let file = self.get_input_from_file_path(path)?; Some(map_file_to_mod(db.as_lower_hir_db(), file)) } - fn rename_file( + // fn rename_file( + // &mut self, + // db: &mut LanguageServerDatabase, + // old_path: &str, + // new_path: &str, + // ) -> Result<()> { + // let file = self.files.remove(old_path); + // if let Some(file) = file { + // file.set_path(db).to(new_path.into()); + // self.files.insert(new_path, file); + // } + // Ok(()) + // } + + fn remove_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, - old_path: &str, - new_path: &str, + path: &str, ) -> Result<()> { - let file = self.files.remove(old_path); - if let Some(file) = file { - file.set_path(db).to(new_path.into()); - self.files.insert(new_path, file); - } - Ok(()) - } - - fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()> { let file = self.files.remove(path); - if let Some(file) = file { - file.remove_from_ingot(db)?; + + if let Some(_file) = file { + let ingot = self.ingot; + let new_ingot_files = self + .files + .values() + .copied() + .collect::>(); + ingot.set_files(db, new_ingot_files); + Ok(()) + } else { + Err(anyhow::anyhow!("File not found in ingot")) } - Ok(()) } } @@ -210,11 +201,7 @@ impl IngotFileContext for StandaloneIngotContext { input_file } - fn get_input_from_file_path( - &self, - _db: &Snapshot, - path: &str, - ) -> Option { + fn get_input_from_file_path(&self, path: &str) -> Option { self.files.get(path).copied() } @@ -242,44 +229,25 @@ impl IngotFileContext for StandaloneIngotContext { ) } - fn get_ingot_from_file_path( - &self, - _db: &Snapshot, - path: &str, - ) -> Option { + fn get_ingot_from_file_path(&self, path: &str) -> Option { // this shouldn't mutate, it should only get the ingot or return `None` get_containing_ingot(&self.ingots, path).copied() } - fn top_mod_from_file_path( - &self, - db: &Snapshot, - path: &str, - ) -> Option { - let file = self.get_input_from_file_path(db, path)?; + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + let file = self.get_input_from_file_path(path)?; Some(map_file_to_mod(db.as_lower_hir_db(), file)) } - fn rename_file( + fn remove_input_for_file_path( &mut self, - db: &mut LanguageServerDatabase, - old_path: &str, - new_path: &str, + _db: &mut LanguageServerDatabase, + path: &str, ) -> Result<()> { - let file = self.files.remove(old_path); - if let Some(file) = file { - file.set_path(db).to(new_path.into()); - self.files.insert(new_path, file); - } - Ok(()) - } - - fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()> { let file = self.files.remove(path); - if let Some(file) = file { - file.remove_from_ingot(db)?; + if let Some(_file) = file { + self.ingots.remove(path); } - self.ingots.remove(path); Ok(()) } } @@ -372,7 +340,7 @@ impl Workspace { let previous_ingot_context_file_keys = &ingot_context.files.keys().collect::>(); for path in previous_ingot_context_file_keys { if !actual_paths.contains(path) { - let _ = ingot_context.remove_file(db, path); + let _ = ingot_context.remove_input_for_file_path(db, path); } } @@ -423,17 +391,12 @@ impl IngotFileContext for Workspace { } } - fn get_input_from_file_path( - &self, - db: &Snapshot, - path: &str, - ) -> Option { + fn get_input_from_file_path(&self, path: &str) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.get_input_from_file_path(db, path) + ctx.get_input_from_file_path(path) } else { - self.standalone_ingot_context - .get_input_from_file_path(db, path) + self.standalone_ingot_context.get_input_from_file_path(path) } } @@ -451,25 +414,16 @@ impl IngotFileContext for Workspace { } } - fn get_ingot_from_file_path( - &self, - db: &Snapshot, - path: &str, - ) -> Option { + fn get_ingot_from_file_path(&self, path: &str) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.get_ingot_from_file_path(db, path) + ctx.get_ingot_from_file_path(path) } else { - self.standalone_ingot_context - .get_ingot_from_file_path(db, path) + self.standalone_ingot_context.get_ingot_from_file_path(path) } } - fn top_mod_from_file_path( - &self, - db: &Snapshot, - path: &str, - ) -> Option { + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { Some(ctx.top_mod_from_file_path(db, path).unwrap()) @@ -479,38 +433,26 @@ impl IngotFileContext for Workspace { } } - fn rename_file( + fn remove_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, - old_path: &str, - new_path: &str, + path: &str, ) -> Result<()> { - let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, old_path); - if let Some(ctx) = ctx { - ctx.rename_file(db, old_path, new_path) - } else { - self.standalone_ingot_context - .rename_file(db, old_path, new_path) - } - } - - fn remove_file(&mut self, db: &mut LanguageServerDatabase, path: &str) -> Result<()> { let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.remove_file(db, path) + ctx.remove_input_for_file_path(db, path) } else { - self.standalone_ingot_context.remove_file(db, path)?; + self.standalone_ingot_context + .remove_input_for_file_path(db, path)?; Ok(()) } } } pub trait SyncableInputFile { - fn sync(&self, db: &mut LanguageServerDatabase, contents: Option) -> Result<()>; + // fn sync(&self, db: &mut LanguageServerDatabase, contents: Option) -> Result<()>; fn sync_from_fs(&self, db: &mut LanguageServerDatabase) -> Result<()>; fn sync_from_text(&self, db: &mut LanguageServerDatabase, contents: String) -> Result<()>; - fn remove_from_ingot(&self, db: &mut LanguageServerDatabase) -> Result<()>; - // fn rename(&self, db: &mut LanguageServerDatabase, new_path: String) -> Result<()>; } impl SyncableInputFile for InputFile { @@ -524,29 +466,22 @@ impl SyncableInputFile for InputFile { self.set_text(db).to(contents); Ok(()) } - fn sync(&self, db: &mut LanguageServerDatabase, contents: Option) -> Result<()> { - // check to see if the file actually exists anymore: - let path = self.path(db); - if !path.exists() { - info!( - "File {:?} no longer exists... removing from workspace", - path - ); - // if not let's remove it from the ingot - self.remove_from_ingot(db) - } else if let Some(contents) = contents { - self.sync_from_text(db, contents) - } else { - self.sync_from_fs(db) - } - } - fn remove_from_ingot(&self, db: &mut LanguageServerDatabase) -> Result<()> { - let ingot = self.ingot(db); - let mut files = ingot.files(db).clone(); - files.remove(self); - ingot.set_files(db, files); - Ok(()) - } + // fn sync(&self, db: &mut LanguageServerDatabase, contents: Option) -> Result<()> { + // // check to see if the file actually exists anymore: + // let path = self.path(db); + // if !path.exists() { + // info!( + // "File {:?} no longer exists... removing from workspace", + // path + // ); + // // if not let's remove it from the ingot + // self.remove_from_ingot(db) + // } else if let Some(contents) = contents { + // self.sync_from_text(db, contents) + // } else { + // self.sync_from_fs(db) + // } + // } } pub trait SyncableIngotFileContext { @@ -584,8 +519,6 @@ impl SyncableIngotFileContext for Workspace { #[cfg(test)] mod tests { - use salsa::ParallelDatabase; - use crate::workspace::{ get_containing_ingot_mut, IngotFileContext, Workspace, FE_CONFIG_SUFFIX, }; @@ -741,7 +674,7 @@ mod tests { // file.sync(&mut db, None); // this would panic if a file has been added to multiple ingots - let _top_mod = workspace.top_mod_from_file_path(&db.snapshot(), src_path.as_str()); + let _top_mod = workspace.top_mod_from_file_path(&db, src_path.as_str()); } } From b464e12a3586a8a907987c1fde87a2e8b0686b19 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 19 Mar 2024 00:18:30 -0500 Subject: [PATCH 46/66] formatting --- crates/language-server/src/backend.rs | 9 +-------- crates/language-server/src/db.rs | 5 +---- crates/language-server/src/diagnostics.rs | 4 +++- crates/language-server/src/handlers/request.rs | 2 +- crates/language-server/src/workspace.rs | 6 +----- 5 files changed, 7 insertions(+), 19 deletions(-) diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 49025c93b..a2dd937e1 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,6 +1,5 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; - use crate::workspace::SyncableIngotFileContext; use fork_stream::StreamExt as _; @@ -11,7 +10,6 @@ use lsp_types::TextDocumentItem; use salsa::{ParallelDatabase, Snapshot}; use stream_operators::StreamOps; - use std::sync::Arc; use tokio::sync::RwLock; @@ -234,12 +232,7 @@ async fn update_input( ) { info!("updating input for {:?}", path); let workspace = &mut workspace.write().await; - let input = workspace - .touch_input_from_file_path( - db, - path - ) - .unwrap(); + let input = workspace.touch_input_from_file_path(db, path).unwrap(); if let Some(contents) = contents { let _ = input.sync_from_text(db, contents); } diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/db.rs index 89bac2c06..e9b0760e0 100644 --- a/crates/language-server/src/db.rs +++ b/crates/language-server/src/db.rs @@ -71,10 +71,7 @@ impl LanguageServerDatabase { smallest_enclosing_item } - pub fn finalize_diags( - &self, - diags: &[Box], - ) -> Vec { + pub fn finalize_diags(&self, diags: &[Box]) -> Vec { let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 3af80c502..524fed1ca 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -134,7 +134,9 @@ fn run_diagnostics( path: &str, ) -> Vec { let file_path = path; - let top_mod = workspace.top_mod_from_file_path(db.as_lower_hir_db(), file_path).unwrap(); + let top_mod = workspace + .top_mod_from_file_path(db.as_lower_hir_db(), file_path) + .unwrap(); let diags = &db.analyze_top_mod(top_mod); db.finalize_diags(diags) } diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index f615ce08f..d5f394d1d 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -8,7 +8,7 @@ use hir_analysis::{ }; use log::info; -use salsa::{Snapshot}; +use salsa::Snapshot; use tokio::sync::RwLock; use tower_lsp::jsonrpc::Result; diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 2db39b4ef..a23f5fd12 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -122,11 +122,7 @@ impl IngotFileContext for LocalIngotContext { Some(self.ingot) } - fn top_mod_from_file_path( - &self, - db: &dyn LowerHirDb, - path: &str, - ) -> Option { + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { let file = self.get_input_from_file_path(path)?; Some(map_file_to_mod(db.as_lower_hir_db(), file)) } From ea9351f7a58443d1032f12876da92354eeb58f7c Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 19 Mar 2024 00:28:29 -0500 Subject: [PATCH 47/66] remove unused stream modifier; rustfmt --- crates/language-server/src/backend.rs | 4 +- crates/language-server/src/diagnostics.rs | 2 +- crates/language-server/src/goto.rs | 14 +- .../language-server/src/handlers/request.rs | 11 +- crates/language-server/src/main.rs | 1 - .../src/stream_buffer_until.rs | 190 ------------------ crates/language-server/src/workspace.rs | 138 +++++-------- 7 files changed, 65 insertions(+), 295 deletions(-) delete mode 100644 crates/language-server/src/stream_buffer_until.rs diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index a2dd937e1..6c1923d13 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -23,8 +23,6 @@ use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; -// use tokio_stream::StreamExt; - use tower_lsp::Client; pub struct Backend { @@ -232,7 +230,7 @@ async fn update_input( ) { info!("updating input for {:?}", path); let workspace = &mut workspace.write().await; - let input = workspace.touch_input_from_file_path(db, path).unwrap(); + let input = workspace.touch_input_for_file_path(db, path).unwrap(); if let Some(contents) = contents { let _ = input.sync_from_text(db, contents); } diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 524fed1ca..1b288bd36 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -135,7 +135,7 @@ fn run_diagnostics( ) -> Vec { let file_path = path; let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), file_path) + .top_mod_for_file_path(db.as_lower_hir_db(), file_path) .unwrap(); let diags = &db.analyze_top_mod(top_mod); db.finalize_diags(diags) diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index a424284e3..d6f2a7cf8 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -138,7 +138,7 @@ mod tests { let fe_source_path = ingot_base_dir.join(fixture.path()); let fe_source_path = fe_source_path.to_str().unwrap(); - let input = workspace.touch_input_from_file_path(db, fixture.path()); + let input = workspace.touch_input_for_file_path(db, fixture.path()); assert_eq!(input.unwrap().ingot(db).kind(db), IngotKind::Local); input @@ -146,10 +146,10 @@ mod tests { .set_text(db) .to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), fe_source_path) + .top_mod_for_file_path(db.as_lower_hir_db(), fe_source_path) .unwrap(); - let ingot = workspace.touch_ingot_from_file_path(db, fixture.path()); + let ingot = workspace.touch_ingot_for_file_path(db, fixture.path()); assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); @@ -200,11 +200,11 @@ mod tests { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let input = workspace - .touch_input_from_file_path(db, fixture.path()) + .touch_input_for_file_path(db, fixture.path()) .unwrap(); input.set_text(db).to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) + .top_mod_for_file_path(db.as_lower_hir_db(), fixture.path()) .unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); @@ -256,12 +256,12 @@ mod tests { let workspace = &mut Workspace::default(); workspace - .touch_input_from_file_path(db, fixture.path()) + .touch_input_for_file_path(db, fixture.path()) .unwrap() .set_text(db) .to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) + .top_mod_for_file_path(db.as_lower_hir_db(), fixture.path()) .unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index d5f394d1d..eb81a3bbc 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -18,7 +18,6 @@ use crate::{ util::{to_lsp_location_from_scope, to_offset_from_position}, workspace::{IngotFileContext, Workspace}, }; -// use tower_lsp::lsp_types::{ResponseError, Url}; pub async fn handle_hover( db: Snapshot, @@ -34,7 +33,7 @@ pub async fn handle_hover( .uri .path(); info!("getting hover info for file_path: {:?}", file_path); - let input = workspace.get_input_from_file_path(file_path); + let input = workspace.get_input_for_file_path(file_path); let ingot = input.map(|input| input.ingot(db.as_input_db())); let file_text = input.unwrap().text(db.as_input_db()); @@ -43,14 +42,10 @@ pub async fn handle_hover( .nth(params.text_document_position_params.position.line as usize) .unwrap(); - // let cursor: Cursor = params.text_document_position_params.position.into(); let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, file_text.as_str(), ); - // let file_path = std::path::Path::new(file_path); - - // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); let ingot_info: Option = { let ingot_type = match ingot { @@ -75,7 +70,7 @@ pub async fn handle_hover( }; let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), file_path) + .top_mod_for_file_path(db.as_lower_hir_db(), file_path) .unwrap(); let early_resolution = goto_enclosing_path(&db, top_mod, cursor); @@ -124,7 +119,7 @@ pub async fn handle_goto_definition( // Get the module and the goto info let file_path = params.text_document.uri.path(); let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), file_path) + .top_mod_for_file_path(db.as_lower_hir_db(), file_path) .unwrap(); let goto_info = goto_enclosing_path(&db, top_mod, cursor); diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 26b66553f..9897d845b 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -6,7 +6,6 @@ mod globals; mod goto; mod language_server; mod logger; -mod stream_buffer_until; mod util; mod workspace; diff --git a/crates/language-server/src/stream_buffer_until.rs b/crates/language-server/src/stream_buffer_until.rs deleted file mode 100644 index 43403f880..000000000 --- a/crates/language-server/src/stream_buffer_until.rs +++ /dev/null @@ -1,190 +0,0 @@ -use futures::stream::Stream; -use futures::stream::{iter, Iter}; -use log::info; -use std::{ - collections::VecDeque, - fmt::Debug, - pin::{pin, Pin}, - task::{Context, Poll}, -}; - -use pin_project::pin_project; - -#[pin_project(project_replace)] -pub struct BufferUntilStream { - #[pin] - input_stream: I, - #[pin] - trigger_stream: T, - pending_buffer: VecDeque, - ready_buffer: VecDeque, -} - -impl BufferUntilStream -where - I: Stream, - T: Stream, -{ - pub fn new(input_stream: I, trigger_stream: T) -> Self { - BufferUntilStream { - input_stream, - trigger_stream, - pending_buffer: VecDeque::new(), - ready_buffer: VecDeque::new(), - } - } - - // pub fn input_stream_mut(&mut self) -> &mut I { - // &mut self.input_stream - // } - - // pub fn input_stream(&self) -> &I { - // &self.input_stream - // } - - // pub fn trigger_stream_mut(&mut self) -> &mut T { - // &mut self.trigger_stream - // } - - // pub fn trigger_stream(&self) -> &T { - // &self.trigger_stream - // } -} -impl Stream for BufferUntilStream -where - I: Stream, - T: Stream, -{ - type Item = Iter>; - - fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - let mut this = self.project(); - let ready_buffer: &mut VecDeque = this.ready_buffer; - let pending_buffer: &mut VecDeque = this.pending_buffer; - - let mut finished = false; - - // Check if the input_stream has a new value - while let Poll::Ready(Some(item)) = this.input_stream.as_mut().poll_next(cx) { - info!("Received item from input_stream: {:?}", item); - pending_buffer.push_back(item); - } - - if let Poll::Ready(None) = this.input_stream.as_mut().poll_next(cx) { - info!("input_stream finished"); - finished = true; - } - - match this.trigger_stream.as_mut().poll_next(cx) { - Poll::Ready(Some(_)) => { - info!("Triggered, moving pending_buffer to ready_buffer"); - ready_buffer.append(pending_buffer); - } - Poll::Ready(None) => { - ready_buffer.append(pending_buffer); - } - _ => { - finished = true; - } - } - - // Send any ready buffer or finish up - if !ready_buffer.is_empty() { - info!("Returning items stream from ready_buffer"); - let current_ready_buffer = std::mem::take(this.ready_buffer); - Poll::Ready(Some(iter(current_ready_buffer))) - } else if finished { - return Poll::Ready(None); - } else { - Poll::Pending - } - } -} - -pub trait BufferUntilStreamExt: Sized -where - I: Stream, - T: Stream, -{ - fn buffer_until(self, trigger: T) -> BufferUntilStream; -} - -impl BufferUntilStreamExt for I -where - I: Stream, - T: Stream, -{ - fn buffer_until(self, trigger: T) -> BufferUntilStream { - BufferUntilStream::new(self, trigger) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use futures::{stream::StreamExt, FutureExt}; - use tokio_stream::wrappers::{BroadcastStream, UnboundedReceiverStream}; - - #[tokio::test] - async fn test_accumulating_stream() { - println!("running test_accumulating_stream"); - let (trigger_sender, trigger_receiver) = tokio::sync::broadcast::channel(100); - let (input_sender, input_receiver) = tokio::sync::mpsc::unbounded_channel(); - - let mut output = vec![]; - - let mut accumulating_stream = BufferUntilStream::new( - UnboundedReceiverStream::from(input_receiver), - BroadcastStream::from(trigger_receiver), - ) - .flatten(); - - input_sender.send(1).unwrap(); - input_sender.send(2).unwrap(); - input_sender.send(3).unwrap(); - - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - assert_eq!(output, Vec::::new()); - - trigger_sender.send(()).unwrap(); - - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - assert_eq!(output, vec![1, 2, 3]); - - input_sender.send(4).unwrap(); - input_sender.send(5).unwrap(); - input_sender.send(6).unwrap(); - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - - assert_eq!(output, vec![1, 2, 3]); - trigger_sender.send(()).unwrap(); - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - - assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); - input_sender.send(7).unwrap(); - input_sender.send(8).unwrap(); - input_sender.send(9).unwrap(); - input_sender.send(10).unwrap(); - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); - - drop(trigger_sender); - - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - assert_eq!(output, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); - } - - // TODO: write tests for end of input stream -} diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index a23f5fd12..e5893092f 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -19,19 +19,19 @@ fn ingot_directory_key(path: String) -> String { } pub trait IngotFileContext { - fn get_input_from_file_path(&self, path: &str) -> Option; - fn touch_input_from_file_path( + fn get_input_for_file_path(&self, path: &str) -> Option; + fn touch_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option; - fn get_ingot_from_file_path(&self, path: &str) -> Option; - fn touch_ingot_from_file_path( + fn get_ingot_for_file_path(&self, path: &str) -> Option; + fn touch_ingot_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option; - fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option; + fn top_mod_for_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option; fn remove_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, @@ -88,12 +88,12 @@ impl LocalIngotContext { } impl IngotFileContext for LocalIngotContext { - fn touch_input_from_file_path( + fn touch_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ingot = self.touch_ingot_from_file_path(db, path)?; + let ingot = self.touch_ingot_for_file_path(db, path)?; let input = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), String::new()); @@ -106,11 +106,11 @@ impl IngotFileContext for LocalIngotContext { input } - fn get_input_from_file_path(&self, path: &str) -> Option { + fn get_input_for_file_path(&self, path: &str) -> Option { self.files.get(path).copied() } - fn touch_ingot_from_file_path( + fn touch_ingot_for_file_path( &mut self, _db: &mut LanguageServerDatabase, _path: &str, @@ -118,29 +118,15 @@ impl IngotFileContext for LocalIngotContext { Some(self.ingot) } - fn get_ingot_from_file_path(&self, _path: &str) -> Option { + fn get_ingot_for_file_path(&self, _path: &str) -> Option { Some(self.ingot) } - fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { - let file = self.get_input_from_file_path(path)?; + fn top_mod_for_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + let file = self.get_input_for_file_path(path)?; Some(map_file_to_mod(db.as_lower_hir_db(), file)) } - // fn rename_file( - // &mut self, - // db: &mut LanguageServerDatabase, - // old_path: &str, - // new_path: &str, - // ) -> Result<()> { - // let file = self.files.remove(old_path); - // if let Some(file) = file { - // file.set_path(db).to(new_path.into()); - // self.files.insert(new_path, file); - // } - // Ok(()) - // } - fn remove_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, @@ -178,12 +164,12 @@ impl StandaloneIngotContext { } impl IngotFileContext for StandaloneIngotContext { - fn touch_input_from_file_path( + fn touch_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { - let ingot = self.touch_ingot_from_file_path(db, path)?; + let ingot = self.touch_ingot_for_file_path(db, path)?; let input_file = self.files.get(path).map_or_else( || { let file = InputFile::new(db, ingot, path.into(), String::new()); @@ -197,11 +183,11 @@ impl IngotFileContext for StandaloneIngotContext { input_file } - fn get_input_from_file_path(&self, path: &str) -> Option { + fn get_input_for_file_path(&self, path: &str) -> Option { self.files.get(path).copied() } - fn touch_ingot_from_file_path( + fn touch_ingot_for_file_path( &mut self, _db: &mut LanguageServerDatabase, path: &str, @@ -225,13 +211,13 @@ impl IngotFileContext for StandaloneIngotContext { ) } - fn get_ingot_from_file_path(&self, path: &str) -> Option { + fn get_ingot_for_file_path(&self, path: &str) -> Option { // this shouldn't mutate, it should only get the ingot or return `None` get_containing_ingot(&self.ingots, path).copied() } - fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { - let file = self.get_input_from_file_path(path)?; + fn top_mod_for_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + let file = self.get_input_for_file_path(path)?; Some(map_file_to_mod(db.as_lower_hir_db(), file)) } @@ -273,7 +259,7 @@ impl Workspace { self.sync(db) } - pub fn ingot_context_from_config_path( + pub fn ingot_context_for_config_path( &mut self, db: &LanguageServerDatabase, config_path: &str, @@ -300,7 +286,7 @@ impl Workspace { .collect::>(); for path in paths { - self.ingot_context_from_config_path(db, path); + self.ingot_context_for_config_path(db, path); } let existing_keys: Vec = self.ingot_contexts.keys().collect(); @@ -329,9 +315,7 @@ impl Workspace { info!("Found {} files in ingot", actual_paths.len()); info!("Syncing ingot files: {:?}", actual_paths); - let ingot_context = self - .ingot_context_from_config_path(db, config_path) - .unwrap(); + let ingot_context = self.ingot_context_for_config_path(db, config_path).unwrap(); let previous_ingot_context_file_keys = &ingot_context.files.keys().collect::>(); for path in previous_ingot_context_file_keys { @@ -342,7 +326,7 @@ impl Workspace { for path in actual_paths { if !previous_ingot_context_file_keys.contains(path) { - let file = ingot_context.touch_input_from_file_path(db, path); + let file = ingot_context.touch_input_for_file_path(db, path); let contents = std::fs::read_to_string(path).unwrap(); file.unwrap().set_text(db).to(contents); } @@ -373,59 +357,59 @@ impl Workspace { } impl IngotFileContext for Workspace { - fn touch_input_from_file_path( + fn touch_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.touch_input_from_file_path(db, path) + ctx.touch_input_for_file_path(db, path) } else { self.standalone_ingot_context - .touch_input_from_file_path(db, path) + .touch_input_for_file_path(db, path) } } - fn get_input_from_file_path(&self, path: &str) -> Option { + fn get_input_for_file_path(&self, path: &str) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.get_input_from_file_path(path) + ctx.get_input_for_file_path(path) } else { - self.standalone_ingot_context.get_input_from_file_path(path) + self.standalone_ingot_context.get_input_for_file_path(path) } } - fn touch_ingot_from_file_path( + fn touch_ingot_for_file_path( &mut self, db: &mut LanguageServerDatabase, path: &str, ) -> Option { let ctx = get_containing_ingot_mut(&mut self.ingot_contexts, path); if let Some(ctx) = ctx { - Some(ctx.touch_ingot_from_file_path(db, path).unwrap()) + Some(ctx.touch_ingot_for_file_path(db, path).unwrap()) } else { self.standalone_ingot_context - .touch_ingot_from_file_path(db, path) + .touch_ingot_for_file_path(db, path) } } - fn get_ingot_from_file_path(&self, path: &str) -> Option { + fn get_ingot_for_file_path(&self, path: &str) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { - ctx.get_ingot_from_file_path(path) + ctx.get_ingot_for_file_path(path) } else { - self.standalone_ingot_context.get_ingot_from_file_path(path) + self.standalone_ingot_context.get_ingot_for_file_path(path) } } - fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + fn top_mod_for_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { - Some(ctx.top_mod_from_file_path(db, path).unwrap()) + Some(ctx.top_mod_for_file_path(db, path).unwrap()) } else { self.standalone_ingot_context - .top_mod_from_file_path(db, path) + .top_mod_for_file_path(db, path) } } @@ -462,22 +446,6 @@ impl SyncableInputFile for InputFile { self.set_text(db).to(contents); Ok(()) } - // fn sync(&self, db: &mut LanguageServerDatabase, contents: Option) -> Result<()> { - // // check to see if the file actually exists anymore: - // let path = self.path(db); - // if !path.exists() { - // info!( - // "File {:?} no longer exists... removing from workspace", - // path - // ); - // // if not let's remove it from the ingot - // self.remove_from_ingot(db) - // } else if let Some(contents) = contents { - // self.sync_from_text(db, contents) - // } else { - // self.sync_from_fs(db) - // } - // } } pub trait SyncableIngotFileContext { @@ -528,11 +496,11 @@ mod tests { let file_path = "tests/data/ingot1/src/main.fe"; let ctx = &mut StandaloneIngotContext::new(); - let file = ctx.touch_input_from_file_path(&mut db, file_path); + let file = ctx.touch_input_for_file_path(&mut db, file_path); assert!(file.is_some()); - let ingot = ctx.touch_ingot_from_file_path(&mut db, file_path); + let ingot = ctx.touch_ingot_for_file_path(&mut db, file_path); assert!(ingot.is_some()); assert_eq!( ingot.unwrap().kind(&db), @@ -546,7 +514,7 @@ mod tests { let mut workspace = Workspace::default(); let mut db = crate::db::LanguageServerDatabase::default(); let file_path = "tests/data/ingot1/src/main.fe"; - let file = workspace.touch_input_from_file_path(&mut db, file_path); + let file = workspace.touch_input_for_file_path(&mut db, file_path); assert!(file.is_some()); } @@ -556,7 +524,7 @@ mod tests { let mut workspace = Workspace::default(); let _ingot_context_ingot = { - let ingot_context = workspace.ingot_context_from_config_path( + let ingot_context = workspace.ingot_context_for_config_path( &crate::db::LanguageServerDatabase::default(), config_path, ); @@ -577,7 +545,7 @@ mod tests { assert!(containing_ingot.as_deref().is_some()); - let ingot = workspace.touch_ingot_from_file_path( + let ingot = workspace.touch_ingot_for_file_path( &mut crate::db::LanguageServerDatabase::default(), file_path, ); @@ -591,17 +559,17 @@ mod tests { let mut db = crate::db::LanguageServerDatabase::default(); let ingot_context_ingot = { - let ingot_context = workspace.ingot_context_from_config_path(&db, config_path); + let ingot_context = workspace.ingot_context_for_config_path(&db, config_path); assert!(ingot_context.is_some()); ingot_context.map(|ctx| ctx.ingot) }; let file_path = "tests/data/ingot1/src/main.fe"; - let file = workspace.touch_input_from_file_path(&mut db, file_path); + let file = workspace.touch_input_for_file_path(&mut db, file_path); assert!(file.is_some()); - let ingot = workspace.touch_ingot_from_file_path(&mut db, file_path); + let ingot = workspace.touch_ingot_for_file_path(&mut db, file_path); assert!(ingot.is_some()); assert_eq!(file.map(|f| f.ingot(&db)).unwrap(), ingot.unwrap()); @@ -630,7 +598,7 @@ mod tests { assert_eq!(workspace.ingot_contexts.len(), 1); let fe_source_path = ingot_base_dir.join("src/main.fe"); - let input = workspace.touch_input_from_file_path(&mut db, fe_source_path.to_str().unwrap()); + let input = workspace.touch_input_for_file_path(&mut db, fe_source_path.to_str().unwrap()); assert!(input.is_some()); assert!(input.unwrap().ingot(&db).kind(&db) == common::input::IngotKind::Local); } @@ -664,13 +632,13 @@ mod tests { for src_path in fe_files { let _file = workspace - .touch_input_from_file_path(&mut db, &src_path) + .touch_input_for_file_path(&mut db, &src_path) .unwrap(); // normally would do this but it's not relevant here... // file.sync(&mut db, None); // this would panic if a file has been added to multiple ingots - let _top_mod = workspace.top_mod_from_file_path(&db, src_path.as_str()); + let _top_mod = workspace.top_mod_for_file_path(&db, src_path.as_str()); } } @@ -696,7 +664,7 @@ mod tests { workspace.sync_ingot_files(&mut db, &foo_config); let foo_context = workspace - .ingot_context_from_config_path(&db, &foo_config) + .ingot_context_for_config_path(&db, &foo_config) .unwrap(); assert!(foo_context.files.len() == 1); @@ -705,7 +673,7 @@ mod tests { for file in foo_files { let contents = std::fs::read_to_string(&file).unwrap(); let file = foo_context - .touch_input_from_file_path(&mut db, &file) + .touch_input_for_file_path(&mut db, &file) .unwrap(); assert!(*file.text(&db) == contents); @@ -723,7 +691,7 @@ mod tests { workspace.sync_local_ingots(&mut db, &messy_workspace_path); let dangling_file = workspace - .touch_input_from_file_path(&mut db, &dangling_path) + .touch_input_for_file_path(&mut db, &dangling_path) .unwrap(); assert_eq!( @@ -744,7 +712,7 @@ mod tests { let non_dangling_file_path = format!("{crate_dir}/test_files/messy/foo/bar/src/main.fe"); let non_dangling_input = workspace - .touch_input_from_file_path(&mut db, &non_dangling_file_path) + .touch_input_for_file_path(&mut db, &non_dangling_file_path) .unwrap(); assert_eq!( From a844f57f9e40b9fcf01c47e35e2b2d439feb8591 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 19 Mar 2024 01:51:57 -0500 Subject: [PATCH 48/66] language server: load fe std lib in language server TODO: figure out how to load the standard lib into the salsa db and how to include it in diagnostics --- Cargo.lock | 159 +++++++++++++++++++++--- crates/language-server/Cargo.toml | 2 + crates/language-server/src/backend.rs | 14 ++- crates/language-server/src/goto.rs | 6 +- crates/language-server/src/workspace.rs | 46 ++++++- 5 files changed, 195 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2a6584274..7861f9e5a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -905,23 +905,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ - "cc", "libc", + "windows-sys 0.52.0", ] [[package]] @@ -1220,7 +1209,7 @@ dependencies = [ "serde_json", "smol_str", "toml", - "vfs", + "vfs 0.5.2", ] [[package]] @@ -1311,6 +1300,7 @@ dependencies = [ "patricia_tree", "pin-project", "rowan", + "rust-embed", "salsa-2022", "serde", "serde_json", @@ -1320,6 +1310,7 @@ dependencies = [ "tokio-stream", "tower-lsp", "url", + "vfs 0.12.0", ] [[package]] @@ -1442,6 +1433,18 @@ dependencies = [ "subtle", ] +[[package]] +name = "filetime" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", +] + [[package]] name = "fixed-hash" version = "0.8.0" @@ -2768,6 +2771,15 @@ dependencies = [ "bitflags", ] +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags", +] + [[package]] name = "regex" version = "1.10.2" @@ -2971,6 +2983,40 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62cc5760263ea229d367e7dff3c0cbf09e4797a125bd87059a6c095804f3b2d1" +[[package]] +name = "rust-embed" +version = "8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb78f46d0066053d16d4ca7b898e9343bc3530f71c61d5ad84cd404ada068745" +dependencies = [ + "rust-embed-impl", + "rust-embed-utils", + "walkdir", +] + +[[package]] +name = "rust-embed-impl" +version = "8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91ac2a3c6c0520a3fb3dd89321177c3c692937c4eb21893378219da10c44fc8" +dependencies = [ + "proc-macro2", + "quote", + "rust-embed-utils", + "syn 2.0.48", + "walkdir", +] + +[[package]] +name = "rust-embed-utils" +version = "8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86f69089032567ffff4eada41c573fc43ff466c7db7c5688b2e7969584345581" +dependencies = [ + "sha2", + "walkdir", +] + [[package]] name = "rustc-demangle" version = "0.1.23" @@ -3191,9 +3237,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.176" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76dc28c9523c5d70816e393136b86d48909cfb27cecaa902d338c19ed47164dc" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] @@ -3210,9 +3256,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.176" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", @@ -3928,6 +3974,15 @@ dependencies = [ "thiserror", ] +[[package]] +name = "vfs" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "654cd097e182a71dbf899178e6b5662c2157dd0b8afd5975de18008f6fc173d1" +dependencies = [ + "filetime", +] + [[package]] name = "walkdir" version = "2.3.3" @@ -4102,6 +4157,15 @@ dependencies = [ "windows-targets 0.48.1", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.4", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -4132,6 +4196,21 @@ dependencies = [ "windows_x86_64_msvc 0.48.0", ] +[[package]] +name = "windows-targets" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -4144,6 +4223,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -4156,6 +4241,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -4168,6 +4259,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +[[package]] +name = "windows_i686_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -4180,6 +4277,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +[[package]] +name = "windows_i686_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -4192,6 +4295,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -4204,6 +4313,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -4216,6 +4331,12 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + [[package]] name = "winnow" version = "0.4.1" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 3eb70bc2a..cc5ad1968 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -46,3 +46,5 @@ pin-project = "1.1.5" merge-streams = "0.1.2" futures-concurrency = "7.5.0" console-subscriber = "0.2.0" +vfs = "0.12.0" +rust-embed = "8.3.0" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 6c1923d13..d0fbd59d3 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -142,17 +142,21 @@ impl Backend { info!("initializing language server!"); // setup workspace // let workspace = self.workspace.clone(); - let mut workspace = self.workspace.write().await; - let _ = workspace.set_workspace_root( - db, + + let root = initialization_params .root_uri .unwrap() .to_file_path() .ok() - .unwrap(), - ); + .unwrap(); + let mut workspace = self.workspace.write().await; + let _ = workspace.set_workspace_root( + db, + &root + ); + let _ = workspace.load_std_lib(db, &root); let _ = workspace.sync(db); let capabilities = server_capabilities(); diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index d6f2a7cf8..e06bc9d8e 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -132,7 +132,7 @@ mod tests { let ingot_base_dir = Path::new(&cargo_manifest_dir).join("test_files/single_ingot"); let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(); + let workspace = &mut Workspace::default(db); let _ = workspace.set_workspace_root(db, ingot_base_dir.clone()); @@ -198,7 +198,7 @@ mod tests { )] fn test_goto_enclosing_path(fixture: Fixture<&str>) { let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(); + let workspace = &mut Workspace::default(db); let input = workspace .touch_input_for_file_path(db, fixture.path()) .unwrap(); @@ -253,7 +253,7 @@ mod tests { )] fn test_smallest_enclosing_path(fixture: Fixture<&str>) { let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(); + let workspace = &mut Workspace::default(db); workspace .touch_input_for_file_path(db, fixture.path()) diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index e5893092f..68e338d3a 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeSet, path::PathBuf}; +use std::{borrow::Cow, collections::BTreeSet, path::PathBuf}; use anyhow::Result; use common::{ @@ -11,6 +11,12 @@ use patricia_tree::StringPatriciaMap; use crate::db::LanguageServerDatabase; +use rust_embed::RustEmbed; + +#[derive(RustEmbed)] +#[folder = "../library/std"] +struct StdLib; + const FE_CONFIG_SUFFIX: &str = "fe.toml"; fn ingot_directory_key(path: String) -> String { path.strip_suffix(FE_CONFIG_SUFFIX) @@ -249,13 +255,43 @@ impl Workspace { } } + pub fn load_std_lib( + &mut self, + db: &mut LanguageServerDatabase, + root_path: &PathBuf, + ) -> Result<()> { + let root_path = root_path.to_str().unwrap(); + self + .touch_ingot_for_file_path(db, &format!("{}/std/fe.toml", root_path)) + .unwrap(); + + info!("Loading std lib..."); + + StdLib::iter().for_each(|path: Cow<'static, str>| { + let path = path.as_ref(); + let std_path = format!("{}/std/{}", root_path, path); + info!("adding std file... {:?} --- {:?}", std_path, path); + if let Some(file) = StdLib::get(path) { + let contents = String::from_utf8(file.data.as_ref().to_vec()); + if let Ok(contents) = contents { + let input = self.touch_input_for_file_path( + db, + &std_path, + ); + input.unwrap().set_text(db).to(contents); + }; + }; + }); + Ok(()) + } + pub fn set_workspace_root( &mut self, db: &mut LanguageServerDatabase, - root_path: PathBuf, + root_path: &PathBuf, ) -> Result<()> { let path = root_path; - self.root_path = Some(path); + self.root_path = Some(path.to_path_buf()); self.sync(db) } @@ -592,7 +628,7 @@ mod tests { let mut workspace = Workspace::default(); let mut db = crate::db::LanguageServerDatabase::default(); - let _ = workspace.set_workspace_root(&mut db, ingot_base_dir.clone()); + let _ = workspace.set_workspace_root(&mut db, &ingot_base_dir); // panic!("wtf? {:?}", ingot_base_dir); assert_eq!(workspace.ingot_contexts.len(), 1); @@ -621,7 +657,7 @@ mod tests { assert!(workspace.ingot_contexts.len() == 2); - let _ = workspace.set_workspace_root(&mut db, PathBuf::from(&path)); + let _ = workspace.set_workspace_root(&mut db, &PathBuf::from(&path)); // get all top level modules for .fe files in the workspace let fe_files = glob::glob(&format!("{path}/**/*.fe")) From ac213e61815814ce502ad0aeae76094b0ba8dc6b Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 19 Mar 2024 13:52:44 -0500 Subject: [PATCH 49/66] batched ingot-wide diagnostics; cleanup --- Cargo.lock | 12 ++ crates/language-server/Cargo.toml | 1 + crates/language-server/src/backend.rs | 60 ++++-- crates/language-server/src/diagnostics.rs | 2 +- crates/language-server/src/goto.rs | 14 +- .../language-server/src/handlers/request.rs | 9 +- crates/language-server/src/main.rs | 1 + .../src/stream_buffer_until.rs | 189 ++++++++++++++++++ crates/language-server/src/workspace.rs | 46 +++-- 9 files changed, 285 insertions(+), 49 deletions(-) create mode 100644 crates/language-server/src/stream_buffer_until.rs diff --git a/Cargo.lock b/Cargo.lock index 7861f9e5a..f2f185eba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1290,6 +1290,7 @@ dependencies = [ "fe-macros", "fork_stream", "futures", + "futures-batch", "futures-concurrency", "fxhash", "glob", @@ -1525,6 +1526,17 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-batch" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f444c45a1cb86f2a7e301469fd50a82084a60dadc25d94529a8312276ecb71a" +dependencies = [ + "futures", + "futures-timer", + "pin-utils", +] + [[package]] name = "futures-channel" version = "0.3.30" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index cc5ad1968..0351f3845 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -48,3 +48,4 @@ futures-concurrency = "7.5.0" console-subscriber = "0.2.0" vfs = "0.12.0" rust-embed = "8.3.0" +futures-batch = "0.6.1" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index d0fbd59d3..785cebb02 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -2,13 +2,16 @@ use crate::handlers::request::{handle_goto_definition, handle_hover}; use crate::workspace::SyncableIngotFileContext; +use common::InputDb; use fork_stream::StreamExt as _; +use futures_batch::ChunksTimeoutStreamExt; +use fxhash::FxHashSet; use futures::StreamExt; use futures_concurrency::prelude::*; use lsp_types::TextDocumentItem; use salsa::{ParallelDatabase, Snapshot}; -use stream_operators::StreamOps; +use tokio_stream::wrappers::UnboundedReceiverStream; use std::sync::Arc; use tokio::sync::RwLock; @@ -23,6 +26,8 @@ use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; use log::info; +// use tokio_stream::StreamExt; + use tower_lsp::Client; pub struct Backend { @@ -58,12 +63,9 @@ impl Backend { let client = self.client.clone(); let messaging = self.messaging; - // let messaging = self.messaging.clone(); - // let messaging = messaging.read().await; let mut initialized_stream = messaging.initialize_stream.fuse(); let mut shutdown_stream = messaging.shutdown_stream.fuse(); - // let mut did_close_stream = messaging.did_close_stream.fuse(); let did_change_watched_files_stream = messaging.did_change_watched_files_stream.fork(); let flat_did_change_watched_files = did_change_watched_files_stream @@ -83,11 +85,13 @@ impl Backend { Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CREATED) }) }); - let mut did_delete_watch_file_stream = - flat_did_change_watched_files.clone().filter(|change| { + let mut did_delete_watch_file_stream = flat_did_change_watched_files + .clone() + .filter(|change| { let change_type = change.typ; Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::DELETED) }) - }); + }) + .fuse(); let did_open_stream = messaging.did_open_stream.fuse(); let did_change_stream = messaging.did_change_stream.fuse(); @@ -128,7 +132,12 @@ impl Backend { }), ) .merge() - .debounce_time(std::time::Duration::from_millis(20)) + .fuse(); + + let (tx_needs_diagnostics, rx_needs_diagnostics) = tokio::sync::mpsc::unbounded_channel(); + + let mut diagnostics_stream = UnboundedReceiverStream::from(rx_needs_diagnostics) + .chunks_timeout(500, std::time::Duration::from_millis(30)) .fuse(); let mut hover_stream = messaging.hover_stream.fuse(); @@ -140,8 +149,6 @@ impl Backend { Some(result) = initialized_stream.next() => { let (initialization_params, responder) = result; info!("initializing language server!"); - // setup workspace - // let workspace = self.workspace.clone(); let root = initialization_params @@ -180,6 +187,7 @@ impl Backend { let path = path.to_str().unwrap(); let workspace = workspace.clone(); let _ = workspace.write().await.remove_input_for_file_path(db, path); + let _ = tx_needs_diagnostics.send(path.to_string()); } Some(doc) = change_stream.next() => { info!("change detected: {:?}", doc.uri); @@ -187,18 +195,36 @@ impl Backend { let path = path_buf.to_str().unwrap(); let contents = Some(doc.text); update_input(workspace.clone(), db, path, contents).await; + let _ = tx_needs_diagnostics.send(path.to_string()); + } + Some(files_need_diagnostics) = diagnostics_stream.next() => { + info!("files need diagnostics: {:?}", files_need_diagnostics); + let mut ingots_need_diagnostics = FxHashSet::default(); + for file in files_need_diagnostics { + let workspace = workspace.clone(); + let workspace = workspace.read().await; + let ingot = workspace.get_ingot_for_file_path(&file).unwrap(); + ingots_need_diagnostics.insert(ingot); + } - let db = db.snapshot(); - let client = client.clone(); - let workspace = workspace.clone(); - self.workers.spawn( - async move { handle_diagnostics(client, workspace, db, doc.uri).await } - ); + info!("ingots need diagnostics: {:?}", ingots_need_diagnostics); + for ingot in ingots_need_diagnostics.into_iter() { + for file in ingot.files(db.as_input_db()) { + let file = *file; + let path = file.path(db.as_input_db()); + let path = lsp_types::Url::from_file_path(path).unwrap(); + let db = db.snapshot(); + let client = client.clone(); + let workspace = workspace.clone(); + self.workers.spawn( + async move { handle_diagnostics(client.clone(), workspace.clone(), db, path).await } + ); + } + } } Some((params, responder)) = hover_stream.next() => { let db = db.snapshot(); let workspace = workspace.clone(); - // let response = handle_hover(db, workspace, params).await; let response = match self.workers.spawn(handle_hover(db, workspace, params)).await { Ok(response) => response, Err(e) => { diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 1b288bd36..524fed1ca 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -135,7 +135,7 @@ fn run_diagnostics( ) -> Vec { let file_path = path; let top_mod = workspace - .top_mod_for_file_path(db.as_lower_hir_db(), file_path) + .top_mod_from_file_path(db.as_lower_hir_db(), file_path) .unwrap(); let diags = &db.analyze_top_mod(top_mod); db.finalize_diags(diags) diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index e06bc9d8e..bdb09f902 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -132,9 +132,9 @@ mod tests { let ingot_base_dir = Path::new(&cargo_manifest_dir).join("test_files/single_ingot"); let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(db); + let workspace = &mut Workspace::default(); - let _ = workspace.set_workspace_root(db, ingot_base_dir.clone()); + let _ = workspace.set_workspace_root(db, &ingot_base_dir); let fe_source_path = ingot_base_dir.join(fixture.path()); let fe_source_path = fe_source_path.to_str().unwrap(); @@ -146,7 +146,7 @@ mod tests { .set_text(db) .to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_for_file_path(db.as_lower_hir_db(), fe_source_path) + .top_mod_from_file_path(db.as_lower_hir_db(), fe_source_path) .unwrap(); let ingot = workspace.touch_ingot_for_file_path(db, fixture.path()); @@ -198,13 +198,13 @@ mod tests { )] fn test_goto_enclosing_path(fixture: Fixture<&str>) { let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(db); + let workspace = &mut Workspace::default(); let input = workspace .touch_input_for_file_path(db, fixture.path()) .unwrap(); input.set_text(db).to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_for_file_path(db.as_lower_hir_db(), fixture.path()) + .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) .unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); @@ -253,7 +253,7 @@ mod tests { )] fn test_smallest_enclosing_path(fixture: Fixture<&str>) { let db = &mut LanguageServerDatabase::default(); - let workspace = &mut Workspace::default(db); + let workspace = &mut Workspace::default(); workspace .touch_input_for_file_path(db, fixture.path()) @@ -261,7 +261,7 @@ mod tests { .set_text(db) .to((*fixture.content()).to_string()); let top_mod = workspace - .top_mod_for_file_path(db.as_lower_hir_db(), fixture.path()) + .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) .unwrap(); let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index eb81a3bbc..b5951ec23 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -18,6 +18,7 @@ use crate::{ util::{to_lsp_location_from_scope, to_offset_from_position}, workspace::{IngotFileContext, Workspace}, }; +// use tower_lsp::lsp_types::{ResponseError, Url}; pub async fn handle_hover( db: Snapshot, @@ -42,10 +43,14 @@ pub async fn handle_hover( .nth(params.text_document_position_params.position.line as usize) .unwrap(); + // let cursor: Cursor = params.text_document_position_params.position.into(); let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, file_text.as_str(), ); + // let file_path = std::path::Path::new(file_path); + + // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); let ingot_info: Option = { let ingot_type = match ingot { @@ -70,7 +75,7 @@ pub async fn handle_hover( }; let top_mod = workspace - .top_mod_for_file_path(db.as_lower_hir_db(), file_path) + .top_mod_from_file_path(db.as_lower_hir_db(), file_path) .unwrap(); let early_resolution = goto_enclosing_path(&db, top_mod, cursor); @@ -119,7 +124,7 @@ pub async fn handle_goto_definition( // Get the module and the goto info let file_path = params.text_document.uri.path(); let top_mod = workspace - .top_mod_for_file_path(db.as_lower_hir_db(), file_path) + .top_mod_from_file_path(db.as_lower_hir_db(), file_path) .unwrap(); let goto_info = goto_enclosing_path(&db, top_mod, cursor); diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 9897d845b..26b66553f 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -6,6 +6,7 @@ mod globals; mod goto; mod language_server; mod logger; +mod stream_buffer_until; mod util; mod workspace; diff --git a/crates/language-server/src/stream_buffer_until.rs b/crates/language-server/src/stream_buffer_until.rs new file mode 100644 index 000000000..4503b9edb --- /dev/null +++ b/crates/language-server/src/stream_buffer_until.rs @@ -0,0 +1,189 @@ +use futures::stream::Stream; +use futures::stream::{iter, Iter}; +use std::{ + collections::VecDeque, + fmt::Debug, + pin::{pin, Pin}, + task::{Context, Poll}, +}; +use tokio_stream::wrappers::IntervalStream; + +use pin_project::pin_project; + +#[pin_project(project_replace)] +pub struct BufferUntilStream { + #[pin] + input_stream: I, + #[pin] + trigger_stream: T, + pending_buffer: VecDeque, + ready_buffer: VecDeque, +} + +impl BufferUntilStream +where + I: Stream, + T: Stream, +{ + pub fn new(input_stream: I, trigger_stream: T) -> Self { + BufferUntilStream { + input_stream, + trigger_stream, + pending_buffer: VecDeque::new(), + ready_buffer: VecDeque::new(), + } + } +} +impl Stream for BufferUntilStream +where + I: Stream, + T: Stream, +{ + type Item = Iter>; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + let mut this = self.project(); + let ready_buffer: &mut VecDeque = this.ready_buffer; + let pending_buffer: &mut VecDeque = this.pending_buffer; + + let mut finished = false; + + // Check if the input_stream has a new value + while let Poll::Ready(Some(item)) = this.input_stream.as_mut().poll_next(cx) { + // info!("Received item from input_stream: {:?}", item); + pending_buffer.push_back(item); + } + + if let Poll::Ready(None) = this.input_stream.as_mut().poll_next(cx) { + // info!("input_stream finished"); + finished = true; + } + + match this.trigger_stream.as_mut().poll_next(cx) { + Poll::Ready(Some(_)) => { + // info!("Triggered, moving pending_buffer to ready_buffer"); + ready_buffer.append(pending_buffer); + } + Poll::Ready(None) => { + ready_buffer.append(pending_buffer); + } + _ => { + finished = true; + } + } + + // Send any ready buffer or finish up + if !ready_buffer.is_empty() { + // info!("Returning items stream from ready_buffer"); + let current_ready_buffer = std::mem::take(this.ready_buffer); + Poll::Ready(Some(iter(current_ready_buffer))) + } else if finished { + return Poll::Ready(None); + } else { + Poll::Pending + } + } +} + +pub trait BufferUntilStreamExt: Sized +where + S: Stream, +{ + fn buffer_until(self, trigger: T) -> BufferUntilStream + where + T: Stream; + fn debounce_buffer_until( + self, + duration: std::time::Duration, + ) -> BufferUntilStream; +} + +impl BufferUntilStreamExt for S +where + S: Stream, +{ + fn buffer_until(self, trigger: T) -> BufferUntilStream + where + T: Stream, + { + BufferUntilStream::new(self, trigger) + } + + fn debounce_buffer_until( + self, + duration: std::time::Duration, + ) -> BufferUntilStream { + let trigger = IntervalStream::new(tokio::time::interval(duration)); + BufferUntilStream::new(self, trigger) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use futures::{stream::StreamExt, FutureExt}; + use tokio_stream::wrappers::{BroadcastStream, UnboundedReceiverStream}; + + #[tokio::test] + async fn test_accumulating_stream() { + println!("running test_accumulating_stream"); + let (trigger_sender, trigger_receiver) = tokio::sync::broadcast::channel(100); + let (input_sender, input_receiver) = tokio::sync::mpsc::unbounded_channel(); + + let mut output = vec![]; + + let mut accumulating_stream = BufferUntilStream::new( + UnboundedReceiverStream::from(input_receiver), + BroadcastStream::from(trigger_receiver), + ) + .flatten(); + + input_sender.send(1).unwrap(); + input_sender.send(2).unwrap(); + input_sender.send(3).unwrap(); + + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + assert_eq!(output, Vec::::new()); + + trigger_sender.send(()).unwrap(); + + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + assert_eq!(output, vec![1, 2, 3]); + + input_sender.send(4).unwrap(); + input_sender.send(5).unwrap(); + input_sender.send(6).unwrap(); + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + + assert_eq!(output, vec![1, 2, 3]); + trigger_sender.send(()).unwrap(); + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + + assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); + input_sender.send(7).unwrap(); + input_sender.send(8).unwrap(); + input_sender.send(9).unwrap(); + input_sender.send(10).unwrap(); + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); + + drop(trigger_sender); + + while let Some(item) = accumulating_stream.next().now_or_never().flatten() { + output.push(item); + } + assert_eq!(output, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + } + + // TODO: write tests for end of input stream +} diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 68e338d3a..21420a4a2 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -1,4 +1,8 @@ -use std::{borrow::Cow, collections::BTreeSet, path::PathBuf}; +use std::{ + borrow::Cow, + collections::BTreeSet, + path::{Path, PathBuf}, +}; use anyhow::Result; use common::{ @@ -37,7 +41,7 @@ pub trait IngotFileContext { db: &mut LanguageServerDatabase, path: &str, ) -> Option; - fn top_mod_for_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option; + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option; fn remove_input_for_file_path( &mut self, db: &mut LanguageServerDatabase, @@ -128,7 +132,7 @@ impl IngotFileContext for LocalIngotContext { Some(self.ingot) } - fn top_mod_for_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { let file = self.get_input_for_file_path(path)?; Some(map_file_to_mod(db.as_lower_hir_db(), file)) } @@ -222,7 +226,7 @@ impl IngotFileContext for StandaloneIngotContext { get_containing_ingot(&self.ingots, path).copied() } - fn top_mod_for_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { let file = self.get_input_for_file_path(path)?; Some(map_file_to_mod(db.as_lower_hir_db(), file)) } @@ -258,11 +262,10 @@ impl Workspace { pub fn load_std_lib( &mut self, db: &mut LanguageServerDatabase, - root_path: &PathBuf, + root_path: &Path, ) -> Result<()> { let root_path = root_path.to_str().unwrap(); - self - .touch_ingot_for_file_path(db, &format!("{}/std/fe.toml", root_path)) + self.touch_ingot_for_file_path(db, &format!("{}/std/fe.toml", root_path)) .unwrap(); info!("Loading std lib..."); @@ -274,10 +277,7 @@ impl Workspace { if let Some(file) = StdLib::get(path) { let contents = String::from_utf8(file.data.as_ref().to_vec()); if let Ok(contents) = contents { - let input = self.touch_input_for_file_path( - db, - &std_path, - ); + let input = self.touch_input_for_file_path(db, &std_path); input.unwrap().set_text(db).to(contents); }; }; @@ -288,14 +288,14 @@ impl Workspace { pub fn set_workspace_root( &mut self, db: &mut LanguageServerDatabase, - root_path: &PathBuf, + root_path: &Path, ) -> Result<()> { let path = root_path; self.root_path = Some(path.to_path_buf()); self.sync(db) } - pub fn ingot_context_for_config_path( + pub fn ingot_context_from_config_path( &mut self, db: &LanguageServerDatabase, config_path: &str, @@ -322,7 +322,7 @@ impl Workspace { .collect::>(); for path in paths { - self.ingot_context_for_config_path(db, path); + self.ingot_context_from_config_path(db, path); } let existing_keys: Vec = self.ingot_contexts.keys().collect(); @@ -351,7 +351,9 @@ impl Workspace { info!("Found {} files in ingot", actual_paths.len()); info!("Syncing ingot files: {:?}", actual_paths); - let ingot_context = self.ingot_context_for_config_path(db, config_path).unwrap(); + let ingot_context = self + .ingot_context_from_config_path(db, config_path) + .unwrap(); let previous_ingot_context_file_keys = &ingot_context.files.keys().collect::>(); for path in previous_ingot_context_file_keys { @@ -439,13 +441,13 @@ impl IngotFileContext for Workspace { } } - fn top_mod_for_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { + fn top_mod_from_file_path(&self, db: &dyn LowerHirDb, path: &str) -> Option { let ctx = get_containing_ingot(&self.ingot_contexts, path); if let Some(ctx) = ctx { - Some(ctx.top_mod_for_file_path(db, path).unwrap()) + Some(ctx.top_mod_from_file_path(db, path).unwrap()) } else { self.standalone_ingot_context - .top_mod_for_file_path(db, path) + .top_mod_from_file_path(db, path) } } @@ -560,7 +562,7 @@ mod tests { let mut workspace = Workspace::default(); let _ingot_context_ingot = { - let ingot_context = workspace.ingot_context_for_config_path( + let ingot_context = workspace.ingot_context_from_config_path( &crate::db::LanguageServerDatabase::default(), config_path, ); @@ -595,7 +597,7 @@ mod tests { let mut db = crate::db::LanguageServerDatabase::default(); let ingot_context_ingot = { - let ingot_context = workspace.ingot_context_for_config_path(&db, config_path); + let ingot_context = workspace.ingot_context_from_config_path(&db, config_path); assert!(ingot_context.is_some()); ingot_context.map(|ctx| ctx.ingot) @@ -674,7 +676,7 @@ mod tests { // file.sync(&mut db, None); // this would panic if a file has been added to multiple ingots - let _top_mod = workspace.top_mod_for_file_path(&db, src_path.as_str()); + let _top_mod = workspace.top_mod_from_file_path(&db, src_path.as_str()); } } @@ -700,7 +702,7 @@ mod tests { workspace.sync_ingot_files(&mut db, &foo_config); let foo_context = workspace - .ingot_context_for_config_path(&db, &foo_config) + .ingot_context_from_config_path(&db, &foo_config) .unwrap(); assert!(foo_context.files.len() == 1); From e56f9a4d3f9a6d283afade46e044efbc121886d0 Mon Sep 17 00:00:00 2001 From: Micah Date: Tue, 19 Mar 2024 17:00:38 -0500 Subject: [PATCH 50/66] test files for language server hover docs --- crates/language-server/test_files/docstrings/fe.toml | 0 crates/language-server/test_files/docstrings/src/lib.fe | 8 ++++++++ .../language-server/test_files/docstrings/src/stuff.fe | 9 +++++++++ 3 files changed, 17 insertions(+) create mode 100644 crates/language-server/test_files/docstrings/fe.toml create mode 100644 crates/language-server/test_files/docstrings/src/lib.fe create mode 100644 crates/language-server/test_files/docstrings/src/stuff.fe diff --git a/crates/language-server/test_files/docstrings/fe.toml b/crates/language-server/test_files/docstrings/fe.toml new file mode 100644 index 000000000..e69de29bb diff --git a/crates/language-server/test_files/docstrings/src/lib.fe b/crates/language-server/test_files/docstrings/src/lib.fe new file mode 100644 index 000000000..90a5c05f2 --- /dev/null +++ b/crates/language-server/test_files/docstrings/src/lib.fe @@ -0,0 +1,8 @@ +use stuff::{ return_three, return_four } + +/// ## `return_seven` +/// ### a function of numbers +/// #### returns the 3+4=7 +fn return_seven() { + return_three() + return_four() +} diff --git a/crates/language-server/test_files/docstrings/src/stuff.fe b/crates/language-server/test_files/docstrings/src/stuff.fe new file mode 100644 index 000000000..b904aa95f --- /dev/null +++ b/crates/language-server/test_files/docstrings/src/stuff.fe @@ -0,0 +1,9 @@ +/// A function that returns `3` +pub fn return_three() { + 3 +} + +/// ## A function that returns 4 +pub fn return_four() { + 4 +} \ No newline at end of file From fcd226481f970a72d2ecbadf1ce4b772c06fa097 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 27 Mar 2024 00:28:01 -0500 Subject: [PATCH 51/66] language server: use `tracing` and `tracing-subscriber` --- Cargo.lock | 97 +++++++++++++++++++ crates/language-server/Cargo.toml | 3 + crates/language-server/src/backend.rs | 2 +- .../language-server/src/handlers/request.rs | 2 +- crates/language-server/src/language_server.rs | 2 +- crates/language-server/src/logger.rs | 93 +++++++++++------- crates/language-server/src/main.rs | 6 +- crates/language-server/src/util.rs | 2 +- crates/language-server/src/workspace.rs | 2 +- 9 files changed, 163 insertions(+), 46 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f2f185eba..ef2f1757d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -765,6 +765,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + [[package]] name = "derive_more" version = "0.99.17" @@ -1310,6 +1319,9 @@ dependencies = [ "tokio-macros", "tokio-stream", "tower-lsp", + "tracing", + "tracing-appender", + "tracing-subscriber", "url", "vfs 0.12.0", ] @@ -2272,6 +2284,16 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num" version = "0.4.0" @@ -2306,6 +2328,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-integer" version = "0.1.45" @@ -2395,6 +2423,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "parity-scale-codec" version = "3.4.0" @@ -2561,6 +2595,12 @@ dependencies = [ "plotters-backend", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -3592,6 +3632,37 @@ dependencies = [ "once_cell", ] +[[package]] +name = "time" +version = "0.3.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +dependencies = [ + "num-conv", + "time-core", +] + [[package]] name = "tiny-keccak" version = "2.0.2" @@ -3823,6 +3894,18 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror", + "time", + "tracing-subscriber", +] + [[package]] name = "tracing-attributes" version = "0.1.27" @@ -3844,6 +3927,17 @@ dependencies = [ "valuable", ] +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + [[package]] name = "tracing-subscriber" version = "0.3.18" @@ -3851,12 +3945,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", + "nu-ansi-term", "once_cell", "regex", "sharded-slab", + "smallvec", "thread_local", "tracing", "tracing-core", + "tracing-log", ] [[package]] diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 0351f3845..0b5260b54 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -49,3 +49,6 @@ console-subscriber = "0.2.0" vfs = "0.12.0" rust-embed = "8.3.0" futures-batch = "0.6.1" +tracing = "0.1.40" +tracing-subscriber = "0.3.18" +tracing-appender = "0.2.3" diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 785cebb02..93fdd526a 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -24,7 +24,7 @@ use crate::globals::LANGUAGE_ID; use crate::language_server::MessageReceivers; use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; -use log::info; +use tracing::info; // use tokio_stream::StreamExt; diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/handlers/request.rs index b5951ec23..483e7d928 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/handlers/request.rs @@ -6,7 +6,7 @@ use hir_analysis::{ name_resolution::{EarlyResolvedPath, NameRes}, HirAnalysisDb, }; -use log::info; +use tracing::info; use salsa::Snapshot; use tokio::sync::RwLock; diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 843ca47bd..300e23140 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -1,9 +1,9 @@ -use log::{error, info}; use lsp_types::{ DidChangeWatchedFilesParams, DidChangeWatchedFilesRegistrationOptions, DidCloseTextDocumentParams, FileSystemWatcher, GlobPattern, InitializeParams, InitializeResult, Registration, }; +use tracing::{error, info}; use tower_lsp::{jsonrpc::Result, Client, LanguageServer}; diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index a05fdee90..61eb2d24c 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -1,55 +1,74 @@ -use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; +use std::io::Write; + use lsp_types::MessageType; use tokio::task::yield_now; use tower_lsp::Client; +use tracing_subscriber::fmt::writer::MakeWriterExt; +use tracing_subscriber::fmt::MakeWriter; +use tracing_subscriber::layer::SubscriberExt; +use tracing_subscriber::prelude::*; -pub struct Logger { - pub(crate) level: Level, +pub async fn handle_log_messages( + mut rx: tokio::sync::mpsc::UnboundedReceiver<(String, MessageType)>, + client: Client, +) -> tokio::sync::mpsc::UnboundedReceiver { + loop { + if let Some((message, message_type)) = rx.recv().await { + client.log_message(message_type, message).await; + yield_now().await; + } + } +} + +#[derive(Clone)] +pub struct LoggerLayer { log_sender: tokio::sync::mpsc::UnboundedSender<(String, MessageType)>, } -impl log::Log for Logger { - fn enabled(&self, metadata: &Metadata) -> bool { - let logger = self; - metadata.level() <= logger.level +impl Write for LoggerLayer { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + let message = String::from_utf8_lossy(buf).to_string(); + let _ = self.log_sender.send((message, MessageType::LOG)); + Ok(buf.len()) } - fn log(&self, record: &Record) { - if self.enabled(record.metadata()) { - let message = format!("{} - {}", record.level(), record.args()); - let message_type = match record.level() { - log::Level::Error => MessageType::ERROR, - log::Level::Warn => MessageType::WARNING, - log::Level::Info => MessageType::INFO, - log::Level::Debug => MessageType::LOG, - log::Level::Trace => MessageType::LOG, - }; - self.log_sender.send((message, message_type)).unwrap(); - } + fn flush(&mut self) -> std::io::Result<()> { + Ok(()) } +} - fn flush(&self) {} +impl MakeWriter<'_> for LoggerLayer { + type Writer = Self; + fn make_writer(&self) -> Self::Writer { + self.clone() + } } pub fn setup_logger( - level: Level, -) -> Result, SetLoggerError> { + level: tracing::Level, +) -> Result, Box> +{ let (log_sender, log_receiver) = tokio::sync::mpsc::unbounded_channel::<(String, MessageType)>(); - let logger = Logger { level, log_sender }; - let static_logger = Box::leak(Box::new(logger)); - log::set_logger(static_logger)?; - log::set_max_level(LevelFilter::Debug); - Ok(log_receiver) -} + let logger = LoggerLayer { log_sender }; + let logger = logger.with_max_level(level); -pub async fn handle_log_messages( - mut rx: tokio::sync::mpsc::UnboundedReceiver<(String, MessageType)>, - client: Client, -) -> tokio::sync::mpsc::UnboundedReceiver { - loop { - let (message, message_type) = rx.recv().await.unwrap(); - client.log_message(message_type, message).await; - yield_now().await; - } + let pretty_logger = tracing_subscriber::fmt::layer() + .event_format(tracing_subscriber::fmt::format::format().pretty()) + .with_ansi(false) + .with_writer(logger); + + #[cfg(tokio_unstable)] + let console_layer = console_subscriber::spawn(); + + #[cfg(tokio_unstable)] + tracing_subscriber::registry() + .with(pretty_logger) + .with(console_layer) + .init(); + + #[cfg(not(tokio_unstable))] + tracing_subscriber::registry().with(pretty_logger).init(); + + Ok(log_receiver) } diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 26b66553f..e07c86cec 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -12,6 +12,7 @@ mod workspace; use backend::Backend; use db::Jar; +use tracing::Level; use language_server::Server; @@ -25,9 +26,8 @@ mod handlers { async fn main() { let stdin = tokio::io::stdin(); let stdout = tokio::io::stdout(); - console_subscriber::init(); + let rx = setup_logger(Level::INFO).unwrap(); - // let message_channels = language_server::MessageChannels::new(); let (message_senders, message_receivers) = language_server::setup_message_channels(); let (service, socket) = tower_lsp::LspService::build(|client| Server::new(client, message_senders)).finish(); @@ -36,8 +36,6 @@ async fn main() { let client = server.client.clone(); let backend = Backend::new(client, message_receivers); - let rx = setup_logger(log::Level::Info).unwrap(); - // separate runtime for the backend // let backend_runtime = tokio::runtime::Builder::new_multi_thread() // .worker_threads(4) diff --git a/crates/language-server/src/util.rs b/crates/language-server/src/util.rs index eb7e393c3..b76278814 100644 --- a/crates/language-server/src/util.rs +++ b/crates/language-server/src/util.rs @@ -4,8 +4,8 @@ use common::{ }; use fxhash::FxHashMap; use hir::{hir_def::scope_graph::ScopeId, span::LazySpan, SpannedHirDb}; -use log::error; use lsp_types::Position; +use tracing::error; use url::Url; pub fn calculate_line_offsets(text: &str) -> Vec { diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/workspace.rs index 21420a4a2..2dd8c087d 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/workspace.rs @@ -10,8 +10,8 @@ use common::{ InputFile, InputIngot, }; use hir::{hir_def::TopLevelMod, lower::map_file_to_mod, LowerHirDb}; -use log::info; use patricia_tree::StringPatriciaMap; +use tracing::info; use crate::db::LanguageServerDatabase; From 497a69d9750edb4f3ea28fc4ff46c897c9744bf1 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 27 Mar 2024 00:54:43 -0500 Subject: [PATCH 52/66] temporarily suppress dead code warning --- crates/hir-analysis/src/ty/def_analysis.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-analysis/src/ty/def_analysis.rs b/crates/hir-analysis/src/ty/def_analysis.rs index 4a074b42a..22323479c 100644 --- a/crates/hir-analysis/src/ty/def_analysis.rs +++ b/crates/hir-analysis/src/ty/def_analysis.rs @@ -889,7 +889,7 @@ enum DefKind { Adt(AdtDef), Trait(TraitDef), ImplTrait(Implementor), - Impl(HirImpl, TyId), + Impl(HirImpl, #[allow(dead_code)] TyId), Func(FuncDef), } From 92a45cd7c6289a779b5fc8b9d4c1a75b7f24b876 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 27 Mar 2024 20:40:18 -0500 Subject: [PATCH 53/66] langauge server channel macro docs --- crates/language-server-macros/src/lib.rs | 98 ++++++++++++++++++- crates/language-server/src/language_server.rs | 6 +- 2 files changed, 94 insertions(+), 10 deletions(-) diff --git a/crates/language-server-macros/src/lib.rs b/crates/language-server-macros/src/lib.rs index 99150f982..3c0e5a316 100644 --- a/crates/language-server-macros/src/lib.rs +++ b/crates/language-server-macros/src/lib.rs @@ -4,14 +4,27 @@ use proc_macro::TokenStream; use quote::{format_ident, quote}; use syn::{parse_macro_input, FnArg, ImplItem, ItemImpl, ReturnType}; -/// Macro for generating tokio channels from [`lsp-types`](https://docs.rs/lsp-types). +/// Generates message channels and dispatch methods for a `tower_lsp::LanguageServer` implementation. /// -/// This procedural macro annotates the `tower_lsp::LanguageServer` trait implementation and generates -/// a struct full of tokio mpsc channels that can be used to signal the server to handle -/// defined requests and notifications. +/// This macro generates two structs: +/// - `MessageSenders`: Contains `tokio::sync::mpsc::UnboundedSender` channels for each method in the `LanguageServer` trait. +/// - `MessageReceivers`: Contains `tokio_stream::wrappers::UnboundedReceiverStream` streams for each method in the `LanguageServer` trait. +/// +/// It also generates a `setup_message_channels` function that initializes the channels and returns instances of the `MessageSenders` and `MessageReceivers` structs. +/// +/// # Example +/// +/// ```rust,ignore +/// use tower_lsp::LanguageServer; +/// +/// #[language_server_macros::message_channels] +/// #[tower_lsp::async_trait] +/// impl LanguageServer for Server { +/// // ... +/// } +/// ``` #[proc_macro_attribute] pub fn message_channels(_attr: TokenStream, item: TokenStream) -> TokenStream { - // let attr = parse_macro_input!(attr as Option); let channel_senders_struct_name = format_ident!( "MessageSenders", // attr.clone().map_or("MessageSenders".to_string(), |attr| attr.to_string()) @@ -213,11 +226,15 @@ fn gen_channel_structs( let dispatcher_fn = match params { Some(params) => quote! { + /// Forward the LSP request parameters to the designated channel. + /// + /// An oneshot receiver is returned which can optionally be used to get a response back from the channel listener. pub fn #sender_fn_name(&self, params: #params) -> #sender_fn_result { #send_payload } }, None => quote! { + /// Forward the LSP notification parameters to the designated channel. pub fn #sender_fn_name(&self) -> #sender_fn_result { #send_payload } @@ -231,14 +248,85 @@ fn gen_channel_structs( .collect(); quote! { + /// Struct containing `tokio::sync::mpsc::UnboundedSender` channels for each method in the `LanguageServer` trait. + /// + /// This struct is generated by the `#[message_channels]` macro. For each method in the `LanguageServer` trait, + /// it generates a corresponding field with a name in the format `_tx`. + /// + /// For each implemented LSP notification method, a channel of type `tokio::sync::mpsc::UnboundedSender` is generated, where `Params` is the method's parameter type. + /// For each implemented LSP request methods, a channel of type `tokio::sync::mpsc::UnboundedSender<(Params, tokio::sync::oneshot::Sender)` is generated, where `Params` is the method's parameter type and `Result` is the method's return type. + /// + /// The macro also generates corresponding `send_` helper methods for each implemented LSP method to allow sending + /// requests or notifications through the respective channels. + /// + /// # Example + /// + /// ```rust,ignore + /// use tower_lsp::{LanguageServer, Client, jsonrpc::Result}; + /// use lsp_types::{InitializeParams, InitializeResult}; + /// + /// struct Backend { + /// messaging: MessageSenders, + /// client: Client, + /// } + /// + /// #[tower_lsp::async_trait] + /// impl LanguageServer for Backend { + /// async fn initialize(&self, params: InitializeParams) -> Result { + /// let rx = self.messaging.send_initialize(params); + /// + /// match rx.await { + /// Ok(result) => { + /// self.client.log_message(lsp_types::MessageType::INFO, "Server initialized!").await; + /// Ok(result) + /// } + /// Err(e) => { + /// self.client.log_message(lsp_types::MessageType::ERROR, format!("Failed to initialize: {:?}", e)).await; + /// Err(jsonrpc::Error::internal_error()) + /// } + /// } + /// } + /// + /// // Other LanguageServer methods... + /// } + /// ``` pub struct #channel_receivers_struct_name { #channel_receivers_declarations } + /// Struct containing `tokio_stream::wrappers::UnboundedReceiverStream` streams for each implemented `LanguageServer` trait method. + /// + /// This struct is generated by the `#[message_channels]` macro. For each implemented method of the `LanguageServer` trait, + /// it generates a corresponding field with a name in the format `_stream`. + /// + /// The type of each field depends on the signature of the corresponding `LanguageServer` method: + /// - If the method has a return type, the field type is `tokio_stream::wrappers::UnboundedReceiverStream<(Params, tokio::sync::oneshot::Sender)>`, + /// where `Params` is the type of the method's parameter and `Result` is the return type. + /// - If the method doesn't have a return type, the field type is `tokio_stream::wrappers::UnboundedReceiverStream`. + /// + /// These streams can be used to handle incoming requests or notifications for each `LanguageServer` method. + /// + /// # Example + /// + /// ```rust,ignore + /// let (senders, receivers) = setup_message_channels(); + /// let mut initialized_stream = receivers.initialize_stream.fuse(); + /// loop { + /// select! { + /// Some((params, responder)) = initialized_stream.next() => { + /// // Handle initialization request + /// let result = lsp_types::InitializeResult { ... }; + /// let _ = responder.send(Ok(result)); + /// } + /// // ... + /// } + /// } + /// ``` pub struct #channel_senders_struct_name { #channel_senders_declarations } + /// Initializes the message channels and returns instances of the `MessageSenders` and `MessageReceivers` structs. pub fn setup_message_channels() -> (#channel_senders_struct_name, #channel_receivers_struct_name) { #channel_instantiations ( diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/language_server.rs index 300e23140..87b68f677 100644 --- a/crates/language-server/src/language_server.rs +++ b/crates/language-server/src/language_server.rs @@ -35,12 +35,11 @@ impl Server { } } -#[language_server_macros::message_channels(MessageChannels)] +#[language_server_macros::message_channels] #[tower_lsp::async_trait] impl LanguageServer for Server { async fn initialize(&self, initialize_params: InitializeParams) -> Result { // forward the initialize request to the messaging system - // let messaging = self.messaging.read().await; let rx = self.messaging.send_initialize(initialize_params); info!("awaiting initialization result"); @@ -72,7 +71,6 @@ impl LanguageServer for Server { } async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { - // info!("sending did change to channel of capacity {}", self.messaging.did_change_tx.capacity()); self.messaging.send_did_change(params); } @@ -85,7 +83,6 @@ impl LanguageServer for Server { } async fn hover(&self, params: lsp_types::HoverParams) -> Result> { - // info!("sending hover to channel of capacity {}", self.messaging.hover_tx.capacity()); let rx = self.messaging.send_hover(params); rx.await.expect("hover response") } @@ -94,7 +91,6 @@ impl LanguageServer for Server { &self, params: lsp_types::GotoDefinitionParams, ) -> Result> { - // let messaging = self.messaging.read().await; let rx = self.messaging.send_goto_definition(params); rx.await.expect("goto definition response") } From a40b5e0e0f752e716d29a56ddab759c7ca5763a4 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 27 Mar 2024 22:38:11 -0500 Subject: [PATCH 54/66] cleanup stream/backend organization --- crates/language-server/src/backend.rs | 272 +----------------- .../language-server/src/{ => backend}/db.rs | 0 .../language-server/src/backend/handlers.rs | 183 ++++++++++++ .../request.rs => backend/helpers.rs} | 21 +- crates/language-server/src/backend/streams.rs | 107 +++++++ .../src/{ => backend}/workspace.rs | 22 +- crates/language-server/src/diagnostics.rs | 4 +- crates/language-server/src/goto.rs | 4 +- crates/language-server/src/main.rs | 19 +- .../src/{language_server.rs => server.rs} | 0 .../src/stream_buffer_until.rs | 189 ------------ 11 files changed, 326 insertions(+), 495 deletions(-) rename crates/language-server/src/{ => backend}/db.rs (100%) create mode 100644 crates/language-server/src/backend/handlers.rs rename crates/language-server/src/{handlers/request.rs => backend/helpers.rs} (87%) create mode 100644 crates/language-server/src/backend/streams.rs rename crates/language-server/src/{ => backend}/workspace.rs (96%) rename crates/language-server/src/{language_server.rs => server.rs} (100%) delete mode 100644 crates/language-server/src/stream_buffer_until.rs diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend.rs index 93fdd526a..b4a9ce449 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend.rs @@ -1,45 +1,24 @@ -use crate::handlers::request::{handle_goto_definition, handle_hover}; - -use crate::workspace::SyncableIngotFileContext; - -use common::InputDb; -use fork_stream::StreamExt as _; -use futures_batch::ChunksTimeoutStreamExt; -use fxhash::FxHashSet; - -use futures::StreamExt; -use futures_concurrency::prelude::*; -use lsp_types::TextDocumentItem; -use salsa::{ParallelDatabase, Snapshot}; -use tokio_stream::wrappers::UnboundedReceiverStream; - +pub(crate) mod db; +mod handlers; +mod helpers; +pub(crate) mod streams; +pub(crate) mod workspace; +use db::LanguageServerDatabase; use std::sync::Arc; use tokio::sync::RwLock; - -use crate::capabilities::server_capabilities; -use crate::db::LanguageServerDatabase; - -use crate::diagnostics::get_diagnostics; -use crate::globals::LANGUAGE_ID; -use crate::language_server::MessageReceivers; -use crate::workspace::{IngotFileContext, SyncableInputFile, Workspace}; - -use tracing::info; - -// use tokio_stream::StreamExt; +use workspace::Workspace; use tower_lsp::Client; pub struct Backend { - pub(crate) messaging: MessageReceivers, - pub(crate) client: Client, - pub(crate) db: LanguageServerDatabase, - pub(crate) workspace: Arc>, + client: Client, + db: LanguageServerDatabase, + workspace: Arc>, workers: tokio::runtime::Runtime, } impl Backend { - pub fn new(client: Client, messaging: MessageReceivers) -> Self { + pub fn new(client: Client) -> Self { let db = LanguageServerDatabase::default(); let workspace = Arc::new(RwLock::new(Workspace::default())); @@ -49,239 +28,10 @@ impl Backend { .build() .unwrap(); Self { - messaging, client, db, workspace, workers, } } - pub async fn handle_streams(mut self) { - info!("setting up streams"); - let workspace = self.workspace.clone(); - let db = &mut self.db; - - let client = self.client.clone(); - let messaging = self.messaging; - - let mut initialized_stream = messaging.initialize_stream.fuse(); - let mut shutdown_stream = messaging.shutdown_stream.fuse(); - let did_change_watched_files_stream = messaging.did_change_watched_files_stream.fork(); - - let flat_did_change_watched_files = did_change_watched_files_stream - .map(|params| futures::stream::iter(params.changes)) - .flatten() - .fork(); - - let did_change_watched_file_stream = - flat_did_change_watched_files.clone().filter(|change| { - let change_type = change.typ; - Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CHANGED) }) - }); - - let did_create_watched_file_stream = - flat_did_change_watched_files.clone().filter(|change| { - let change_type = change.typ; - Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CREATED) }) - }); - - let mut did_delete_watch_file_stream = flat_did_change_watched_files - .clone() - .filter(|change| { - let change_type = change.typ; - Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::DELETED) }) - }) - .fuse(); - - let did_open_stream = messaging.did_open_stream.fuse(); - let did_change_stream = messaging.did_change_stream.fuse(); - let mut change_stream = ( - did_change_watched_file_stream.map(|change| { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - let text = std::fs::read_to_string(path).unwrap(); - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - } - }), - did_create_watched_file_stream.map(|change| { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - let text = std::fs::read_to_string(path).unwrap(); - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - } - }), - did_open_stream.map(|params| TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.text_document.text, - }), - did_change_stream.map(|params| TextDocumentItem { - uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.content_changes[0].text.clone(), - }), - ) - .merge() - .fuse(); - - let (tx_needs_diagnostics, rx_needs_diagnostics) = tokio::sync::mpsc::unbounded_channel(); - - let mut diagnostics_stream = UnboundedReceiverStream::from(rx_needs_diagnostics) - .chunks_timeout(500, std::time::Duration::from_millis(30)) - .fuse(); - - let mut hover_stream = messaging.hover_stream.fuse(); - let mut goto_definition_stream = messaging.goto_definition_stream.fuse(); - - info!("streams set up, looping on them now"); - loop { - tokio::select! { - Some(result) = initialized_stream.next() => { - let (initialization_params, responder) = result; - info!("initializing language server!"); - - let root = - initialization_params - .root_uri - .unwrap() - .to_file_path() - .ok() - .unwrap(); - - let mut workspace = self.workspace.write().await; - let _ = workspace.set_workspace_root( - db, - &root - ); - let _ = workspace.load_std_lib(db, &root); - let _ = workspace.sync(db); - - let capabilities = server_capabilities(); - let initialize_result = lsp_types::InitializeResult { - capabilities, - server_info: Some(lsp_types::ServerInfo { - name: String::from("fe-language-server"), - version: Some(String::from(env!("CARGO_PKG_VERSION"))), - }), - }; - let _ = responder.send(Ok(initialize_result)); - } - Some(result) = shutdown_stream.next() => { - let (_, responder) = result; - info!("shutting down language server"); - let _ = responder.send(Ok(())); - } - Some(deleted) = did_delete_watch_file_stream.next() => { - let path = deleted.uri.to_file_path().unwrap(); - info!("file deleted: {:?}", path); - let path = path.to_str().unwrap(); - let workspace = workspace.clone(); - let _ = workspace.write().await.remove_input_for_file_path(db, path); - let _ = tx_needs_diagnostics.send(path.to_string()); - } - Some(doc) = change_stream.next() => { - info!("change detected: {:?}", doc.uri); - let path_buf = doc.uri.to_file_path().unwrap(); - let path = path_buf.to_str().unwrap(); - let contents = Some(doc.text); - update_input(workspace.clone(), db, path, contents).await; - let _ = tx_needs_diagnostics.send(path.to_string()); - } - Some(files_need_diagnostics) = diagnostics_stream.next() => { - info!("files need diagnostics: {:?}", files_need_diagnostics); - let mut ingots_need_diagnostics = FxHashSet::default(); - for file in files_need_diagnostics { - let workspace = workspace.clone(); - let workspace = workspace.read().await; - let ingot = workspace.get_ingot_for_file_path(&file).unwrap(); - ingots_need_diagnostics.insert(ingot); - } - - info!("ingots need diagnostics: {:?}", ingots_need_diagnostics); - for ingot in ingots_need_diagnostics.into_iter() { - for file in ingot.files(db.as_input_db()) { - let file = *file; - let path = file.path(db.as_input_db()); - let path = lsp_types::Url::from_file_path(path).unwrap(); - let db = db.snapshot(); - let client = client.clone(); - let workspace = workspace.clone(); - self.workers.spawn( - async move { handle_diagnostics(client.clone(), workspace.clone(), db, path).await } - ); - } - } - } - Some((params, responder)) = hover_stream.next() => { - let db = db.snapshot(); - let workspace = workspace.clone(); - let response = match self.workers.spawn(handle_hover(db, workspace, params)).await { - Ok(response) => response, - Err(e) => { - eprintln!("Error handling hover: {:?}", e); - Ok(None) - } - }; - let _ = responder.send(response); - } - Some((params, responder)) = goto_definition_stream.next() => { - let db = db.snapshot(); - let workspace = workspace.clone(); - let response = match handle_goto_definition(db, workspace, params).await { - Ok(response) => response, - Err(e) => { - eprintln!("Error handling goto definition: {:?}", e); - None - } - }; - let _ = responder.send(Ok(response)); - } - } - tokio::task::yield_now().await; - } - } -} - -async fn update_input( - workspace: Arc>, - db: &mut LanguageServerDatabase, - path: &str, - contents: Option, -) { - info!("updating input for {:?}", path); - let workspace = &mut workspace.write().await; - let input = workspace.touch_input_for_file_path(db, path).unwrap(); - if let Some(contents) = contents { - let _ = input.sync_from_text(db, contents); - } -} - -async fn handle_diagnostics( - client: Client, - workspace: Arc>, - db: Snapshot, - url: lsp_types::Url, -) { - info!("handling diagnostics for {:?}", url); - let workspace = &workspace.read().await; - let diagnostics = get_diagnostics(&db, workspace, url.clone()); - - let client = client.clone(); - let diagnostics = diagnostics - .unwrap() - .into_iter() - .map(|(uri, diags)| async { client.publish_diagnostics(uri, diags, None).await }) - .collect::>(); - - futures::future::join_all(diagnostics).await; } diff --git a/crates/language-server/src/db.rs b/crates/language-server/src/backend/db.rs similarity index 100% rename from crates/language-server/src/db.rs rename to crates/language-server/src/backend/db.rs diff --git a/crates/language-server/src/backend/handlers.rs b/crates/language-server/src/backend/handlers.rs new file mode 100644 index 000000000..ac5b94c47 --- /dev/null +++ b/crates/language-server/src/backend/handlers.rs @@ -0,0 +1,183 @@ +use super::helpers::{goto_helper, hover_helper}; +use crate::backend::Backend; + +use crate::backend::workspace::SyncableIngotFileContext; + +use common::InputDb; +use fxhash::FxHashSet; + +use lsp_types::TextDocumentItem; +use salsa::{ParallelDatabase, Snapshot}; + +use std::sync::Arc; +use tokio::sync::RwLock; + +use crate::backend::db::LanguageServerDatabase; +use crate::capabilities::server_capabilities; + +use crate::backend::workspace::{IngotFileContext, SyncableInputFile, Workspace}; +use crate::diagnostics::get_diagnostics; + +use tracing::info; + +use tower_lsp::Client; + +impl Backend { + pub(super) async fn handle_initialized( + &mut self, + params: lsp_types::InitializeParams, + responder: tokio::sync::oneshot::Sender< + Result, + >, + ) { + info!("initializing language server!"); + + let root = params.root_uri.unwrap().to_file_path().ok().unwrap(); + + let mut workspace = self.workspace.write().await; + let _ = workspace.set_workspace_root(&mut self.db, &root); + let _ = workspace.load_std_lib(&mut self.db, &root); + let _ = workspace.sync(&mut self.db); + + let capabilities = server_capabilities(); + let initialize_result = lsp_types::InitializeResult { + capabilities, + server_info: Some(lsp_types::ServerInfo { + name: String::from("fe-language-server"), + version: Some(String::from(env!("CARGO_PKG_VERSION"))), + }), + }; + let _ = responder.send(Ok(initialize_result)); + } + + pub(super) async fn handle_shutdown( + &mut self, + responder: tokio::sync::oneshot::Sender>, + ) { + info!("shutting down language server"); + let _ = responder.send(Ok(())); + } + + pub(super) async fn handle_deleted( + &mut self, + params: lsp_types::FileEvent, + tx_needs_diagnostics: tokio::sync::mpsc::UnboundedSender, + ) { + let path = params.uri.to_file_path().unwrap(); + info!("file deleted: {:?}", path); + let path = path.to_str().unwrap(); + let workspace = self.workspace.clone(); + let _ = workspace + .write() + .await + .remove_input_for_file_path(&mut self.db, path); + let _ = tx_needs_diagnostics.send(path.to_string()); + } + + pub(super) async fn handle_change( + &mut self, + doc: TextDocumentItem, + tx_needs_diagnostics: tokio::sync::mpsc::UnboundedSender, + ) { + info!("change detected: {:?}", doc.uri); + let path_buf = doc.uri.to_file_path().unwrap(); + let path = path_buf.to_str().unwrap(); + let contents = Some(doc.text); + if let Some(contents) = contents { + let workspace = &mut self.workspace.write().await; + let input = workspace + .touch_input_for_file_path(&mut self.db, path) + .unwrap(); + let _ = input.sync_from_text(&mut self.db, contents); + } + let _ = tx_needs_diagnostics.send(path.to_string()); + } + + pub(super) async fn handle_diagnostics(&mut self, files_need_diagnostics: Vec) { + info!("files need diagnostics: {:?}", files_need_diagnostics); + let mut ingots_need_diagnostics = FxHashSet::default(); + for file in files_need_diagnostics { + let workspace = self.workspace.clone(); + let workspace = workspace.read().await; + let ingot = workspace.get_ingot_for_file_path(&file).unwrap(); + ingots_need_diagnostics.insert(ingot); + } + + info!("ingots need diagnostics: {:?}", ingots_need_diagnostics); + for ingot in ingots_need_diagnostics.into_iter() { + for file in ingot.files(self.db.as_input_db()) { + let file = *file; + let path = file.path(self.db.as_input_db()); + let path = lsp_types::Url::from_file_path(path).unwrap(); + let db = self.db.snapshot(); + let client = self.client.clone(); + let workspace = self.workspace.clone(); + self.workers.spawn(async move { + diagnostics_workload(client.clone(), workspace.clone(), db, path).await + }); + } + } + } + + pub(super) async fn handle_hover( + &mut self, + params: lsp_types::HoverParams, + responder: tokio::sync::oneshot::Sender< + Result, tower_lsp::jsonrpc::Error>, + >, + ) { + let db = self.db.snapshot(); + let workspace = self.workspace.clone(); + let response = match self + .workers + .spawn(hover_helper(db, workspace, params)) + .await + { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling hover: {:?}", e); + Ok(None) + } + }; + let _ = responder.send(response); + } + + pub(super) async fn handle_goto_definition( + &mut self, + params: lsp_types::GotoDefinitionParams, + responder: tokio::sync::oneshot::Sender< + Result, tower_lsp::jsonrpc::Error>, + >, + ) { + let db = self.db.snapshot(); + let workspace = self.workspace.clone(); + let response = match goto_helper(db, workspace, params).await { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling goto definition: {:?}", e); + None + } + }; + let _ = responder.send(Ok(response)); + } +} + +pub(super) async fn diagnostics_workload( + client: Client, + workspace: Arc>, + db: Snapshot, + url: lsp_types::Url, +) { + info!("handling diagnostics for {:?}", url); + let workspace = &workspace.read().await; + let diagnostics = get_diagnostics(&db, workspace, url.clone()); + + let client = client.clone(); + let diagnostics = diagnostics + .unwrap() + .into_iter() + .map(|(uri, diags)| async { client.publish_diagnostics(uri, diags, None).await }) + .collect::>(); + + futures::future::join_all(diagnostics).await; +} diff --git a/crates/language-server/src/handlers/request.rs b/crates/language-server/src/backend/helpers.rs similarity index 87% rename from crates/language-server/src/handlers/request.rs rename to crates/language-server/src/backend/helpers.rs index 483e7d928..b7b41e5c5 100644 --- a/crates/language-server/src/handlers/request.rs +++ b/crates/language-server/src/backend/helpers.rs @@ -13,21 +13,19 @@ use tokio::sync::RwLock; use tower_lsp::jsonrpc::Result; use crate::{ - db::LanguageServerDatabase, + backend::db::LanguageServerDatabase, + backend::workspace::{IngotFileContext, Workspace}, goto::{goto_enclosing_path, Cursor}, util::{to_lsp_location_from_scope, to_offset_from_position}, - workspace::{IngotFileContext, Workspace}, }; -// use tower_lsp::lsp_types::{ResponseError, Url}; -pub async fn handle_hover( +pub async fn hover_helper( db: Snapshot, workspace: Arc>, params: lsp_types::HoverParams, ) -> Result> { let workspace = workspace.read().await; info!("handling hover"); - // TODO: get more relevant information for the hover let file_path = ¶ms .text_document_position_params .text_document @@ -43,14 +41,10 @@ pub async fn handle_hover( .nth(params.text_document_position_params.position.line as usize) .unwrap(); - // let cursor: Cursor = params.text_document_position_params.position.into(); let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, file_text.as_str(), ); - // let file_path = std::path::Path::new(file_path); - - // info!("got ingot: {:?} of type {:?}", ingot, ingot.map(|ingot| ingot.kind(&mut state.db))); let ingot_info: Option = { let ingot_type = match ingot { @@ -110,7 +104,7 @@ pub async fn handle_hover( use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Hover}; -pub async fn handle_goto_definition( +pub async fn goto_helper( db: Snapshot, workspace: Arc>, params: GotoDefinitionParams, @@ -157,13 +151,6 @@ pub async fn handle_goto_definition( .collect::>() .join("\n"); - // let _error = (!errors.is_empty()).then_some(ResponseError{ - // code: lsp_types::error_codes::SERVER_CANCELLED as i32, - // message: errors, - // data: None, - // }); - - // state.send_response(response_message)?; Ok(Some(lsp_types::GotoDefinitionResponse::Array( locations .into_iter() diff --git a/crates/language-server/src/backend/streams.rs b/crates/language-server/src/backend/streams.rs new file mode 100644 index 000000000..f9f33df0b --- /dev/null +++ b/crates/language-server/src/backend/streams.rs @@ -0,0 +1,107 @@ +use crate::backend::Backend; +use fork_stream::StreamExt as _; +use futures_batch::ChunksTimeoutStreamExt; + +use crate::globals::LANGUAGE_ID; +use crate::server::MessageReceivers; +use futures::StreamExt; +use futures_concurrency::prelude::*; +use lsp_types::TextDocumentItem; +use tokio_stream::wrappers::UnboundedReceiverStream; + +use tracing::info; + +pub async fn setup_streams(backend: &mut Backend, mut receivers: MessageReceivers) { + info!("setting up streams"); + let mut initialized_stream = receivers.initialize_stream.fuse(); + let mut shutdown_stream = receivers.shutdown_stream.fuse(); + let did_change_watched_files_stream = receivers.did_change_watched_files_stream.fork(); + + let flat_did_change_watched_files = did_change_watched_files_stream + .map(|params| futures::stream::iter(params.changes)) + .flatten() + .fork(); + + let did_change_watched_file_stream = flat_did_change_watched_files.clone().filter(|change| { + let change_type = change.typ; + Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CHANGED) }) + }); + + let did_create_watched_file_stream = flat_did_change_watched_files.clone().filter(|change| { + let change_type = change.typ; + Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CREATED) }) + }); + + let mut did_delete_watch_file_stream = flat_did_change_watched_files + .clone() + .filter(|change| { + let change_type = change.typ; + Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::DELETED) }) + }) + .fuse(); + + let did_open_stream = (&mut receivers.did_open_stream).fuse(); + let did_change_stream = (&mut receivers.did_change_stream).fuse(); + let mut change_stream = ( + did_change_watched_file_stream.map(|change| { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + let text = std::fs::read_to_string(path).unwrap(); + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + } + }), + did_create_watched_file_stream.map(|change| { + let uri = change.uri; + let path = uri.to_file_path().unwrap(); + let text = std::fs::read_to_string(path).unwrap(); + TextDocumentItem { + uri: uri.clone(), + language_id: LANGUAGE_ID.to_string(), + version: 0, + text, + } + }), + did_open_stream.map(|params| TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.text_document.text, + }), + did_change_stream.map(|params| TextDocumentItem { + uri: params.text_document.uri, + language_id: LANGUAGE_ID.to_string(), + version: params.text_document.version, + text: params.content_changes[0].text.clone(), + }), + ) + .merge() + .fuse(); + + let (tx_needs_diagnostics, rx_needs_diagnostics) = + tokio::sync::mpsc::unbounded_channel::(); + + let mut diagnostics_stream = UnboundedReceiverStream::from(rx_needs_diagnostics) + .chunks_timeout(500, std::time::Duration::from_millis(30)) + .fuse(); + + let mut hover_stream = (&mut receivers.hover_stream).fuse(); + let mut goto_definition_stream = (&mut receivers.goto_definition_stream).fuse(); + + info!("streams set up, looping on them now"); + loop { + tokio::select! { + Some((params, responder)) = initialized_stream.next() => backend.handle_initialized(params, responder).await, + Some((_, responder)) = shutdown_stream.next() => backend.handle_shutdown(responder).await, + Some(params) = did_delete_watch_file_stream.next() => backend.handle_deleted(params, tx_needs_diagnostics.clone()).await, + Some(params) = change_stream.next() => backend.handle_change(params, tx_needs_diagnostics.clone()).await, + Some(files_need_diagnostics) = diagnostics_stream.next() => backend.handle_diagnostics(files_need_diagnostics).await, + Some((params, responder)) = hover_stream.next() => backend.handle_hover(params, responder).await, + Some((params, responder)) = goto_definition_stream.next() => backend.handle_goto_definition(params, responder).await, + } + tokio::task::yield_now().await; + } +} diff --git a/crates/language-server/src/workspace.rs b/crates/language-server/src/backend/workspace.rs similarity index 96% rename from crates/language-server/src/workspace.rs rename to crates/language-server/src/backend/workspace.rs index 2dd8c087d..ae16fe2cf 100644 --- a/crates/language-server/src/workspace.rs +++ b/crates/language-server/src/backend/workspace.rs @@ -13,7 +13,7 @@ use hir::{hir_def::TopLevelMod, lower::map_file_to_mod, LowerHirDb}; use patricia_tree::StringPatriciaMap; use tracing::info; -use crate::db::LanguageServerDatabase; +use super::db::LanguageServerDatabase; use rust_embed::RustEmbed; @@ -521,7 +521,7 @@ impl SyncableIngotFileContext for Workspace { #[cfg(test)] mod tests { - use crate::workspace::{ + use crate::backend::workspace::{ get_containing_ingot_mut, IngotFileContext, Workspace, FE_CONFIG_SUFFIX, }; use std::path::PathBuf; @@ -530,7 +530,7 @@ mod tests { #[test] fn test_standalone_context() { - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); let file_path = "tests/data/ingot1/src/main.fe"; let ctx = &mut StandaloneIngotContext::new(); @@ -550,7 +550,7 @@ mod tests { #[test] fn test_workspace_standalone_ingot() { let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); let file_path = "tests/data/ingot1/src/main.fe"; let file = workspace.touch_input_for_file_path(&mut db, file_path); assert!(file.is_some()); @@ -563,7 +563,7 @@ mod tests { let _ingot_context_ingot = { let ingot_context = workspace.ingot_context_from_config_path( - &crate::db::LanguageServerDatabase::default(), + &crate::backend::db::LanguageServerDatabase::default(), config_path, ); @@ -584,7 +584,7 @@ mod tests { assert!(containing_ingot.as_deref().is_some()); let ingot = workspace.touch_ingot_for_file_path( - &mut crate::db::LanguageServerDatabase::default(), + &mut crate::backend::db::LanguageServerDatabase::default(), file_path, ); assert!(ingot.is_some()); @@ -594,7 +594,7 @@ mod tests { fn test_workspace_local_ingot() { let config_path = "tests/data/ingot1/fe.toml"; let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); let ingot_context_ingot = { let ingot_context = workspace.ingot_context_from_config_path(&db, config_path); @@ -628,7 +628,7 @@ mod tests { let _ingot_config_path = &ingot_base_dir.join("fe.toml"); let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); let _ = workspace.set_workspace_root(&mut db, &ingot_base_dir); // panic!("wtf? {:?}", ingot_base_dir); @@ -653,7 +653,7 @@ mod tests { ); let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); workspace.sync_local_ingots(&mut db, &path); @@ -692,7 +692,7 @@ mod tests { ); let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); workspace.sync_local_ingots(&mut db, &path); @@ -725,7 +725,7 @@ mod tests { let dangling_path = format!("{crate_dir}/test_files/messy/dangling.fe"); let mut workspace = Workspace::default(); - let mut db = crate::db::LanguageServerDatabase::default(); + let mut db = crate::backend::db::LanguageServerDatabase::default(); workspace.sync_local_ingots(&mut db, &messy_workspace_path); let dangling_file = workspace diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/diagnostics.rs index 524fed1ca..23c05205c 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/diagnostics.rs @@ -14,9 +14,9 @@ use hir::{diagnostics::DiagnosticVoucher, LowerHirDb}; use salsa::Snapshot; use crate::{ - db::{LanguageServerDatabase, LanguageServerDb}, + backend::db::{LanguageServerDatabase, LanguageServerDb}, + backend::workspace::{IngotFileContext, Workspace}, util::diag_to_lsp, - workspace::{IngotFileContext, Workspace}, }; pub trait ToCsDiag { diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/goto.rs index bdb09f902..b6e8ed2a1 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/goto.rs @@ -7,7 +7,7 @@ use hir::{ use hir_analysis::{name_resolution::EarlyResolvedPath, HirAnalysisDb}; use salsa::Snapshot; -use crate::db::{LanguageServerDatabase, LanguageServerDb}; +use crate::backend::db::{LanguageServerDatabase, LanguageServerDb}; use common::diagnostics::Span; use hir::span::LazySpan; @@ -93,7 +93,7 @@ pub fn goto_enclosing_path( #[cfg(test)] mod tests { - use crate::workspace::{IngotFileContext, Workspace}; + use crate::backend::workspace::{IngotFileContext, Workspace}; use super::*; use common::input::IngotKind; diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index e07c86cec..42e81ca90 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,40 +1,33 @@ mod backend; mod capabilities; -mod db; mod diagnostics; mod globals; mod goto; -mod language_server; mod logger; -mod stream_buffer_until; +mod server; mod util; -mod workspace; +use backend::db::Jar; use backend::Backend; -use db::Jar; use tracing::Level; -use language_server::Server; +use server::Server; use crate::logger::{handle_log_messages, setup_logger}; -mod handlers { - pub mod request; -} - #[tokio_macros::main] async fn main() { let stdin = tokio::io::stdin(); let stdout = tokio::io::stdout(); let rx = setup_logger(Level::INFO).unwrap(); - let (message_senders, message_receivers) = language_server::setup_message_channels(); + let (message_senders, message_receivers) = server::setup_message_channels(); let (service, socket) = tower_lsp::LspService::build(|client| Server::new(client, message_senders)).finish(); let server = service.inner(); let client = server.client.clone(); - let backend = Backend::new(client, message_receivers); + let mut backend = Backend::new(client); // separate runtime for the backend // let backend_runtime = tokio::runtime::Builder::new_multi_thread() @@ -52,6 +45,6 @@ async fn main() { _ = tower_lsp::Server::new(stdin, stdout, socket) .serve(service) => {} // backend - _ = backend.handle_streams() => {} + _ = backend::streams::setup_streams(&mut backend, message_receivers) => {} } } diff --git a/crates/language-server/src/language_server.rs b/crates/language-server/src/server.rs similarity index 100% rename from crates/language-server/src/language_server.rs rename to crates/language-server/src/server.rs diff --git a/crates/language-server/src/stream_buffer_until.rs b/crates/language-server/src/stream_buffer_until.rs deleted file mode 100644 index 4503b9edb..000000000 --- a/crates/language-server/src/stream_buffer_until.rs +++ /dev/null @@ -1,189 +0,0 @@ -use futures::stream::Stream; -use futures::stream::{iter, Iter}; -use std::{ - collections::VecDeque, - fmt::Debug, - pin::{pin, Pin}, - task::{Context, Poll}, -}; -use tokio_stream::wrappers::IntervalStream; - -use pin_project::pin_project; - -#[pin_project(project_replace)] -pub struct BufferUntilStream { - #[pin] - input_stream: I, - #[pin] - trigger_stream: T, - pending_buffer: VecDeque, - ready_buffer: VecDeque, -} - -impl BufferUntilStream -where - I: Stream, - T: Stream, -{ - pub fn new(input_stream: I, trigger_stream: T) -> Self { - BufferUntilStream { - input_stream, - trigger_stream, - pending_buffer: VecDeque::new(), - ready_buffer: VecDeque::new(), - } - } -} -impl Stream for BufferUntilStream -where - I: Stream, - T: Stream, -{ - type Item = Iter>; - - fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - let mut this = self.project(); - let ready_buffer: &mut VecDeque = this.ready_buffer; - let pending_buffer: &mut VecDeque = this.pending_buffer; - - let mut finished = false; - - // Check if the input_stream has a new value - while let Poll::Ready(Some(item)) = this.input_stream.as_mut().poll_next(cx) { - // info!("Received item from input_stream: {:?}", item); - pending_buffer.push_back(item); - } - - if let Poll::Ready(None) = this.input_stream.as_mut().poll_next(cx) { - // info!("input_stream finished"); - finished = true; - } - - match this.trigger_stream.as_mut().poll_next(cx) { - Poll::Ready(Some(_)) => { - // info!("Triggered, moving pending_buffer to ready_buffer"); - ready_buffer.append(pending_buffer); - } - Poll::Ready(None) => { - ready_buffer.append(pending_buffer); - } - _ => { - finished = true; - } - } - - // Send any ready buffer or finish up - if !ready_buffer.is_empty() { - // info!("Returning items stream from ready_buffer"); - let current_ready_buffer = std::mem::take(this.ready_buffer); - Poll::Ready(Some(iter(current_ready_buffer))) - } else if finished { - return Poll::Ready(None); - } else { - Poll::Pending - } - } -} - -pub trait BufferUntilStreamExt: Sized -where - S: Stream, -{ - fn buffer_until(self, trigger: T) -> BufferUntilStream - where - T: Stream; - fn debounce_buffer_until( - self, - duration: std::time::Duration, - ) -> BufferUntilStream; -} - -impl BufferUntilStreamExt for S -where - S: Stream, -{ - fn buffer_until(self, trigger: T) -> BufferUntilStream - where - T: Stream, - { - BufferUntilStream::new(self, trigger) - } - - fn debounce_buffer_until( - self, - duration: std::time::Duration, - ) -> BufferUntilStream { - let trigger = IntervalStream::new(tokio::time::interval(duration)); - BufferUntilStream::new(self, trigger) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use futures::{stream::StreamExt, FutureExt}; - use tokio_stream::wrappers::{BroadcastStream, UnboundedReceiverStream}; - - #[tokio::test] - async fn test_accumulating_stream() { - println!("running test_accumulating_stream"); - let (trigger_sender, trigger_receiver) = tokio::sync::broadcast::channel(100); - let (input_sender, input_receiver) = tokio::sync::mpsc::unbounded_channel(); - - let mut output = vec![]; - - let mut accumulating_stream = BufferUntilStream::new( - UnboundedReceiverStream::from(input_receiver), - BroadcastStream::from(trigger_receiver), - ) - .flatten(); - - input_sender.send(1).unwrap(); - input_sender.send(2).unwrap(); - input_sender.send(3).unwrap(); - - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - assert_eq!(output, Vec::::new()); - - trigger_sender.send(()).unwrap(); - - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - assert_eq!(output, vec![1, 2, 3]); - - input_sender.send(4).unwrap(); - input_sender.send(5).unwrap(); - input_sender.send(6).unwrap(); - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - - assert_eq!(output, vec![1, 2, 3]); - trigger_sender.send(()).unwrap(); - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - - assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); - input_sender.send(7).unwrap(); - input_sender.send(8).unwrap(); - input_sender.send(9).unwrap(); - input_sender.send(10).unwrap(); - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - assert_eq!(output, vec![1, 2, 3, 4, 5, 6]); - - drop(trigger_sender); - - while let Some(item) = accumulating_stream.next().now_or_never().flatten() { - output.push(item); - } - assert_eq!(output, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); - } - - // TODO: write tests for end of input stream -} From 9e296cef5884bfff4d5ecd6213babc7d18ef2af6 Mon Sep 17 00:00:00 2001 From: Micah Date: Wed, 27 Mar 2024 23:51:28 -0500 Subject: [PATCH 55/66] more language server reorganization --- crates/language-server/src/backend/db.rs | 4 +- .../src/{backend.rs => backend/mod.rs} | 11 +-- crates/language-server/src/config.rs | 0 .../src/{ => functionality}/capabilities.rs | 0 .../src/{ => functionality}/diagnostics.rs | 25 +++++- .../src/{ => functionality}/goto.rs | 77 ++++++++++++++++++- .../{backend => functionality}/handlers.rs | 38 ++------- .../helpers.rs => functionality/hover.rs} | 70 ++--------------- .../language-server/src/functionality/mod.rs | 6 ++ .../src/{backend => functionality}/streams.rs | 0 crates/language-server/src/main.rs | 6 +- 11 files changed, 125 insertions(+), 112 deletions(-) rename crates/language-server/src/{backend.rs => backend/mod.rs} (78%) delete mode 100644 crates/language-server/src/config.rs rename crates/language-server/src/{ => functionality}/capabilities.rs (100%) rename crates/language-server/src/{ => functionality}/diagnostics.rs (87%) rename crates/language-server/src/{ => functionality}/goto.rs (81%) rename crates/language-server/src/{backend => functionality}/handlers.rs (85%) rename crates/language-server/src/{backend/helpers.rs => functionality/hover.rs} (60%) create mode 100644 crates/language-server/src/functionality/mod.rs rename crates/language-server/src/{backend => functionality}/streams.rs (100%) diff --git a/crates/language-server/src/backend/db.rs b/crates/language-server/src/backend/db.rs index e9b0760e0..057ba2139 100644 --- a/crates/language-server/src/backend/db.rs +++ b/crates/language-server/src/backend/db.rs @@ -12,10 +12,10 @@ use hir_analysis::{ }; use salsa::{ParallelDatabase, Snapshot}; -use crate::goto::Cursor; +use crate::functionality::goto::Cursor; #[salsa::jar(db = LanguageServerDb)] -pub struct Jar(crate::diagnostics::file_line_starts); +pub struct Jar(crate::functionality::diagnostics::file_line_starts); pub trait LanguageServerDb: salsa::DbWithJar + HirAnalysisDb + HirDb + LowerHirDb + SpannedHirDb + InputDb diff --git a/crates/language-server/src/backend.rs b/crates/language-server/src/backend/mod.rs similarity index 78% rename from crates/language-server/src/backend.rs rename to crates/language-server/src/backend/mod.rs index b4a9ce449..89f339ff6 100644 --- a/crates/language-server/src/backend.rs +++ b/crates/language-server/src/backend/mod.rs @@ -1,7 +1,4 @@ pub(crate) mod db; -mod handlers; -mod helpers; -pub(crate) mod streams; pub(crate) mod workspace; use db::LanguageServerDatabase; use std::sync::Arc; @@ -11,10 +8,10 @@ use workspace::Workspace; use tower_lsp::Client; pub struct Backend { - client: Client, - db: LanguageServerDatabase, - workspace: Arc>, - workers: tokio::runtime::Runtime, + pub(super) client: Client, + pub(super) db: LanguageServerDatabase, + pub(super) workspace: Arc>, + pub(super) workers: tokio::runtime::Runtime, } impl Backend { diff --git a/crates/language-server/src/config.rs b/crates/language-server/src/config.rs deleted file mode 100644 index e69de29bb..000000000 diff --git a/crates/language-server/src/capabilities.rs b/crates/language-server/src/functionality/capabilities.rs similarity index 100% rename from crates/language-server/src/capabilities.rs rename to crates/language-server/src/functionality/capabilities.rs diff --git a/crates/language-server/src/diagnostics.rs b/crates/language-server/src/functionality/diagnostics.rs similarity index 87% rename from crates/language-server/src/diagnostics.rs rename to crates/language-server/src/functionality/diagnostics.rs index 23c05205c..5ccaaa1ca 100644 --- a/crates/language-server/src/diagnostics.rs +++ b/crates/language-server/src/functionality/diagnostics.rs @@ -1,4 +1,4 @@ -use std::ops::Range; +use std::{ops::Range, sync::Arc}; use camino::Utf8Path; use clap::Error; @@ -12,6 +12,9 @@ use common::{ use fxhash::FxHashMap; use hir::{diagnostics::DiagnosticVoucher, LowerHirDb}; use salsa::Snapshot; +use tokio::sync::RwLock; +use tower_lsp::Client; +use tracing::info; use crate::{ backend::db::{LanguageServerDatabase, LanguageServerDb}, @@ -165,3 +168,23 @@ pub fn get_diagnostics( Ok(result) } + +pub(super) async fn diagnostics_workload( + client: Client, + workspace: Arc>, + db: Snapshot, + url: lsp_types::Url, +) { + info!("handling diagnostics for {:?}", url); + let workspace = &workspace.read().await; + let diagnostics = get_diagnostics(&db, workspace, url.clone()); + + let client = client.clone(); + let diagnostics = diagnostics + .unwrap() + .into_iter() + .map(|(uri, diags)| async { client.publish_diagnostics(uri, diags, None).await }) + .collect::>(); + + futures::future::join_all(diagnostics).await; +} diff --git a/crates/language-server/src/goto.rs b/crates/language-server/src/functionality/goto.rs similarity index 81% rename from crates/language-server/src/goto.rs rename to crates/language-server/src/functionality/goto.rs index b6e8ed2a1..d6d748197 100644 --- a/crates/language-server/src/goto.rs +++ b/crates/language-server/src/functionality/goto.rs @@ -2,12 +2,18 @@ use fxhash::FxHashMap; use hir::{ hir_def::{scope_graph::ScopeId, ItemKind, PathId, TopLevelMod}, visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, - HirDb, + HirDb, LowerHirDb, SpannedHirDb, +}; +use hir_analysis::{ + name_resolution::{EarlyResolvedPath, NameRes}, + HirAnalysisDb, }; -use hir_analysis::{name_resolution::EarlyResolvedPath, HirAnalysisDb}; use salsa::Snapshot; -use crate::backend::db::{LanguageServerDatabase, LanguageServerDb}; +use crate::{ + backend::db::{LanguageServerDatabase, LanguageServerDb}, + util::{to_lsp_location_from_scope, to_offset_from_position}, +}; use common::diagnostics::Span; use hir::span::LazySpan; @@ -91,6 +97,71 @@ pub fn goto_enclosing_path( Some(resolved_path) } +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; + +use crate::backend::workspace::{IngotFileContext, Workspace}; +// use crate::diagnostics::get_diagnostics; + +use std::sync::Arc; +use tokio::sync::RwLock; + +use tower_lsp::jsonrpc::Result; + +pub async fn goto_helper( + db: Snapshot, + workspace: Arc>, + params: GotoDefinitionParams, +) -> Result> { + let workspace = workspace.read().await; + // Convert the position to an offset in the file + let params = params.text_document_position_params; + let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); + let cursor: Cursor = to_offset_from_position(params.position, file_text.unwrap().as_str()); + + // Get the module and the goto info + let file_path = params.text_document.uri.path(); + let top_mod = workspace + .top_mod_from_file_path(db.as_lower_hir_db(), file_path) + .unwrap(); + let goto_info = goto_enclosing_path(&db, top_mod, cursor); + + // Convert the goto info to a Location + let scopes = match goto_info { + Some(EarlyResolvedPath::Full(bucket)) => { + bucket.iter().map(NameRes::scope).collect::>() + } + Some(EarlyResolvedPath::Partial { + res, + unresolved_from: _, + }) => { + vec![res.scope()] + } + None => return Ok(None), + }; + + let locations = scopes + .iter() + .filter_map(|scope| *scope) + .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) + .collect::>(); + + let _errors = scopes + .iter() + .filter_map(|scope| *scope) + .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) + .filter_map(std::result::Result::err) + .map(|err| err.to_string()) + .collect::>() + .join("\n"); + + Ok(Some(lsp_types::GotoDefinitionResponse::Array( + locations + .into_iter() + .filter_map(std::result::Result::ok) + .collect(), + ))) +} + #[cfg(test)] mod tests { use crate::backend::workspace::{IngotFileContext, Workspace}; diff --git a/crates/language-server/src/backend/handlers.rs b/crates/language-server/src/functionality/handlers.rs similarity index 85% rename from crates/language-server/src/backend/handlers.rs rename to crates/language-server/src/functionality/handlers.rs index ac5b94c47..2ea1856ac 100644 --- a/crates/language-server/src/backend/handlers.rs +++ b/crates/language-server/src/functionality/handlers.rs @@ -1,27 +1,23 @@ -use super::helpers::{goto_helper, hover_helper}; +use super::goto::goto_helper; +use super::hover::hover_helper; use crate::backend::Backend; use crate::backend::workspace::SyncableIngotFileContext; +use crate::functionality::diagnostics::diagnostics_workload; use common::InputDb; use fxhash::FxHashSet; use lsp_types::TextDocumentItem; -use salsa::{ParallelDatabase, Snapshot}; +use salsa::ParallelDatabase; -use std::sync::Arc; -use tokio::sync::RwLock; +use super::capabilities::server_capabilities; -use crate::backend::db::LanguageServerDatabase; -use crate::capabilities::server_capabilities; - -use crate::backend::workspace::{IngotFileContext, SyncableInputFile, Workspace}; -use crate::diagnostics::get_diagnostics; +use crate::backend::workspace::{IngotFileContext, SyncableInputFile}; +// use crate::diagnostics::get_diagnostics; use tracing::info; -use tower_lsp::Client; - impl Backend { pub(super) async fn handle_initialized( &mut self, @@ -161,23 +157,3 @@ impl Backend { let _ = responder.send(Ok(response)); } } - -pub(super) async fn diagnostics_workload( - client: Client, - workspace: Arc>, - db: Snapshot, - url: lsp_types::Url, -) { - info!("handling diagnostics for {:?}", url); - let workspace = &workspace.read().await; - let diagnostics = get_diagnostics(&db, workspace, url.clone()); - - let client = client.clone(); - let diagnostics = diagnostics - .unwrap() - .into_iter() - .map(|(uri, diags)| async { client.publish_diagnostics(uri, diags, None).await }) - .collect::>(); - - futures::future::join_all(diagnostics).await; -} diff --git a/crates/language-server/src/backend/helpers.rs b/crates/language-server/src/functionality/hover.rs similarity index 60% rename from crates/language-server/src/backend/helpers.rs rename to crates/language-server/src/functionality/hover.rs index b7b41e5c5..6df2b0847 100644 --- a/crates/language-server/src/backend/helpers.rs +++ b/crates/language-server/src/functionality/hover.rs @@ -1,11 +1,9 @@ use std::sync::Arc; use common::{input::IngotKind, InputDb}; -use hir::{LowerHirDb, SpannedHirDb}; -use hir_analysis::{ - name_resolution::{EarlyResolvedPath, NameRes}, - HirAnalysisDb, -}; +use hir::LowerHirDb; +use hir_analysis::{name_resolution::EarlyResolvedPath, HirAnalysisDb}; +use lsp_types::Hover; use tracing::info; use salsa::Snapshot; @@ -15,10 +13,11 @@ use tower_lsp::jsonrpc::Result; use crate::{ backend::db::LanguageServerDatabase, backend::workspace::{IngotFileContext, Workspace}, - goto::{goto_enclosing_path, Cursor}, - util::{to_lsp_location_from_scope, to_offset_from_position}, + util::to_offset_from_position, }; +use super::goto::{goto_enclosing_path, Cursor}; + pub async fn hover_helper( db: Snapshot, workspace: Arc>, @@ -101,60 +100,3 @@ pub async fn hover_helper( }; Ok(Some(result)) } - -use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Hover}; - -pub async fn goto_helper( - db: Snapshot, - workspace: Arc>, - params: GotoDefinitionParams, -) -> Result> { - let workspace = workspace.read().await; - // Convert the position to an offset in the file - let params = params.text_document_position_params; - let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); - let cursor: Cursor = to_offset_from_position(params.position, file_text.unwrap().as_str()); - - // Get the module and the goto info - let file_path = params.text_document.uri.path(); - let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), file_path) - .unwrap(); - let goto_info = goto_enclosing_path(&db, top_mod, cursor); - - // Convert the goto info to a Location - let scopes = match goto_info { - Some(EarlyResolvedPath::Full(bucket)) => { - bucket.iter().map(NameRes::scope).collect::>() - } - Some(EarlyResolvedPath::Partial { - res, - unresolved_from: _, - }) => { - vec![res.scope()] - } - None => return Ok(None), - }; - - let locations = scopes - .iter() - .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) - .collect::>(); - - let _errors = scopes - .iter() - .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) - .filter_map(std::result::Result::err) - .map(|err| err.to_string()) - .collect::>() - .join("\n"); - - Ok(Some(lsp_types::GotoDefinitionResponse::Array( - locations - .into_iter() - .filter_map(std::result::Result::ok) - .collect(), - ))) -} diff --git a/crates/language-server/src/functionality/mod.rs b/crates/language-server/src/functionality/mod.rs new file mode 100644 index 000000000..81e4e4c36 --- /dev/null +++ b/crates/language-server/src/functionality/mod.rs @@ -0,0 +1,6 @@ +mod capabilities; +pub(super) mod diagnostics; +pub(super) mod goto; +pub(super) mod handlers; +pub(super) mod hover; +pub(crate) mod streams; diff --git a/crates/language-server/src/backend/streams.rs b/crates/language-server/src/functionality/streams.rs similarity index 100% rename from crates/language-server/src/backend/streams.rs rename to crates/language-server/src/functionality/streams.rs diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index 42e81ca90..a08642148 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,8 +1,6 @@ mod backend; -mod capabilities; -mod diagnostics; +mod functionality; mod globals; -mod goto; mod logger; mod server; mod util; @@ -45,6 +43,6 @@ async fn main() { _ = tower_lsp::Server::new(stdin, stdout, socket) .serve(service) => {} // backend - _ = backend::streams::setup_streams(&mut backend, message_receivers) => {} + _ = functionality::streams::setup_streams(&mut backend, message_receivers) => {} } } From 3fd2feaef97affbb971297194ab951ea848bfcb6 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 28 Mar 2024 02:58:50 -0500 Subject: [PATCH 56/66] remove unused dependencies from language server --- Cargo.lock | 139 +----------------------------- crates/language-server/Cargo.toml | 12 --- 2 files changed, 1 insertion(+), 150 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ef2f1757d..b2b0a07ab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -745,16 +745,6 @@ dependencies = [ "parking_lot_core 0.9.7", ] -[[package]] -name = "debounced" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d8b0346b9fa0aa01a3fa4bcce48d62f8738e9c2956e92f275bbf6cf9d6fab5" -dependencies = [ - "futures-timer", - "futures-util", -] - [[package]] name = "der" version = "0.7.4" @@ -765,15 +755,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "deranged" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" -dependencies = [ - "powerfmt", -] - [[package]] name = "derive_more" version = "0.99.17" @@ -1218,7 +1199,7 @@ dependencies = [ "serde_json", "smol_str", "toml", - "vfs 0.5.2", + "vfs", ] [[package]] @@ -1287,43 +1268,31 @@ dependencies = [ "clap 4.3.12", "codespan-reporting", "console-subscriber", - "debounced", "dir-test", - "fe-analyzer", "fe-common2", "fe-compiler-test-utils", - "fe-driver2", "fe-hir", "fe-hir-analysis", "fe-language-server-macros", - "fe-macros", "fork_stream", "futures", "futures-batch", "futures-concurrency", "fxhash", "glob", - "indexmap 1.9.3", - "log", "lsp-types", - "merge-streams", "patricia_tree", - "pin-project", "rowan", "rust-embed", "salsa-2022", - "serde", "serde_json", - "stream-operators", "tokio", "tokio-macros", "tokio-stream", "tower-lsp", "tracing", - "tracing-appender", "tracing-subscriber", "url", - "vfs 0.12.0", ] [[package]] @@ -1446,18 +1415,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "filetime" -version = "0.2.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" -dependencies = [ - "cfg-if 1.0.0", - "libc", - "redox_syscall 0.4.1", - "windows-sys 0.52.0", -] - [[package]] name = "fixed-hash" version = "0.8.0" @@ -2232,16 +2189,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "merge-streams" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f84f6452969abd246e7ac1fe4fe75906c76e8ec88d898df9aef37e0f3b6a7c2" -dependencies = [ - "futures-core", - "pin-project", -] - [[package]] name = "mime" version = "0.3.17" @@ -2328,12 +2275,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - [[package]] name = "num-integer" version = "0.1.45" @@ -2595,12 +2536,6 @@ dependencies = [ "plotters-backend", ] -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - [[package]] name = "ppv-lite86" version = "0.2.17" @@ -2823,15 +2758,6 @@ dependencies = [ "bitflags", ] -[[package]] -name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags", -] - [[package]] name = "regex" version = "1.10.2" @@ -3458,17 +3384,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "stream-operators" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "903e4d7cdada44bf788a0949ae2864bd4ae959deed241e57037e4efe01f7dc1b" -dependencies = [ - "pin-project-lite", - "tokio", - "tokio-stream", -] - [[package]] name = "strsim" version = "0.10.0" @@ -3632,37 +3547,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "time" -version = "0.3.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" -dependencies = [ - "deranged", - "itoa", - "num-conv", - "powerfmt", - "serde", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" - -[[package]] -name = "time-macros" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" -dependencies = [ - "num-conv", - "time-core", -] - [[package]] name = "tiny-keccak" version = "2.0.2" @@ -3894,18 +3778,6 @@ dependencies = [ "tracing-core", ] -[[package]] -name = "tracing-appender" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" -dependencies = [ - "crossbeam-channel", - "thiserror", - "time", - "tracing-subscriber", -] - [[package]] name = "tracing-attributes" version = "0.1.27" @@ -4083,15 +3955,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "vfs" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "654cd097e182a71dbf899178e6b5662c2157dd0b8afd5975de18008f6fc173d1" -dependencies = [ - "filetime", -] - [[package]] name = "walkdir" version = "2.3.3" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 0b5260b54..e591ca323 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -14,23 +14,17 @@ salsa = { git = "https://github.com/salsa-rs/salsa", package = "salsa-2022" } codespan-reporting = "0.11" hir = { path = "../hir", package = "fe-hir" } language-server-macros = { path = "../language-server-macros", package = "fe-language-server-macros" } -macros = { path = "../macros", package = "fe-macros" } hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } camino = "1.1.4" clap = { version = "4.3", features = ["derive"] } -fe-analyzer = {path = "../analyzer", version = "^0.23.0"} -driver = { path = "../driver2", package = "fe-driver2" } common = { path = "../common2", package = "fe-common2" } anyhow = "1.0.71" lsp-types = "0.94.0" -serde = "1.0.162" serde_json = "1.0.96" -indexmap = "1.6.2" rowan = "0.15.10" fxhash = "0.2.1" dir-test = "0.1" fe-compiler-test-utils = { path = "../test-utils" } -log = "0.4" patricia_tree = "0.6.2" glob = "0.3.1" url = "2.4.1" @@ -40,15 +34,9 @@ tokio-macros = "2.2.0" futures = "0.3.28" tokio-stream = { version = "0.1.14", features = ["sync"] } fork_stream = "0.1.0" -debounced = "0.1.0" -stream-operators = "0.1.1" -pin-project = "1.1.5" -merge-streams = "0.1.2" futures-concurrency = "7.5.0" console-subscriber = "0.2.0" -vfs = "0.12.0" rust-embed = "8.3.0" futures-batch = "0.6.1" tracing = "0.1.40" tracing-subscriber = "0.3.18" -tracing-appender = "0.2.3" From 5f00e7657788fda1fd463d898e1e43e3e008606e Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 28 Mar 2024 01:14:03 -0500 Subject: [PATCH 57/66] improved go-to and hover functionality --- crates/language-server/src/backend/db.rs | 36 +- .../language-server/src/functionality/goto.rs | 448 ++++++++++-------- .../src/functionality/handlers.rs | 24 +- .../src/functionality/hover.rs | 84 ++-- .../src/functionality/item_info.rs | 77 +++ .../language-server/src/functionality/mod.rs | 1 + .../test_files/docstrings/src/lib.fe | 8 - .../test_files/docstrings/src/stuff.fe | 9 - crates/language-server/test_files/goto.snap | 38 +- .../{docstrings => hoverable}/fe.toml | 0 .../test_files/hoverable/src/lib.fe | 31 ++ .../test_files/hoverable/src/stuff.fe | 17 + .../test_files/single_ingot/src/foo.fe | 4 +- .../test_files/single_ingot/src/lib.fe | 29 +- .../test_files/single_ingot/src/lib.snap | 66 +-- 15 files changed, 494 insertions(+), 378 deletions(-) create mode 100644 crates/language-server/src/functionality/item_info.rs delete mode 100644 crates/language-server/test_files/docstrings/src/lib.fe delete mode 100644 crates/language-server/test_files/docstrings/src/stuff.fe rename crates/language-server/test_files/{docstrings => hoverable}/fe.toml (100%) create mode 100644 crates/language-server/test_files/hoverable/src/lib.fe create mode 100644 crates/language-server/test_files/hoverable/src/stuff.fe diff --git a/crates/language-server/src/backend/db.rs b/crates/language-server/src/backend/db.rs index 057ba2139..d4f3eb311 100644 --- a/crates/language-server/src/backend/db.rs +++ b/crates/language-server/src/backend/db.rs @@ -1,9 +1,6 @@ use common::{diagnostics::CompleteDiagnostic, InputDb}; use hir::{ - analysis_pass::AnalysisPassManager, - diagnostics::DiagnosticVoucher, - hir_def::{ItemKind, TopLevelMod}, - span::{DynLazySpan, LazySpan}, + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, }; use hir_analysis::{ @@ -12,8 +9,6 @@ use hir_analysis::{ }; use salsa::{ParallelDatabase, Snapshot}; -use crate::functionality::goto::Cursor; - #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::functionality::diagnostics::file_line_starts); @@ -45,32 +40,6 @@ impl LanguageServerDatabase { pass_manager.run_on_module(top_mod) } - pub fn find_enclosing_item(&self, top_mod: TopLevelMod, cursor: Cursor) -> Option { - let items = top_mod - .scope_graph(self.as_hir_db()) - .items_dfs(self.as_hir_db()); - - let mut smallest_enclosing_item = None; - let mut smallest_range_size = None; - - for item in items { - let lazy_item_span = DynLazySpan::from(item.lazy_span()); - let item_span = lazy_item_span - .resolve(SpannedHirDb::as_spanned_hir_db(self)) - .unwrap(); - - if item_span.range.contains(cursor) { - let range_size = item_span.range.end() - item_span.range.start(); - if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { - smallest_enclosing_item = Some(item); - smallest_range_size = Some(range_size); - } - } - } - - smallest_enclosing_item - } - pub fn finalize_diags(&self, diags: &[Box]) -> Vec { let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { @@ -79,6 +48,9 @@ impl LanguageServerDatabase { }); diags } + pub fn as_language_server_db(&self) -> &dyn LanguageServerDb { + >::as_jar_db::<'_>(self) + } } impl salsa::Database for LanguageServerDatabase { diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs index d6d748197..05672cdd5 100644 --- a/crates/language-server/src/functionality/goto.rs +++ b/crates/language-server/src/functionality/goto.rs @@ -1,17 +1,14 @@ use fxhash::FxHashMap; use hir::{ - hir_def::{scope_graph::ScopeId, ItemKind, PathId, TopLevelMod}, - visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, - HirDb, LowerHirDb, SpannedHirDb, + hir_def::{scope_graph::ScopeId, IdentId, ItemKind, Partial, PathId, TopLevelMod}, + span::DynLazySpan, + visitor::{prelude::LazyPathSpan, walk_path, Visitor, VisitorCtxt}, + LowerHirDb, SpannedHirDb, }; -use hir_analysis::{ - name_resolution::{EarlyResolvedPath, NameRes}, - HirAnalysisDb, -}; -use salsa::Snapshot; +use hir_analysis::name_resolution::{EarlyResolvedPath, NameDomain, NameRes}; use crate::{ - backend::db::{LanguageServerDatabase, LanguageServerDb}, + backend::{db::LanguageServerDb, Backend}, util::{to_lsp_location_from_scope, to_offset_from_position}, }; use common::diagnostics::Span; @@ -19,16 +16,20 @@ use hir::span::LazySpan; pub type GotoEnclosingPath = (PathId, ScopeId); pub type GotoPathMap = FxHashMap; +pub type GotoEnclosingSegment = (IdentId, ScopeId); +pub type GotoSegmentMap = FxHashMap; pub struct PathSpanCollector<'db> { path_map: GotoPathMap, + ident_map: GotoSegmentMap, db: &'db dyn LanguageServerDb, } impl<'db> PathSpanCollector<'db> { - pub fn new(db: &'db LanguageServerDatabase) -> Self { + pub fn new(db: &'db dyn LanguageServerDb) -> Self { Self { path_map: FxHashMap::default(), + ident_map: FxHashMap::default(), db, } } @@ -47,6 +48,25 @@ impl<'db> Visitor for PathSpanCollector<'db> { let scope = ctxt.scope(); self.path_map.insert(span, (path, scope)); + walk_path(self, ctxt, path); + } + + fn visit_ident( + &mut self, + ctxt: &mut VisitorCtxt<'_, hir::visitor::prelude::LazySpanAtom>, + ident: hir::hir_def::IdentId, + ) { + // keep track of `Span` --> `(IdentId, ScopeId)` so we can get more detailed information + // about the part of the path over which the cursor is hovering + let Some(span) = ctxt + .span() + .and_then(|lazy_span| lazy_span.resolve(self.db.as_spanned_hir_db())) + else { + return; + }; + + let scope = ctxt.scope(); + self.ident_map.insert(span, (ident, scope)); } } @@ -67,112 +87,191 @@ fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option, - top_mod: TopLevelMod, +fn smallest_enclosing_ident( cursor: Cursor, -) -> Option { - // Find the innermost item enclosing the cursor. - let item: ItemKind = db.find_enclosing_item(top_mod, cursor)?; + ident_map: &GotoSegmentMap, +) -> Option { + let mut smallest_enclosing_segment = None; + let mut smallest_range_size = None; - let mut visitor_ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); - let mut path_collector = PathSpanCollector::new(db); - path_collector.visit_item(&mut visitor_ctxt, item); + for (span, enclosing_segment) in ident_map { + if span.range.contains(cursor) { + let range_size = span.range.end() - span.range.start(); + if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { + smallest_enclosing_segment = Some(*enclosing_segment); + smallest_range_size = Some(range_size); + } + } + } - // can we do this without the cache? - let path_map = path_collector.path_map; + smallest_enclosing_segment +} - // Find the path that encloses the cursor. - let goto_starting_path = smallest_enclosing_path(cursor, &path_map)?; +pub fn find_enclosing_item( + db: &dyn SpannedHirDb, + top_mod: TopLevelMod, + cursor: Cursor, +) -> Option { + let items = top_mod + .scope_graph(db.as_hir_db()) + .items_dfs(db.as_hir_db()); - let (path_id, scope_id) = goto_starting_path; + let mut smallest_enclosing_item = None; + let mut smallest_range_size = None; - // Resolve path. - let resolved_path = hir_analysis::name_resolution::resolve_path_early( - db.as_hir_analysis_db(), - path_id, - scope_id, - ); + for item in items { + let lazy_item_span = DynLazySpan::from(item.lazy_span()); + let item_span = lazy_item_span + .resolve(SpannedHirDb::as_spanned_hir_db(db)) + .unwrap(); - Some(resolved_path) + if item_span.range.contains(cursor) { + let range_size = item_span.range.end() - item_span.range.start(); + if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { + smallest_enclosing_item = Some(item); + smallest_range_size = Some(range_size); + } + } + } + + smallest_enclosing_item } -use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; +pub fn get_goto_target_scopes_for_cursor( + db: &dyn LanguageServerDb, + top_mod: TopLevelMod, + cursor: Cursor, +) -> Option> { + let item: ItemKind = find_enclosing_item(db.as_spanned_hir_db(), top_mod, cursor)?; -use crate::backend::workspace::{IngotFileContext, Workspace}; -// use crate::diagnostics::get_diagnostics; + let mut visitor_ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); + let mut path_collector = PathSpanCollector::new(db); + path_collector.visit_item(&mut visitor_ctxt, item); + + let (cursor_ident, _) = smallest_enclosing_ident(cursor, &path_collector.ident_map)?; + let (cursor_path, cursor_path_scope) = + smallest_enclosing_path(cursor, &path_collector.path_map)?; + + // we need to get the segment upto and including `cursor_ident` + let mut segments = cursor_path.segments(db.as_hir_db()).clone(); + let is_partial = if let Some(pos) = segments.iter().position(|ident| match ident { + Partial::Present(ident) => *ident == cursor_ident, + Partial::Absent => false, + }) { + segments.truncate(pos + 1); + segments.len() < cursor_path.segments(db.as_hir_db()).len() + } else { + false + }; + + let resolved_segments = hir_analysis::name_resolution::resolve_segments_early( + db.as_hir_analysis_db(), + segments.as_slice(), + cursor_path_scope, + ); -use std::sync::Arc; -use tokio::sync::RwLock; - -use tower_lsp::jsonrpc::Result; - -pub async fn goto_helper( - db: Snapshot, - workspace: Arc>, - params: GotoDefinitionParams, -) -> Result> { - let workspace = workspace.read().await; - // Convert the position to an offset in the file - let params = params.text_document_position_params; - let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); - let cursor: Cursor = to_offset_from_position(params.position, file_text.unwrap().as_str()); - - // Get the module and the goto info - let file_path = params.text_document.uri.path(); - let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), file_path) - .unwrap(); - let goto_info = goto_enclosing_path(&db, top_mod, cursor); - - // Convert the goto info to a Location - let scopes = match goto_info { - Some(EarlyResolvedPath::Full(bucket)) => { - bucket.iter().map(NameRes::scope).collect::>() + let scopes = match resolved_segments { + EarlyResolvedPath::Full(bucket) => { + if is_partial { + match bucket.pick(NameDomain::Type) { + Ok(res) => res.scope().iter().cloned().collect::>(), + _ => bucket.iter().filter_map(NameRes::scope).collect::>(), + } + } else { + bucket.iter().filter_map(NameRes::scope).collect::>() + } } - Some(EarlyResolvedPath::Partial { + EarlyResolvedPath::Partial { res, unresolved_from: _, - }) => { - vec![res.scope()] - } - None => return Ok(None), + } => res.scope().iter().cloned().collect::>(), }; - let locations = scopes - .iter() - .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) - .collect::>(); - - let _errors = scopes - .iter() - .filter_map(|scope| *scope) - .map(|scope| to_lsp_location_from_scope(scope, db.as_spanned_hir_db())) - .filter_map(std::result::Result::err) - .map(|err| err.to_string()) - .collect::>() - .join("\n"); - - Ok(Some(lsp_types::GotoDefinitionResponse::Array( - locations - .into_iter() - .filter_map(std::result::Result::ok) - .collect(), - ))) + Some(scopes) } +use crate::backend::workspace::IngotFileContext; +// use crate::diagnostics::get_diagnostics; + +// use tower_lsp::jsonrpc::Result; + +impl Backend { + pub(super) async fn handle_goto_definition( + &mut self, + params: lsp_types::GotoDefinitionParams, + responder: tokio::sync::oneshot::Sender< + Result, tower_lsp::jsonrpc::Error>, + >, + ) { + let workspace = self.workspace.clone(); + let workspace = workspace.read().await; + // Convert the position to an offset in the file + let params = params.text_document_position_params; + let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); + let cursor: Cursor = to_offset_from_position(params.position, file_text.unwrap().as_str()); + + // Get the module and the goto info + let file_path = params.text_document.uri.path(); + let top_mod = workspace + .top_mod_from_file_path(self.db.as_lower_hir_db(), file_path) + .unwrap(); + + let scopes = + get_goto_target_scopes_for_cursor(&self.db, top_mod, cursor).unwrap_or_default(); + + let locations = scopes + .iter() + .map(|scope| to_lsp_location_from_scope(*scope, self.db.as_spanned_hir_db())) + .collect::>(); + + let result: Result, ()> = + Ok(Some(lsp_types::GotoDefinitionResponse::Array( + locations + .into_iter() + .filter_map(std::result::Result::ok) + .collect(), + ))); + let response = match result { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling goto definition: {:?}", e); + None + } + }; + let _ = responder.send(Ok(response)); + } +} #[cfg(test)] mod tests { - use crate::backend::workspace::{IngotFileContext, Workspace}; + use crate::backend::{ + db::LanguageServerDatabase, + workspace::{IngotFileContext, Workspace}, + }; use super::*; use common::input::IngotKind; use dir_test::{dir_test, Fixture}; use fe_compiler_test_utils::snap_test; - use hir::LowerHirDb; - use salsa::ParallelDatabase; - use std::path::Path; + use hir::{HirDb, LowerHirDb}; + use std::{collections::BTreeMap, path::Path}; + + // given a cursor position and a string, convert to cursor line and column + fn line_col_from_cursor(cursor: Cursor, s: &str) -> (usize, usize) { + let mut line = 0; + let mut col = 0; + for (i, c) in s.chars().enumerate() { + if i == Into::::into(cursor) { + return (line, col); + } + if c == '\n' { + line += 1; + col = 0; + } else { + col += 1; + } + } + (line, col) + } fn extract_multiple_cursor_positions_from_spans( db: &mut LanguageServerDatabase, @@ -182,18 +281,62 @@ mod tests { let mut path_collector = PathSpanCollector::new(db); path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); - let path_map = path_collector.path_map; + let ident_map = path_collector.ident_map; let mut cursors = Vec::new(); - for (span, _) in path_map { + for (span, _) in ident_map { let cursor = span.range.start(); - // println!("cursor from span: {:?}, {:?}", span, cursor); cursors.push(cursor); } cursors } + fn make_goto_cursors_snapshot( + db: &mut LanguageServerDatabase, + fixture: &Fixture<&str>, + top_mod: TopLevelMod, + ) -> String { + let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); + let mut cursor_path_map: BTreeMap = BTreeMap::default(); + + for cursor in &cursors { + let scopes = + get_goto_target_scopes_for_cursor(db, top_mod, *cursor).unwrap_or_default(); + + if !scopes.is_empty() { + cursor_path_map.insert( + *cursor, + scopes + .iter() + .flat_map(|x| x.pretty_path(db)) + .collect::>() + .join("\n"), + ); + } + } + + let cursor_lines = cursor_path_map + .iter() + .map(|(cursor, path)| { + let (cursor_line, cursor_col) = line_col_from_cursor(*cursor, fixture.content()); + format!("cursor position ({cursor_line:?}, {cursor_col:?}), path: {path}") + }) + .collect::>(); + + format!( + "{}\n---\n{}", + fixture + .content() + .lines() + .enumerate() + .map(|(i, line)| format!("{i:?}: {line}")) + .collect::>() + .join("\n"), + cursor_lines.join("\n") + ) + } + #[dir_test( dir: "$CARGO_MANIFEST_DIR/test_files/single_ingot", glob: "**/lib.fe", @@ -223,51 +366,15 @@ mod tests { let ingot = workspace.touch_ingot_for_file_path(db, fixture.path()); assert_eq!(ingot.unwrap().kind(db), IngotKind::Local); - let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); - let mut cursor_path_map: FxHashMap = FxHashMap::default(); - - for cursor in &cursors { - let early_resolution = goto_enclosing_path(&db.snapshot(), top_mod, *cursor); - - let goto_info = match early_resolution { - Some(EarlyResolvedPath::Full(bucket)) => { - if !bucket.is_empty() { - bucket - .iter() - .map(|x| x.pretty_path(db).unwrap()) - .collect::>() - .join("\n") - } else { - String::from("`NameResBucket` is empty") - } - } - Some(EarlyResolvedPath::Partial { - res, - unresolved_from: _, - }) => res.pretty_path(db).unwrap(), - None => String::from("No resolution available"), - }; - - cursor_path_map.insert(*cursor, goto_info); - } - - let result = format!( - "{}\n---\n{}", - fixture.content(), - cursor_path_map - .iter() - .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path:?}") }) - .collect::>() - .join("\n") - ); - snap_test!(result, fixture.path()); + let snapshot = make_goto_cursors_snapshot(db, &fixture, top_mod); + snap_test!(snapshot, fixture.path()); } #[dir_test( dir: "$CARGO_MANIFEST_DIR/test_files", glob: "goto*.fe" )] - fn test_goto_enclosing_path(fixture: Fixture<&str>) { + fn test_goto_cursor_target(fixture: Fixture<&str>) { let db = &mut LanguageServerDatabase::default(); let workspace = &mut Workspace::default(); let input = workspace @@ -278,44 +385,8 @@ mod tests { .top_mod_from_file_path(db.as_lower_hir_db(), fixture.path()) .unwrap(); - let cursors = extract_multiple_cursor_positions_from_spans(db, top_mod); - - let mut cursor_path_map: FxHashMap = FxHashMap::default(); - - for cursor in &cursors { - let resolved_path = goto_enclosing_path(&db.snapshot(), top_mod, *cursor); - - if let Some(path) = resolved_path { - match path { - EarlyResolvedPath::Full(bucket) => { - let path = bucket - .iter() - .map(|x| x.pretty_path(db).unwrap()) - .collect::>() - .join("\n"); - cursor_path_map.insert(*cursor, path); - } - EarlyResolvedPath::Partial { - res, - unresolved_from: _, - } => { - let path = res.pretty_path(db).unwrap(); - cursor_path_map.insert(*cursor, path); - } - } - } - } - - let result = format!( - "{}\n---\n{}", - fixture.content(), - cursor_path_map - .iter() - .map(|(cursor, path)| { format!("cursor position: {cursor:?}, path: {path}") }) - .collect::>() - .join("\n") - ); - snap_test!(result, fixture.path()); + let snapshot = make_goto_cursors_snapshot(db, &fixture, top_mod); + snap_test!(snapshot, fixture.path()); } #[dir_test( @@ -347,24 +418,23 @@ mod tests { let path_map = path_collector.path_map; let enclosing_path = smallest_enclosing_path(*cursor, &path_map); - let resolved_enclosing_path = hir_analysis::name_resolution::resolve_path_early( - db, - enclosing_path.unwrap().0, - enclosing_path.unwrap().1, - ); - - let res = match resolved_enclosing_path { - EarlyResolvedPath::Full(bucket) => bucket - .iter() - .map(|x| x.pretty_path(db).unwrap()) - .collect::>() - .join("\n"), - EarlyResolvedPath::Partial { - res, - unresolved_from: _, - } => res.pretty_path(db).unwrap(), - }; - cursor_path_map.insert(*cursor, res); + if let Some((path, scope)) = enclosing_path { + let resolved_enclosing_path = + hir_analysis::name_resolution::resolve_path_early(db, path, scope); + + let res = match resolved_enclosing_path { + EarlyResolvedPath::Full(bucket) => bucket + .iter() + .map(|x| x.pretty_path(db).unwrap()) + .collect::>() + .join("\n"), + EarlyResolvedPath::Partial { + res, + unresolved_from: _, + } => res.pretty_path(db).unwrap(), + }; + cursor_path_map.insert(*cursor, res); + } } let result = format!( diff --git a/crates/language-server/src/functionality/handlers.rs b/crates/language-server/src/functionality/handlers.rs index 2ea1856ac..26078a706 100644 --- a/crates/language-server/src/functionality/handlers.rs +++ b/crates/language-server/src/functionality/handlers.rs @@ -1,5 +1,3 @@ -use super::goto::goto_helper; -use super::hover::hover_helper; use crate::backend::Backend; use crate::backend::workspace::SyncableIngotFileContext; @@ -11,10 +9,9 @@ use fxhash::FxHashSet; use lsp_types::TextDocumentItem; use salsa::ParallelDatabase; -use super::capabilities::server_capabilities; +use super::{capabilities::server_capabilities, hover::hover_helper}; use crate::backend::workspace::{IngotFileContext, SyncableInputFile}; -// use crate::diagnostics::get_diagnostics; use tracing::info; @@ -137,23 +134,4 @@ impl Backend { }; let _ = responder.send(response); } - - pub(super) async fn handle_goto_definition( - &mut self, - params: lsp_types::GotoDefinitionParams, - responder: tokio::sync::oneshot::Sender< - Result, tower_lsp::jsonrpc::Error>, - >, - ) { - let db = self.db.snapshot(); - let workspace = self.workspace.clone(); - let response = match goto_helper(db, workspace, params).await { - Ok(response) => response, - Err(e) => { - eprintln!("Error handling goto definition: {:?}", e); - None - } - }; - let _ = responder.send(Ok(response)); - } } diff --git a/crates/language-server/src/functionality/hover.rs b/crates/language-server/src/functionality/hover.rs index 6df2b0847..c2b0c8871 100644 --- a/crates/language-server/src/functionality/hover.rs +++ b/crates/language-server/src/functionality/hover.rs @@ -1,8 +1,9 @@ use std::sync::Arc; -use common::{input::IngotKind, InputDb}; +use common::InputDb; use hir::LowerHirDb; -use hir_analysis::{name_resolution::EarlyResolvedPath, HirAnalysisDb}; +use hir::{HirDb, SpannedHirDb}; + use lsp_types::Hover; use tracing::info; @@ -16,7 +17,8 @@ use crate::{ util::to_offset_from_position, }; -use super::goto::{goto_enclosing_path, Cursor}; +use super::goto::{get_goto_target_scopes_for_cursor, Cursor}; +use super::item_info::{get_item_definition_markdown, get_item_docstring, get_item_path_markdown}; pub async fn hover_helper( db: Snapshot, @@ -31,71 +33,47 @@ pub async fn hover_helper( .uri .path(); info!("getting hover info for file_path: {:?}", file_path); - let input = workspace.get_input_for_file_path(file_path); - let ingot = input.map(|input| input.ingot(db.as_input_db())); + let input = workspace.get_input_for_file_path(file_path); let file_text = input.unwrap().text(db.as_input_db()); - let line = file_text - .lines() - .nth(params.text_document_position_params.position.line as usize) - .unwrap(); let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, file_text.as_str(), ); - let ingot_info: Option = { - let ingot_type = match ingot { - Some(ingot) => match ingot.kind(db.as_input_db()) { - IngotKind::StandAlone => None, - IngotKind::Local => Some("Local ingot"), - IngotKind::External => Some("External ingot"), - IngotKind::Std => Some("Standard library"), - }, - None => Some("No ingot information available"), - }; - let ingot_file_count = ingot.unwrap().files(db.as_input_db()).len(); - let ingot_path = ingot - .unwrap() - .path(db.as_input_db()) - .strip_prefix(workspace.root_path.clone().unwrap_or("".into())) - .ok(); - - ingot_type.map(|ingot_type| { - format!("{ingot_type} with {ingot_file_count} files at path: {ingot_path:?}") - }) - }; - let top_mod = workspace .top_mod_from_file_path(db.as_lower_hir_db(), file_path) .unwrap(); - let early_resolution = goto_enclosing_path(&db, top_mod, cursor); + let goto_info = &get_goto_target_scopes_for_cursor(db.as_language_server_db(), top_mod, cursor) + .unwrap_or_default(); - let goto_info = match early_resolution { - Some(EarlyResolvedPath::Full(bucket)) => bucket - .iter() - .map(|x| x.pretty_path(db.as_hir_analysis_db()).unwrap()) - .collect::>() - .join("\n"), - Some(EarlyResolvedPath::Partial { - res, - unresolved_from: _, - }) => res.pretty_path(db.as_hir_analysis_db()).unwrap(), - None => String::from("No goto info available"), - }; + let hir_db = db.as_hir_db(); + let scopes_info = goto_info + .iter() + .map(|scope| { + let item = &scope.item(); + let pretty_path = get_item_path_markdown(item, hir_db); + let definition_source = get_item_definition_markdown(item, db.as_spanned_hir_db()); + let docs = get_item_docstring(item, hir_db); + + let result = [pretty_path, definition_source, docs] + .iter() + .filter_map(|info| info.clone().map(|info| format!("{}\n", info))) + .collect::>() + .join("\n"); + + result + }) + .collect::>(); + + let info = scopes_info.join("\n---\n"); let result = lsp_types::Hover { contents: lsp_types::HoverContents::Markup(lsp_types::MarkupContent { - kind: lsp_types::MarkupKind::Markdown, - value: format!( - "### Hovering over:\n```{}```\n\n{}\n\n### Goto Info: \n\n{}\n\n### Ingot info: \n\n{:?}", - &line, - serde_json::to_string_pretty(¶ms).unwrap(), - goto_info, - ingot_info, - ), - }), + kind: lsp_types::MarkupKind::Markdown, + value: info, + }), range: None, }; Ok(Some(result)) diff --git a/crates/language-server/src/functionality/item_info.rs b/crates/language-server/src/functionality/item_info.rs new file mode 100644 index 000000000..b7f0e1e27 --- /dev/null +++ b/crates/language-server/src/functionality/item_info.rs @@ -0,0 +1,77 @@ +use hir::{ + hir_def::{Attr, ItemKind}, + span::LazySpan, + HirDb, SpannedHirDb, +}; + +pub fn get_item_docstring(item: &ItemKind, hir_db: &dyn HirDb) -> Option { + let docstring = match item { + ItemKind::Func(func) => func.attributes(hir_db).data(hir_db).clone(), + ItemKind::Mod(mod_) => mod_.attributes(hir_db).data(hir_db).clone(), + ItemKind::Struct(struct_) => struct_.attributes(hir_db).data(hir_db).clone(), + ItemKind::Enum(enum_) => enum_.attributes(hir_db).data(hir_db).clone(), + ItemKind::TypeAlias(type_alias) => type_alias.attributes(hir_db).data(hir_db).clone(), + ItemKind::Trait(trait_) => trait_.attributes(hir_db).data(hir_db).clone(), + ItemKind::Impl(impl_) => impl_.attributes(hir_db).data(hir_db).clone(), + // ItemKind::Body(body) => body.attributes(hir_db).data(hir_db).clone(), + // ItemKind::Const(const_) => const_.attributes(hir_db).data(hir_db).clone(), + // ItemKind::Use(use_) => use_.attributes(hir_db).data(hir_db).clone(), + ItemKind::Contract(contract) => contract.attributes(hir_db).data(hir_db).clone(), + _ => vec![], + } + .iter() + .filter_map(|attr| { + if let Attr::DocComment(doc) = attr { + Some(doc.text.data(hir_db).clone()) + } else { + None + } + }) + .collect::>(); + + if docstring.is_empty() { + None + } else { + Some(docstring.join("\n")) + } +} + +pub fn get_item_path_markdown(item: &ItemKind, hir_db: &dyn HirDb) -> Option { + item.scope() + .pretty_path(hir_db) + .map(|path| format!("```fe\n{}\n```", path)) +} + +pub fn get_item_definition_markdown(item: &ItemKind, db: &dyn SpannedHirDb) -> Option { + // TODO: use pending AST features to get the definition without all this text manipulation + let hir_db = db.as_hir_db(); + let span = item.lazy_span().resolve(db)?; + + let mut start: usize = span.range.start().into(); + let mut end: usize = span.range.end().into(); + + // if the item has a body or children, cut that stuff out + let body_start = match item { + ItemKind::Func(func) => Some(func.body(hir_db)?.lazy_span().resolve(db)?.range.start()), + ItemKind::Mod(module) => Some(module.scope().name_span(hir_db)?.resolve(db)?.range.end()), + // TODO: handle other item types + _ => None, + }; + if let Some(body_start) = body_start { + end = body_start.into(); + } + + // let's start at the beginning of the line where the name is defined + let name_span = item.name_span()?.resolve(db); + if let Some(name_span) = name_span { + let mut name_line_start = name_span.range.start().into(); + let file_text = span.file.text(db.as_input_db()).as_str(); + while name_line_start > 0 && file_text.chars().nth(name_line_start - 1).unwrap() != '\n' { + name_line_start -= 1; + } + start = name_line_start; + } + + let item_definition = span.file.text(db.as_input_db()).as_str()[start..end].to_string(); + Some(format!("```fe\n{}\n```", item_definition.trim())) +} diff --git a/crates/language-server/src/functionality/mod.rs b/crates/language-server/src/functionality/mod.rs index 81e4e4c36..8d55ac65f 100644 --- a/crates/language-server/src/functionality/mod.rs +++ b/crates/language-server/src/functionality/mod.rs @@ -3,4 +3,5 @@ pub(super) mod diagnostics; pub(super) mod goto; pub(super) mod handlers; pub(super) mod hover; +pub(super) mod item_info; pub(crate) mod streams; diff --git a/crates/language-server/test_files/docstrings/src/lib.fe b/crates/language-server/test_files/docstrings/src/lib.fe deleted file mode 100644 index 90a5c05f2..000000000 --- a/crates/language-server/test_files/docstrings/src/lib.fe +++ /dev/null @@ -1,8 +0,0 @@ -use stuff::{ return_three, return_four } - -/// ## `return_seven` -/// ### a function of numbers -/// #### returns the 3+4=7 -fn return_seven() { - return_three() + return_four() -} diff --git a/crates/language-server/test_files/docstrings/src/stuff.fe b/crates/language-server/test_files/docstrings/src/stuff.fe deleted file mode 100644 index b904aa95f..000000000 --- a/crates/language-server/test_files/docstrings/src/stuff.fe +++ /dev/null @@ -1,9 +0,0 @@ -/// A function that returns `3` -pub fn return_three() { - 3 -} - -/// ## A function that returns 4 -pub fn return_four() { - 4 -} \ No newline at end of file diff --git a/crates/language-server/test_files/goto.snap b/crates/language-server/test_files/goto.snap index a3956e258..3a74e9e80 100644 --- a/crates/language-server/test_files/goto.snap +++ b/crates/language-server/test_files/goto.snap @@ -1,25 +1,23 @@ --- source: crates/language-server/src/goto.rs -assertion_line: 153 -expression: result +assertion_line: 283 +expression: snapshot input_file: crates/language-server/test_files/goto.fe --- -struct Foo {} -struct Bar {} - -fn main() { - let x: Foo - let y: Bar - let z: baz::Baz -} - -mod baz { - pub struct Baz {} -} +0: struct Foo {} +1: struct Bar {} +2: +3: fn main() { +4: let x: Foo +5: let y: Bar +6: let z: baz::Baz +7: } +8: +9: mod baz { +10: pub struct Baz {} +11: } --- -cursor position: 64, path: -cursor position: 82, path: goto::baz::Baz -cursor position: 79, path: -cursor position: 52, path: goto::Foo -cursor position: 49, path: -cursor position: 67, path: goto::Bar +cursor position (4, 11), path: goto::Foo +cursor position (5, 11), path: goto::Bar +cursor position (6, 11), path: goto::baz +cursor position (6, 16), path: goto::baz::Baz diff --git a/crates/language-server/test_files/docstrings/fe.toml b/crates/language-server/test_files/hoverable/fe.toml similarity index 100% rename from crates/language-server/test_files/docstrings/fe.toml rename to crates/language-server/test_files/hoverable/fe.toml diff --git a/crates/language-server/test_files/hoverable/src/lib.fe b/crates/language-server/test_files/hoverable/src/lib.fe new file mode 100644 index 000000000..a91ad35b8 --- /dev/null +++ b/crates/language-server/test_files/hoverable/src/lib.fe @@ -0,0 +1,31 @@ +use stuff::calculations::{ return_three, return_four } + +/// ## `return_seven` +/// ### a function of numbers +/// #### returns the 3+4=7 +pub fn return_seven() { + return_three() + return_four() +} + +fn calculate() { + return_seven() + let x: stuff::calculations::ambiguous +} + +/// Anything that can be calculated ought to implement a +/// `calculate` function +pub trait Calculatable { + fn calculate(self) +} + +/// A struct for holding numbers like `x` and `y` +struct Numbers { + x: i32, + y: i32 +} + +impl Calculatable for Numbers { + fn calculate(self) { + self.x + self.y + } +} \ No newline at end of file diff --git a/crates/language-server/test_files/hoverable/src/stuff.fe b/crates/language-server/test_files/hoverable/src/stuff.fe new file mode 100644 index 000000000..c9692a1ef --- /dev/null +++ b/crates/language-server/test_files/hoverable/src/stuff.fe @@ -0,0 +1,17 @@ +/// ### Calculation helper functions +pub mod calculations { + /// A function that returns `3` + pub fn return_three() -> u32 { + 3 + } + + /// ## A function that returns 4 + pub fn return_four() { + 4 + } + + pub mod ambiguous { + + } + pub fn ambiguous() {} +} \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/foo.fe b/crates/language-server/test_files/single_ingot/src/foo.fe index c2251ee70..99e9264c3 100644 --- a/crates/language-server/test_files/single_ingot/src/foo.fe +++ b/crates/language-server/test_files/single_ingot/src/foo.fe @@ -1,8 +1,8 @@ -pub fn foo() { +pub fn why() { let x = 5 x } -pub struct Foo { +pub struct Why { pub x: i32 } \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.fe b/crates/language-server/test_files/single_ingot/src/lib.fe index cc513131d..5669526f6 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.fe +++ b/crates/language-server/test_files/single_ingot/src/lib.fe @@ -1,18 +1,23 @@ -use foo::Foo +use ingot::foo::Why -mod baz { - use super::Foo - - pub struct Bar { - x: Foo - } - - fn bar() -> () { - let x: Foo +mod who { + use super::Why + pub mod what { + pub fn how() {} + pub mod how { + use ingot::Why + pub struct When { + x: Why + } } + } + pub struct Bar { + x: Why + } } fn bar() -> () { - let y: Foo - let z: baz::Bar + let y: Why + let z = who::what::how + let z: who::what::how::When } \ No newline at end of file diff --git a/crates/language-server/test_files/single_ingot/src/lib.snap b/crates/language-server/test_files/single_ingot/src/lib.snap index 07d1a8119..2326668ff 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.snap +++ b/crates/language-server/test_files/single_ingot/src/lib.snap @@ -1,35 +1,41 @@ --- source: crates/language-server/src/goto.rs -assertion_line: 170 -expression: result +assertion_line: 264 +expression: snapshot input_file: crates/language-server/test_files/single_ingot/src/lib.fe --- -use foo::Foo - -mod baz { - use super::Foo - - pub struct Bar { - x: Foo - } - - fn bar() -> () { - let x: Foo - } -} - -fn bar() -> () { - let y: Foo - let z: baz::Bar -} +0: use ingot::foo::Why +1: +2: mod who { +3: use super::Why +4: pub mod what { +5: pub fn how() {} +6: pub mod how { +7: use ingot::Why +8: pub struct When { +9: x: Why +10: } +11: } +12: } +13: pub struct Bar { +14: x: Why +15: } +16: } +17: +18: fn bar() -> () { +19: let y: Why +20: let z = who::what::how +21: let z: who::what::how::When +22: } --- -cursor position: 80, path: "lib::foo::Foo" -cursor position: 29, path: "No resolution available" -cursor position: 183, path: "lib::baz::Bar" -cursor position: 180, path: "`NameResBucket` is empty" -cursor position: 168, path: "lib::foo::Foo" -cursor position: 21, path: "No resolution available" -cursor position: 165, path: "`NameResBucket` is empty" -cursor position: 127, path: "lib::foo::Foo" -cursor position: 124, path: "`NameResBucket` is empty" -cursor position: 60, path: "No resolution available" +cursor position (9, 11), path: lib::foo::Why +cursor position (14, 7), path: lib::foo::Why +cursor position (19, 11), path: lib::foo::Why +cursor position (20, 12), path: lib::who +cursor position (20, 17), path: lib::who::what +cursor position (20, 23), path: lib::who::what::how +lib::who::what::how +cursor position (21, 11), path: lib::who +cursor position (21, 16), path: lib::who::what +cursor position (21, 22), path: lib::who::what::how +cursor position (21, 27), path: lib::who::what::how::When From 988bda0e5be3d7d6399ffe21ada7ca99e11c5b32 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 28 Mar 2024 02:19:18 -0500 Subject: [PATCH 58/66] vscode extension comment toggling, autoclose --- .../vscode/language-configuration.json | 36 +++++++++++++++++++ .../editors/vscode/package.json | 9 ++--- 2 files changed, 41 insertions(+), 4 deletions(-) create mode 100644 crates/language-server/editors/vscode/language-configuration.json diff --git a/crates/language-server/editors/vscode/language-configuration.json b/crates/language-server/editors/vscode/language-configuration.json new file mode 100644 index 000000000..7ab55b9b0 --- /dev/null +++ b/crates/language-server/editors/vscode/language-configuration.json @@ -0,0 +1,36 @@ +{ + "comments": { + "lineComment": "//", + }, + "brackets": [ + ["{", "}"], + ["[", "]"], + ["(", ")"] + ], + "autoClosingPairs": [ + { "open": "{", "close": "}" }, + { "open": "[", "close": "]" }, + { "open": "(", "close": ")" }, + { "open": "'", "close": "'", "notIn": ["string", "comment"] }, + { "open": "\"", "close": "\"", "notIn": ["string"] }, + ], + "autoCloseBefore": ";:.,=}])>` \n\t", + "surroundingPairs": [ + ["{", "}"], + ["[", "]"], + ["(", ")"], + ["'", "'"], + ["\"", "\""], + ], + "folding": { + "markers": { + "start": "^\\s*//\\s*#?region\\b", + "end": "^\\s*//\\s*#?endregion\\b" + } + }, + "wordPattern": "(-?\\d*\\.\\d\\w*)|([^\\`\\~\\!\\@\\#\\%\\^\\&\\*\\(\\)\\-\\=\\+\\[\\{\\]\\}\\\\\\|\\;\\:\\'\\\"\\,\\.\\<\\>\\/\\?\\s]+)", + "indentationRules": { + "increaseIndentPattern": "^((?!\\/\\/).)*(\\{[^}\"'`]*|\\([^)\"'`]*|\\[[^\\]\"'`]*)$", + "decreaseIndentPattern": "^((?!.*?\\/\\*).*\\*/)?\\s*[\\)\\}\\]].*$" + } +} diff --git a/crates/language-server/editors/vscode/package.json b/crates/language-server/editors/vscode/package.json index eaff77187..d2e33c8b1 100644 --- a/crates/language-server/editors/vscode/package.json +++ b/crates/language-server/editors/vscode/package.json @@ -25,14 +25,15 @@ "id": "fe", "extensions": [ ".fe" - ] + ], + "configuration": "./language-configuration.json" } ], "grammars": [ { - "language": "fe", - "scopeName": "source.fe", - "path": "./fe.tmLanguage.json" + "language": "fe", + "scopeName": "source.fe", + "path": "./fe.tmLanguage.json" } ] }, From 04b3fc2fa0c4019eb6d99cd5464597821c0d5e27 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 28 Mar 2024 18:02:55 -0500 Subject: [PATCH 59/66] fix hanging diagnostic workers --- Cargo.lock | 11 +++ crates/language-server/Cargo.toml | 1 + crates/language-server/src/backend/db.rs | 30 ++++++- .../src/functionality/diagnostics.rs | 80 ++----------------- .../src/functionality/handlers.rs | 65 +++++++++------ .../src/functionality/hover.rs | 32 ++------ crates/language-server/src/logger.rs | 8 +- .../test_files/hoverable/src/stuff.fe | 2 + 8 files changed, 105 insertions(+), 124 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b2b0a07ab..9b678eb63 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1288,6 +1288,7 @@ dependencies = [ "serde_json", "tokio", "tokio-macros", + "tokio-rayon", "tokio-stream", "tower-lsp", "tracing", @@ -3622,6 +3623,16 @@ dependencies = [ "syn 2.0.48", ] +[[package]] +name = "tokio-rayon" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cf33a76e0b1dd03b778f83244137bd59887abf25c0e87bc3e7071105f457693" +dependencies = [ + "rayon", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.14" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index e591ca323..7360c4a94 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -40,3 +40,4 @@ rust-embed = "8.3.0" futures-batch = "0.6.1" tracing = "0.1.40" tracing-subscriber = "0.3.18" +tokio-rayon = "2.1.0" diff --git a/crates/language-server/src/backend/db.rs b/crates/language-server/src/backend/db.rs index d4f3eb311..b30c4895d 100644 --- a/crates/language-server/src/backend/db.rs +++ b/crates/language-server/src/backend/db.rs @@ -1,7 +1,8 @@ -use common::{diagnostics::CompleteDiagnostic, InputDb}; +use common::{diagnostics::CompleteDiagnostic, InputDb, InputFile}; +use fxhash::FxHashMap; use hir::{ analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, - HirDb, LowerHirDb, ParsingPass, SpannedHirDb, + lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, }; use hir_analysis::{ name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, @@ -9,6 +10,8 @@ use hir_analysis::{ }; use salsa::{ParallelDatabase, Snapshot}; +use crate::util::diag_to_lsp; + #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::functionality::diagnostics::file_line_starts); @@ -48,6 +51,29 @@ impl LanguageServerDatabase { }); diags } + + pub fn get_lsp_diagnostics( + &self, + files: Vec, + ) -> FxHashMap> { + let mut result = FxHashMap::>::default(); + files + .iter() + .flat_map(|file| { + let top_mod = map_file_to_mod(self, *file); + let diagnostics = self.analyze_top_mod(top_mod); + self.finalize_diags(&diagnostics) + .into_iter() + .flat_map(|diag| diag_to_lsp(diag, self.as_input_db()).clone()) + }) + .for_each(|(uri, more_diags)| { + let _ = result.entry(uri.clone()).or_insert_with(Vec::new); + let diags = result.entry(uri).or_insert_with(Vec::new); + diags.extend(more_diags); + }); + result + } + pub fn as_language_server_db(&self) -> &dyn LanguageServerDb { >::as_jar_db::<'_>(self) } diff --git a/crates/language-server/src/functionality/diagnostics.rs b/crates/language-server/src/functionality/diagnostics.rs index 5ccaaa1ca..0663e900e 100644 --- a/crates/language-server/src/functionality/diagnostics.rs +++ b/crates/language-server/src/functionality/diagnostics.rs @@ -1,27 +1,19 @@ -use std::{ops::Range, sync::Arc}; +use std::ops::Range; use camino::Utf8Path; -use clap::Error; + use codespan_reporting as cs; use cs::{diagnostic as cs_diag, files as cs_files}; use common::{ diagnostics::{LabelStyle, Severity}, - InputDb, InputFile, -}; -use fxhash::FxHashMap; -use hir::{diagnostics::DiagnosticVoucher, LowerHirDb}; -use salsa::Snapshot; -use tokio::sync::RwLock; -use tower_lsp::Client; -use tracing::info; - -use crate::{ - backend::db::{LanguageServerDatabase, LanguageServerDb}, - backend::workspace::{IngotFileContext, Workspace}, - util::diag_to_lsp, + InputFile, }; +use hir::diagnostics::DiagnosticVoucher; + +use crate::backend::db::{LanguageServerDatabase, LanguageServerDb}; + pub trait ToCsDiag { fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic; } @@ -130,61 +122,3 @@ impl<'a> cs_files::Files<'a> for LanguageServerDatabase { Ok(Range { start, end }) } } - -fn run_diagnostics( - db: &Snapshot, - workspace: &Workspace, - path: &str, -) -> Vec { - let file_path = path; - let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), file_path) - .unwrap(); - let diags = &db.analyze_top_mod(top_mod); - db.finalize_diags(diags) -} - -pub fn get_diagnostics( - db: &Snapshot, - workspace: &Workspace, - uri: lsp_types::Url, -) -> Result>, Error> { - let diags = run_diagnostics(db, workspace, uri.to_file_path().unwrap().to_str().unwrap()); - - let diagnostics = diags - .into_iter() - .flat_map(|diag| diag_to_lsp(diag, db.as_input_db()).clone()); - - // we need to reduce the diagnostics to a map from URL to Vec - let mut result = FxHashMap::>::default(); - - // add a null diagnostic to the result for the given URL - let _ = result.entry(uri.clone()).or_insert_with(Vec::new); - - diagnostics.for_each(|(uri, more_diags)| { - let diags = result.entry(uri).or_insert_with(Vec::new); - diags.extend(more_diags); - }); - - Ok(result) -} - -pub(super) async fn diagnostics_workload( - client: Client, - workspace: Arc>, - db: Snapshot, - url: lsp_types::Url, -) { - info!("handling diagnostics for {:?}", url); - let workspace = &workspace.read().await; - let diagnostics = get_diagnostics(&db, workspace, url.clone()); - - let client = client.clone(); - let diagnostics = diagnostics - .unwrap() - .into_iter() - .map(|(uri, diags)| async { client.publish_diagnostics(uri, diags, None).await }) - .collect::>(); - - futures::future::join_all(diagnostics).await; -} diff --git a/crates/language-server/src/functionality/handlers.rs b/crates/language-server/src/functionality/handlers.rs index 26078a706..b8e30e23b 100644 --- a/crates/language-server/src/functionality/handlers.rs +++ b/crates/language-server/src/functionality/handlers.rs @@ -1,9 +1,9 @@ use crate::backend::Backend; use crate::backend::workspace::SyncableIngotFileContext; -use crate::functionality::diagnostics::diagnostics_workload; use common::InputDb; +use futures::TryFutureExt; use fxhash::FxHashSet; use lsp_types::TextDocumentItem; @@ -97,19 +97,26 @@ impl Backend { } info!("ingots need diagnostics: {:?}", ingots_need_diagnostics); - for ingot in ingots_need_diagnostics.into_iter() { - for file in ingot.files(self.db.as_input_db()) { - let file = *file; - let path = file.path(self.db.as_input_db()); - let path = lsp_types::Url::from_file_path(path).unwrap(); - let db = self.db.snapshot(); - let client = self.client.clone(); - let workspace = self.workspace.clone(); - self.workers.spawn(async move { - diagnostics_workload(client.clone(), workspace.clone(), db, path).await + let ingot_files_need_diagnostics = ingots_need_diagnostics + .into_iter() + .flat_map(|ingot| ingot.files(self.db.as_input_db())) + .cloned() + .collect::>(); + + let db = self.db.snapshot(); + let client = self.client.clone(); + let compute_and_send_diagnostics = self + .workers + .spawn_blocking(move || db.get_lsp_diagnostics(ingot_files_need_diagnostics)) + .and_then(|diagnostics| async move { + let client = client.clone(); + let send_diagnostics = diagnostics.into_iter().map(|(path, diagnostic)| async { + client.publish_diagnostics(path, diagnostic, None).await }); - } - } + futures::future::join_all(send_diagnostics).await; + Ok(()) + }); + tokio::spawn(compute_and_send_diagnostics); } pub(super) async fn handle_hover( @@ -121,17 +128,27 @@ impl Backend { ) { let db = self.db.snapshot(); let workspace = self.workspace.clone(); - let response = match self - .workers - .spawn(hover_helper(db, workspace, params)) - .await - { - Ok(response) => response, - Err(e) => { - eprintln!("Error handling hover: {:?}", e); - Ok(None) + let file = workspace.read().await.get_input_for_file_path( + params + .text_document_position_params + .text_document + .uri + .path(), + ); + match file { + Some(file) => { + let response = match hover_helper(db, file, params) { + Ok(response) => response, + Err(e) => { + eprintln!("Error handling hover: {:?}", e); + None + } + }; + let _ = responder.send(Ok(response)); } - }; - let _ = responder.send(response); + None => { + let _ = responder.send(Ok(None)); + } + } } } diff --git a/crates/language-server/src/functionality/hover.rs b/crates/language-server/src/functionality/hover.rs index c2b0c8871..73ca2675d 100644 --- a/crates/language-server/src/functionality/hover.rs +++ b/crates/language-server/src/functionality/hover.rs @@ -1,6 +1,5 @@ -use std::sync::Arc; - -use common::InputDb; +use common::{InputDb, InputFile}; +use hir::lower::map_file_to_mod; use hir::LowerHirDb; use hir::{HirDb, SpannedHirDb}; @@ -8,43 +7,28 @@ use lsp_types::Hover; use tracing::info; use salsa::Snapshot; -use tokio::sync::RwLock; + use tower_lsp::jsonrpc::Result; -use crate::{ - backend::db::LanguageServerDatabase, - backend::workspace::{IngotFileContext, Workspace}, - util::to_offset_from_position, -}; +use crate::{backend::db::LanguageServerDatabase, util::to_offset_from_position}; use super::goto::{get_goto_target_scopes_for_cursor, Cursor}; use super::item_info::{get_item_definition_markdown, get_item_docstring, get_item_path_markdown}; -pub async fn hover_helper( +pub fn hover_helper( db: Snapshot, - workspace: Arc>, + input: InputFile, params: lsp_types::HoverParams, ) -> Result> { - let workspace = workspace.read().await; info!("handling hover"); - let file_path = ¶ms - .text_document_position_params - .text_document - .uri - .path(); - info!("getting hover info for file_path: {:?}", file_path); - - let input = workspace.get_input_for_file_path(file_path); - let file_text = input.unwrap().text(db.as_input_db()); + let file_text = input.text(db.as_input_db()); let cursor: Cursor = to_offset_from_position( params.text_document_position_params.position, file_text.as_str(), ); - let top_mod = workspace - .top_mod_from_file_path(db.as_lower_hir_db(), file_path) - .unwrap(); + let top_mod = map_file_to_mod(db.as_lower_hir_db(), input); let goto_info = &get_goto_target_scopes_for_cursor(db.as_language_server_db(), top_mod, cursor) .unwrap_or_default(); diff --git a/crates/language-server/src/logger.rs b/crates/language-server/src/logger.rs index 61eb2d24c..0af0cb990 100644 --- a/crates/language-server/src/logger.rs +++ b/crates/language-server/src/logger.rs @@ -51,7 +51,13 @@ pub fn setup_logger( let (log_sender, log_receiver) = tokio::sync::mpsc::unbounded_channel::<(String, MessageType)>(); let logger = LoggerLayer { log_sender }; - let logger = logger.with_max_level(level); + let logger = logger + .with_filter(|metadata| { + metadata + .module_path() + .map_or(false, |path| path.starts_with("fe_language_server")) + }) + .with_max_level(level); let pretty_logger = tracing_subscriber::fmt::layer() .event_format(tracing_subscriber::fmt::format::format().pretty()) diff --git a/crates/language-server/test_files/hoverable/src/stuff.fe b/crates/language-server/test_files/hoverable/src/stuff.fe index c9692a1ef..b97ffe766 100644 --- a/crates/language-server/test_files/hoverable/src/stuff.fe +++ b/crates/language-server/test_files/hoverable/src/stuff.fe @@ -10,8 +10,10 @@ pub mod calculations { 4 } + /// which one is it? pub mod ambiguous { } + /// is it this one? pub fn ambiguous() {} } \ No newline at end of file From 1c02042b2b6d545ca5a9be500b4907c05a05984e Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 28 Mar 2024 18:09:13 -0500 Subject: [PATCH 60/66] no need for workspace lock --- crates/language-server/src/backend/mod.rs | 6 ++--- .../language-server/src/functionality/goto.rs | 4 +--- .../src/functionality/handlers.rs | 23 ++++++------------- 3 files changed, 10 insertions(+), 23 deletions(-) diff --git a/crates/language-server/src/backend/mod.rs b/crates/language-server/src/backend/mod.rs index 89f339ff6..e77ce263a 100644 --- a/crates/language-server/src/backend/mod.rs +++ b/crates/language-server/src/backend/mod.rs @@ -1,8 +1,6 @@ pub(crate) mod db; pub(crate) mod workspace; use db::LanguageServerDatabase; -use std::sync::Arc; -use tokio::sync::RwLock; use workspace::Workspace; use tower_lsp::Client; @@ -10,14 +8,14 @@ use tower_lsp::Client; pub struct Backend { pub(super) client: Client, pub(super) db: LanguageServerDatabase, - pub(super) workspace: Arc>, + pub(super) workspace: Workspace, pub(super) workers: tokio::runtime::Runtime, } impl Backend { pub fn new(client: Client) -> Self { let db = LanguageServerDatabase::default(); - let workspace = Arc::new(RwLock::new(Workspace::default())); + let workspace = Workspace::default(); let workers = tokio::runtime::Builder::new_multi_thread() .worker_threads(4) diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs index 05672cdd5..f63f5df11 100644 --- a/crates/language-server/src/functionality/goto.rs +++ b/crates/language-server/src/functionality/goto.rs @@ -203,8 +203,6 @@ impl Backend { Result, tower_lsp::jsonrpc::Error>, >, ) { - let workspace = self.workspace.clone(); - let workspace = workspace.read().await; // Convert the position to an offset in the file let params = params.text_document_position_params; let file_text = std::fs::read_to_string(params.text_document.uri.path()).ok(); @@ -212,7 +210,7 @@ impl Backend { // Get the module and the goto info let file_path = params.text_document.uri.path(); - let top_mod = workspace + let top_mod = self.workspace .top_mod_from_file_path(self.db.as_lower_hir_db(), file_path) .unwrap(); diff --git a/crates/language-server/src/functionality/handlers.rs b/crates/language-server/src/functionality/handlers.rs index b8e30e23b..61d155286 100644 --- a/crates/language-server/src/functionality/handlers.rs +++ b/crates/language-server/src/functionality/handlers.rs @@ -27,10 +27,9 @@ impl Backend { let root = params.root_uri.unwrap().to_file_path().ok().unwrap(); - let mut workspace = self.workspace.write().await; - let _ = workspace.set_workspace_root(&mut self.db, &root); - let _ = workspace.load_std_lib(&mut self.db, &root); - let _ = workspace.sync(&mut self.db); + let _ = self.workspace.set_workspace_root(&mut self.db, &root); + let _ = self.workspace.load_std_lib(&mut self.db, &root); + let _ = self.workspace.sync(&mut self.db); let capabilities = server_capabilities(); let initialize_result = lsp_types::InitializeResult { @@ -59,11 +58,7 @@ impl Backend { let path = params.uri.to_file_path().unwrap(); info!("file deleted: {:?}", path); let path = path.to_str().unwrap(); - let workspace = self.workspace.clone(); - let _ = workspace - .write() - .await - .remove_input_for_file_path(&mut self.db, path); + let _ = self.workspace.remove_input_for_file_path(&mut self.db, path); let _ = tx_needs_diagnostics.send(path.to_string()); } @@ -77,8 +72,7 @@ impl Backend { let path = path_buf.to_str().unwrap(); let contents = Some(doc.text); if let Some(contents) = contents { - let workspace = &mut self.workspace.write().await; - let input = workspace + let input = self.workspace .touch_input_for_file_path(&mut self.db, path) .unwrap(); let _ = input.sync_from_text(&mut self.db, contents); @@ -90,9 +84,7 @@ impl Backend { info!("files need diagnostics: {:?}", files_need_diagnostics); let mut ingots_need_diagnostics = FxHashSet::default(); for file in files_need_diagnostics { - let workspace = self.workspace.clone(); - let workspace = workspace.read().await; - let ingot = workspace.get_ingot_for_file_path(&file).unwrap(); + let ingot = self.workspace.get_ingot_for_file_path(&file).unwrap(); ingots_need_diagnostics.insert(ingot); } @@ -127,8 +119,7 @@ impl Backend { >, ) { let db = self.db.snapshot(); - let workspace = self.workspace.clone(); - let file = workspace.read().await.get_input_for_file_path( + let file = self.workspace.get_input_for_file_path( params .text_document_position_params .text_document From 4bd60a4ebb3a9327f9ae46ae7c4d364f71d7786c Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 28 Mar 2024 18:21:11 -0500 Subject: [PATCH 61/66] cleanup hover/diagnostic handlers --- .../src/functionality/handlers.rs | 60 ++++++++----------- 1 file changed, 26 insertions(+), 34 deletions(-) diff --git a/crates/language-server/src/functionality/handlers.rs b/crates/language-server/src/functionality/handlers.rs index 61d155286..2b52f8b01 100644 --- a/crates/language-server/src/functionality/handlers.rs +++ b/crates/language-server/src/functionality/handlers.rs @@ -13,7 +13,7 @@ use super::{capabilities::server_capabilities, hover::hover_helper}; use crate::backend::workspace::{IngotFileContext, SyncableInputFile}; -use tracing::info; +use tracing::{error, info}; impl Backend { pub(super) async fn handle_initialized( @@ -58,7 +58,9 @@ impl Backend { let path = params.uri.to_file_path().unwrap(); info!("file deleted: {:?}", path); let path = path.to_str().unwrap(); - let _ = self.workspace.remove_input_for_file_path(&mut self.db, path); + let _ = self + .workspace + .remove_input_for_file_path(&mut self.db, path); let _ = tx_needs_diagnostics.send(path.to_string()); } @@ -72,7 +74,8 @@ impl Backend { let path = path_buf.to_str().unwrap(); let contents = Some(doc.text); if let Some(contents) = contents { - let input = self.workspace + let input = self + .workspace .touch_input_for_file_path(&mut self.db, path) .unwrap(); let _ = input.sync_from_text(&mut self.db, contents); @@ -81,31 +84,26 @@ impl Backend { } pub(super) async fn handle_diagnostics(&mut self, files_need_diagnostics: Vec) { - info!("files need diagnostics: {:?}", files_need_diagnostics); - let mut ingots_need_diagnostics = FxHashSet::default(); - for file in files_need_diagnostics { - let ingot = self.workspace.get_ingot_for_file_path(&file).unwrap(); - ingots_need_diagnostics.insert(ingot); - } - - info!("ingots need diagnostics: {:?}", ingots_need_diagnostics); - let ingot_files_need_diagnostics = ingots_need_diagnostics + let ingot_files_need_diagnostics: FxHashSet<_> = files_need_diagnostics .into_iter() + .filter_map(|file| self.workspace.get_ingot_for_file_path(&file)) .flat_map(|ingot| ingot.files(self.db.as_input_db())) .cloned() - .collect::>(); + .collect(); let db = self.db.snapshot(); let client = self.client.clone(); let compute_and_send_diagnostics = self .workers - .spawn_blocking(move || db.get_lsp_diagnostics(ingot_files_need_diagnostics)) + .spawn_blocking(move || { + db.get_lsp_diagnostics(ingot_files_need_diagnostics.into_iter().collect()) + }) .and_then(|diagnostics| async move { - let client = client.clone(); - let send_diagnostics = diagnostics.into_iter().map(|(path, diagnostic)| async { - client.publish_diagnostics(path, diagnostic, None).await - }); - futures::future::join_all(send_diagnostics).await; + futures::future::join_all(diagnostics.into_iter().map(|(path, diagnostic)| { + let client = client.clone(); + async move { client.publish_diagnostics(path, diagnostic, None).await } + })) + .await; Ok(()) }); tokio::spawn(compute_and_send_diagnostics); @@ -126,20 +124,14 @@ impl Backend { .uri .path(), ); - match file { - Some(file) => { - let response = match hover_helper(db, file, params) { - Ok(response) => response, - Err(e) => { - eprintln!("Error handling hover: {:?}", e); - None - } - }; - let _ = responder.send(Ok(response)); - } - None => { - let _ = responder.send(Ok(None)); - } - } + + let response = file.and_then(|file| { + hover_helper(db, file, params).unwrap_or_else(|e| { + error!("Error handling hover: {:?}", e); + None + }) + }); + + let _ = responder.send(Ok(response)); } } From 6ae80ca4c947c116bf2317ecd578d5dd4dd7d269 Mon Sep 17 00:00:00 2001 From: Micah Date: Thu, 28 Mar 2024 18:28:47 -0500 Subject: [PATCH 62/66] cleanup language server database --- Cargo.lock | 12 ---- crates/language-server/Cargo.toml | 2 - crates/language-server/src/backend/db.rs | 61 ++--------------- .../src/functionality/diagnostics.rs | 65 +++++++++++++++++-- .../language-server/src/functionality/goto.rs | 6 +- 5 files changed, 67 insertions(+), 79 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9b678eb63..c36d2f25b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1265,7 +1265,6 @@ version = "0.23.0" dependencies = [ "anyhow", "camino", - "clap 4.3.12", "codespan-reporting", "console-subscriber", "dir-test", @@ -1288,7 +1287,6 @@ dependencies = [ "serde_json", "tokio", "tokio-macros", - "tokio-rayon", "tokio-stream", "tower-lsp", "tracing", @@ -3623,16 +3621,6 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "tokio-rayon" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cf33a76e0b1dd03b778f83244137bd59887abf25c0e87bc3e7071105f457693" -dependencies = [ - "rayon", - "tokio", -] - [[package]] name = "tokio-stream" version = "0.1.14" diff --git a/crates/language-server/Cargo.toml b/crates/language-server/Cargo.toml index 7360c4a94..af212f6cb 100644 --- a/crates/language-server/Cargo.toml +++ b/crates/language-server/Cargo.toml @@ -16,7 +16,6 @@ hir = { path = "../hir", package = "fe-hir" } language-server-macros = { path = "../language-server-macros", package = "fe-language-server-macros" } hir-analysis = { path = "../hir-analysis", package = "fe-hir-analysis" } camino = "1.1.4" -clap = { version = "4.3", features = ["derive"] } common = { path = "../common2", package = "fe-common2" } anyhow = "1.0.71" lsp-types = "0.94.0" @@ -40,4 +39,3 @@ rust-embed = "8.3.0" futures-batch = "0.6.1" tracing = "0.1.40" tracing-subscriber = "0.3.18" -tokio-rayon = "2.1.0" diff --git a/crates/language-server/src/backend/db.rs b/crates/language-server/src/backend/db.rs index b30c4895d..a063b82e2 100644 --- a/crates/language-server/src/backend/db.rs +++ b/crates/language-server/src/backend/db.rs @@ -1,16 +1,8 @@ -use common::{diagnostics::CompleteDiagnostic, InputDb, InputFile}; -use fxhash::FxHashMap; -use hir::{ - analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, - lower::map_file_to_mod, HirDb, LowerHirDb, ParsingPass, SpannedHirDb, -}; -use hir_analysis::{ - name_resolution::{DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass}, - HirAnalysisDb, -}; -use salsa::{ParallelDatabase, Snapshot}; +use common::InputDb; -use crate::util::diag_to_lsp; +use hir::{HirDb, LowerHirDb, SpannedHirDb}; +use hir_analysis::HirAnalysisDb; +use salsa::{ParallelDatabase, Snapshot}; #[salsa::jar(db = LanguageServerDb)] pub struct Jar(crate::functionality::diagnostics::file_line_starts); @@ -38,42 +30,6 @@ pub struct LanguageServerDatabase { } impl LanguageServerDatabase { - pub fn analyze_top_mod(&self, top_mod: TopLevelMod) -> Vec> { - let mut pass_manager = initialize_analysis_pass(self); - pass_manager.run_on_module(top_mod) - } - - pub fn finalize_diags(&self, diags: &[Box]) -> Vec { - let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); - diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { - std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), - ord => ord, - }); - diags - } - - pub fn get_lsp_diagnostics( - &self, - files: Vec, - ) -> FxHashMap> { - let mut result = FxHashMap::>::default(); - files - .iter() - .flat_map(|file| { - let top_mod = map_file_to_mod(self, *file); - let diagnostics = self.analyze_top_mod(top_mod); - self.finalize_diags(&diagnostics) - .into_iter() - .flat_map(|diag| diag_to_lsp(diag, self.as_input_db()).clone()) - }) - .for_each(|(uri, more_diags)| { - let _ = result.entry(uri.clone()).or_insert_with(Vec::new); - let diags = result.entry(uri).or_insert_with(Vec::new); - diags.extend(more_diags); - }); - result - } - pub fn as_language_server_db(&self) -> &dyn LanguageServerDb { >::as_jar_db::<'_>(self) } @@ -100,12 +56,3 @@ impl ParallelDatabase for LanguageServerDatabase { }) } } - -fn initialize_analysis_pass(db: &LanguageServerDatabase) -> AnalysisPassManager<'_> { - let mut pass_manager = AnalysisPassManager::new(); - pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); - pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); - pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); - pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); - pass_manager -} diff --git a/crates/language-server/src/functionality/diagnostics.rs b/crates/language-server/src/functionality/diagnostics.rs index 0663e900e..8540d0a5e 100644 --- a/crates/language-server/src/functionality/diagnostics.rs +++ b/crates/language-server/src/functionality/diagnostics.rs @@ -6,13 +6,23 @@ use codespan_reporting as cs; use cs::{diagnostic as cs_diag, files as cs_files}; use common::{ - diagnostics::{LabelStyle, Severity}, - InputFile, + diagnostics::{CompleteDiagnostic, LabelStyle, Severity}, + InputDb, InputFile, }; -use hir::diagnostics::DiagnosticVoucher; +use fxhash::FxHashMap; +use hir::{ + analysis_pass::AnalysisPassManager, diagnostics::DiagnosticVoucher, hir_def::TopLevelMod, + lower::map_file_to_mod, ParsingPass, +}; +use hir_analysis::name_resolution::{ + DefConflictAnalysisPass, ImportAnalysisPass, PathAnalysisPass, +}; -use crate::backend::db::{LanguageServerDatabase, LanguageServerDb}; +use crate::{ + backend::db::{LanguageServerDatabase, LanguageServerDb}, + util::diag_to_lsp, +}; pub trait ToCsDiag { fn to_cs(&self, db: &LanguageServerDatabase) -> cs_diag::Diagnostic; @@ -122,3 +132,50 @@ impl<'a> cs_files::Files<'a> for LanguageServerDatabase { Ok(Range { start, end }) } } + +impl LanguageServerDatabase { + pub fn analyze_top_mod(&self, top_mod: TopLevelMod) -> Vec> { + let mut pass_manager = initialize_analysis_pass(self); + pass_manager.run_on_module(top_mod) + } + + pub fn finalize_diags(&self, diags: &[Box]) -> Vec { + let mut diags: Vec<_> = diags.iter().map(|d| d.to_complete(self)).collect(); + diags.sort_by(|lhs, rhs| match lhs.error_code.cmp(&rhs.error_code) { + std::cmp::Ordering::Equal => lhs.primary_span().cmp(&rhs.primary_span()), + ord => ord, + }); + diags + } + + pub fn get_lsp_diagnostics( + &self, + files: Vec, + ) -> FxHashMap> { + let mut result = FxHashMap::>::default(); + files + .iter() + .flat_map(|file| { + let top_mod = map_file_to_mod(self, *file); + let diagnostics = self.analyze_top_mod(top_mod); + self.finalize_diags(&diagnostics) + .into_iter() + .flat_map(|diag| diag_to_lsp(diag, self.as_input_db()).clone()) + }) + .for_each(|(uri, more_diags)| { + let _ = result.entry(uri.clone()).or_insert_with(Vec::new); + let diags = result.entry(uri).or_insert_with(Vec::new); + diags.extend(more_diags); + }); + result + } +} + +fn initialize_analysis_pass(db: &LanguageServerDatabase) -> AnalysisPassManager<'_> { + let mut pass_manager = AnalysisPassManager::new(); + pass_manager.add_module_pass(Box::new(ParsingPass::new(db))); + pass_manager.add_module_pass(Box::new(DefConflictAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(ImportAnalysisPass::new(db))); + pass_manager.add_module_pass(Box::new(PathAnalysisPass::new(db))); + pass_manager +} diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs index f63f5df11..4c0d1fb01 100644 --- a/crates/language-server/src/functionality/goto.rs +++ b/crates/language-server/src/functionality/goto.rs @@ -191,9 +191,6 @@ pub fn get_goto_target_scopes_for_cursor( } use crate::backend::workspace::IngotFileContext; -// use crate::diagnostics::get_diagnostics; - -// use tower_lsp::jsonrpc::Result; impl Backend { pub(super) async fn handle_goto_definition( @@ -210,7 +207,8 @@ impl Backend { // Get the module and the goto info let file_path = params.text_document.uri.path(); - let top_mod = self.workspace + let top_mod = self + .workspace .top_mod_from_file_path(self.db.as_lower_hir_db(), file_path) .unwrap(); From cdd3a1b3281183f744bb7540d43e7d723cdd32c6 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 1 Apr 2024 02:02:32 -0500 Subject: [PATCH 63/66] Applied some suggestions from review --- crates/language-server/src/backend/db.rs | 10 ++++---- .../language-server/src/backend/workspace.rs | 19 --------------- .../language-server/src/functionality/goto.rs | 2 +- .../src/functionality/handlers.rs | 8 +++---- .../src/functionality/hover.rs | 20 +++++++--------- .../src/functionality/item_info.rs | 24 +++++++++---------- 6 files changed, 30 insertions(+), 53 deletions(-) diff --git a/crates/language-server/src/backend/db.rs b/crates/language-server/src/backend/db.rs index a063b82e2..0dd0fc1dd 100644 --- a/crates/language-server/src/backend/db.rs +++ b/crates/language-server/src/backend/db.rs @@ -29,11 +29,11 @@ pub struct LanguageServerDatabase { storage: salsa::Storage, } -impl LanguageServerDatabase { - pub fn as_language_server_db(&self) -> &dyn LanguageServerDb { - >::as_jar_db::<'_>(self) - } -} +// impl LanguageServerDatabase { +// pub fn as_language_server_db(&self) -> &dyn LanguageServerDb { +// >::as_jar_db::<'_>(self) +// } +// } impl salsa::Database for LanguageServerDatabase { fn salsa_event(&self, _: salsa::Event) {} diff --git a/crates/language-server/src/backend/workspace.rs b/crates/language-server/src/backend/workspace.rs index ae16fe2cf..e1590fc23 100644 --- a/crates/language-server/src/backend/workspace.rs +++ b/crates/language-server/src/backend/workspace.rs @@ -467,25 +467,6 @@ impl IngotFileContext for Workspace { } } -pub trait SyncableInputFile { - // fn sync(&self, db: &mut LanguageServerDatabase, contents: Option) -> Result<()>; - fn sync_from_fs(&self, db: &mut LanguageServerDatabase) -> Result<()>; - fn sync_from_text(&self, db: &mut LanguageServerDatabase, contents: String) -> Result<()>; -} - -impl SyncableInputFile for InputFile { - fn sync_from_fs(&self, db: &mut LanguageServerDatabase) -> Result<()> { - let path = self.path(db); - let contents = std::fs::read_to_string(path)?; - self.set_text(db).to(contents); - Ok(()) - } - fn sync_from_text(&self, db: &mut LanguageServerDatabase, contents: String) -> Result<()> { - self.set_text(db).to(contents); - Ok(()) - } -} - pub trait SyncableIngotFileContext { fn sync(&mut self, db: &mut LanguageServerDatabase) -> Result<()>; } diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs index 4c0d1fb01..0ea037888 100644 --- a/crates/language-server/src/functionality/goto.rs +++ b/crates/language-server/src/functionality/goto.rs @@ -194,7 +194,7 @@ use crate::backend::workspace::IngotFileContext; impl Backend { pub(super) async fn handle_goto_definition( - &mut self, + &self, params: lsp_types::GotoDefinitionParams, responder: tokio::sync::oneshot::Sender< Result, tower_lsp::jsonrpc::Error>, diff --git a/crates/language-server/src/functionality/handlers.rs b/crates/language-server/src/functionality/handlers.rs index 2b52f8b01..8e6b01c44 100644 --- a/crates/language-server/src/functionality/handlers.rs +++ b/crates/language-server/src/functionality/handlers.rs @@ -11,7 +11,7 @@ use salsa::ParallelDatabase; use super::{capabilities::server_capabilities, hover::hover_helper}; -use crate::backend::workspace::{IngotFileContext, SyncableInputFile}; +use crate::backend::workspace::IngotFileContext; use tracing::{error, info}; @@ -78,7 +78,7 @@ impl Backend { .workspace .touch_input_for_file_path(&mut self.db, path) .unwrap(); - let _ = input.sync_from_text(&mut self.db, contents); + input.set_text(&mut self.db).to(contents); } let _ = tx_needs_diagnostics.send(path.to_string()); } @@ -116,7 +116,7 @@ impl Backend { Result, tower_lsp::jsonrpc::Error>, >, ) { - let db = self.db.snapshot(); + // let db = self.db.snapshot(); let file = self.workspace.get_input_for_file_path( params .text_document_position_params @@ -126,7 +126,7 @@ impl Backend { ); let response = file.and_then(|file| { - hover_helper(db, file, params).unwrap_or_else(|e| { + hover_helper(&self.db, file, params).unwrap_or_else(|e| { error!("Error handling hover: {:?}", e); None }) diff --git a/crates/language-server/src/functionality/hover.rs b/crates/language-server/src/functionality/hover.rs index 73ca2675d..4a3570d34 100644 --- a/crates/language-server/src/functionality/hover.rs +++ b/crates/language-server/src/functionality/hover.rs @@ -1,22 +1,19 @@ -use common::{InputDb, InputFile}; +use common::InputFile; use hir::lower::map_file_to_mod; -use hir::LowerHirDb; -use hir::{HirDb, SpannedHirDb}; use lsp_types::Hover; use tracing::info; -use salsa::Snapshot; - use tower_lsp::jsonrpc::Result; -use crate::{backend::db::LanguageServerDatabase, util::to_offset_from_position}; +use crate::backend::db::LanguageServerDb; +use crate::util::to_offset_from_position; use super::goto::{get_goto_target_scopes_for_cursor, Cursor}; use super::item_info::{get_item_definition_markdown, get_item_docstring, get_item_path_markdown}; pub fn hover_helper( - db: Snapshot, + db: &dyn LanguageServerDb, input: InputFile, params: lsp_types::HoverParams, ) -> Result> { @@ -29,17 +26,16 @@ pub fn hover_helper( ); let top_mod = map_file_to_mod(db.as_lower_hir_db(), input); - let goto_info = &get_goto_target_scopes_for_cursor(db.as_language_server_db(), top_mod, cursor) - .unwrap_or_default(); + let goto_info = &get_goto_target_scopes_for_cursor(db, top_mod, cursor).unwrap_or_default(); let hir_db = db.as_hir_db(); let scopes_info = goto_info .iter() .map(|scope| { let item = &scope.item(); - let pretty_path = get_item_path_markdown(item, hir_db); - let definition_source = get_item_definition_markdown(item, db.as_spanned_hir_db()); - let docs = get_item_docstring(item, hir_db); + let pretty_path = get_item_path_markdown(*item, hir_db); + let definition_source = get_item_definition_markdown(*item, db.as_spanned_hir_db()); + let docs = get_item_docstring(*item, hir_db); let result = [pretty_path, definition_source, docs] .iter() diff --git a/crates/language-server/src/functionality/item_info.rs b/crates/language-server/src/functionality/item_info.rs index b7f0e1e27..94cc48c7b 100644 --- a/crates/language-server/src/functionality/item_info.rs +++ b/crates/language-server/src/functionality/item_info.rs @@ -4,20 +4,20 @@ use hir::{ HirDb, SpannedHirDb, }; -pub fn get_item_docstring(item: &ItemKind, hir_db: &dyn HirDb) -> Option { +pub fn get_item_docstring(item: ItemKind, hir_db: &dyn HirDb) -> Option { let docstring = match item { - ItemKind::Func(func) => func.attributes(hir_db).data(hir_db).clone(), - ItemKind::Mod(mod_) => mod_.attributes(hir_db).data(hir_db).clone(), - ItemKind::Struct(struct_) => struct_.attributes(hir_db).data(hir_db).clone(), - ItemKind::Enum(enum_) => enum_.attributes(hir_db).data(hir_db).clone(), - ItemKind::TypeAlias(type_alias) => type_alias.attributes(hir_db).data(hir_db).clone(), - ItemKind::Trait(trait_) => trait_.attributes(hir_db).data(hir_db).clone(), - ItemKind::Impl(impl_) => impl_.attributes(hir_db).data(hir_db).clone(), + ItemKind::Func(func) => func.attributes(hir_db).data(hir_db), + ItemKind::Mod(mod_) => mod_.attributes(hir_db).data(hir_db), + ItemKind::Struct(struct_) => struct_.attributes(hir_db).data(hir_db), + ItemKind::Enum(enum_) => enum_.attributes(hir_db).data(hir_db), + ItemKind::TypeAlias(type_alias) => type_alias.attributes(hir_db).data(hir_db), + ItemKind::Trait(trait_) => trait_.attributes(hir_db).data(hir_db), + ItemKind::Impl(impl_) => impl_.attributes(hir_db).data(hir_db), // ItemKind::Body(body) => body.attributes(hir_db).data(hir_db).clone(), // ItemKind::Const(const_) => const_.attributes(hir_db).data(hir_db).clone(), // ItemKind::Use(use_) => use_.attributes(hir_db).data(hir_db).clone(), - ItemKind::Contract(contract) => contract.attributes(hir_db).data(hir_db).clone(), - _ => vec![], + ItemKind::Contract(contract) => contract.attributes(hir_db).data(hir_db), + _ => return None, } .iter() .filter_map(|attr| { @@ -36,13 +36,13 @@ pub fn get_item_docstring(item: &ItemKind, hir_db: &dyn HirDb) -> Option } } -pub fn get_item_path_markdown(item: &ItemKind, hir_db: &dyn HirDb) -> Option { +pub fn get_item_path_markdown(item: ItemKind, hir_db: &dyn HirDb) -> Option { item.scope() .pretty_path(hir_db) .map(|path| format!("```fe\n{}\n```", path)) } -pub fn get_item_definition_markdown(item: &ItemKind, db: &dyn SpannedHirDb) -> Option { +pub fn get_item_definition_markdown(item: ItemKind, db: &dyn SpannedHirDb) -> Option { // TODO: use pending AST features to get the definition without all this text manipulation let hir_db = db.as_hir_db(); let span = item.lazy_span().resolve(db)?; From 824966eeae12854138e8469595675539e5aa3dda Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 1 Apr 2024 09:59:46 -0500 Subject: [PATCH 64/66] Apply suggestions from code review Co-authored-by: Yoshitomo Nakanishi Co-authored-by: Sean Billig --- .../language-server/src/functionality/goto.rs | 6 +- .../src/functionality/streams.rs | 91 +++++++------------ 2 files changed, 35 insertions(+), 62 deletions(-) diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs index 0ea037888..a00bfed07 100644 --- a/crates/language-server/src/functionality/goto.rs +++ b/crates/language-server/src/functionality/goto.rs @@ -14,10 +14,8 @@ use crate::{ use common::diagnostics::Span; use hir::span::LazySpan; -pub type GotoEnclosingPath = (PathId, ScopeId); -pub type GotoPathMap = FxHashMap; -pub type GotoEnclosingSegment = (IdentId, ScopeId); -pub type GotoSegmentMap = FxHashMap; +type GotoEnclosingPath = (PathId, ScopeId); +type GotoEnclosingSegment = (IdentId, ScopeId); pub struct PathSpanCollector<'db> { path_map: GotoPathMap, diff --git a/crates/language-server/src/functionality/streams.rs b/crates/language-server/src/functionality/streams.rs index f9f33df0b..d46e173f6 100644 --- a/crates/language-server/src/functionality/streams.rs +++ b/crates/language-server/src/functionality/streams.rs @@ -11,71 +11,47 @@ use tokio_stream::wrappers::UnboundedReceiverStream; use tracing::info; +pub struct FileChange { + pub uri: url::Url, + pub kind: ChangeKind, +} +pub enum ChangeKind { + Open(String), + Create, + Edit(Option), + Delete, +} + pub async fn setup_streams(backend: &mut Backend, mut receivers: MessageReceivers) { info!("setting up streams"); let mut initialized_stream = receivers.initialize_stream.fuse(); let mut shutdown_stream = receivers.shutdown_stream.fuse(); - let did_change_watched_files_stream = receivers.did_change_watched_files_stream.fork(); - - let flat_did_change_watched_files = did_change_watched_files_stream - .map(|params| futures::stream::iter(params.changes)) - .flatten() - .fork(); - - let did_change_watched_file_stream = flat_did_change_watched_files.clone().filter(|change| { - let change_type = change.typ; - Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CHANGED) }) - }); - let did_create_watched_file_stream = flat_did_change_watched_files.clone().filter(|change| { - let change_type = change.typ; - Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::CREATED) }) - }); - - let mut did_delete_watch_file_stream = flat_did_change_watched_files - .clone() - .filter(|change| { - let change_type = change.typ; - Box::pin(async move { matches!(change_type, lsp_types::FileChangeType::DELETED) }) - }) - .fuse(); - - let did_open_stream = (&mut receivers.did_open_stream).fuse(); - let did_change_stream = (&mut receivers.did_change_stream).fuse(); let mut change_stream = ( - did_change_watched_file_stream.map(|change| { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - let text = std::fs::read_to_string(path).unwrap(); - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - } - }), - did_create_watched_file_stream.map(|change| { - let uri = change.uri; - let path = uri.to_file_path().unwrap(); - let text = std::fs::read_to_string(path).unwrap(); - TextDocumentItem { - uri: uri.clone(), - language_id: LANGUAGE_ID.to_string(), - version: 0, - text, - } - }), - did_open_stream.map(|params| TextDocumentItem { + receivers + .did_change_watched_files_stream + .map(|params| futures::stream::iter(params.changes)) + .flatten() + .fuse() + .map(|event| { + let kind = match event.typ { + FileChangeType::CHANGED => ChangeKind::Edit(None), + FileChangeType::CREATED => ChangeKind::Create, + FileChangeType::DELETED => ChangeKind::Delete, + _ => unreachable!(), + }; + FileChange { + uri: event.uri, + kind, + } + }), + receivers.did_open_stream.fuse().map(|params| FileChange { uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.text_document.text, + kind: ChangeKind::Open(params.text_document.text), }), - did_change_stream.map(|params| TextDocumentItem { + receivers.did_change_stream.fuse().map(|params| FileChange { uri: params.text_document.uri, - language_id: LANGUAGE_ID.to_string(), - version: params.text_document.version, - text: params.content_changes[0].text.clone(), + kind: ChangeKind::Edit(Some(params.content_changes[0].text.clone())), }), ) .merge() @@ -96,8 +72,7 @@ pub async fn setup_streams(backend: &mut Backend, mut receivers: MessageReceiver tokio::select! { Some((params, responder)) = initialized_stream.next() => backend.handle_initialized(params, responder).await, Some((_, responder)) = shutdown_stream.next() => backend.handle_shutdown(responder).await, - Some(params) = did_delete_watch_file_stream.next() => backend.handle_deleted(params, tx_needs_diagnostics.clone()).await, - Some(params) = change_stream.next() => backend.handle_change(params, tx_needs_diagnostics.clone()).await, + Some(change) = change_stream.next() => backend.handle_change(change, tx_needs_diagnostics.clone()).await, Some(files_need_diagnostics) = diagnostics_stream.next() => backend.handle_diagnostics(files_need_diagnostics).await, Some((params, responder)) = hover_stream.next() => backend.handle_hover(params, responder).await, Some((params, responder)) = goto_definition_stream.next() => backend.handle_goto_definition(params, responder).await, From c697a17feedead7948e5c570fcbb260bc6ad2975 Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 1 Apr 2024 10:43:08 -0500 Subject: [PATCH 65/66] Integrate review suggestions --- .../language-server/src/functionality/goto.rs | 8 +- .../src/functionality/handlers.rs | 73 ++++++++++++------- .../src/functionality/streams.rs | 5 +- crates/language-server/src/globals.rs | 1 - crates/language-server/src/main.rs | 1 - .../fixtures/features/numeric_sizes.fe | 12 +-- 6 files changed, 57 insertions(+), 43 deletions(-) delete mode 100644 crates/language-server/src/globals.rs diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs index a00bfed07..d7ba911e1 100644 --- a/crates/language-server/src/functionality/goto.rs +++ b/crates/language-server/src/functionality/goto.rs @@ -18,8 +18,8 @@ type GotoEnclosingPath = (PathId, ScopeId); type GotoEnclosingSegment = (IdentId, ScopeId); pub struct PathSpanCollector<'db> { - path_map: GotoPathMap, - ident_map: GotoSegmentMap, + path_map: FxHashMap, + ident_map: FxHashMap, db: &'db dyn LanguageServerDb, } @@ -68,7 +68,7 @@ impl<'db> Visitor for PathSpanCollector<'db> { } } -fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option { +fn smallest_enclosing_path(cursor: Cursor, path_map: &FxHashMap) -> Option { let mut smallest_enclosing_path = None; let mut smallest_range_size = None; @@ -87,7 +87,7 @@ fn smallest_enclosing_path(cursor: Cursor, path_map: &GotoPathMap) -> Option ) -> Option { let mut smallest_enclosing_segment = None; let mut smallest_range_size = None; diff --git a/crates/language-server/src/functionality/handlers.rs b/crates/language-server/src/functionality/handlers.rs index 8e6b01c44..38b71e1f3 100644 --- a/crates/language-server/src/functionality/handlers.rs +++ b/crates/language-server/src/functionality/handlers.rs @@ -6,10 +6,13 @@ use common::InputDb; use futures::TryFutureExt; use fxhash::FxHashSet; -use lsp_types::TextDocumentItem; use salsa::ParallelDatabase; -use super::{capabilities::server_capabilities, hover::hover_helper}; +use super::{ + capabilities::server_capabilities, + hover::hover_helper, + streams::{ChangeKind, FileChange}, +}; use crate::backend::workspace::IngotFileContext; @@ -50,37 +53,53 @@ impl Backend { let _ = responder.send(Ok(())); } - pub(super) async fn handle_deleted( + pub(super) async fn handle_change( &mut self, - params: lsp_types::FileEvent, + change: FileChange, tx_needs_diagnostics: tokio::sync::mpsc::UnboundedSender, ) { - let path = params.uri.to_file_path().unwrap(); - info!("file deleted: {:?}", path); + let path = change + .uri + .to_file_path() + .unwrap_or_else(|_| panic!("Failed to convert URI to path: {:?}", change.uri)); + let path = path.to_str().unwrap(); - let _ = self - .workspace - .remove_input_for_file_path(&mut self.db, path); - let _ = tx_needs_diagnostics.send(path.to_string()); - } - pub(super) async fn handle_change( - &mut self, - doc: TextDocumentItem, - tx_needs_diagnostics: tokio::sync::mpsc::UnboundedSender, - ) { - info!("change detected: {:?}", doc.uri); - let path_buf = doc.uri.to_file_path().unwrap(); - let path = path_buf.to_str().unwrap(); - let contents = Some(doc.text); - if let Some(contents) = contents { - let input = self - .workspace - .touch_input_for_file_path(&mut self.db, path) - .unwrap(); - input.set_text(&mut self.db).to(contents); + match change.kind { + ChangeKind::Open(contents) => { + info!("file opened: {:?}", &path); + self.update_input_file_text(path, contents); + } + ChangeKind::Create => { + info!("file created: {:?}", &path); + let contents = tokio::fs::read_to_string(&path).await.unwrap(); + self.update_input_file_text(path, contents) + } + ChangeKind::Edit(contents) => { + info!("file edited: {:?}", &path); + let contents = if let Some(text) = contents { + text + } else { + tokio::fs::read_to_string(&path).await.unwrap() + }; + self.update_input_file_text(path, contents); + } + ChangeKind::Delete => { + info!("file deleted: {:?}", path); + self.workspace + .remove_input_for_file_path(&mut self.db, path) + .unwrap(); + } } - let _ = tx_needs_diagnostics.send(path.to_string()); + tx_needs_diagnostics.send(path.to_string()).unwrap(); + } + + fn update_input_file_text(&mut self, path: &str, contents: String) { + let input = self + .workspace + .touch_input_for_file_path(&mut self.db, path) + .unwrap(); + input.set_text(&mut self.db).to(contents); } pub(super) async fn handle_diagnostics(&mut self, files_need_diagnostics: Vec) { diff --git a/crates/language-server/src/functionality/streams.rs b/crates/language-server/src/functionality/streams.rs index d46e173f6..2fabbf14f 100644 --- a/crates/language-server/src/functionality/streams.rs +++ b/crates/language-server/src/functionality/streams.rs @@ -1,12 +1,9 @@ use crate::backend::Backend; -use fork_stream::StreamExt as _; use futures_batch::ChunksTimeoutStreamExt; - -use crate::globals::LANGUAGE_ID; use crate::server::MessageReceivers; use futures::StreamExt; use futures_concurrency::prelude::*; -use lsp_types::TextDocumentItem; +use lsp_types::FileChangeType; use tokio_stream::wrappers::UnboundedReceiverStream; use tracing::info; diff --git a/crates/language-server/src/globals.rs b/crates/language-server/src/globals.rs deleted file mode 100644 index e6ef17dad..000000000 --- a/crates/language-server/src/globals.rs +++ /dev/null @@ -1 +0,0 @@ -pub(crate) const LANGUAGE_ID: &str = "fe"; diff --git a/crates/language-server/src/main.rs b/crates/language-server/src/main.rs index a08642148..b578a17cc 100644 --- a/crates/language-server/src/main.rs +++ b/crates/language-server/src/main.rs @@ -1,6 +1,5 @@ mod backend; mod functionality; -mod globals; mod logger; mod server; mod util; diff --git a/crates/test-files/fixtures/features/numeric_sizes.fe b/crates/test-files/fixtures/features/numeric_sizes.fe index fdec05e3a..9773757e6 100644 --- a/crates/test-files/fixtures/features/numeric_sizes.fe +++ b/crates/test-files/fixtures/features/numeric_sizes.fe @@ -27,8 +27,8 @@ const U128_MAX: u128 = 340282366920938463463374607431768211455 const I256_MIN: i256 = -57896044618658097711785492504343953926634992332820282019728792003956564819968 const I256_MAX: i256 = 57896044618658097711785492504343953926634992332820282019728792003956564819967 -const U256_MIN: u256 = 0 -const U256_MAX: u256 = 115792089237316195423570985008687907853269984665640564039457584007913129639935 +const u256_MIN: u256 = 0 +const u256_MAX: u256 = 115792089237316195423570985008687907853269984665640564039457584007913129639935 contract Foo { @@ -38,7 +38,7 @@ contract Foo { assert u32::min() == U32_MIN assert u64::min() == U64_MIN assert u128::min() == U128_MIN - assert u256::min() == U256_MIN + assert u256::min() == u256_MIN // TODO: Investigate why these can't be compared against their const values assert i8::min() == get_i8_const_min() assert i16::min() == get_i16_const_min() @@ -52,7 +52,7 @@ contract Foo { assert u32::max() == U32_MAX assert u64::max() == U64_MAX assert u128::max() == U128_MAX - assert u256::max() == U256_MAX + assert u256::max() == u256_MAX assert i8::max() == I8_MAX assert i16::max() == I16_MAX @@ -106,7 +106,7 @@ contract Foo { } pub fn get_u256_const_min() -> u256 { - return U256_MIN + return u256_MIN } pub fn get_i8_min() -> i8 { @@ -202,7 +202,7 @@ contract Foo { } pub fn get_u256_const_max() -> u256 { - return U256_MAX + return u256_MAX } pub fn get_i8_max() -> i8 { From d8ecf229b7a2992210f0ec6183fd05f256f50d1e Mon Sep 17 00:00:00 2001 From: Micah Date: Mon, 1 Apr 2024 12:12:07 -0500 Subject: [PATCH 66/66] Implement @y-nak's suggested goto improvements --- .../language-server/src/functionality/goto.rs | 145 +++++++----------- .../src/functionality/streams.rs | 2 +- .../test_files/single_ingot/src/lib.snap | 4 +- 3 files changed, 62 insertions(+), 89 deletions(-) diff --git a/crates/language-server/src/functionality/goto.rs b/crates/language-server/src/functionality/goto.rs index d7ba911e1..4403bba52 100644 --- a/crates/language-server/src/functionality/goto.rs +++ b/crates/language-server/src/functionality/goto.rs @@ -2,7 +2,7 @@ use fxhash::FxHashMap; use hir::{ hir_def::{scope_graph::ScopeId, IdentId, ItemKind, Partial, PathId, TopLevelMod}, span::DynLazySpan, - visitor::{prelude::LazyPathSpan, walk_path, Visitor, VisitorCtxt}, + visitor::{prelude::LazyPathSpan, Visitor, VisitorCtxt}, LowerHirDb, SpannedHirDb, }; use hir_analysis::name_resolution::{EarlyResolvedPath, NameDomain, NameRes}; @@ -14,81 +14,64 @@ use crate::{ use common::diagnostics::Span; use hir::span::LazySpan; -type GotoEnclosingPath = (PathId, ScopeId); -type GotoEnclosingSegment = (IdentId, ScopeId); +pub type Cursor = rowan::TextSize; +struct GotoEnclosingPathSegment { + path: PathId, + idx: usize, + scope: ScopeId, +} +impl GotoEnclosingPathSegment { + fn segments<'db>(&self, db: &'db dyn LanguageServerDb) -> &'db [Partial] { + &self.path.segments(db.as_hir_db())[0..self.idx + 1] + } + fn is_intermediate(&self, db: &dyn LanguageServerDb) -> bool { + self.idx < self.path.segments(db.as_hir_db()).len() - 1 + } +} -pub struct PathSpanCollector<'db> { - path_map: FxHashMap, - ident_map: FxHashMap, +struct PathSegmentSpanCollector<'db> { + segment_map: FxHashMap, db: &'db dyn LanguageServerDb, } -impl<'db> PathSpanCollector<'db> { - pub fn new(db: &'db dyn LanguageServerDb) -> Self { +impl<'db> PathSegmentSpanCollector<'db> { + fn new(db: &'db dyn LanguageServerDb) -> Self { Self { - path_map: FxHashMap::default(), - ident_map: FxHashMap::default(), + segment_map: FxHashMap::default(), db, } } } -pub type Cursor = rowan::TextSize; - -impl<'db> Visitor for PathSpanCollector<'db> { +impl<'db> Visitor for PathSegmentSpanCollector<'db> { fn visit_path(&mut self, ctxt: &mut VisitorCtxt<'_, LazyPathSpan>, path: PathId) { - let Some(span) = ctxt - .span() - .and_then(|lazy_span| lazy_span.resolve(self.db.as_spanned_hir_db())) - else { - return; - }; - - let scope = ctxt.scope(); - self.path_map.insert(span, (path, scope)); - walk_path(self, ctxt, path); - } - - fn visit_ident( - &mut self, - ctxt: &mut VisitorCtxt<'_, hir::visitor::prelude::LazySpanAtom>, - ident: hir::hir_def::IdentId, - ) { - // keep track of `Span` --> `(IdentId, ScopeId)` so we can get more detailed information - // about the part of the path over which the cursor is hovering - let Some(span) = ctxt - .span() - .and_then(|lazy_span| lazy_span.resolve(self.db.as_spanned_hir_db())) - else { + let Some(path_span) = ctxt.span() else { return; }; let scope = ctxt.scope(); - self.ident_map.insert(span, (ident, scope)); - } -} - -fn smallest_enclosing_path(cursor: Cursor, path_map: &FxHashMap) -> Option { - let mut smallest_enclosing_path = None; - let mut smallest_range_size = None; - - for (span, enclosing_path) in path_map { - if span.range.contains(cursor) { - let range_size = span.range.end() - span.range.start(); - if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { - smallest_enclosing_path = Some(*enclosing_path); - smallest_range_size = Some(range_size); - } + for i in 0..path.segments(self.db.as_hir_db()).iter().len() { + let Some(segment_span) = path_span.segment(i).resolve(self.db.as_spanned_hir_db()) + else { + continue; + }; + + self.segment_map.insert( + segment_span, + GotoEnclosingPathSegment { + path, + idx: i, + scope, + }, + ); } } - - smallest_enclosing_path } -fn smallest_enclosing_ident( +fn smallest_enclosing_segment( cursor: Cursor, - ident_map: &FxHashMap -) -> Option { + ident_map: &FxHashMap, +) -> Option<&GotoEnclosingPathSegment> { let mut smallest_enclosing_segment = None; let mut smallest_range_size = None; @@ -96,7 +79,7 @@ fn smallest_enclosing_ident( if span.range.contains(cursor) { let range_size = span.range.end() - span.range.start(); if smallest_range_size.is_none() || range_size < smallest_range_size.unwrap() { - smallest_enclosing_segment = Some(*enclosing_segment); + smallest_enclosing_segment = Some(enclosing_segment); smallest_range_size = Some(range_size); } } @@ -143,34 +126,23 @@ pub fn get_goto_target_scopes_for_cursor( let item: ItemKind = find_enclosing_item(db.as_spanned_hir_db(), top_mod, cursor)?; let mut visitor_ctxt = VisitorCtxt::with_item(db.as_hir_db(), item); - let mut path_collector = PathSpanCollector::new(db); - path_collector.visit_item(&mut visitor_ctxt, item); - - let (cursor_ident, _) = smallest_enclosing_ident(cursor, &path_collector.ident_map)?; - let (cursor_path, cursor_path_scope) = - smallest_enclosing_path(cursor, &path_collector.path_map)?; - - // we need to get the segment upto and including `cursor_ident` - let mut segments = cursor_path.segments(db.as_hir_db()).clone(); - let is_partial = if let Some(pos) = segments.iter().position(|ident| match ident { - Partial::Present(ident) => *ident == cursor_ident, - Partial::Absent => false, - }) { - segments.truncate(pos + 1); - segments.len() < cursor_path.segments(db.as_hir_db()).len() - } else { - false - }; + let mut path_segment_collector = PathSegmentSpanCollector::new(db); + path_segment_collector.visit_item(&mut visitor_ctxt, item); + + let cursor_segment = smallest_enclosing_segment(cursor, &path_segment_collector.segment_map)?; + let segments = cursor_segment.segments(db); + let is_intermediate_segment = cursor_segment.is_intermediate(db); + // let is_partial = cursor_segment.idx < cursor_segment.path.segments(db.as_jar_db()).len(); let resolved_segments = hir_analysis::name_resolution::resolve_segments_early( db.as_hir_analysis_db(), - segments.as_slice(), - cursor_path_scope, + segments, + cursor_segment.scope, ); let scopes = match resolved_segments { EarlyResolvedPath::Full(bucket) => { - if is_partial { + if is_intermediate_segment { match bucket.pick(NameDomain::Type) { Ok(res) => res.scope().iter().cloned().collect::>(), _ => bucket.iter().filter_map(NameRes::scope).collect::>(), @@ -272,13 +244,14 @@ mod tests { top_mod: TopLevelMod, ) -> Vec { let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); - let mut path_collector = PathSpanCollector::new(db); + // let mut path_collector = PathSpanCollector::new(db); + let mut path_collector = PathSegmentSpanCollector::new(db); path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); - let ident_map = path_collector.ident_map; + let segment_map = path_collector.segment_map; let mut cursors = Vec::new(); - for (span, _) in ident_map { + for (span, _) in segment_map { let cursor = span.range.start(); cursors.push(cursor); } @@ -406,15 +379,15 @@ mod tests { for cursor in &cursors { let mut visitor_ctxt = VisitorCtxt::with_top_mod(db.as_hir_db(), top_mod); - let mut path_collector = PathSpanCollector::new(db); + let mut path_collector = PathSegmentSpanCollector::new(db); path_collector.visit_top_mod(&mut visitor_ctxt, top_mod); - let path_map = path_collector.path_map; - let enclosing_path = smallest_enclosing_path(*cursor, &path_map); + let path_map = path_collector.segment_map; + let enclosing_path_segment = smallest_enclosing_segment(*cursor, &path_map); - if let Some((path, scope)) = enclosing_path { + if let Some(GotoEnclosingPathSegment { path, scope, .. }) = enclosing_path_segment { let resolved_enclosing_path = - hir_analysis::name_resolution::resolve_path_early(db, path, scope); + hir_analysis::name_resolution::resolve_path_early(db, *path, *scope); let res = match resolved_enclosing_path { EarlyResolvedPath::Full(bucket) => bucket diff --git a/crates/language-server/src/functionality/streams.rs b/crates/language-server/src/functionality/streams.rs index 2fabbf14f..573e326e7 100644 --- a/crates/language-server/src/functionality/streams.rs +++ b/crates/language-server/src/functionality/streams.rs @@ -1,7 +1,7 @@ use crate::backend::Backend; -use futures_batch::ChunksTimeoutStreamExt; use crate::server::MessageReceivers; use futures::StreamExt; +use futures_batch::ChunksTimeoutStreamExt; use futures_concurrency::prelude::*; use lsp_types::FileChangeType; use tokio_stream::wrappers::UnboundedReceiverStream; diff --git a/crates/language-server/test_files/single_ingot/src/lib.snap b/crates/language-server/test_files/single_ingot/src/lib.snap index 2326668ff..680a6b6af 100644 --- a/crates/language-server/test_files/single_ingot/src/lib.snap +++ b/crates/language-server/test_files/single_ingot/src/lib.snap @@ -1,6 +1,6 @@ --- -source: crates/language-server/src/goto.rs -assertion_line: 264 +source: crates/language-server/src/functionality/goto.rs +assertion_line: 337 expression: snapshot input_file: crates/language-server/test_files/single_ingot/src/lib.fe ---