diff --git a/src/tools/rust-analyzer/.github/workflows/ci.yaml b/src/tools/rust-analyzer/.github/workflows/ci.yaml index 5cf4a8fd43939..d82e46016dc2a 100644 --- a/src/tools/rust-analyzer/.github/workflows/ci.yaml +++ b/src/tools/rust-analyzer/.github/workflows/ci.yaml @@ -1,14 +1,10 @@ -# Please make sure that the `needs` fields for both `end-success` and `end-failure` +# Please make sure that the `needs` field for the `conclusion` job # are updated when adding new jobs! name: CI on: pull_request: - push: - branches: - - auto - - try - - automation/bors/try + merge_group: env: CARGO_INCREMENTAL: 0 @@ -237,20 +233,21 @@ jobs: - name: check for typos run: typos - end-success: - name: bors build finished - if: github.event.pusher.name == 'bors' && success() - runs-on: ubuntu-latest + conclusion: needs: [rust, rust-cross, typescript, typo-check] - steps: - - name: Mark the job as successful - run: exit 0 - - end-failure: - name: bors build finished - if: github.event.pusher.name == 'bors' && !success() + # We need to ensure this job does *not* get skipped if its dependencies fail, + # because a skipped job is considered a success by GitHub. So we have to + # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run + # when the workflow is canceled manually. + # + # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB! + if: ${{ !cancelled() }} runs-on: ubuntu-latest - needs: [rust, rust-cross, typescript, typo-check] steps: - - name: Mark the job as a failure - run: exit 1 + # Manually check the status of all dependencies. `if: failure()` does not work. + - name: Conclusion + run: | + # Print the dependent jobs to see them in the CI log + jq -C <<< '${{ toJson(needs) }}' + # Check if all jobs that we depend on (in the needs array) were successful. + jq --exit-status 'all(.result == "success")' <<< '${{ toJson(needs) }}' diff --git a/src/tools/rust-analyzer/.gitignore b/src/tools/rust-analyzer/.gitignore index 68c87a6b1ed41..c4470a45078a2 100644 --- a/src/tools/rust-analyzer/.gitignore +++ b/src/tools/rust-analyzer/.gitignore @@ -1,6 +1,5 @@ -/target/ +target/ /dist/ -crates/*/target **/*.rs.bk **/*.rs.pending-snap .idea/* diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index fd569571b38bf..695c37f6d7bfc 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -164,6 +164,7 @@ dependencies = [ "rustc-hash 2.0.0", "syntax", "syntax-bridge", + "tracing", "tt", ] @@ -556,6 +557,7 @@ dependencies = [ "syntax-bridge", "test-fixture", "test-utils", + "text-size", "tracing", "triomphe", "tt", @@ -670,7 +672,6 @@ dependencies = [ "syntax", "test-fixture", "test-utils", - "text-edit", "toolchain", "tracing", "triomphe", @@ -692,7 +693,6 @@ dependencies = [ "syntax", "test-fixture", "test-utils", - "text-edit", "tracing", ] @@ -711,7 +711,6 @@ dependencies = [ "syntax", "test-fixture", "test-utils", - "text-edit", "tracing", ] @@ -743,7 +742,6 @@ dependencies = [ "syntax", "test-fixture", "test-utils", - "text-edit", "tracing", "triomphe", ] @@ -765,7 +763,6 @@ dependencies = [ "syntax", "test-fixture", "test-utils", - "text-edit", "tracing", ] @@ -784,7 +781,6 @@ dependencies = [ "syntax", "test-fixture", "test-utils", - "text-edit", "triomphe", ] @@ -1497,9 +1493,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.73.0" +version = "0.75.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879ece0781e3c1cb670b9f29775c81a43a16db789d1296fad6bc5c74065b2fac" +checksum = "d5bc2cfc7264d84215a08875ef90a1d35f76b5c9ad1993515d2da7e4e40b2b4b" dependencies = [ "bitflags 2.6.0", "ra-ap-rustc_index", @@ -1508,9 +1504,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index" -version = "0.73.0" +version = "0.75.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6910087ff89bb9f3db114bfcd86b5139042731fe7278d3ff4ceaa69a140154a7" +checksum = "e8929140697812e5dd09e19cf446d85146332363f0dbc125d4214834c34ead96" dependencies = [ "arrayvec", "ra-ap-rustc_index_macros", @@ -1519,9 +1515,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.73.0" +version = "0.75.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b6f7bd12b678fbb37444ba77f3b0cfc13b7394a6dc7b0c799491fc9df0a9997" +checksum = "514a3f5d04c8b4a2750f29746cc9abb1f78deb7e72e4ad1dc95bbc608f3db157" dependencies = [ "proc-macro2", "quote", @@ -1530,9 +1526,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.73.0" +version = "0.75.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "119bc05b5b6bc3e7f5b67ce8b8080e185da94bd83c447f91b6b3f3ecf60cbab1" +checksum = "276fcb1205da071a0cd64416f3f0e198043c11f176c5b501a45dbf0cb33979f2" dependencies = [ "unicode-properties", "unicode-xid", @@ -1540,9 +1536,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.73.0" +version = "0.75.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ed6150ae71d905c064dc88d7824ebb0fa81083f45d7477cba7b57176f2f635" +checksum = "961b30b22cfac296b14b72e9f95e79c16cebc8c926872755fb1568a6c4243a62" dependencies = [ "ra-ap-rustc_index", "ra-ap-rustc_lexer", @@ -1550,9 +1546,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.73.0" +version = "0.75.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e830862a0ec85fce211d34735315686bb8d6a12d418d6d735fb534aa1cd3293" +checksum = "614232513814a4b714fea7f11345d31c0c277bca3089bb6ca1ec20870bfc022a" dependencies = [ "ra-ap-rustc_index", "rustc-hash 2.0.0", @@ -1884,9 +1880,9 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "smol_str" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66eaf762c5af19db3108300515c8aa7a50efc90ff745f4c62288052ebf9fdd25" +checksum = "9676b89cd56310a87b93dec47b11af744f34d5fc9f367b829474eec0a891350d" dependencies = [ "borsh", "serde", @@ -1978,7 +1974,6 @@ dependencies = [ "smol_str", "stdx", "test-utils", - "text-edit", "tracing", "triomphe", ] @@ -2026,14 +2021,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "text-edit" -version = "0.0.0" -dependencies = [ - "itertools", - "text-size", -] - [[package]] name = "text-size" version = "1.1.1" diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 9db62de9abfc0..3aa93b7b7b4b9 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" [workspace.package] -rust-version = "1.81" +rust-version = "1.82" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] @@ -79,17 +79,16 @@ span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" } -text-edit = { path = "./crates/text-edit", version = "0.0.0" } toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.73", default-features = false } -ra-ap-rustc_parse_format = { version = "0.73", default-features = false } -ra-ap-rustc_index = { version = "0.73", default-features = false } -ra-ap-rustc_abi = { version = "0.73", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.73", default-features = false } +ra-ap-rustc_lexer = { version = "0.75", default-features = false } +ra-ap-rustc_parse_format = { version = "0.75", default-features = false } +ra-ap-rustc_index = { version = "0.75", default-features = false } +ra-ap-rustc_abi = { version = "0.75", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.75", default-features = false } # local crates that aren't published to crates.io. These should not have versions. test-fixture = { path = "./crates/test-fixture" } @@ -145,7 +144,7 @@ smallvec = { version = "1.10.0", features = [ "union", "const_generics", ] } -smol_str = "0.3.1" +smol_str = "0.3.2" snap = "1.1.0" text-size = "1.1.1" tracing = "0.1.40" diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index 29b7ad6f8fe2a..040bddbd7fd3a 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -14,6 +14,7 @@ doctest = false [dependencies] rustc-hash.workspace = true +tracing.workspace = true # locals deps tt = { workspace = true, optional = true } diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index c2d4008605618..6a6213a871fda 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -9,7 +9,7 @@ use std::fmt; use rustc_hash::FxHashSet; -use intern::Symbol; +use intern::{sym, Symbol}; pub use cfg_expr::{CfgAtom, CfgExpr}; pub use dnf::DnfExpr; @@ -24,11 +24,17 @@ pub use dnf::DnfExpr; /// of key and value in `key_values`. /// /// See: -#[derive(Clone, PartialEq, Eq, Default)] +#[derive(Clone, PartialEq, Eq)] pub struct CfgOptions { enabled: FxHashSet, } +impl Default for CfgOptions { + fn default() -> Self { + Self { enabled: FxHashSet::from_iter([CfgAtom::Flag(sym::true_.clone())]) } + } +} + impl fmt::Debug for CfgOptions { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut items = self @@ -54,23 +60,37 @@ impl CfgOptions { } pub fn insert_atom(&mut self, key: Symbol) { - self.enabled.insert(CfgAtom::Flag(key)); + self.insert_any_atom(CfgAtom::Flag(key)); } pub fn insert_key_value(&mut self, key: Symbol, value: Symbol) { - self.enabled.insert(CfgAtom::KeyValue { key, value }); + self.insert_any_atom(CfgAtom::KeyValue { key, value }); } pub fn apply_diff(&mut self, diff: CfgDiff) { for atom in diff.enable { - self.enabled.insert(atom); + self.insert_any_atom(atom); } for atom in diff.disable { + let (CfgAtom::Flag(sym) | CfgAtom::KeyValue { key: sym, .. }) = &atom; + if *sym == sym::true_ || *sym == sym::false_ { + tracing::error!("cannot remove `true` or `false` from cfg"); + continue; + } self.enabled.remove(&atom); } } + fn insert_any_atom(&mut self, atom: CfgAtom) { + let (CfgAtom::Flag(sym) | CfgAtom::KeyValue { key: sym, .. }) = &atom; + if *sym == sym::true_ || *sym == sym::false_ { + tracing::error!("cannot insert `true` or `false` to cfg"); + return; + } + self.enabled.insert(atom); + } + pub fn get_cfg_keys(&self) -> impl Iterator { self.enabled.iter().map(|it| match it { CfgAtom::Flag(key) => key, @@ -88,7 +108,7 @@ impl CfgOptions { impl Extend for CfgOptions { fn extend>(&mut self, iter: T) { - iter.into_iter().for_each(|cfg_flag| _ = self.enabled.insert(cfg_flag)); + iter.into_iter().for_each(|cfg_flag| self.insert_any_atom(cfg_flag)); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index c8ba5da449e99..375f18d9fe1f8 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -29,6 +29,7 @@ smallvec.workspace = true hashbrown.workspace = true triomphe.workspace = true rustc_apfloat = "0.2.0" +text-size.workspace = true ra-ap-rustc_parse_format.workspace = true ra-ap-rustc_abi.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs index 9535b5aea7c73..5a386f6cf8d14 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs @@ -10,6 +10,7 @@ use std::ops::{Deref, Index}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; +use either::Either; use hir_expand::{name::Name, ExpandError, InFile}; use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use rustc_hash::FxHashMap; @@ -22,15 +23,33 @@ use crate::{ db::DefDatabase, expander::Expander, hir::{ - dummy_expr_id, Binding, BindingId, Expr, ExprId, Label, LabelId, Pat, PatId, RecordFieldPat, + dummy_expr_id, Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, + LabelId, Pat, PatId, RecordFieldPat, Statement, }, item_tree::AttrOwner, nameres::DefMap, path::{ModPath, Path}, src::HasSource, + type_ref::{TypeRef, TypeRefId, TypesMap, TypesSourceMap}, BlockId, DefWithBodyId, HasModule, Lookup, }; +/// A wrapper around [`span::SyntaxContextId`] that is intended only for comparisons. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct HygieneId(pub(crate) span::SyntaxContextId); + +impl HygieneId { + pub const ROOT: Self = Self(span::SyntaxContextId::ROOT); + + pub fn new(ctx: span::SyntaxContextId) -> Self { + Self(ctx) + } + + pub(crate) fn is_root(self) -> bool { + self.0.is_root() + } +} + /// The body of an item (function, const etc.). #[derive(Debug, Eq, PartialEq)] pub struct Body { @@ -51,8 +70,25 @@ pub struct Body { pub self_param: Option, /// The `ExprId` of the actual body expression. pub body_expr: ExprId, + pub types: TypesMap, /// Block expressions in this body that may contain inner items. block_scopes: Vec, + + /// A map from binding to its hygiene ID. + /// + /// Bindings that don't come from macro expansion are not allocated to save space, so not all bindings appear here. + /// If a binding does not appear here it has `SyntaxContextId::ROOT`. + /// + /// Note that this may not be the direct `SyntaxContextId` of the binding's expansion, because transparent + /// expansions are attributed to their parent expansion (recursively). + binding_hygiene: FxHashMap, + /// A map from an variable usages to their hygiene ID. + /// + /// Expressions that can be recorded here are single segment path, although not all single segments path refer + /// to variables and have hygiene (some refer to items, we don't know at this stage). + expr_hygiene: FxHashMap, + /// A map from a destructuring assignment possible variable usages to their hygiene ID. + pat_hygiene: FxHashMap, } pub type ExprPtr = AstPtr; @@ -67,9 +103,12 @@ pub type LabelSource = InFile; pub type FieldPtr = AstPtr; pub type FieldSource = InFile; -pub type PatFieldPtr = AstPtr; +pub type PatFieldPtr = AstPtr>; pub type PatFieldSource = InFile; +pub type ExprOrPatPtr = AstPtr>; +pub type ExprOrPatSource = InFile; + /// An item body together with the mapping from syntax nodes to HIR expression /// IDs. This is needed to go from e.g. a position in a file to the HIR /// expression containing it; but for type inference etc., we want to operate on @@ -83,11 +122,13 @@ pub type PatFieldSource = InFile; /// this properly for macros. #[derive(Default, Debug, Eq, PartialEq)] pub struct BodySourceMap { - expr_map: FxHashMap, + // AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map + // to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected). + expr_map: FxHashMap, expr_map_back: ArenaMap, pat_map: FxHashMap, - pat_map_back: ArenaMap, + pat_map_back: ArenaMap, label_map: FxHashMap, label_map_back: ArenaMap, @@ -100,10 +141,13 @@ pub struct BodySourceMap { field_map_back: FxHashMap, pat_field_map_back: FxHashMap, + types: TypesSourceMap, + + // FIXME: Make this a sane struct. template_map: Option< Box<( // format_args! - FxHashMap>, + FxHashMap)>, // asm! FxHashMap>>, )>, @@ -261,6 +305,10 @@ impl Body { pats, bindings, binding_owners, + binding_hygiene, + expr_hygiene, + pat_hygiene, + types, } = self; block_scopes.shrink_to_fit(); exprs.shrink_to_fit(); @@ -268,6 +316,10 @@ impl Body { pats.shrink_to_fit(); bindings.shrink_to_fit(); binding_owners.shrink_to_fit(); + binding_hygiene.shrink_to_fit(); + expr_hygiene.shrink_to_fit(); + pat_hygiene.shrink_to_fit(); + types.shrink_to_fit(); } pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) { @@ -286,7 +338,8 @@ impl Body { | Pat::Path(..) | Pat::ConstBlock(..) | Pat::Wild - | Pat::Missing => {} + | Pat::Missing + | Pat::Expr(_) => {} &Pat::Bind { subpat, .. } => { if let Some(subpat) = subpat { f(subpat); @@ -322,6 +375,162 @@ impl Body { None => true, } } + + pub fn walk_child_exprs(&self, expr_id: ExprId, mut f: impl FnMut(ExprId)) { + let expr = &self[expr_id]; + match expr { + Expr::Continue { .. } + | Expr::Const(_) + | Expr::Missing + | Expr::Path(_) + | Expr::OffsetOf(_) + | Expr::Literal(_) + | Expr::Underscore => {} + Expr::InlineAsm(it) => it.operands.iter().for_each(|(_, op)| match op { + AsmOperand::In { expr, .. } + | AsmOperand::Out { expr: Some(expr), .. } + | AsmOperand::InOut { expr, .. } => f(*expr), + AsmOperand::SplitInOut { in_expr, out_expr, .. } => { + f(*in_expr); + if let Some(out_expr) = out_expr { + f(*out_expr); + } + } + AsmOperand::Out { expr: None, .. } + | AsmOperand::Const(_) + | AsmOperand::Label(_) + | AsmOperand::Sym(_) => (), + }), + Expr::If { condition, then_branch, else_branch } => { + f(*condition); + f(*then_branch); + if let &Some(else_branch) = else_branch { + f(else_branch); + } + } + Expr::Let { expr, .. } => { + f(*expr); + } + Expr::Block { statements, tail, .. } + | Expr::Unsafe { statements, tail, .. } + | Expr::Async { statements, tail, .. } => { + for stmt in statements.iter() { + match stmt { + Statement::Let { initializer, else_branch, pat, .. } => { + if let &Some(expr) = initializer { + f(expr); + } + if let &Some(expr) = else_branch { + f(expr); + } + self.walk_exprs_in_pat(*pat, &mut f); + } + Statement::Expr { expr: expression, .. } => f(*expression), + Statement::Item(_) => (), + } + } + if let &Some(expr) = tail { + f(expr); + } + } + Expr::Loop { body, .. } => f(*body), + Expr::Call { callee, args, .. } => { + f(*callee); + args.iter().copied().for_each(f); + } + Expr::MethodCall { receiver, args, .. } => { + f(*receiver); + args.iter().copied().for_each(f); + } + Expr::Match { expr, arms } => { + f(*expr); + arms.iter().map(|arm| arm.expr).for_each(f); + } + Expr::Break { expr, .. } + | Expr::Return { expr } + | Expr::Yield { expr } + | Expr::Yeet { expr } => { + if let &Some(expr) = expr { + f(expr); + } + } + Expr::Become { expr } => f(*expr), + Expr::RecordLit { fields, spread, .. } => { + for field in fields.iter() { + f(field.expr); + } + if let &Some(expr) = spread { + f(expr); + } + } + Expr::Closure { body, .. } => { + f(*body); + } + Expr::BinaryOp { lhs, rhs, .. } => { + f(*lhs); + f(*rhs); + } + Expr::Range { lhs, rhs, .. } => { + if let &Some(lhs) = rhs { + f(lhs); + } + if let &Some(rhs) = lhs { + f(rhs); + } + } + Expr::Index { base, index, .. } => { + f(*base); + f(*index); + } + Expr::Field { expr, .. } + | Expr::Await { expr } + | Expr::Cast { expr, .. } + | Expr::Ref { expr, .. } + | Expr::UnaryOp { expr, .. } + | Expr::Box { expr } => { + f(*expr); + } + Expr::Tuple { exprs, .. } => exprs.iter().copied().for_each(f), + Expr::Array(a) => match a { + Array::ElementList { elements, .. } => elements.iter().copied().for_each(f), + Array::Repeat { initializer, repeat } => { + f(*initializer); + f(*repeat) + } + }, + &Expr::Assignment { target, value } => { + self.walk_exprs_in_pat(target, &mut f); + f(value); + } + } + } + + pub fn walk_exprs_in_pat(&self, pat_id: PatId, f: &mut impl FnMut(ExprId)) { + self.walk_pats(pat_id, &mut |pat| { + if let Pat::Expr(expr) | Pat::ConstBlock(expr) = self[pat] { + f(expr); + } + }); + } + + fn binding_hygiene(&self, binding: BindingId) -> HygieneId { + self.binding_hygiene.get(&binding).copied().unwrap_or(HygieneId::ROOT) + } + + pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId { + self.expr_hygiene.get(&expr).copied().unwrap_or(HygieneId::ROOT) + } + + pub fn pat_path_hygiene(&self, pat: PatId) -> HygieneId { + self.pat_hygiene.get(&pat).copied().unwrap_or(HygieneId::ROOT) + } + + pub fn expr_or_pat_path_hygiene(&self, id: ExprOrPatId) -> HygieneId { + match id { + ExprOrPatId::ExprId(id) => self.expr_path_hygiene(id), + ExprOrPatId::PatId(id) => self.pat_path_hygiene(id), + } + } } impl Default for Body { @@ -336,6 +545,10 @@ impl Default for Body { block_scopes: Default::default(), binding_owners: Default::default(), self_param: Default::default(), + binding_hygiene: Default::default(), + expr_hygiene: Default::default(), + pat_hygiene: Default::default(), + types: Default::default(), } } } @@ -372,14 +585,29 @@ impl Index for Body { } } +impl Index for Body { + type Output = TypeRef; + + fn index(&self, b: TypeRefId) -> &TypeRef { + &self.types[b] + } +} + // FIXME: Change `node_` prefix to something more reasonable. // Perhaps `expr_syntax` and `expr_id`? impl BodySourceMap { + pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result { + match id { + ExprOrPatId::ExprId(id) => self.expr_syntax(id).map(|it| it.map(AstPtr::wrap_left)), + ExprOrPatId::PatId(id) => self.pat_syntax(id), + } + } + pub fn expr_syntax(&self, expr: ExprId) -> Result { self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax) } - pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option { + pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option { let src = node.map(AstPtr::new); self.expr_map.get(&src).cloned() } @@ -395,7 +623,7 @@ impl BodySourceMap { self.expansions.iter().map(|(&a, &b)| (a, b)) } - pub fn pat_syntax(&self, pat: PatId) -> Result { + pub fn pat_syntax(&self, pat: PatId) -> Result { self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax) } @@ -428,7 +656,7 @@ impl BodySourceMap { self.pat_field_map_back[&pat] } - pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option { + pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option { let src = node.map(AstPtr::new).map(AstPtr::upcast::).map(AstPtr::upcast); self.expr_map.get(&src).copied() } @@ -442,9 +670,11 @@ impl BodySourceMap { pub fn implicit_format_args( &self, node: InFile<&ast::FormatArgsExpr>, - ) -> Option<&[(syntax::TextRange, Name)]> { + ) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> { let src = node.map(AstPtr::new).map(AstPtr::upcast::); - self.template_map.as_ref()?.0.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref) + let (hygiene, names) = + self.template_map.as_ref()?.0.get(&self.expr_map.get(&src)?.as_expr()?)?; + Some((*hygiene, &**names)) } pub fn asm_template_args( @@ -452,8 +682,8 @@ impl BodySourceMap { node: InFile<&ast::AsmExpr>, ) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> { let src = node.map(AstPtr::new).map(AstPtr::upcast::); - let expr = self.expr_map.get(&src)?; - Some(*expr).zip(self.template_map.as_ref()?.1.get(expr).map(std::ops::Deref::deref)) + let expr = self.expr_map.get(&src)?.as_expr()?; + Some(expr).zip(self.template_map.as_ref()?.1.get(&expr).map(std::ops::Deref::deref)) } /// Get a reference to the body source map's diagnostics. @@ -476,6 +706,7 @@ impl BodySourceMap { template_map, diagnostics, binding_definitions, + types, } = self; if let Some(template_map) = template_map { template_map.0.shrink_to_fit(); @@ -492,14 +723,6 @@ impl BodySourceMap { expansions.shrink_to_fit(); diagnostics.shrink_to_fit(); binding_definitions.shrink_to_fit(); - } - - pub fn template_map( - &self, - ) -> Option<&( - FxHashMap, Vec<(tt::TextRange, Name)>>, - FxHashMap, Vec>>, - )> { - self.template_map.as_deref() + types.shrink_to_fit(); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs index 9c547574ecb1f..0b108b54e671b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs @@ -9,9 +9,10 @@ use base_db::CrateId; use either::Either; use hir_expand::{ name::{AsName, Name}, - InFile, + span_map::{ExpansionSpanMap, SpanMap}, + InFile, MacroDefId, }; -use intern::{sym, Interned, Symbol}; +use intern::{sym, Symbol}; use rustc_hash::FxHashMap; use span::AstIdMap; use stdx::never; @@ -22,10 +23,11 @@ use syntax::{ }, AstNode, AstPtr, AstToken as _, SyntaxNodePtr, }; +use text_size::TextSize; use triomphe::Arc; use crate::{ - body::{Body, BodyDiagnostic, BodySourceMap, ExprPtr, LabelPtr, PatPtr}, + body::{Body, BodyDiagnostic, BodySourceMap, ExprPtr, HygieneId, LabelPtr, PatPtr}, builtin_type::BuiltinUint, data::adt::StructKind, db::DefDatabase, @@ -37,8 +39,8 @@ use crate::{ FormatPlaceholder, FormatSign, FormatTrait, }, Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind, - Expr, ExprId, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability, OffsetOf, Pat, - PatId, RecordFieldPat, RecordLitField, Statement, + Expr, ExprId, Item, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability, + OffsetOf, Pat, PatId, RecordFieldPat, RecordLitField, Statement, }, item_scope::BuiltinShadowMode, lang_item::LangItem, @@ -46,7 +48,7 @@ use crate::{ nameres::{DefMap, MacroSubNs}, path::{GenericArgs, Path}, type_ref::{Mutability, Rawness, TypeRef}, - AdtId, BlockId, BlockLoc, ConstBlockLoc, DefWithBodyId, ModuleDefId, UnresolvedMacro, + AdtId, BlockId, BlockLoc, ConstBlockLoc, DefWithBodyId, MacroId, ModuleDefId, UnresolvedMacro, }; type FxIndexSet = indexmap::IndexSet>; @@ -60,6 +62,17 @@ pub(super) fn lower( krate: CrateId, is_async_fn: bool, ) -> (Body, BodySourceMap) { + // We cannot leave the root span map empty and let any identifier from it be treated as root, + // because when inside nested macros `SyntaxContextId`s from the outer macro will be interleaved + // with the inner macro, and that will cause confusion because they won't be the same as `ROOT` + // even though they should be the same. Also, when the body comes from multiple expansions, their + // hygiene is different. + let span_map = expander.current_file_id().macro_file().map(|_| { + let SpanMap::ExpansionSpanMap(span_map) = expander.span_map(db) else { + panic!("in a macro file there should be `ExpansionSpanMap`"); + }; + Arc::clone(span_map) + }); ExprCollector { db, owner, @@ -70,11 +83,12 @@ pub(super) fn lower( body: Body::default(), expander, current_try_block_label: None, - is_lowering_assignee_expr: false, is_lowering_coroutine: false, label_ribs: Vec::new(), current_binding_owner: None, awaitable_context: None, + current_span_map: span_map, + current_block_legacy_macro_defs_count: FxHashMap::default(), } .collect(params, body, is_async_fn) } @@ -89,9 +103,14 @@ struct ExprCollector<'a> { body: Body, source_map: BodySourceMap, - is_lowering_assignee_expr: bool, is_lowering_coroutine: bool, + /// Legacy (`macro_rules!`) macros can have multiple definitions and shadow each other, + /// and we need to find the current definition. So we track the number of definitions we saw. + current_block_legacy_macro_defs_count: FxHashMap, + + current_span_map: Option>, + current_try_block_label: Option, // points to the expression that a try expression will target (replaces current_try_block_label) // catch_scope: Option, @@ -110,31 +129,27 @@ struct ExprCollector<'a> { #[derive(Clone, Debug)] struct LabelRib { kind: RibKind, - // Once we handle macro hygiene this will need to be a map - label: Option<(Name, LabelId)>, } impl LabelRib { fn new(kind: RibKind) -> Self { - LabelRib { kind, label: None } - } - fn new_normal(label: (Name, LabelId)) -> Self { - LabelRib { kind: RibKind::Normal, label: Some(label) } + LabelRib { kind } } } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq)] enum RibKind { - Normal, + Normal(Name, LabelId, HygieneId), Closure, Constant, + MacroDef(Box), } impl RibKind { /// This rib forbids referring to labels defined in upwards ribs. - fn is_label_barrier(self) -> bool { + fn is_label_barrier(&self) -> bool { match self { - RibKind::Normal => false, + RibKind::Normal(..) | RibKind::MacroDef(_) => false, RibKind::Closure | RibKind::Constant => true, } } @@ -147,7 +162,7 @@ enum Awaitable { #[derive(Debug, Default)] struct BindingList { - map: FxHashMap, + map: FxHashMap<(Name, HygieneId), BindingId>, is_used: FxHashMap, reject_new: bool, } @@ -157,9 +172,16 @@ impl BindingList { &mut self, ec: &mut ExprCollector<'_>, name: Name, + hygiene: HygieneId, mode: BindingAnnotation, ) -> BindingId { - let id = *self.map.entry(name).or_insert_with_key(|n| ec.alloc_binding(n.clone(), mode)); + let id = *self.map.entry((name, hygiene)).or_insert_with_key(|(name, _)| { + let id = ec.alloc_binding(name.clone(), mode); + if !hygiene.is_root() { + ec.body.binding_hygiene.insert(id, hygiene); + } + id + }); if ec.body.bindings[id].mode != mode { ec.body.bindings[id].problems = Some(BindingProblems::BoundInconsistently); } @@ -213,6 +235,13 @@ impl ExprCollector<'_> { Name::new_symbol_root(sym::self_.clone()), BindingAnnotation::new(is_mutable, false), ); + let hygiene = self_param + .name() + .map(|name| self.hygiene_id_for(name.syntax().text_range().start())) + .unwrap_or(HygieneId::ROOT); + if !hygiene.is_root() { + self.body.binding_hygiene.insert(binding_id, hygiene); + } self.body.self_param = Some(binding_id); self.source_map.self_param = Some(self.expander.in_file(AstPtr::new(&self_param))); } @@ -245,8 +274,8 @@ impl ExprCollector<'_> { (self.body, self.source_map) } - fn ctx(&self) -> LowerCtx<'_> { - self.expander.ctx(self.db) + fn ctx(&mut self) -> LowerCtx<'_> { + self.expander.ctx(self.db, &mut self.body.types, &mut self.source_map.types) } fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { @@ -290,13 +319,14 @@ impl ExprCollector<'_> { }) } Some(ast::BlockModifier::Label(label)) => { - let label = self.collect_label(label); - self.with_labeled_rib(label, |this| { + let label_hygiene = self.hygiene_id_for(label.syntax().text_range().start()); + let label_id = self.collect_label(label); + self.with_labeled_rib(label_id, label_hygiene, |this| { this.collect_block_(e, |id, statements, tail| Expr::Block { id, statements, tail, - label: Some(label), + label: Some(label_id), }) }) } @@ -338,9 +368,14 @@ impl ExprCollector<'_> { None => self.collect_block(e), }, ast::Expr::LoopExpr(e) => { - let label = e.label().map(|label| self.collect_label(label)); + let label = e.label().map(|label| { + ( + self.hygiene_id_for(label.syntax().text_range().start()), + self.collect_label(label), + ) + }); let body = self.collect_labelled_block_opt(label, e.loop_body()); - self.alloc_expr(Expr::Loop { body, label }, syntax_ptr) + self.alloc_expr(Expr::Loop { body, label: label.map(|it| it.1) }, syntax_ptr) } ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e), ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e), @@ -359,14 +394,7 @@ impl ExprCollector<'_> { } else { Box::default() }; - self.alloc_expr( - Expr::Call { - callee, - args, - is_assignee_expr: self.is_lowering_assignee_expr, - }, - syntax_ptr, - ) + self.alloc_expr(Expr::Call { callee, args }, syntax_ptr) } } ast::Expr::MethodCallExpr(e) => { @@ -407,12 +435,15 @@ impl ExprCollector<'_> { self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr) } ast::Expr::PathExpr(e) => { - let path = e - .path() - .and_then(|path| self.expander.parse_path(self.db, path)) - .map(Expr::Path) - .unwrap_or(Expr::Missing); - self.alloc_expr(path, syntax_ptr) + let (path, hygiene) = self + .collect_expr_path(e) + .map(|(path, hygiene)| (Expr::Path(path), hygiene)) + .unwrap_or((Expr::Missing, HygieneId::ROOT)); + let expr_id = self.alloc_expr(path, syntax_ptr); + if !hygiene.is_root() { + self.body.expr_hygiene.insert(expr_id, hygiene); + } + expr_id } ast::Expr::ContinueExpr(e) => { let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| { @@ -433,7 +464,7 @@ impl ExprCollector<'_> { let inner = self.collect_expr_opt(e.expr()); // make the paren expr point to the inner expression as well for IDE resolution let src = self.expander.in_file(syntax_ptr); - self.source_map.expr_map.insert(src, inner); + self.source_map.expr_map.insert(src, inner.into()); inner } ast::Expr::ReturnExpr(e) => { @@ -455,9 +486,7 @@ impl ExprCollector<'_> { self.alloc_expr(Expr::Yeet { expr }, syntax_ptr) } ast::Expr::RecordExpr(e) => { - let path = - e.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); - let is_assignee_expr = self.is_lowering_assignee_expr; + let path = e.path().and_then(|path| self.parse_path(path)).map(Box::new); let record_lit = if let Some(nfl) = e.record_expr_field_list() { let fields = nfl .fields() @@ -476,16 +505,9 @@ impl ExprCollector<'_> { }) .collect(); let spread = nfl.spread().map(|s| self.collect_expr(s)); - let ellipsis = nfl.dotdot_token().is_some(); - Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr } + Expr::RecordLit { path, fields, spread } } else { - Expr::RecordLit { - path, - fields: Box::default(), - spread: None, - ellipsis: false, - is_assignee_expr, - } + Expr::RecordLit { path, fields: Box::default(), spread: None } }; self.alloc_expr(record_lit, syntax_ptr) @@ -511,7 +533,7 @@ impl ExprCollector<'_> { ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e), ast::Expr::CastExpr(e) => { let expr = self.collect_expr_opt(e.expr()); - let type_ref = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty())); + let type_ref = TypeRef::from_ast_opt(&self.ctx(), e.ty()); self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr) } ast::Expr::RefExpr(e) => { @@ -550,16 +572,13 @@ impl ExprCollector<'_> { arg_types.reserve_exact(num_params); for param in pl.params() { let pat = this.collect_pat_top(param.pat()); - let type_ref = - param.ty().map(|it| Interned::new(TypeRef::from_ast(&this.ctx(), it))); + let type_ref = param.ty().map(|it| TypeRef::from_ast(&this.ctx(), it)); args.push(pat); arg_types.push(type_ref); } } - let ret_type = e - .ret_type() - .and_then(|r| r.ty()) - .map(|it| Interned::new(TypeRef::from_ast(&this.ctx(), it))); + let ret_type = + e.ret_type().and_then(|r| r.ty()).map(|it| TypeRef::from_ast(&this.ctx(), it)); let prev_is_lowering_coroutine = mem::take(&mut this.is_lowering_coroutine); let prev_try_block_label = this.current_try_block_label.take(); @@ -602,12 +621,14 @@ impl ExprCollector<'_> { ast::Expr::BinExpr(e) => { let op = e.op_kind(); if let Some(ast::BinaryOp::Assignment { op: None }) = op { - self.is_lowering_assignee_expr = true; + let target = self.collect_expr_as_pat_opt(e.lhs()); + let value = self.collect_expr_opt(e.rhs()); + self.alloc_expr(Expr::Assignment { target, value }, syntax_ptr) + } else { + let lhs = self.collect_expr_opt(e.lhs()); + let rhs = self.collect_expr_opt(e.rhs()); + self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr) } - let lhs = self.collect_expr_opt(e.lhs()); - self.is_lowering_assignee_expr = false; - let rhs = self.collect_expr_opt(e.rhs()); - self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr) } ast::Expr::TupleExpr(e) => { let mut exprs: Vec<_> = e.fields().map(|expr| self.collect_expr(expr)).collect(); @@ -617,13 +638,7 @@ impl ExprCollector<'_> { exprs.insert(0, self.missing_expr()); } - self.alloc_expr( - Expr::Tuple { - exprs: exprs.into_boxed_slice(), - is_assignee_expr: self.is_lowering_assignee_expr, - }, - syntax_ptr, - ) + self.alloc_expr(Expr::Tuple { exprs: exprs.into_boxed_slice() }, syntax_ptr) } ast::Expr::ArrayExpr(e) => { let kind = e.kind(); @@ -631,13 +646,7 @@ impl ExprCollector<'_> { match kind { ArrayExprKind::ElementList(e) => { let elements = e.map(|expr| self.collect_expr(expr)).collect(); - self.alloc_expr( - Expr::Array(Array::ElementList { - elements, - is_assignee_expr: self.is_lowering_assignee_expr, - }), - syntax_ptr, - ) + self.alloc_expr(Expr::Array(Array::ElementList { elements }), syntax_ptr) } ArrayExprKind::Repeat { initializer, repeat } => { let initializer = self.collect_expr_opt(initializer); @@ -664,8 +673,7 @@ impl ExprCollector<'_> { ast::Expr::IndexExpr(e) => { let base = self.collect_expr_opt(e.base()); let index = self.collect_expr_opt(e.index()); - let is_assignee_expr = self.is_lowering_assignee_expr; - self.alloc_expr(Expr::Index { base, index, is_assignee_expr }, syntax_ptr) + self.alloc_expr(Expr::Index { base, index }, syntax_ptr) } ast::Expr::RangeExpr(e) => { let lhs = e.start().map(|lhs| self.collect_expr(lhs)); @@ -688,7 +696,7 @@ impl ExprCollector<'_> { // Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro let src = self.expander.in_file(syntax_ptr); - self.source_map.expr_map.insert(src, id); + self.source_map.expr_map.insert(src, id.into()); id } None => self.alloc_expr(Expr::Missing, syntax_ptr), @@ -697,7 +705,7 @@ impl ExprCollector<'_> { ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr), ast::Expr::AsmExpr(e) => self.lower_inline_asm(e, syntax_ptr), ast::Expr::OffsetOfExpr(e) => { - let container = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty())); + let container = TypeRef::from_ast_opt(&self.ctx(), e.ty()); let fields = e.fields().map(|it| it.as_name()).collect(); self.alloc_expr(Expr::OffsetOf(OffsetOf { container, fields }), syntax_ptr) } @@ -705,6 +713,200 @@ impl ExprCollector<'_> { }) } + fn parse_path(&mut self, path: ast::Path) -> Option { + self.expander.parse_path(self.db, path, &mut self.body.types, &mut self.source_map.types) + } + + fn collect_expr_path(&mut self, e: ast::PathExpr) -> Option<(Path, HygieneId)> { + e.path().and_then(|path| { + let path = self.parse_path(path)?; + // Need to enable `mod_path.len() < 1` for `self`. + let may_be_variable = matches!(&path, Path::BarePath(mod_path) if mod_path.len() <= 1); + let hygiene = if may_be_variable { + self.hygiene_id_for(e.syntax().text_range().start()) + } else { + HygieneId::ROOT + }; + Some((path, hygiene)) + }) + } + + fn collect_expr_as_pat_opt(&mut self, expr: Option) -> PatId { + match expr { + Some(expr) => self.collect_expr_as_pat(expr), + _ => self.missing_pat(), + } + } + + fn collect_expr_as_pat(&mut self, expr: ast::Expr) -> PatId { + self.maybe_collect_expr_as_pat(&expr).unwrap_or_else(|| { + let src = self.expander.in_file(AstPtr::new(&expr).wrap_left()); + let expr = self.collect_expr(expr); + // Do not use `alloc_pat_from_expr()` here, it will override the entry in `expr_map`. + let id = self.body.pats.alloc(Pat::Expr(expr)); + self.source_map.pat_map_back.insert(id, src); + id + }) + } + + fn maybe_collect_expr_as_pat(&mut self, expr: &ast::Expr) -> Option { + self.check_cfg(expr)?; + let syntax_ptr = AstPtr::new(expr); + + let result = match expr { + ast::Expr::UnderscoreExpr(_) => self.alloc_pat_from_expr(Pat::Wild, syntax_ptr), + ast::Expr::ParenExpr(e) => { + // We special-case `(..)` for consistency with patterns. + if let Some(ast::Expr::RangeExpr(range)) = e.expr() { + if range.is_range_full() { + return Some(self.alloc_pat_from_expr( + Pat::Tuple { args: Box::default(), ellipsis: Some(0) }, + syntax_ptr, + )); + } + } + return e.expr().and_then(|expr| self.maybe_collect_expr_as_pat(&expr)); + } + ast::Expr::TupleExpr(e) => { + let (ellipsis, args) = collect_tuple(self, e.fields()); + self.alloc_pat_from_expr(Pat::Tuple { args, ellipsis }, syntax_ptr) + } + ast::Expr::ArrayExpr(e) => { + if e.semicolon_token().is_some() { + return None; + } + + let mut elements = e.exprs(); + let prefix = elements + .by_ref() + .map_while(|elem| collect_possibly_rest(self, elem).left()) + .collect(); + let suffix = elements.map(|elem| self.collect_expr_as_pat(elem)).collect(); + self.alloc_pat_from_expr(Pat::Slice { prefix, slice: None, suffix }, syntax_ptr) + } + ast::Expr::CallExpr(e) => { + let path = collect_path(self, e.expr()?)?; + let path = path.path().and_then(|path| self.parse_path(path)).map(Box::new); + let (ellipsis, args) = collect_tuple(self, e.arg_list()?.args()); + self.alloc_pat_from_expr(Pat::TupleStruct { path, args, ellipsis }, syntax_ptr) + } + ast::Expr::PathExpr(e) => { + let (path, hygiene) = self + .collect_expr_path(e.clone()) + .map(|(path, hygiene)| (Pat::Path(path), hygiene)) + .unwrap_or((Pat::Missing, HygieneId::ROOT)); + let pat_id = self.alloc_pat_from_expr(path, syntax_ptr); + if !hygiene.is_root() { + self.body.pat_hygiene.insert(pat_id, hygiene); + } + pat_id + } + ast::Expr::MacroExpr(e) => { + let e = e.macro_call()?; + let macro_ptr = AstPtr::new(&e); + let src = self.expander.in_file(AstPtr::new(expr)); + let id = self.collect_macro_call(e, macro_ptr, true, |this, expansion| { + this.collect_expr_as_pat_opt(expansion) + }); + self.source_map.expr_map.insert(src, id.into()); + id + } + ast::Expr::RecordExpr(e) => { + let path = e.path().and_then(|path| self.parse_path(path)).map(Box::new); + let record_field_list = e.record_expr_field_list()?; + let ellipsis = record_field_list.dotdot_token().is_some(); + // FIXME: Report an error here if `record_field_list.spread().is_some()`. + let args = record_field_list + .fields() + .filter_map(|f| { + self.check_cfg(&f)?; + let field_expr = f.expr()?; + let pat = self.collect_expr_as_pat(field_expr); + let name = f.field_name()?.as_name(); + let src = self.expander.in_file(AstPtr::new(&f).wrap_left()); + self.source_map.pat_field_map_back.insert(pat, src); + Some(RecordFieldPat { name, pat }) + }) + .collect(); + self.alloc_pat_from_expr(Pat::Record { path, args, ellipsis }, syntax_ptr) + } + _ => return None, + }; + return Some(result); + + fn collect_path(this: &mut ExprCollector<'_>, expr: ast::Expr) -> Option { + match expr { + ast::Expr::PathExpr(e) => Some(e), + ast::Expr::MacroExpr(mac) => { + let call = mac.macro_call()?; + { + let macro_ptr = AstPtr::new(&call); + this.collect_macro_call(call, macro_ptr, true, |this, expanded_path| { + collect_path(this, expanded_path?) + }) + } + } + _ => None, + } + } + + fn collect_possibly_rest( + this: &mut ExprCollector<'_>, + expr: ast::Expr, + ) -> Either { + match &expr { + ast::Expr::RangeExpr(e) if e.is_range_full() => Either::Right(()), + ast::Expr::MacroExpr(mac) => match mac.macro_call() { + Some(call) => { + let macro_ptr = AstPtr::new(&call); + let pat = this.collect_macro_call( + call, + macro_ptr, + true, + |this, expanded_expr| match expanded_expr { + Some(expanded_pat) => collect_possibly_rest(this, expanded_pat), + None => Either::Left(this.missing_pat()), + }, + ); + if let Either::Left(pat) = pat { + let src = this.expander.in_file(AstPtr::new(&expr).wrap_left()); + this.source_map.pat_map_back.insert(pat, src); + } + pat + } + None => { + let ptr = AstPtr::new(&expr); + Either::Left(this.alloc_pat_from_expr(Pat::Missing, ptr)) + } + }, + _ => Either::Left(this.collect_expr_as_pat(expr)), + } + } + + fn collect_tuple( + this: &mut ExprCollector<'_>, + fields: ast::AstChildren, + ) -> (Option, Box<[la_arena::Idx]>) { + let mut ellipsis = None; + let args = fields + .enumerate() + .filter_map(|(idx, elem)| { + match collect_possibly_rest(this, elem) { + Either::Left(pat) => Some(pat), + Either::Right(()) => { + if ellipsis.is_none() { + ellipsis = Some(idx as u32); + } + // FIXME: Report an error here otherwise. + None + } + } + }) + .collect(); + (ellipsis, args) + } + } + fn initialize_binding_owner( &mut self, syntax_ptr: AstPtr, @@ -744,7 +946,7 @@ impl ExprCollector<'_> { let old_label = self.current_try_block_label.replace(label); let ptr = AstPtr::new(&e).upcast(); - let (btail, expr_id) = self.with_labeled_rib(label, |this| { + let (btail, expr_id) = self.with_labeled_rib(label, HygieneId::ROOT, |this| { let mut btail = None; let block = this.collect_block_(e, |id, statements, tail| { btail = tail; @@ -755,17 +957,13 @@ impl ExprCollector<'_> { let callee = self.alloc_expr_desugared_with_ptr(Expr::Path(try_from_output), ptr); let next_tail = match btail { - Some(tail) => self.alloc_expr_desugared_with_ptr( - Expr::Call { callee, args: Box::new([tail]), is_assignee_expr: false }, - ptr, - ), + Some(tail) => self + .alloc_expr_desugared_with_ptr(Expr::Call { callee, args: Box::new([tail]) }, ptr), None => { - let unit = self.alloc_expr_desugared_with_ptr( - Expr::Tuple { exprs: Box::new([]), is_assignee_expr: false }, - ptr, - ); + let unit = + self.alloc_expr_desugared_with_ptr(Expr::Tuple { exprs: Box::new([]) }, ptr); self.alloc_expr_desugared_with_ptr( - Expr::Call { callee, args: Box::new([unit]), is_assignee_expr: false }, + Expr::Call { callee, args: Box::new([unit]) }, ptr, ) } @@ -792,7 +990,9 @@ impl ExprCollector<'_> { /// FIXME: Rustc wraps the condition in a construct equivalent to `{ let _t = ; _t }` /// to preserve drop semantics. We should probably do the same in future. fn collect_while_loop(&mut self, syntax_ptr: AstPtr, e: ast::WhileExpr) -> ExprId { - let label = e.label().map(|label| self.collect_label(label)); + let label = e.label().map(|label| { + (self.hygiene_id_for(label.syntax().text_range().start()), self.collect_label(label)) + }); let body = self.collect_labelled_block_opt(label, e.loop_body()); // Labels can also be used in the condition expression, like this: @@ -809,9 +1009,9 @@ impl ExprCollector<'_> { // } // ``` let condition = match label { - Some(label) => { - self.with_labeled_rib(label, |this| this.collect_expr_opt(e.condition())) - } + Some((label_hygiene, label)) => self.with_labeled_rib(label, label_hygiene, |this| { + this.collect_expr_opt(e.condition()) + }), None => self.collect_expr_opt(e.condition()), }; @@ -820,7 +1020,7 @@ impl ExprCollector<'_> { Expr::If { condition, then_branch: body, else_branch: Some(break_expr) }, syntax_ptr, ); - self.alloc_expr(Expr::Loop { body: if_expr, label }, syntax_ptr) + self.alloc_expr(Expr::Loop { body: if_expr, label: label.map(|it| it.1) }, syntax_ptr) } /// Desugar `ast::ForExpr` from: `[opt_ident]: for in ` into: @@ -851,15 +1051,11 @@ impl ExprCollector<'_> { let head = self.collect_expr_opt(e.iterable()); let into_iter_fn_expr = self.alloc_expr(Expr::Path(into_iter_fn), syntax_ptr); let iterator = self.alloc_expr( - Expr::Call { - callee: into_iter_fn_expr, - args: Box::new([head]), - is_assignee_expr: false, - }, + Expr::Call { callee: into_iter_fn_expr, args: Box::new([head]) }, syntax_ptr, ); let none_arm = MatchArm { - pat: self.alloc_pat_desugared(Pat::Path(Box::new(option_none))), + pat: self.alloc_pat_desugared(Pat::Path(option_none)), guard: None, expr: self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr), }; @@ -868,7 +1064,9 @@ impl ExprCollector<'_> { args: Box::new([self.collect_pat_top(e.pat())]), ellipsis: None, }; - let label = e.label().map(|label| self.collect_label(label)); + let label = e.label().map(|label| { + (self.hygiene_id_for(label.syntax().text_range().start()), self.collect_label(label)) + }); let some_arm = MatchArm { pat: self.alloc_pat_desugared(some_pat), guard: None, @@ -884,11 +1082,7 @@ impl ExprCollector<'_> { ); let iter_next_fn_expr = self.alloc_expr(Expr::Path(iter_next_fn), syntax_ptr); let iter_next_expr = self.alloc_expr( - Expr::Call { - callee: iter_next_fn_expr, - args: Box::new([iter_expr_mut]), - is_assignee_expr: false, - }, + Expr::Call { callee: iter_next_fn_expr, args: Box::new([iter_expr_mut]) }, syntax_ptr, ); let loop_inner = self.alloc_expr( @@ -904,7 +1098,8 @@ impl ExprCollector<'_> { }, syntax_ptr, ); - let loop_outer = self.alloc_expr(Expr::Loop { body: loop_inner, label }, syntax_ptr); + let loop_outer = self + .alloc_expr(Expr::Loop { body: loop_inner, label: label.map(|it| it.1) }, syntax_ptr); let iter_binding = self.alloc_binding(iter_name, BindingAnnotation::Mutable); let iter_pat = self.alloc_pat_desugared(Pat::Bind { id: iter_binding, subpat: None }); self.add_definition_to_binding(iter_binding, iter_pat); @@ -942,10 +1137,8 @@ impl ExprCollector<'_> { }; let operand = self.collect_expr_opt(e.expr()); let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr); - let expr = self.alloc_expr( - Expr::Call { callee: try_branch, args: Box::new([operand]), is_assignee_expr: false }, - syntax_ptr, - ); + let expr = self + .alloc_expr(Expr::Call { callee: try_branch, args: Box::new([operand]) }, syntax_ptr); let continue_name = Name::generate_new_name(self.body.bindings.len()); let continue_binding = self.alloc_binding(continue_name.clone(), BindingAnnotation::Unannotated); @@ -975,10 +1168,8 @@ impl ExprCollector<'_> { expr: { let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr); let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr); - let result = self.alloc_expr( - Expr::Call { callee, args: Box::new([it]), is_assignee_expr: false }, - syntax_ptr, - ); + let result = + self.alloc_expr(Expr::Call { callee, args: Box::new([it]) }, syntax_ptr); self.alloc_expr( match self.current_try_block_label { Some(label) => Expr::Break { expr: Some(result), label: Some(label) }, @@ -1065,7 +1256,14 @@ impl ExprCollector<'_> { // FIXME: Report parse errors here } + let SpanMap::ExpansionSpanMap(new_span_map) = self.expander.span_map(self.db) + else { + panic!("just expanded a macro, ExpansionSpanMap should be available"); + }; + let old_span_map = + mem::replace(&mut self.current_span_map, Some(new_span_map.clone())); let id = collector(self, Some(expansion.tree())); + self.current_span_map = old_span_map; self.ast_id_map = prev_ast_id_map; self.expander.exit(mark); id @@ -1108,7 +1306,7 @@ impl ExprCollector<'_> { // Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro let src = self.expander.in_file(syntax_ptr); - self.source_map.expr_map.insert(src, tail); + self.source_map.expr_map.insert(src, tail.into()); }) } @@ -1119,8 +1317,7 @@ impl ExprCollector<'_> { return; } let pat = self.collect_pat_top(stmt.pat()); - let type_ref = - stmt.ty().map(|it| Interned::new(TypeRef::from_ast(&self.ctx(), it))); + let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&self.ctx(), it)); let initializer = stmt.initializer().map(|e| self.collect_expr(e)); let else_branch = stmt .let_else() @@ -1145,10 +1342,46 @@ impl ExprCollector<'_> { statements.push(Statement::Expr { expr, has_semi }); } } - ast::Stmt::Item(_item) => statements.push(Statement::Item), + ast::Stmt::Item(ast::Item::MacroDef(macro_)) => { + let Some(name) = macro_.name() else { + statements.push(Statement::Item(Item::Other)); + return; + }; + let name = name.as_name(); + let macro_id = self.def_map.modules[DefMap::ROOT].scope.get(&name).take_macros(); + self.collect_macro_def(statements, macro_id); + } + ast::Stmt::Item(ast::Item::MacroRules(macro_)) => { + let Some(name) = macro_.name() else { + statements.push(Statement::Item(Item::Other)); + return; + }; + let name = name.as_name(); + let macro_defs_count = + self.current_block_legacy_macro_defs_count.entry(name.clone()).or_insert(0); + let macro_id = self.def_map.modules[DefMap::ROOT] + .scope + .get_legacy_macro(&name) + .and_then(|it| it.get(*macro_defs_count)) + .copied(); + *macro_defs_count += 1; + self.collect_macro_def(statements, macro_id); + } + ast::Stmt::Item(_item) => statements.push(Statement::Item(Item::Other)), } } + fn collect_macro_def(&mut self, statements: &mut Vec, macro_id: Option) { + let Some(macro_id) = macro_id else { + never!("def map should have macro definition, but it doesn't"); + statements.push(Statement::Item(Item::Other)); + return; + }; + let macro_id = self.db.macro_def(macro_id); + statements.push(Statement::Item(Item::MacroDef(Box::new(macro_id)))); + self.label_ribs.push(LabelRib::new(RibKind::MacroDef(Box::new(macro_id)))); + } + fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId { self.collect_block_(block, |id, statements, tail| Expr::Block { id, @@ -1194,6 +1427,7 @@ impl ExprCollector<'_> { }; let prev_def_map = mem::replace(&mut self.def_map, def_map); let prev_local_module = mem::replace(&mut self.expander.module, module); + let prev_legacy_macros_count = mem::take(&mut self.current_block_legacy_macro_defs_count); let mut statements = Vec::new(); block.statements().for_each(|s| self.collect_stmt(&mut statements, s)); @@ -1216,6 +1450,7 @@ impl ExprCollector<'_> { self.def_map = prev_def_map; self.expander.module = prev_local_module; + self.current_block_legacy_macro_defs_count = prev_legacy_macros_count; expr_id } @@ -1228,11 +1463,13 @@ impl ExprCollector<'_> { fn collect_labelled_block_opt( &mut self, - label: Option, + label: Option<(HygieneId, LabelId)>, expr: Option, ) -> ExprId { match label { - Some(label) => self.with_labeled_rib(label, |this| this.collect_block_opt(expr)), + Some((hygiene, label)) => { + self.with_labeled_rib(label, hygiene, |this| this.collect_block_opt(expr)) + } None => self.collect_block_opt(expr), } } @@ -1250,6 +1487,10 @@ impl ExprCollector<'_> { let pattern = match &pat { ast::Pat::IdentPat(bp) => { let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); + let hygiene = bp + .name() + .map(|name| self.hygiene_id_for(name.syntax().text_range().start())) + .unwrap_or(HygieneId::ROOT); let annotation = BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some()); @@ -1285,12 +1526,12 @@ impl ExprCollector<'_> { } // shadowing statics is an error as well, so we just ignore that case here _ => { - let id = binding_list.find(self, name, annotation); + let id = binding_list.find(self, name, hygiene, annotation); (Some(id), Pat::Bind { id, subpat }) } } } else { - let id = binding_list.find(self, name, annotation); + let id = binding_list.find(self, name, hygiene, annotation); (Some(id), Pat::Bind { id, subpat }) }; @@ -1302,8 +1543,7 @@ impl ExprCollector<'_> { return pat; } ast::Pat::TupleStructPat(p) => { - let path = - p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); + let path = p.path().and_then(|path| self.parse_path(path)).map(Box::new); let (args, ellipsis) = self.collect_tuple_pat( p.fields(), comma_follows_token(p.l_paren_token()), @@ -1317,8 +1557,7 @@ impl ExprCollector<'_> { Pat::Ref { pat, mutability } } ast::Pat::PathPat(p) => { - let path = - p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); + let path = p.path().and_then(|path| self.parse_path(path)); path.map(Pat::Path).unwrap_or(Pat::Missing) } ast::Pat::OrPat(p) => 'b: { @@ -1348,6 +1587,10 @@ impl ExprCollector<'_> { for (id, _) in current_is_used.into_iter() { binding_list.check_is_used(self, id); } + if let &[pat] = &*pats { + // Leading pipe without real OR pattern. Leaving an one-item OR pattern may confuse later stages. + return pat; + } Pat::Or(pats.into()) } ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat(), binding_list), @@ -1361,8 +1604,7 @@ impl ExprCollector<'_> { } ast::Pat::WildcardPat(_) => Pat::Wild, ast::Pat::RecordPat(p) => { - let path = - p.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); + let path = p.path().and_then(|path| self.parse_path(path)).map(Box::new); let record_pat_field_list = &p.record_pat_field_list().expect("every struct should have a field list"); let args = record_pat_field_list @@ -1372,7 +1614,7 @@ impl ExprCollector<'_> { let ast_pat = f.pat()?; let pat = self.collect_pat(ast_pat, binding_list); let name = f.field_name()?.as_name(); - let src = self.expander.in_file(AstPtr::new(&f)); + let src = self.expander.in_file(AstPtr::new(&f).wrap_right()); self.source_map.pat_field_map_back.insert(pat, src); Some(RecordFieldPat { name, pat }) }) @@ -1569,20 +1811,51 @@ impl ExprCollector<'_> { lifetime: Option, ) -> Result, BodyDiagnostic> { let Some(lifetime) = lifetime else { return Ok(None) }; + let (mut hygiene_id, mut hygiene_info) = match &self.current_span_map { + None => (HygieneId::ROOT, None), + Some(span_map) => { + let span = span_map.span_at(lifetime.syntax().text_range().start()); + let ctx = self.db.lookup_intern_syntax_context(span.ctx); + let hygiene_id = HygieneId::new(ctx.opaque_and_semitransparent); + let hygiene_info = ctx.outer_expn.map(|expansion| { + let expansion = self.db.lookup_intern_macro_call(expansion); + (ctx.parent, expansion.def) + }); + (hygiene_id, hygiene_info) + } + }; let name = Name::new_lifetime(&lifetime); for (rib_idx, rib) in self.label_ribs.iter().enumerate().rev() { - if let Some((label_name, id)) = &rib.label { - if *label_name == name { - return if self.is_label_valid_from_rib(rib_idx) { - Ok(Some(*id)) - } else { - Err(BodyDiagnostic::UnreachableLabel { - name, - node: self.expander.in_file(AstPtr::new(&lifetime)), - }) - }; + match &rib.kind { + RibKind::Normal(label_name, id, label_hygiene) => { + if *label_name == name && *label_hygiene == hygiene_id { + return if self.is_label_valid_from_rib(rib_idx) { + Ok(Some(*id)) + } else { + Err(BodyDiagnostic::UnreachableLabel { + name, + node: self.expander.in_file(AstPtr::new(&lifetime)), + }) + }; + } } + RibKind::MacroDef(macro_id) => { + if let Some((parent_ctx, label_macro_id)) = hygiene_info { + if label_macro_id == **macro_id { + // A macro is allowed to refer to labels from before its declaration. + // Therefore, if we got to the rib of its declaration, give up its hygiene + // and use its parent expansion. + let parent_ctx = self.db.lookup_intern_syntax_context(parent_ctx); + hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent); + hygiene_info = parent_ctx.outer_expn.map(|expansion| { + let expansion = self.db.lookup_intern_macro_call(expansion); + (parent_ctx.parent, expansion.def) + }); + } + } + } + _ => {} } } @@ -1596,28 +1869,44 @@ impl ExprCollector<'_> { !self.label_ribs[rib_index + 1..].iter().any(|rib| rib.kind.is_label_barrier()) } + fn pop_label_rib(&mut self) { + // We need to pop all macro defs, plus one rib. + while let Some(LabelRib { kind: RibKind::MacroDef(_) }) = self.label_ribs.pop() { + // Do nothing. + } + } + fn with_label_rib(&mut self, kind: RibKind, f: impl FnOnce(&mut Self) -> T) -> T { self.label_ribs.push(LabelRib::new(kind)); let res = f(self); - self.label_ribs.pop(); + self.pop_label_rib(); res } - fn with_labeled_rib(&mut self, label: LabelId, f: impl FnOnce(&mut Self) -> T) -> T { - self.label_ribs.push(LabelRib::new_normal((self.body[label].name.clone(), label))); + fn with_labeled_rib( + &mut self, + label: LabelId, + hygiene: HygieneId, + f: impl FnOnce(&mut Self) -> T, + ) -> T { + self.label_ribs.push(LabelRib::new(RibKind::Normal( + self.body[label].name.clone(), + label, + hygiene, + ))); let res = f(self); - self.label_ribs.pop(); + self.pop_label_rib(); res } fn with_opt_labeled_rib( &mut self, - label: Option, + label: Option<(HygieneId, LabelId)>, f: impl FnOnce(&mut Self) -> T, ) -> T { match label { None => f(self), - Some(label) => self.with_labeled_rib(label, f), + Some((hygiene, label)) => self.with_labeled_rib(label, hygiene, f), } } // endregion: labels @@ -1666,28 +1955,39 @@ impl ExprCollector<'_> { _ => None, }); let mut mappings = vec![]; - let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) { + let (fmt, hygiene) = match template.and_then(|it| self.expand_macros_to_string(it)) { Some((s, is_direct_literal)) => { let call_ctx = self.expander.syntax_context(); - format_args::parse( + let hygiene = self.hygiene_id_for(s.syntax().text_range().start()); + let fmt = format_args::parse( &s, fmt_snippet, args, is_direct_literal, - |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), + |name| { + let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name))); + if !hygiene.is_root() { + self.body.expr_hygiene.insert(expr_id, hygiene); + } + expr_id + }, |name, span| { if let Some(span) = span { mappings.push((span, name)) } }, call_ctx, - ) + ); + (fmt, hygiene) } - None => FormatArgs { - template: Default::default(), - arguments: args.finish(), - orphans: Default::default(), - }, + None => ( + FormatArgs { + template: Default::default(), + arguments: args.finish(), + orphans: Default::default(), + }, + HygieneId::ROOT, + ), }; // Create a list of all _unique_ (argument, format trait) combinations. @@ -1723,10 +2023,8 @@ impl ExprCollector<'_> { } }) .collect(); - let lit_pieces = self.alloc_expr_desugared(Expr::Array(Array::ElementList { - elements: lit_pieces, - is_assignee_expr: false, - })); + let lit_pieces = + self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: lit_pieces })); let lit_pieces = self.alloc_expr_desugared(Expr::Ref { expr: lit_pieces, rawness: Rawness::Ref, @@ -1743,10 +2041,7 @@ impl ExprCollector<'_> { Some(self.make_format_spec(placeholder, &mut argmap)) }) .collect(); - let array = self.alloc_expr_desugared(Expr::Array(Array::ElementList { - elements, - is_assignee_expr: false, - })); + let array = self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements })); self.alloc_expr_desugared(Expr::Ref { expr: array, rawness: Rawness::Ref, @@ -1756,10 +2051,8 @@ impl ExprCollector<'_> { let arguments = &*fmt.arguments.arguments; let args = if arguments.is_empty() { - let expr = self.alloc_expr_desugared(Expr::Array(Array::ElementList { - elements: Box::default(), - is_assignee_expr: false, - })); + let expr = self + .alloc_expr_desugared(Expr::Array(Array::ElementList { elements: Box::default() })); self.alloc_expr_desugared(Expr::Ref { expr, rawness: Rawness::Ref, @@ -1786,10 +2079,8 @@ impl ExprCollector<'_> { self.make_argument(arg, ty) }) .collect(); - let array = self.alloc_expr_desugared(Expr::Array(Array::ElementList { - elements: args, - is_assignee_expr: false, - })); + let array = + self.alloc_expr_desugared(Expr::Array(Array::ElementList { elements: args })); self.alloc_expr_desugared(Expr::Ref { expr: array, rawness: Rawness::Ref, @@ -1822,11 +2113,8 @@ impl ExprCollector<'_> { let new_v1_formatted = self.alloc_expr_desugared(Expr::Path(new_v1_formatted)); let unsafe_arg_new = self.alloc_expr_desugared(Expr::Path(unsafe_arg_new)); - let unsafe_arg_new = self.alloc_expr_desugared(Expr::Call { - callee: unsafe_arg_new, - args: Box::default(), - is_assignee_expr: false, - }); + let unsafe_arg_new = + self.alloc_expr_desugared(Expr::Call { callee: unsafe_arg_new, args: Box::default() }); let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe { id: None, // We collect the unused expressions here so that we still infer them instead of @@ -1843,11 +2131,14 @@ impl ExprCollector<'_> { Expr::Call { callee: new_v1_formatted, args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]), - is_assignee_expr: false, }, syntax_ptr, ); - self.source_map.template_map.get_or_insert_with(Default::default).0.insert(idx, mappings); + self.source_map + .template_map + .get_or_insert_with(Default::default) + .0 + .insert(idx, (hygiene, mappings)); idx } @@ -1938,7 +2229,6 @@ impl ExprCollector<'_> { self.alloc_expr_desugared(Expr::Call { callee: format_placeholder_new, args: Box::new([position, fill, align, flags, precision, width]), - is_assignee_expr: false, }) } @@ -1980,11 +2270,7 @@ impl ExprCollector<'_> { Some(count_is) => self.alloc_expr_desugared(Expr::Path(count_is)), None => self.missing_expr(), }; - self.alloc_expr_desugared(Expr::Call { - callee: count_is, - args: Box::new([args]), - is_assignee_expr: false, - }) + self.alloc_expr_desugared(Expr::Call { callee: count_is, args: Box::new([args]) }) } Some(FormatCount::Argument(arg)) => { if let Ok(arg_index) = arg.index { @@ -2005,7 +2291,6 @@ impl ExprCollector<'_> { self.alloc_expr_desugared(Expr::Call { callee: count_param, args: Box::new([args]), - is_assignee_expr: false, }) } else { // FIXME: This drops arg causing it to potentially not be resolved/type checked @@ -2054,11 +2339,7 @@ impl ExprCollector<'_> { Some(new_fn) => self.alloc_expr_desugared(Expr::Path(new_fn)), None => self.missing_expr(), }; - self.alloc_expr_desugared(Expr::Call { - callee: new_fn, - args: Box::new([arg]), - is_assignee_expr: false, - }) + self.alloc_expr_desugared(Expr::Call { callee: new_fn, args: Box::new([arg]) }) } // endregion: format @@ -2082,7 +2363,7 @@ impl ExprCollector<'_> { let src = self.expander.in_file(ptr); let id = self.body.exprs.alloc(expr); self.source_map.expr_map_back.insert(id, src); - self.source_map.expr_map.insert(src, id); + self.source_map.expr_map.insert(src, id.into()); id } // FIXME: desugared exprs don't have ptr, that's wrong and should be fixed. @@ -2110,10 +2391,17 @@ impl ExprCollector<'_> { binding } + fn alloc_pat_from_expr(&mut self, pat: Pat, ptr: ExprPtr) -> PatId { + let src = self.expander.in_file(ptr); + let id = self.body.pats.alloc(pat); + self.source_map.expr_map.insert(src, id.into()); + self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left)); + id + } fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { let src = self.expander.in_file(ptr); let id = self.body.pats.alloc(pat); - self.source_map.pat_map_back.insert(id, src); + self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right)); self.source_map.pat_map.insert(src, id); id } @@ -2151,6 +2439,17 @@ impl ExprCollector<'_> { self.awaitable_context = orig; res } + + /// If this returns `HygieneId::ROOT`, do not allocate to save space. + fn hygiene_id_for(&self, span_start: TextSize) -> HygieneId { + match &self.current_span_map { + None => HygieneId::ROOT, + Some(span_map) => { + let ctx = span_map.span_at(span_start).ctx; + HygieneId(self.db.lookup_intern_syntax_context(ctx).opaque_and_semitransparent) + } + } + } } fn comma_follows_token(t: Option) -> bool { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs index 4213370ac195a..c1b58dbdd0cb3 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower/asm.rs @@ -158,9 +158,7 @@ impl ExprCollector<'_> { AsmOperand::Const(self.collect_expr_opt(c.expr())) } ast::AsmOperand::AsmSym(s) => { - let Some(path) = - s.path().and_then(|p| self.expander.parse_path(self.db, p)) - else { + let Some(path) = s.path().and_then(|p| self.parse_path(p)) else { continue; }; AsmOperand::Sym(path) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs index 37167fcb8155b..f8b6eef342226 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs @@ -11,7 +11,6 @@ use crate::{ Statement, }, pretty::{print_generic_args, print_path, print_type_ref}, - type_ref::TypeRef, }; use super::*; @@ -69,20 +68,20 @@ pub(super) fn print_body_hir( }; if let DefWithBodyId::FunctionId(it) = owner { p.buf.push('('); - let function_data = &db.function_data(it); + let function_data = db.function_data(it); let (mut params, ret_type) = (function_data.params.iter(), &function_data.ret_type); if let Some(self_param) = body.self_param { p.print_binding(self_param); p.buf.push_str(": "); if let Some(ty) = params.next() { - p.print_type_ref(ty); + p.print_type_ref(*ty, &function_data.types_map); p.buf.push_str(", "); } } body.params.iter().zip(params).for_each(|(¶m, ty)| { p.print_pat(param); p.buf.push_str(": "); - p.print_type_ref(ty); + p.print_type_ref(*ty, &function_data.types_map); p.buf.push_str(", "); }); // remove the last ", " in param list @@ -92,7 +91,7 @@ pub(super) fn print_body_hir( p.buf.push(')'); // return type p.buf.push_str(" -> "); - p.print_type_ref(ret_type); + p.print_type_ref(*ret_type, &function_data.types_map); p.buf.push(' '); } p.print_expr(body.body_expr); @@ -242,7 +241,7 @@ impl Printer<'_> { Expr::InlineAsm(_) => w!(self, "builtin#asm(_)"), Expr::OffsetOf(offset_of) => { w!(self, "builtin#offset_of("); - self.print_type_ref(&offset_of.container); + self.print_type_ref(offset_of.container, &self.body.types); let edition = self.edition; w!( self, @@ -277,7 +276,7 @@ impl Printer<'_> { w!(self, "loop "); self.print_expr(*body); } - Expr::Call { callee, args, is_assignee_expr: _ } => { + Expr::Call { callee, args } => { self.print_expr(*callee); w!(self, "("); if !args.is_empty() { @@ -296,7 +295,7 @@ impl Printer<'_> { if let Some(args) = generic_args { w!(self, "::<"); let edition = self.edition; - print_generic_args(self.db, args, self, edition).unwrap(); + print_generic_args(self.db, args, &self.body.types, self, edition).unwrap(); w!(self, ">"); } w!(self, "("); @@ -372,7 +371,7 @@ impl Printer<'_> { self.print_expr(*expr); } } - Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr: _ } => { + Expr::RecordLit { path, fields, spread } => { match path { Some(path) => self.print_path(path), None => w!(self, "�"), @@ -391,9 +390,6 @@ impl Printer<'_> { p.print_expr(*spread); wln!(p); } - if *ellipsis { - wln!(p, ".."); - } }); w!(self, "}}"); } @@ -408,7 +404,7 @@ impl Printer<'_> { Expr::Cast { expr, type_ref } => { self.print_expr(*expr); w!(self, " as "); - self.print_type_ref(type_ref); + self.print_type_ref(*type_ref, &self.body.types); } Expr::Ref { expr, rawness, mutability } => { w!(self, "&"); @@ -466,7 +462,7 @@ impl Printer<'_> { w!(self, ") "); } } - Expr::Index { base, index, is_assignee_expr: _ } => { + Expr::Index { base, index } => { self.print_expr(*base); w!(self, "["); self.print_expr(*index); @@ -496,18 +492,18 @@ impl Printer<'_> { self.print_pat(*pat); if let Some(ty) = ty { w!(self, ": "); - self.print_type_ref(ty); + self.print_type_ref(*ty, &self.body.types); } } w!(self, "|"); if let Some(ret_ty) = ret_type { w!(self, " -> "); - self.print_type_ref(ret_ty); + self.print_type_ref(*ret_ty, &self.body.types); } self.whitespace(); self.print_expr(*body); } - Expr::Tuple { exprs, is_assignee_expr: _ } => { + Expr::Tuple { exprs } => { w!(self, "("); for expr in exprs.iter() { self.print_expr(*expr); @@ -519,7 +515,7 @@ impl Printer<'_> { w!(self, "["); if !matches!(arr, Array::ElementList { elements, .. } if elements.is_empty()) { self.indented(|p| match arr { - Array::ElementList { elements, is_assignee_expr: _ } => { + Array::ElementList { elements } => { for elem in elements.iter() { p.print_expr(*elem); w!(p, ", "); @@ -551,6 +547,11 @@ impl Printer<'_> { Expr::Const(id) => { w!(self, "const {{ /* {id:?} */ }}"); } + &Expr::Assignment { target, value } => { + self.print_pat(target); + w!(self, " = "); + self.print_expr(value); + } } } @@ -719,6 +720,9 @@ impl Printer<'_> { w!(self, "const "); self.print_expr(*c); } + Pat::Expr(expr) => { + self.print_expr(*expr); + } } } @@ -729,7 +733,7 @@ impl Printer<'_> { self.print_pat(*pat); if let Some(ty) = type_ref { w!(self, ": "); - self.print_type_ref(ty); + self.print_type_ref(*ty, &self.body.types); } if let Some(init) = initializer { w!(self, " = "); @@ -748,7 +752,7 @@ impl Printer<'_> { } wln!(self); } - Statement::Item => (), + Statement::Item(_) => (), } } @@ -787,14 +791,14 @@ impl Printer<'_> { } } - fn print_type_ref(&mut self, ty: &TypeRef) { + fn print_type_ref(&mut self, ty: TypeRefId, map: &TypesMap) { let edition = self.edition; - print_type_ref(self.db, ty, self, edition).unwrap(); + print_type_ref(self.db, ty, map, self, edition).unwrap(); } fn print_path(&mut self, path: &Path) { let edition = self.edition; - print_path(self.db, path, self, edition).unwrap(); + print_path(self.db, path, &self.body.types, self, edition).unwrap(); } fn print_binding(&mut self, id: BindingId) { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs index bf201ca834792..63a7a9af201da 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs @@ -1,12 +1,12 @@ //! Name resolution for expressions. -use hir_expand::name::Name; +use hir_expand::{name::Name, MacroDefId}; use la_arena::{Arena, ArenaMap, Idx, IdxRange, RawIdx}; use triomphe::Arc; use crate::{ - body::Body, + body::{Body, HygieneId}, db::DefDatabase, - hir::{Binding, BindingId, Expr, ExprId, LabelId, Pat, PatId, Statement}, + hir::{Binding, BindingId, Expr, ExprId, Item, LabelId, Pat, PatId, Statement}, BlockId, ConstBlockId, DefWithBodyId, }; @@ -22,6 +22,7 @@ pub struct ExprScopes { #[derive(Debug, PartialEq, Eq)] pub struct ScopeEntry { name: Name, + hygiene: HygieneId, binding: BindingId, } @@ -30,6 +31,10 @@ impl ScopeEntry { &self.name } + pub(crate) fn hygiene(&self) -> HygieneId { + self.hygiene + } + pub fn binding(&self) -> BindingId { self.binding } @@ -40,6 +45,8 @@ pub struct ScopeData { parent: Option, block: Option, label: Option<(LabelId, Name)>, + // FIXME: We can compress this with an enum for this and `label`/`block` if memory usage matters. + macro_def: Option>, entries: IdxRange, } @@ -62,6 +69,12 @@ impl ExprScopes { self.scopes[scope].block } + /// If `scope` refers to a macro def scope, returns the corresponding `MacroId`. + #[allow(clippy::borrowed_box)] // If we return `&MacroDefId` we need to move it, this way we just clone the `Box`. + pub fn macro_def(&self, scope: ScopeId) -> Option<&Box> { + self.scopes[scope].macro_def.as_ref() + } + /// If `scope` refers to a labeled expression scope, returns the corresponding `Label`. pub fn label(&self, scope: ScopeId) -> Option<(LabelId, Name)> { self.scopes[scope].label.clone() @@ -102,7 +115,7 @@ impl ExprScopes { }; let mut root = scopes.root_scope(); if let Some(self_param) = body.self_param { - scopes.add_bindings(body, root, self_param); + scopes.add_bindings(body, root, self_param, body.binding_hygiene(self_param)); } scopes.add_params_bindings(body, root, &body.params); compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root, resolve_const_block); @@ -114,6 +127,7 @@ impl ExprScopes { parent: None, block: None, label: None, + macro_def: None, entries: empty_entries(self.scope_entries.len()), }) } @@ -123,6 +137,7 @@ impl ExprScopes { parent: Some(parent), block: None, label: None, + macro_def: None, entries: empty_entries(self.scope_entries.len()), }) } @@ -132,6 +147,7 @@ impl ExprScopes { parent: Some(parent), block: None, label, + macro_def: None, entries: empty_entries(self.scope_entries.len()), }) } @@ -146,21 +162,38 @@ impl ExprScopes { parent: Some(parent), block, label, + macro_def: None, entries: empty_entries(self.scope_entries.len()), }) } - fn add_bindings(&mut self, body: &Body, scope: ScopeId, binding: BindingId) { + fn new_macro_def_scope(&mut self, parent: ScopeId, macro_id: Box) -> ScopeId { + self.scopes.alloc(ScopeData { + parent: Some(parent), + block: None, + label: None, + macro_def: Some(macro_id), + entries: empty_entries(self.scope_entries.len()), + }) + } + + fn add_bindings( + &mut self, + body: &Body, + scope: ScopeId, + binding: BindingId, + hygiene: HygieneId, + ) { let Binding { name, .. } = &body.bindings[binding]; - let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding }); + let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding, hygiene }); self.scopes[scope].entries = IdxRange::new_inclusive(self.scopes[scope].entries.start()..=entry); } fn add_pat_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) { let pattern = &body[pat]; - if let Pat::Bind { id, .. } = pattern { - self.add_bindings(body, scope, *id); + if let Pat::Bind { id, .. } = *pattern { + self.add_bindings(body, scope, id, body.binding_hygiene(id)); } pattern.walk_child_pats(|pat| self.add_pat_bindings(body, scope, pat)); @@ -206,7 +239,10 @@ fn compute_block_scopes( Statement::Expr { expr, .. } => { compute_expr_scopes(*expr, body, scopes, scope, resolve_const_block); } - Statement::Item => (), + Statement::Item(Item::MacroDef(macro_id)) => { + *scope = scopes.new_macro_def_scope(*scope, macro_id.clone()); + } + Statement::Item(Item::Other) => (), } } if let Some(expr) = tail { @@ -282,7 +318,7 @@ fn compute_expr_scopes( *scope = scopes.new_scope(*scope); scopes.add_pat_bindings(body, *scope, pat); } - e => e.walk_child_exprs(|e| compute_expr_scopes(scopes, e, scope)), + _ => body.walk_child_exprs(expr, |e| compute_expr_scopes(scopes, e, scope)), }; } @@ -333,6 +369,8 @@ mod tests { let expr_id = source_map .node_expr(InFile { file_id: file_id.into(), value: &marker.into() }) + .unwrap() + .as_expr() .unwrap(); let scope = scopes.scope_for(expr_id); @@ -488,8 +526,11 @@ fn foo() { let expr_scope = { let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); - let expr_id = - source_map.node_expr(InFile { file_id: file_id.into(), value: &expr_ast }).unwrap(); + let expr_id = source_map + .node_expr(InFile { file_id: file_id.into(), value: &expr_ast }) + .unwrap() + .as_expr() + .unwrap(); scopes.scope_for(expr_id).unwrap() }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs index dd3e79c874d85..3b29d98d198f5 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs @@ -370,3 +370,37 @@ fn f(a: i32, b: u32) -> String { }"#]] .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) } + +#[test] +fn destructuring_assignment_tuple_macro() { + // This is a funny one. `let m!()() = Bar()` is an error in rustc, because `m!()()` isn't a valid pattern, + // but in destructuring assignment it is valid, because `m!()()` is a valid expression, and destructuring + // assignments start their lives as expressions. So we have to do the same. + + let (db, body, def) = lower( + r#" +struct Bar(); + +macro_rules! m { + () => { Bar }; +} + +fn foo() { + m!()() = Bar(); +} +"#, + ); + + let (_, source_map) = db.body_with_source_map(def); + assert_eq!(source_map.diagnostics(), &[]); + + for (_, def_map) in body.blocks(&db) { + assert_eq!(def_map.diagnostics(), &[]); + } + + expect![[r#" + fn foo() -> () { + Bar() = Bar(); + }"#]] + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index 263fad51d78ec..f49018eaf3813 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -6,7 +6,7 @@ use base_db::CrateId; use hir_expand::{ name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefKind, }; -use intern::{sym, Interned, Symbol}; +use intern::{sym, Symbol}; use la_arena::{Idx, RawIdx}; use smallvec::SmallVec; use syntax::{ast, Parse}; @@ -25,7 +25,7 @@ use crate::{ DefMap, MacroSubNs, }, path::ImportAlias, - type_ref::{TraitRef, TypeBound, TypeRef}, + type_ref::{TraitRef, TypeBound, TypeRefId, TypesMap}, visibility::RawVisibility, AssocItemId, AstIdWithPath, ConstId, ConstLoc, ExternCrateId, FunctionId, FunctionLoc, HasModule, ImplId, Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, @@ -35,13 +35,14 @@ use crate::{ #[derive(Debug, Clone, PartialEq, Eq)] pub struct FunctionData { pub name: Name, - pub params: Box<[Interned]>, - pub ret_type: Interned, + pub params: Box<[TypeRefId]>, + pub ret_type: TypeRefId, pub attrs: Attrs, pub visibility: RawVisibility, pub abi: Option, pub legacy_const_generics_indices: Option>>, pub rustc_allow_incoherent_impl: bool, + pub types_map: Arc, flags: FnFlags, } @@ -110,13 +111,14 @@ impl FunctionData { .filter(|&(idx, _)| { item_tree.attrs(db, krate, attr_owner(idx)).is_cfg_enabled(cfg_options) }) - .filter_map(|(_, param)| param.type_ref.clone()) + .filter_map(|(_, param)| param.type_ref) .collect(), - ret_type: func.ret_type.clone(), + ret_type: func.ret_type, attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()), visibility, abi: func.abi.clone(), legacy_const_generics_indices, + types_map: func.types_map.clone(), flags, rustc_allow_incoherent_impl, }) @@ -182,13 +184,14 @@ fn parse_rustc_legacy_const_generics(tt: &crate::tt::Subtree) -> Box<[u32]> { #[derive(Debug, Clone, PartialEq, Eq)] pub struct TypeAliasData { pub name: Name, - pub type_ref: Option>, + pub type_ref: Option, pub visibility: RawVisibility, pub is_extern: bool, pub rustc_has_incoherent_inherent_impls: bool, pub rustc_allow_incoherent_impl: bool, /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). - pub bounds: Box<[Interned]>, + pub bounds: Box<[TypeBound]>, + pub types_map: Arc, } impl TypeAliasData { @@ -216,12 +219,13 @@ impl TypeAliasData { Arc::new(TypeAliasData { name: typ.name.clone(), - type_ref: typ.type_ref.clone(), + type_ref: typ.type_ref, visibility, is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)), rustc_has_incoherent_inherent_impls, rustc_allow_incoherent_impl, bounds: typ.bounds.clone(), + types_map: typ.types_map.clone(), }) } } @@ -343,13 +347,14 @@ impl TraitAliasData { #[derive(Debug, PartialEq, Eq)] pub struct ImplData { - pub target_trait: Option>, - pub self_ty: Interned, + pub target_trait: Option, + pub self_ty: TypeRefId, pub items: Box<[AssocItemId]>, pub is_negative: bool, pub is_unsafe: bool, // box it as the vec is usually empty anyways pub macro_calls: Option, MacroCallId)>>>, + pub types_map: Arc, } impl ImplData { @@ -368,7 +373,7 @@ impl ImplData { let item_tree = tree_id.item_tree(db); let impl_def = &item_tree[tree_id.value]; let target_trait = impl_def.target_trait.clone(); - let self_ty = impl_def.self_ty.clone(); + let self_ty = impl_def.self_ty; let is_negative = impl_def.is_negative; let is_unsafe = impl_def.is_unsafe; @@ -387,6 +392,7 @@ impl ImplData { is_negative, is_unsafe, macro_calls, + types_map: impl_def.types_map.clone(), }), DefDiagnostics::new(diagnostics), ) @@ -532,10 +538,11 @@ impl ExternCrateDeclData { pub struct ConstData { /// `None` for `const _: () = ();` pub name: Option, - pub type_ref: Interned, + pub type_ref: TypeRefId, pub visibility: RawVisibility, pub rustc_allow_incoherent_impl: bool, pub has_body: bool, + pub types_map: Arc, } impl ConstData { @@ -556,10 +563,11 @@ impl ConstData { Arc::new(ConstData { name: konst.name.clone(), - type_ref: konst.type_ref.clone(), + type_ref: konst.type_ref, visibility, rustc_allow_incoherent_impl, has_body: konst.has_body, + types_map: konst.types_map.clone(), }) } } @@ -567,12 +575,13 @@ impl ConstData { #[derive(Debug, Clone, PartialEq, Eq)] pub struct StaticData { pub name: Name, - pub type_ref: Interned, + pub type_ref: TypeRefId, pub visibility: RawVisibility, pub mutable: bool, pub is_extern: bool, pub has_safe_kw: bool, pub has_unsafe_kw: bool, + pub types_map: Arc, } impl StaticData { @@ -583,12 +592,13 @@ impl StaticData { Arc::new(StaticData { name: statik.name.clone(), - type_ref: statik.type_ref.clone(), + type_ref: statik.type_ref, visibility: item_tree[statik.visibility].clone(), mutable: statik.mutable, is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)), has_safe_kw: statik.has_safe_kw, has_unsafe_kw: statik.has_unsafe_kw, + types_map: statik.types_map.clone(), }) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs index ba54451e594f7..068ebb3b7e910 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs @@ -6,7 +6,7 @@ use cfg::CfgOptions; use either::Either; use hir_expand::name::Name; -use intern::{sym, Interned}; +use intern::sym; use la_arena::Arena; use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions}; use triomphe::Arc; @@ -21,7 +21,7 @@ use crate::{ lang_item::LangItem, nameres::diagnostics::{DefDiagnostic, DefDiagnostics}, tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}, - type_ref::TypeRef, + type_ref::{TypeRefId, TypesMap}, visibility::RawVisibility, EnumId, EnumVariantId, LocalFieldId, LocalModuleId, Lookup, StructId, UnionId, VariantId, }; @@ -73,8 +73,8 @@ pub struct EnumVariantData { #[derive(Debug, Clone, PartialEq, Eq)] pub enum VariantData { - Record(Arena), - Tuple(Arena), + Record { fields: Arena, types_map: Arc }, + Tuple { fields: Arena, types_map: Arc }, Unit, } @@ -82,7 +82,7 @@ pub enum VariantData { #[derive(Debug, Clone, PartialEq, Eq)] pub struct FieldData { pub name: Name, - pub type_ref: Interned, + pub type_ref: TypeRefId, pub visibility: RawVisibility, } @@ -208,7 +208,7 @@ impl StructData { } let strukt = &item_tree[loc.id.value]; - let (data, diagnostics) = lower_fields( + let (fields, diagnostics) = lower_fields( db, krate, loc.container.local_id, @@ -219,12 +219,13 @@ impl StructData { &strukt.fields, None, ); + let types_map = strukt.types_map.clone(); ( Arc::new(StructData { name: strukt.name.clone(), variant_data: Arc::new(match strukt.shape { - FieldsShape::Record => VariantData::Record(data), - FieldsShape::Tuple => VariantData::Tuple(data), + FieldsShape::Record => VariantData::Record { fields, types_map }, + FieldsShape::Tuple => VariantData::Tuple { fields, types_map }, FieldsShape::Unit => VariantData::Unit, }), repr, @@ -258,7 +259,7 @@ impl StructData { } let union = &item_tree[loc.id.value]; - let (data, diagnostics) = lower_fields( + let (fields, diagnostics) = lower_fields( db, krate, loc.container.local_id, @@ -269,10 +270,11 @@ impl StructData { &union.fields, None, ); + let types_map = union.types_map.clone(); ( Arc::new(StructData { name: union.name.clone(), - variant_data: Arc::new(VariantData::Record(data)), + variant_data: Arc::new(VariantData::Record { fields, types_map }), repr, visibility: item_tree[union.visibility].clone(), flags, @@ -360,7 +362,7 @@ impl EnumVariantData { let item_tree = loc.id.item_tree(db); let variant = &item_tree[loc.id.value]; - let (data, diagnostics) = lower_fields( + let (fields, diagnostics) = lower_fields( db, krate, container.local_id, @@ -371,13 +373,14 @@ impl EnumVariantData { &variant.fields, Some(item_tree[loc.parent.lookup(db).id.value].visibility), ); + let types_map = variant.types_map.clone(); ( Arc::new(EnumVariantData { name: variant.name.clone(), variant_data: Arc::new(match variant.shape { - FieldsShape::Record => VariantData::Record(data), - FieldsShape::Tuple => VariantData::Tuple(data), + FieldsShape::Record => VariantData::Record { fields, types_map }, + FieldsShape::Tuple => VariantData::Tuple { fields, types_map }, FieldsShape::Unit => VariantData::Unit, }), }), @@ -390,11 +393,20 @@ impl VariantData { pub fn fields(&self) -> &Arena { const EMPTY: &Arena = &Arena::new(); match &self { - VariantData::Record(fields) | VariantData::Tuple(fields) => fields, + VariantData::Record { fields, .. } | VariantData::Tuple { fields, .. } => fields, _ => EMPTY, } } + pub fn types_map(&self) -> &TypesMap { + match &self { + VariantData::Record { types_map, .. } | VariantData::Tuple { types_map, .. } => { + types_map + } + VariantData::Unit => TypesMap::EMPTY, + } + } + // FIXME: Linear lookup pub fn field(&self, name: &Name) -> Option { self.fields().iter().find_map(|(id, data)| if &data.name == name { Some(id) } else { None }) @@ -402,8 +414,8 @@ impl VariantData { pub fn kind(&self) -> StructKind { match self { - VariantData::Record(_) => StructKind::Record, - VariantData::Tuple(_) => StructKind::Tuple, + VariantData::Record { .. } => StructKind::Record, + VariantData::Tuple { .. } => StructKind::Tuple, VariantData::Unit => StructKind::Unit, } } @@ -463,7 +475,7 @@ fn lower_field( ) -> FieldData { FieldData { name: field.name.clone(), - type_ref: field.type_ref.clone(), + type_ref: field.type_ref, visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(), } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index aeda302f35c5e..d7e83ce33e896 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -2,7 +2,7 @@ use base_db::{ra_salsa, CrateId, SourceDatabase, Upcast}; use either::Either; use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId}; -use intern::{sym, Interned}; +use intern::sym; use la_arena::ArenaMap; use span::{EditionedFileId, MacroCallId}; use syntax::{ast, AstPtr}; @@ -18,9 +18,10 @@ use crate::{ }, generics::GenericParams, import_map::ImportMap, - item_tree::{AttrOwner, ItemTree}, + item_tree::{AttrOwner, ItemTree, ItemTreeSourceMaps}, lang_item::{self, LangItem, LangItemTarget, LangItems}, nameres::{diagnostics::DefDiagnostics, DefMap}, + type_ref::TypesSourceMap, visibility::{self, Visibility}, AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, EnumVariantId, EnumVariantLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, @@ -91,6 +92,18 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast Arc; + #[ra_salsa::invoke(ItemTree::file_item_tree_with_source_map_query)] + fn file_item_tree_with_source_map( + &self, + file_id: HirFileId, + ) -> (Arc, Arc); + + #[ra_salsa::invoke(ItemTree::block_item_tree_with_source_map_query)] + fn block_item_tree_with_source_map( + &self, + block_id: BlockId, + ) -> (Arc, Arc); + #[ra_salsa::invoke(DefMap::crate_def_map_query)] fn crate_def_map(&self, krate: CrateId) -> Arc; @@ -187,7 +200,14 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast Arc; #[ra_salsa::invoke(GenericParams::generic_params_query)] - fn generic_params(&self, def: GenericDefId) -> Interned; + fn generic_params(&self, def: GenericDefId) -> Arc; + + /// If this returns `None` for the source map, that means it is the same as with the item tree. + #[ra_salsa::invoke(GenericParams::generic_params_with_source_map_query)] + fn generic_params_with_source_map( + &self, + def: GenericDefId, + ) -> (Arc, Option>); // region:attrs diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs index 6d8b4445f75bc..d430733fcadfe 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs @@ -14,6 +14,7 @@ use span::SyntaxContextId; use syntax::{ast, Parse}; use triomphe::Arc; +use crate::type_ref::{TypesMap, TypesSourceMap}; use crate::{ attr::Attrs, db::DefDatabase, lower::LowerCtx, path::Path, AsMacroCall, MacroId, ModuleId, UnresolvedMacro, @@ -49,6 +50,10 @@ impl Expander { } } + pub(crate) fn span_map(&self, db: &dyn DefDatabase) -> &SpanMap { + self.span_map.get_or_init(|| db.span_map(self.current_file_id)) + } + pub fn krate(&self) -> CrateId { self.module.krate } @@ -110,8 +115,19 @@ impl Expander { mark.bomb.defuse(); } - pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> { - LowerCtx::with_span_map_cell(db, self.current_file_id, self.span_map.clone()) + pub fn ctx<'a>( + &self, + db: &'a dyn DefDatabase, + types_map: &'a mut TypesMap, + types_source_map: &'a mut TypesSourceMap, + ) -> LowerCtx<'a> { + LowerCtx::with_span_map_cell( + db, + self.current_file_id, + self.span_map.clone(), + types_map, + types_source_map, + ) } pub(crate) fn in_file(&self, value: T) -> InFile { @@ -138,8 +154,20 @@ impl Expander { self.current_file_id } - pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option { - let ctx = LowerCtx::with_span_map_cell(db, self.current_file_id, self.span_map.clone()); + pub(crate) fn parse_path( + &mut self, + db: &dyn DefDatabase, + path: ast::Path, + types_map: &mut TypesMap, + types_source_map: &mut TypesSourceMap, + ) -> Option { + let ctx = LowerCtx::with_span_map_cell( + db, + self.current_file_id, + self.span_map.clone(), + types_map, + types_source_map, + ); Path::from_src(&ctx, path) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index f5e03e5281e21..a615abd1bbe04 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -1025,7 +1025,7 @@ pub mod ast { check_found_path( r#" mod bar { - mod foo { pub(super) struct S; } + mod foo { pub(crate) struct S; } pub(crate) use foo::*; } $0 @@ -1047,7 +1047,7 @@ $0 check_found_path( r#" mod bar { - mod foo { pub(super) struct S; } + mod foo { pub(crate) struct S; } pub(crate) use foo::S as U; } $0 diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs index 6c34ee086aa9b..6b79850e9c40f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs @@ -3,16 +3,18 @@ //! generic parameters. See also the `Generics` type and the `generics_of` query //! in rustc. -use std::ops; +use std::{ops, sync::LazyLock}; use either::Either; use hir_expand::{ name::{AsName, Name}, ExpandResult, }; -use intern::Interned; use la_arena::{Arena, RawIdx}; -use stdx::impl_from; +use stdx::{ + impl_from, + thin_vec::{EmptyOptimizedThinVec, ThinVec}, +}; use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds}; use triomphe::Arc; @@ -22,7 +24,11 @@ use crate::{ item_tree::{AttrOwner, FileItemTreeId, GenericModItem, GenericsItemTreeNode, ItemTree}, lower::LowerCtx, nameres::{DefMap, MacroSubNs}, - type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef}, + path::{AssociatedTypeBinding, GenericArg, GenericArgs, NormalPath, Path}, + type_ref::{ + ArrayType, ConstRef, FnType, LifetimeRef, RefType, TypeBound, TypeRef, TypeRefId, TypesMap, + TypesSourceMap, + }, AdtId, ConstParamId, GenericDefId, HasModule, ItemTreeLoc, LifetimeParamId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; @@ -37,7 +43,7 @@ pub struct TypeParamData { /// [`None`] only if the type ref is an [`TypeRef::ImplTrait`]. FIXME: Might be better to just /// make it always be a value, giving impl trait a special name. pub name: Option, - pub default: Option>, + pub default: Option, pub provenance: TypeParamProvenance, } @@ -51,7 +57,7 @@ pub struct LifetimeParamData { #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct ConstParamData { pub name: Name, - pub ty: Interned, + pub ty: TypeRefId, pub default: Option, } @@ -161,6 +167,7 @@ pub struct GenericParams { type_or_consts: Arena, lifetimes: Arena, where_predicates: Box<[WherePredicate]>, + pub types_map: TypesMap, } impl ops::Index for GenericParams { @@ -183,24 +190,14 @@ impl ops::Index for GenericParams { /// associated type bindings like `Iterator`. #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub enum WherePredicate { - TypeBound { - target: WherePredicateTypeTarget, - bound: Interned, - }, - Lifetime { - target: LifetimeRef, - bound: LifetimeRef, - }, - ForLifetime { - lifetimes: Box<[Name]>, - target: WherePredicateTypeTarget, - bound: Interned, - }, + TypeBound { target: WherePredicateTypeTarget, bound: TypeBound }, + Lifetime { target: LifetimeRef, bound: LifetimeRef }, + ForLifetime { lifetimes: Box<[Name]>, target: WherePredicateTypeTarget, bound: TypeBound }, } #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub enum WherePredicateTypeTarget { - TypeRef(Interned), + TypeRef(TypeRefId), /// For desugared where predicates that can directly refer to a type param. TypeOrConstParam(LocalTypeOrConstParamId), } @@ -300,7 +297,14 @@ impl GenericParams { pub(crate) fn generic_params_query( db: &dyn DefDatabase, def: GenericDefId, - ) -> Interned { + ) -> Arc { + db.generic_params_with_source_map(def).0 + } + + pub(crate) fn generic_params_with_source_map_query( + db: &dyn DefDatabase, + def: GenericDefId, + ) -> (Arc, Option>) { let _p = tracing::info_span!("generic_params_query").entered(); let krate = def.krate(db); @@ -309,7 +313,7 @@ impl GenericParams { // Returns the generic parameters that are enabled under the current `#[cfg]` options let enabled_params = - |params: &Interned, item_tree: &ItemTree, parent: GenericModItem| { + |params: &Arc, item_tree: &ItemTree, parent: GenericModItem| { let enabled = |param| item_tree.attrs(db, krate, param).is_cfg_enabled(cfg_options); let attr_owner_ct = |param| AttrOwner::TypeOrConstParamData(parent, param); let attr_owner_lt = |param| AttrOwner::LifetimeParamData(parent, param); @@ -325,7 +329,7 @@ impl GenericParams { if all_type_or_consts_enabled && all_lifetimes_enabled { params.clone() } else { - Interned::new(GenericParams { + Arc::new(GenericParams { type_or_consts: all_type_or_consts_enabled .then(|| params.type_or_consts.clone()) .unwrap_or_else(|| { @@ -347,6 +351,7 @@ impl GenericParams { .collect() }), where_predicates: params.where_predicates.clone(), + types_map: params.types_map.clone(), }) } }; @@ -357,18 +362,18 @@ impl GenericParams { Data = impl ItemTreeLoc, >, enabled_params: impl Fn( - &Interned, + &Arc, &ItemTree, GenericModItem, - ) -> Interned, - ) -> Interned + ) -> Arc, + ) -> (Arc, Option>) where FileItemTreeId: Into, { let id = id.lookup(db).item_tree_id(); let tree = id.item_tree(db); let item = &tree[id.value]; - enabled_params(item.generic_params(), &tree, id.value.into()) + (enabled_params(item.generic_params(), &tree, id.value.into()), None) } match def { @@ -383,28 +388,37 @@ impl GenericParams { let module = loc.container.module(db); let func_data = db.function_data(id); if func_data.params.is_empty() { - enabled_params + (enabled_params, None) } else { + let source_maps = loc.id.item_tree_with_source_map(db).1; + let item_source_maps = source_maps.function(loc.id.value); let mut generic_params = GenericParamsCollector { type_or_consts: enabled_params.type_or_consts.clone(), lifetimes: enabled_params.lifetimes.clone(), where_predicates: enabled_params.where_predicates.clone().into(), }; + let (mut types_map, mut types_source_maps) = + (enabled_params.types_map.clone(), item_source_maps.generics().clone()); // Don't create an `Expander` if not needed since this // could cause a reparse after the `ItemTree` has been created due to the spanmap. let mut expander = None; - for param in func_data.params.iter() { + for ¶m in func_data.params.iter() { generic_params.fill_implicit_impl_trait_args( db, + &mut types_map, + &mut types_source_maps, &mut expander, &mut || { (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) }, param, + &item.types_map, + item_source_maps.item(), ); } - Interned::new(generic_params.finish()) + let generics = generic_params.finish(types_map, &mut types_source_maps); + (generics, Some(Arc::new(types_source_maps))) } } GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics(db, id, enabled_params), @@ -414,11 +428,15 @@ impl GenericParams { GenericDefId::TraitAliasId(id) => id_to_generics(db, id, enabled_params), GenericDefId::TypeAliasId(id) => id_to_generics(db, id, enabled_params), GenericDefId::ImplId(id) => id_to_generics(db, id, enabled_params), - GenericDefId::ConstId(_) => Interned::new(GenericParams { - type_or_consts: Default::default(), - lifetimes: Default::default(), - where_predicates: Default::default(), - }), + GenericDefId::ConstId(_) => ( + Arc::new(GenericParams { + type_or_consts: Default::default(), + lifetimes: Default::default(), + where_predicates: Default::default(), + types_map: Default::default(), + }), + None, + ), } } } @@ -452,7 +470,7 @@ impl GenericParamsCollector { &mut self, lower_ctx: &LowerCtx<'_>, type_bounds: Option, - target: Either, + target: Either, ) { for bound in type_bounds.iter().flat_map(|type_bound_list| type_bound_list.bounds()) { self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone()); @@ -473,16 +491,15 @@ impl GenericParamsCollector { ast::TypeOrConstParam::Type(type_param) => { let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); // FIXME: Use `Path::from_src` - let default = type_param - .default_type() - .map(|it| Interned::new(TypeRef::from_ast(lower_ctx, it))); + let default = + type_param.default_type().map(|it| TypeRef::from_ast(lower_ctx, it)); let param = TypeParamData { name: Some(name.clone()), default, provenance: TypeParamProvenance::TypeParamList, }; let idx = self.type_or_consts.alloc(param.into()); - let type_ref = TypeRef::Path(name.into()); + let type_ref = lower_ctx.alloc_type_ref_desugared(TypeRef::Path(name.into())); self.fill_bounds( lower_ctx, type_param.type_bound_list(), @@ -492,12 +509,10 @@ impl GenericParamsCollector { } ast::TypeOrConstParam::Const(const_param) => { let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); - let ty = const_param - .ty() - .map_or(TypeRef::Error, |it| TypeRef::from_ast(lower_ctx, it)); + let ty = TypeRef::from_ast_opt(lower_ctx, const_param.ty()); let param = ConstParamData { name, - ty: Interned::new(ty), + ty, default: ConstRef::from_const_param(lower_ctx, &const_param), }; let idx = self.type_or_consts.alloc(param.into()); @@ -557,7 +572,7 @@ impl GenericParamsCollector { lower_ctx: &LowerCtx<'_>, bound: ast::TypeBound, hrtb_lifetimes: Option<&[Name]>, - target: Either, + target: Either, ) { let bound = TypeBound::from_ast(lower_ctx, bound); self.fill_impl_trait_bounds(lower_ctx.take_impl_traits_bounds()); @@ -565,12 +580,12 @@ impl GenericParamsCollector { (Either::Left(type_ref), bound) => match hrtb_lifetimes { Some(hrtb_lifetimes) => WherePredicate::ForLifetime { lifetimes: hrtb_lifetimes.to_vec().into_boxed_slice(), - target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)), - bound: Interned::new(bound), + target: WherePredicateTypeTarget::TypeRef(type_ref), + bound, }, None => WherePredicate::TypeBound { - target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)), - bound: Interned::new(bound), + target: WherePredicateTypeTarget::TypeRef(type_ref), + bound, }, }, (Either::Right(lifetime), TypeBound::Lifetime(bound)) => { @@ -581,7 +596,7 @@ impl GenericParamsCollector { self.where_predicates.push(predicate); } - fn fill_impl_trait_bounds(&mut self, impl_bounds: Vec>>) { + fn fill_impl_trait_bounds(&mut self, impl_bounds: Vec>) { for bounds in impl_bounds { let param = TypeParamData { name: None, @@ -589,10 +604,10 @@ impl GenericParamsCollector { provenance: TypeParamProvenance::ArgumentImplTrait, }; let param_id = self.type_or_consts.alloc(param.into()); - for bound in bounds { + for bound in &bounds { self.where_predicates.push(WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(param_id), - bound, + bound: bound.clone(), }); } } @@ -601,12 +616,16 @@ impl GenericParamsCollector { fn fill_implicit_impl_trait_args( &mut self, db: &dyn DefDatabase, + generics_types_map: &mut TypesMap, + generics_types_source_map: &mut TypesSourceMap, // FIXME: Change this back to `LazyCell` if https://github.com/rust-lang/libs-team/issues/429 is accepted. exp: &mut Option<(Arc, Expander)>, exp_fill: &mut dyn FnMut() -> (Arc, Expander), - type_ref: &TypeRef, + type_ref: TypeRefId, + types_map: &TypesMap, + types_source_map: &TypesSourceMap, ) { - type_ref.walk(&mut |type_ref| { + TypeRef::walk(type_ref, types_map, &mut |type_ref| { if let TypeRef::ImplTrait(bounds) = type_ref { let param = TypeParamData { name: None, @@ -615,12 +634,20 @@ impl GenericParamsCollector { }; let param_id = self.type_or_consts.alloc(param.into()); for bound in bounds { + let bound = copy_type_bound( + bound, + types_map, + types_source_map, + generics_types_map, + generics_types_source_map, + ); self.where_predicates.push(WherePredicate::TypeBound { target: WherePredicateTypeTarget::TypeOrConstParam(param_id), - bound: bound.clone(), + bound, }); } } + if let TypeRef::Macro(mc) = type_ref { let macro_call = mc.to_node(db.upcast()); let (def_map, expander) = exp.get_or_insert_with(&mut *exp_fill); @@ -641,23 +668,217 @@ impl GenericParamsCollector { if let Ok(ExpandResult { value: Some((mark, expanded)), .. }) = expander.enter_expand(db, macro_call, resolver) { - let ctx = expander.ctx(db); + let (mut macro_types_map, mut macro_types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let ctx = expander.ctx(db, &mut macro_types_map, &mut macro_types_source_map); let type_ref = TypeRef::from_ast(&ctx, expanded.tree()); - self.fill_implicit_impl_trait_args(db, &mut *exp, exp_fill, &type_ref); + self.fill_implicit_impl_trait_args( + db, + generics_types_map, + generics_types_source_map, + &mut *exp, + exp_fill, + type_ref, + ¯o_types_map, + ¯o_types_source_map, + ); exp.get_or_insert_with(&mut *exp_fill).1.exit(mark); } } }); } - pub(crate) fn finish(self) -> GenericParams { - let Self { mut lifetimes, mut type_or_consts, where_predicates } = self; + pub(crate) fn finish( + self, + mut generics_types_map: TypesMap, + generics_types_source_map: &mut TypesSourceMap, + ) -> Arc { + let Self { mut lifetimes, mut type_or_consts, mut where_predicates } = self; + + if lifetimes.is_empty() && type_or_consts.is_empty() && where_predicates.is_empty() { + static EMPTY: LazyLock> = LazyLock::new(|| { + Arc::new(GenericParams { + lifetimes: Arena::new(), + type_or_consts: Arena::new(), + where_predicates: Box::default(), + types_map: TypesMap::default(), + }) + }); + return Arc::clone(&EMPTY); + } + lifetimes.shrink_to_fit(); type_or_consts.shrink_to_fit(); - GenericParams { + where_predicates.shrink_to_fit(); + generics_types_map.shrink_to_fit(); + generics_types_source_map.shrink_to_fit(); + Arc::new(GenericParams { type_or_consts, lifetimes, where_predicates: where_predicates.into_boxed_slice(), + types_map: generics_types_map, + }) + } +} + +/// Copies a `TypeRef` from a `TypesMap` (accompanied with `TypesSourceMap`) into another `TypesMap` +/// (and `TypesSourceMap`). +fn copy_type_ref( + type_ref: TypeRefId, + from: &TypesMap, + from_source_map: &TypesSourceMap, + to: &mut TypesMap, + to_source_map: &mut TypesSourceMap, +) -> TypeRefId { + let result = match &from[type_ref] { + TypeRef::Fn(fn_) => { + let params = fn_.params().iter().map(|(name, param_type)| { + (name.clone(), copy_type_ref(*param_type, from, from_source_map, to, to_source_map)) + }); + TypeRef::Fn(FnType::new(fn_.is_varargs(), fn_.is_unsafe(), fn_.abi().clone(), params)) } + TypeRef::Tuple(types) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter( + types.iter().map(|&t| copy_type_ref(t, from, from_source_map, to, to_source_map)), + )), + &TypeRef::RawPtr(type_ref, mutbl) => TypeRef::RawPtr( + copy_type_ref(type_ref, from, from_source_map, to, to_source_map), + mutbl, + ), + TypeRef::Reference(ref_) => TypeRef::Reference(Box::new(RefType { + ty: copy_type_ref(ref_.ty, from, from_source_map, to, to_source_map), + lifetime: ref_.lifetime.clone(), + mutability: ref_.mutability, + })), + TypeRef::Array(array) => TypeRef::Array(Box::new(ArrayType { + ty: copy_type_ref(array.ty, from, from_source_map, to, to_source_map), + len: array.len.clone(), + })), + &TypeRef::Slice(type_ref) => { + TypeRef::Slice(copy_type_ref(type_ref, from, from_source_map, to, to_source_map)) + } + TypeRef::ImplTrait(bounds) => TypeRef::ImplTrait(ThinVec::from_iter(copy_type_bounds( + bounds, + from, + from_source_map, + to, + to_source_map, + ))), + TypeRef::DynTrait(bounds) => TypeRef::DynTrait(ThinVec::from_iter(copy_type_bounds( + bounds, + from, + from_source_map, + to, + to_source_map, + ))), + TypeRef::Path(path) => { + TypeRef::Path(copy_path(path, from, from_source_map, to, to_source_map)) + } + TypeRef::Never => TypeRef::Never, + TypeRef::Placeholder => TypeRef::Placeholder, + TypeRef::Macro(macro_call) => TypeRef::Macro(*macro_call), + TypeRef::Error => TypeRef::Error, + }; + let id = to.types.alloc(result); + if let Some(&ptr) = from_source_map.types_map_back.get(id) { + to_source_map.types_map_back.insert(id, ptr); + } + id +} + +fn copy_path( + path: &Path, + from: &TypesMap, + from_source_map: &TypesSourceMap, + to: &mut TypesMap, + to_source_map: &mut TypesSourceMap, +) -> Path { + match path { + Path::BarePath(mod_path) => Path::BarePath(mod_path.clone()), + Path::Normal(path) => { + let type_anchor = path + .type_anchor() + .map(|type_ref| copy_type_ref(type_ref, from, from_source_map, to, to_source_map)); + let mod_path = path.mod_path().clone(); + let generic_args = path.generic_args().iter().map(|generic_args| { + copy_generic_args(generic_args, from, from_source_map, to, to_source_map) + }); + Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args)) + } + Path::LangItem(lang_item, name) => Path::LangItem(*lang_item, name.clone()), + } +} + +fn copy_generic_args( + generic_args: &Option, + from: &TypesMap, + from_source_map: &TypesSourceMap, + to: &mut TypesMap, + to_source_map: &mut TypesSourceMap, +) -> Option { + generic_args.as_ref().map(|generic_args| { + let args = generic_args + .args + .iter() + .map(|arg| match arg { + &GenericArg::Type(ty) => { + GenericArg::Type(copy_type_ref(ty, from, from_source_map, to, to_source_map)) + } + GenericArg::Lifetime(lifetime) => GenericArg::Lifetime(lifetime.clone()), + GenericArg::Const(konst) => GenericArg::Const(konst.clone()), + }) + .collect(); + let bindings = generic_args + .bindings + .iter() + .map(|binding| { + let name = binding.name.clone(); + let args = + copy_generic_args(&binding.args, from, from_source_map, to, to_source_map); + let type_ref = binding.type_ref.map(|type_ref| { + copy_type_ref(type_ref, from, from_source_map, to, to_source_map) + }); + let bounds = + copy_type_bounds(&binding.bounds, from, from_source_map, to, to_source_map) + .collect(); + AssociatedTypeBinding { name, args, type_ref, bounds } + }) + .collect(); + GenericArgs { + args, + has_self_type: generic_args.has_self_type, + bindings, + desugared_from_fn: generic_args.desugared_from_fn, + } + }) +} + +fn copy_type_bounds<'a>( + bounds: &'a [TypeBound], + from: &'a TypesMap, + from_source_map: &'a TypesSourceMap, + to: &'a mut TypesMap, + to_source_map: &'a mut TypesSourceMap, +) -> impl stdx::thin_vec::TrustedLen + 'a { + bounds.iter().map(|bound| copy_type_bound(bound, from, from_source_map, to, to_source_map)) +} + +fn copy_type_bound( + bound: &TypeBound, + from: &TypesMap, + from_source_map: &TypesSourceMap, + to: &mut TypesMap, + to_source_map: &mut TypesSourceMap, +) -> TypeBound { + match bound { + TypeBound::Path(path, modifier) => { + TypeBound::Path(copy_path(path, from, from_source_map, to, to_source_map), *modifier) + } + TypeBound::ForLifetime(lifetimes, path) => TypeBound::ForLifetime( + lifetimes.clone(), + copy_path(path, from, from_source_map, to, to_source_map), + ), + TypeBound::Lifetime(lifetime) => TypeBound::Lifetime(lifetime.clone()), + TypeBound::Use(use_args) => TypeBound::Use(use_args.clone()), + TypeBound::Error => TypeBound::Error, } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs index d9358a28822e7..8596346943022 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs @@ -17,16 +17,17 @@ pub mod type_ref; use std::fmt; -use hir_expand::name::Name; -use intern::{Interned, Symbol}; +use hir_expand::{name::Name, MacroDefId}; +use intern::Symbol; use la_arena::{Idx, RawIdx}; use rustc_apfloat::ieee::{Half as f16, Quad as f128}; use syntax::ast; +use type_ref::TypeRefId; use crate::{ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, path::{GenericArgs, Path}, - type_ref::{Mutability, Rawness, TypeRef}, + type_ref::{Mutability, Rawness}, BlockId, ConstBlockId, }; @@ -48,6 +49,22 @@ pub enum ExprOrPatId { ExprId(ExprId), PatId(PatId), } + +impl ExprOrPatId { + pub fn as_expr(self) -> Option { + match self { + Self::ExprId(v) => Some(v), + _ => None, + } + } + + pub fn as_pat(self) -> Option { + match self { + Self::PatId(v) => Some(v), + _ => None, + } + } +} stdx::impl_from!(ExprId, PatId for ExprOrPatId); #[derive(Debug, Clone, Eq, PartialEq)] @@ -204,7 +221,6 @@ pub enum Expr { Call { callee: ExprId, args: Box<[ExprId]>, - is_assignee_expr: bool, }, MethodCall { receiver: ExprId, @@ -239,8 +255,6 @@ pub enum Expr { path: Option>, fields: Box<[RecordLitField]>, spread: Option, - ellipsis: bool, - is_assignee_expr: bool, }, Field { expr: ExprId, @@ -251,7 +265,7 @@ pub enum Expr { }, Cast { expr: ExprId, - type_ref: Interned, + type_ref: TypeRefId, }, Ref { expr: ExprId, @@ -265,11 +279,17 @@ pub enum Expr { expr: ExprId, op: UnaryOp, }, + /// `op` cannot be bare `=` (but can be `op=`), these are lowered to `Assignment` instead. BinaryOp { lhs: ExprId, rhs: ExprId, op: Option, }, + // Assignments need a special treatment because of destructuring assignment. + Assignment { + target: PatId, + value: ExprId, + }, Range { lhs: Option, rhs: Option, @@ -278,19 +298,17 @@ pub enum Expr { Index { base: ExprId, index: ExprId, - is_assignee_expr: bool, }, Closure { args: Box<[PatId]>, - arg_types: Box<[Option>]>, - ret_type: Option>, + arg_types: Box<[Option]>, + ret_type: Option, body: ExprId, closure_kind: ClosureKind, capture_by: CaptureBy, }, Tuple { exprs: Box<[ExprId]>, - is_assignee_expr: bool, }, Array(Array), Literal(Literal), @@ -301,7 +319,7 @@ pub enum Expr { #[derive(Debug, Clone, PartialEq, Eq)] pub struct OffsetOf { - pub container: Interned, + pub container: TypeRefId, pub fields: Box<[Name]>, } @@ -446,7 +464,7 @@ pub enum Movability { #[derive(Debug, Clone, Eq, PartialEq)] pub enum Array { - ElementList { elements: Box<[ExprId]>, is_assignee_expr: bool }, + ElementList { elements: Box<[ExprId]> }, Repeat { initializer: ExprId, repeat: ExprId }, } @@ -467,7 +485,7 @@ pub struct RecordLitField { pub enum Statement { Let { pat: PatId, - type_ref: Option>, + type_ref: Option, initializer: Option, else_branch: Option, }, @@ -475,133 +493,13 @@ pub enum Statement { expr: ExprId, has_semi: bool, }, - // At the moment, we only use this to figure out if a return expression - // is really the last statement of a block. See #16566 - Item, + Item(Item), } -impl Expr { - pub fn walk_child_exprs(&self, mut f: impl FnMut(ExprId)) { - match self { - Expr::Missing => {} - Expr::Path(_) | Expr::OffsetOf(_) => {} - Expr::InlineAsm(it) => it.operands.iter().for_each(|(_, op)| match op { - AsmOperand::In { expr, .. } - | AsmOperand::Out { expr: Some(expr), .. } - | AsmOperand::InOut { expr, .. } => f(*expr), - AsmOperand::SplitInOut { in_expr, out_expr, .. } => { - f(*in_expr); - if let Some(out_expr) = out_expr { - f(*out_expr); - } - } - AsmOperand::Out { expr: None, .. } - | AsmOperand::Const(_) - | AsmOperand::Label(_) - | AsmOperand::Sym(_) => (), - }), - Expr::If { condition, then_branch, else_branch } => { - f(*condition); - f(*then_branch); - if let &Some(else_branch) = else_branch { - f(else_branch); - } - } - Expr::Let { expr, .. } => { - f(*expr); - } - Expr::Const(_) => (), - Expr::Block { statements, tail, .. } - | Expr::Unsafe { statements, tail, .. } - | Expr::Async { statements, tail, .. } => { - for stmt in statements.iter() { - match stmt { - Statement::Let { initializer, else_branch, .. } => { - if let &Some(expr) = initializer { - f(expr); - } - if let &Some(expr) = else_branch { - f(expr); - } - } - Statement::Expr { expr: expression, .. } => f(*expression), - Statement::Item => (), - } - } - if let &Some(expr) = tail { - f(expr); - } - } - Expr::Loop { body, .. } => f(*body), - Expr::Call { callee, args, .. } => { - f(*callee); - args.iter().copied().for_each(f); - } - Expr::MethodCall { receiver, args, .. } => { - f(*receiver); - args.iter().copied().for_each(f); - } - Expr::Match { expr, arms } => { - f(*expr); - arms.iter().map(|arm| arm.expr).for_each(f); - } - Expr::Continue { .. } => {} - Expr::Break { expr, .. } - | Expr::Return { expr } - | Expr::Yield { expr } - | Expr::Yeet { expr } => { - if let &Some(expr) = expr { - f(expr); - } - } - Expr::Become { expr } => f(*expr), - Expr::RecordLit { fields, spread, .. } => { - for field in fields.iter() { - f(field.expr); - } - if let &Some(expr) = spread { - f(expr); - } - } - Expr::Closure { body, .. } => { - f(*body); - } - Expr::BinaryOp { lhs, rhs, .. } => { - f(*lhs); - f(*rhs); - } - Expr::Range { lhs, rhs, .. } => { - if let &Some(lhs) = rhs { - f(lhs); - } - if let &Some(rhs) = lhs { - f(rhs); - } - } - Expr::Index { base, index, .. } => { - f(*base); - f(*index); - } - Expr::Field { expr, .. } - | Expr::Await { expr } - | Expr::Cast { expr, .. } - | Expr::Ref { expr, .. } - | Expr::UnaryOp { expr, .. } - | Expr::Box { expr } => { - f(*expr); - } - Expr::Tuple { exprs, .. } => exprs.iter().copied().for_each(f), - Expr::Array(a) => match a { - Array::ElementList { elements, .. } => elements.iter().copied().for_each(f), - Array::Repeat { initializer, repeat } => { - f(*initializer); - f(*repeat) - } - }, - Expr::Literal(_) => {} - Expr::Underscore => {} - } - } +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Item { + MacroDef(Box), + Other, } /// Explicit binding annotations given in the HIR for a binding. Note @@ -665,18 +563,49 @@ pub struct RecordFieldPat { pub enum Pat { Missing, Wild, - Tuple { args: Box<[PatId]>, ellipsis: Option }, + Tuple { + args: Box<[PatId]>, + ellipsis: Option, + }, Or(Box<[PatId]>), - Record { path: Option>, args: Box<[RecordFieldPat]>, ellipsis: bool }, - Range { start: Option>, end: Option> }, - Slice { prefix: Box<[PatId]>, slice: Option, suffix: Box<[PatId]> }, - Path(Box), + Record { + path: Option>, + args: Box<[RecordFieldPat]>, + ellipsis: bool, + }, + Range { + start: Option>, + end: Option>, + }, + Slice { + prefix: Box<[PatId]>, + slice: Option, + suffix: Box<[PatId]>, + }, + /// This might refer to a variable if a single segment path (specifically, on destructuring assignment). + Path(Path), Lit(ExprId), - Bind { id: BindingId, subpat: Option }, - TupleStruct { path: Option>, args: Box<[PatId]>, ellipsis: Option }, - Ref { pat: PatId, mutability: Mutability }, - Box { inner: PatId }, + Bind { + id: BindingId, + subpat: Option, + }, + TupleStruct { + path: Option>, + args: Box<[PatId]>, + ellipsis: Option, + }, + Ref { + pat: PatId, + mutability: Mutability, + }, + Box { + inner: PatId, + }, ConstBlock(ExprId), + /// An expression inside a pattern. That can only occur inside assignments. + /// + /// E.g. in `(a, *b) = (1, &mut 2)`, `*b` is an expression. + Expr(ExprId), } impl Pat { @@ -687,7 +616,8 @@ impl Pat { | Pat::Path(..) | Pat::ConstBlock(..) | Pat::Wild - | Pat::Missing => {} + | Pat::Missing + | Pat::Expr(_) => {} Pat::Bind { subpat, .. } => { subpat.iter().copied().for_each(f); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs index b74cd90f6933a..2582340c0f81b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs @@ -2,22 +2,27 @@ //! be directly created from an ast::TypeRef, without further queries. use core::fmt; -use std::fmt::Write; +use std::{fmt::Write, ops::Index}; use hir_expand::{ db::ExpandDatabase, name::{AsName, Name}, - AstId, + AstId, InFile, }; -use intern::{sym, Interned, Symbol}; +use intern::{sym, Symbol}; +use la_arena::{Arena, ArenaMap, Idx}; use span::Edition; -use syntax::ast::{self, HasGenericArgs, HasName, IsString}; +use stdx::thin_vec::{thin_vec_with_header_struct, EmptyOptimizedThinVec, ThinVec}; +use syntax::{ + ast::{self, HasGenericArgs, HasName, IsString}, + AstPtr, +}; use crate::{ builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, hir::Literal, lower::LowerCtx, - path::Path, + path::{GenericArg, Path}, }; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] @@ -104,35 +109,90 @@ impl TraitRef { } } +thin_vec_with_header_struct! { + pub new(pub(crate)) struct FnType, FnTypeHeader { + pub params: [(Option, TypeRefId)], + pub is_varargs: bool, + pub is_unsafe: bool, + pub abi: Option; ref, + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct ArrayType { + pub ty: TypeRefId, + // FIXME: This should be Ast + pub len: ConstRef, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct RefType { + pub ty: TypeRefId, + pub lifetime: Option, + pub mutability: Mutability, +} + /// Compare ty::Ty -/// -/// Note: Most users of `TypeRef` that end up in the salsa database intern it using -/// `Interned` to save space. But notably, nested `TypeRef`s are not interned, since that -/// does not seem to save any noticeable amount of memory. #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeRef { Never, Placeholder, - Tuple(Vec), + Tuple(EmptyOptimizedThinVec), Path(Path), - RawPtr(Box, Mutability), - Reference(Box, Option, Mutability), - // FIXME: This should be Array(Box, Ast), - Array(Box, ConstRef), - Slice(Box), + RawPtr(TypeRefId, Mutability), + Reference(Box), + Array(Box), + Slice(TypeRefId), /// A fn pointer. Last element of the vector is the return type. - Fn( - Box<[(Option, TypeRef)]>, - bool, /*varargs*/ - bool, /*is_unsafe*/ - Option, /* abi */ - ), - ImplTrait(Vec>), - DynTrait(Vec>), + Fn(FnType), + ImplTrait(ThinVec), + DynTrait(ThinVec), Macro(AstId), Error, } +#[cfg(target_arch = "x86_64")] +const _: () = assert!(size_of::() == 16); + +pub type TypeRefId = Idx; + +#[derive(Default, Clone, PartialEq, Eq, Debug, Hash)] +pub struct TypesMap { + pub(crate) types: Arena, +} + +impl TypesMap { + pub const EMPTY: &TypesMap = &TypesMap { types: Arena::new() }; + + pub(crate) fn shrink_to_fit(&mut self) { + let TypesMap { types } = self; + types.shrink_to_fit(); + } +} + +impl Index for TypesMap { + type Output = TypeRef; + + fn index(&self, index: TypeRefId) -> &Self::Output { + &self.types[index] + } +} + +pub type TypePtr = AstPtr; +pub type TypeSource = InFile; + +#[derive(Default, Clone, PartialEq, Eq, Debug, Hash)] +pub struct TypesSourceMap { + pub(crate) types_map_back: ArenaMap, +} + +impl TypesSourceMap { + pub(crate) fn shrink_to_fit(&mut self) { + let TypesSourceMap { types_map_back } = self; + types_map_back.shrink_to_fit(); + } +} + #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct LifetimeRef { pub name: Name, @@ -157,12 +217,22 @@ pub enum TypeBound { Path(Path, TraitBoundModifier), ForLifetime(Box<[Name]>, Path), Lifetime(LifetimeRef), + Use(Box<[UseArgRef]>), Error, } +#[cfg(target_pointer_width = "64")] +const _: [(); 32] = [(); ::std::mem::size_of::()]; + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub enum UseArgRef { + Name(Name), + Lifetime(LifetimeRef), +} + /// A modifier on a bound, currently this is only used for `?Sized`, where the /// modifier is `Maybe`. -#[derive(Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub enum TraitBoundModifier { None, Maybe, @@ -170,12 +240,12 @@ pub enum TraitBoundModifier { impl TypeRef { /// Converts an `ast::TypeRef` to a `hir::TypeRef`. - pub fn from_ast(ctx: &LowerCtx<'_>, node: ast::Type) -> Self { - match node { - ast::Type::ParenType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()), - ast::Type::TupleType(inner) => { - TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect()) - } + pub fn from_ast(ctx: &LowerCtx<'_>, node: ast::Type) -> TypeRefId { + let ty = match &node { + ast::Type::ParenType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()), + ast::Type::TupleType(inner) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter( + Vec::from_iter(inner.fields().map(|it| TypeRef::from_ast(ctx, it))), + )), ast::Type::NeverType(..) => TypeRef::Never, ast::Type::PathType(inner) => { // FIXME: Use `Path::from_src` @@ -188,20 +258,21 @@ impl TypeRef { ast::Type::PtrType(inner) => { let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty()); let mutability = Mutability::from_mutable(inner.mut_token().is_some()); - TypeRef::RawPtr(Box::new(inner_ty), mutability) + TypeRef::RawPtr(inner_ty, mutability) } ast::Type::ArrayType(inner) => { let len = ConstRef::from_const_arg(ctx, inner.const_arg()); - TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len) - } - ast::Type::SliceType(inner) => { - TypeRef::Slice(Box::new(TypeRef::from_ast_opt(ctx, inner.ty()))) + TypeRef::Array(Box::new(ArrayType { + ty: TypeRef::from_ast_opt(ctx, inner.ty()), + len, + })) } + ast::Type::SliceType(inner) => TypeRef::Slice(TypeRef::from_ast_opt(ctx, inner.ty())), ast::Type::RefType(inner) => { let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty()); let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(<)); let mutability = Mutability::from_mutable(inner.mut_token().is_some()); - TypeRef::Reference(Box::new(inner_ty), lifetime, mutability) + TypeRef::Reference(Box::new(RefType { ty: inner_ty, lifetime, mutability })) } ast::Type::InferType(_inner) => TypeRef::Placeholder, ast::Type::FnPtrType(inner) => { @@ -209,7 +280,7 @@ impl TypeRef { .ret_type() .and_then(|rt| rt.ty()) .map(|it| TypeRef::from_ast(ctx, it)) - .unwrap_or_else(|| TypeRef::Tuple(Vec::new())); + .unwrap_or_else(|| ctx.alloc_type_ref_desugared(TypeRef::unit())); let mut is_varargs = false; let mut params = if let Some(pl) = inner.param_list() { if let Some(param) = pl.params().last() { @@ -241,10 +312,10 @@ impl TypeRef { let abi = inner.abi().map(lower_abi); params.push((None, ret_ty)); - TypeRef::Fn(params.into(), is_varargs, inner.unsafe_token().is_some(), abi) + TypeRef::Fn(FnType::new(is_varargs, inner.unsafe_token().is_some(), abi, params)) } // for types are close enough for our purposes to the inner type for now... - ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()), + ast::Type::ForType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()), ast::Type::ImplTraitType(inner) => { if ctx.outer_impl_trait() { // Disallow nested impl traits @@ -261,74 +332,74 @@ impl TypeRef { Some(mc) => TypeRef::Macro(ctx.ast_id(&mc)), None => TypeRef::Error, }, - } + }; + ctx.alloc_type_ref(ty, AstPtr::new(&node)) } - pub(crate) fn from_ast_opt(ctx: &LowerCtx<'_>, node: Option) -> Self { + pub(crate) fn from_ast_opt(ctx: &LowerCtx<'_>, node: Option) -> TypeRefId { match node { Some(node) => TypeRef::from_ast(ctx, node), - None => TypeRef::Error, + None => ctx.alloc_error_type(), } } pub(crate) fn unit() -> TypeRef { - TypeRef::Tuple(Vec::new()) + TypeRef::Tuple(EmptyOptimizedThinVec::empty()) } - pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) { - go(self, f); + pub fn walk(this: TypeRefId, map: &TypesMap, f: &mut impl FnMut(&TypeRef)) { + go(this, f, map); - fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) { + fn go(type_ref: TypeRefId, f: &mut impl FnMut(&TypeRef), map: &TypesMap) { + let type_ref = &map[type_ref]; f(type_ref); match type_ref { - TypeRef::Fn(params, _, _, _) => { - params.iter().for_each(|(_, param_type)| go(param_type, f)) + TypeRef::Fn(fn_) => { + fn_.params().iter().for_each(|&(_, param_type)| go(param_type, f, map)) } - TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)), - TypeRef::RawPtr(type_ref, _) - | TypeRef::Reference(type_ref, ..) - | TypeRef::Array(type_ref, _) - | TypeRef::Slice(type_ref) => go(type_ref, f), + TypeRef::Tuple(types) => types.iter().for_each(|&t| go(t, f, map)), + TypeRef::RawPtr(type_ref, _) | TypeRef::Slice(type_ref) => go(*type_ref, f, map), + TypeRef::Reference(it) => go(it.ty, f, map), + TypeRef::Array(it) => go(it.ty, f, map), TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { for bound in bounds { - match bound.as_ref() { + match bound { TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => { - go_path(path, f) + go_path(path, f, map) } - TypeBound::Lifetime(_) | TypeBound::Error => (), + TypeBound::Lifetime(_) | TypeBound::Error | TypeBound::Use(_) => (), } } } - TypeRef::Path(path) => go_path(path, f), + TypeRef::Path(path) => go_path(path, f, map), TypeRef::Never | TypeRef::Placeholder | TypeRef::Macro(_) | TypeRef::Error => {} }; } - fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef)) { + fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef), map: &TypesMap) { if let Some(type_ref) = path.type_anchor() { - go(type_ref, f); + go(type_ref, f, map); } for segment in path.segments().iter() { if let Some(args_and_bindings) = segment.args_and_bindings { for arg in args_and_bindings.args.iter() { match arg { - crate::path::GenericArg::Type(type_ref) => { - go(type_ref, f); + GenericArg::Type(type_ref) => { + go(*type_ref, f, map); } - crate::path::GenericArg::Const(_) - | crate::path::GenericArg::Lifetime(_) => {} + GenericArg::Const(_) | GenericArg::Lifetime(_) => {} } } for binding in args_and_bindings.bindings.iter() { - if let Some(type_ref) = &binding.type_ref { - go(type_ref, f); + if let Some(type_ref) = binding.type_ref { + go(type_ref, f, map); } for bound in binding.bounds.iter() { - match bound.as_ref() { + match bound { TypeBound::Path(path, _) | TypeBound::ForLifetime(_, path) => { - go_path(path, f) + go_path(path, f, map) } - TypeBound::Lifetime(_) | TypeBound::Error => (), + TypeBound::Lifetime(_) | TypeBound::Error | TypeBound::Use(_) => (), } } } @@ -341,11 +412,13 @@ impl TypeRef { pub(crate) fn type_bounds_from_ast( lower_ctx: &LowerCtx<'_>, type_bounds_opt: Option, -) -> Vec> { +) -> ThinVec { if let Some(type_bounds) = type_bounds_opt { - type_bounds.bounds().map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it))).collect() + ThinVec::from_iter(Vec::from_iter( + type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)), + )) } else { - vec![] + ThinVec::from_iter([]) } } @@ -380,7 +453,16 @@ impl TypeBound { None => TypeBound::Error, } } - ast::TypeBoundKind::Use(_) => TypeBound::Error, + ast::TypeBoundKind::Use(gal) => TypeBound::Use( + gal.use_bound_generic_args() + .map(|p| match p { + ast::UseBoundGenericArg::Lifetime(l) => { + UseArgRef::Lifetime(LifetimeRef::new(&l)) + } + ast::UseBoundGenericArg::NameRef(n) => UseArgRef::Name(n.as_name()), + }) + .collect(), + ), ast::TypeBoundKind::Lifetime(lifetime) => { TypeBound::Lifetime(LifetimeRef::new(&lifetime)) } @@ -391,7 +473,7 @@ impl TypeBound { match self { TypeBound::Path(p, m) => Some((p, m)), TypeBound::ForLifetime(_, p) => Some((p, &TraitBoundModifier::None)), - TypeBound::Lifetime(_) | TypeBound::Error => None, + TypeBound::Lifetime(_) | TypeBound::Error | TypeBound::Use(_) => None, } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index 7cb833fdce7c0..b5bf2feb82a20 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -61,7 +61,7 @@ use crate::{ db::DefDatabase, generics::GenericParams, path::{GenericArgs, ImportAlias, ModPath, Path, PathKind}, - type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, + type_ref::{Mutability, TraitRef, TypeBound, TypeRefId, TypesMap, TypesSourceMap}, visibility::{RawVisibility, VisibilityExplicitness}, BlockId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, }; @@ -100,14 +100,20 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { - let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered(); - static EMPTY: OnceLock> = OnceLock::new(); + db.file_item_tree_with_source_map(file_id).0 + } - let syntax = db.parse_or_expand(file_id); + pub(crate) fn file_item_tree_with_source_map_query( + db: &dyn DefDatabase, + file_id: HirFileId, + ) -> (Arc, Arc) { + let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered(); + static EMPTY: OnceLock<(Arc, Arc)> = OnceLock::new(); let ctx = lower::Ctx::new(db, file_id); + let syntax = db.parse_or_expand(file_id); let mut top_attrs = None; - let mut item_tree = match_ast! { + let (mut item_tree, source_maps) = match_ast! { match syntax { ast::SourceFile(file) => { top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map())); @@ -137,42 +143,55 @@ impl ItemTree { { EMPTY .get_or_init(|| { - Arc::new(ItemTree { - top_level: SmallVec::new_const(), - attrs: FxHashMap::default(), - data: None, - }) + ( + Arc::new(ItemTree { + top_level: SmallVec::new_const(), + attrs: FxHashMap::default(), + data: None, + }), + Arc::default(), + ) }) .clone() } else { item_tree.shrink_to_fit(); - Arc::new(item_tree) + (Arc::new(item_tree), Arc::new(source_maps)) } } pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc { + db.block_item_tree_with_source_map(block).0 + } + + pub(crate) fn block_item_tree_with_source_map_query( + db: &dyn DefDatabase, + block: BlockId, + ) -> (Arc, Arc) { let _p = tracing::info_span!("block_item_tree_query", ?block).entered(); - static EMPTY: OnceLock> = OnceLock::new(); + static EMPTY: OnceLock<(Arc, Arc)> = OnceLock::new(); let loc = block.lookup(db); let block = loc.ast_id.to_node(db.upcast()); let ctx = lower::Ctx::new(db, loc.ast_id.file_id); - let mut item_tree = ctx.lower_block(&block); + let (mut item_tree, source_maps) = ctx.lower_block(&block); if item_tree.data.is_none() && item_tree.top_level.is_empty() && item_tree.attrs.is_empty() { EMPTY .get_or_init(|| { - Arc::new(ItemTree { - top_level: SmallVec::new_const(), - attrs: FxHashMap::default(), - data: None, - }) + ( + Arc::new(ItemTree { + top_level: SmallVec::new_const(), + attrs: FxHashMap::default(), + data: None, + }), + Arc::default(), + ) }) .clone() } else { item_tree.shrink_to_fit(); - Arc::new(item_tree) + (Arc::new(item_tree), Arc::new(source_maps)) } } @@ -309,6 +328,160 @@ struct ItemTreeData { vis: ItemVisibilities, } +#[derive(Default, Debug, Eq, PartialEq)] +pub struct ItemTreeSourceMaps { + all_concatenated: Box<[TypesSourceMap]>, + structs_offset: u32, + unions_offset: u32, + enum_generics_offset: u32, + variants_offset: u32, + consts_offset: u32, + statics_offset: u32, + trait_generics_offset: u32, + trait_alias_generics_offset: u32, + impls_offset: u32, + type_aliases_offset: u32, +} + +#[derive(Clone, Copy)] +pub struct GenericItemSourceMap<'a>(&'a [TypesSourceMap; 2]); + +impl<'a> GenericItemSourceMap<'a> { + #[inline] + pub fn item(self) -> &'a TypesSourceMap { + &self.0[0] + } + + #[inline] + pub fn generics(self) -> &'a TypesSourceMap { + &self.0[1] + } +} + +#[derive(Default, Debug, Eq, PartialEq)] +pub struct GenericItemSourceMapBuilder { + pub item: TypesSourceMap, + pub generics: TypesSourceMap, +} + +#[derive(Default, Debug, Eq, PartialEq)] +struct ItemTreeSourceMapsBuilder { + functions: Vec, + structs: Vec, + unions: Vec, + enum_generics: Vec, + variants: Vec, + consts: Vec, + statics: Vec, + trait_generics: Vec, + trait_alias_generics: Vec, + impls: Vec, + type_aliases: Vec, +} + +impl ItemTreeSourceMapsBuilder { + fn build(self) -> ItemTreeSourceMaps { + let ItemTreeSourceMapsBuilder { + functions, + structs, + unions, + enum_generics, + variants, + consts, + statics, + trait_generics, + trait_alias_generics, + impls, + type_aliases, + } = self; + let structs_offset = functions.len() as u32 * 2; + let unions_offset = structs_offset + (structs.len() as u32 * 2); + let enum_generics_offset = unions_offset + (unions.len() as u32 * 2); + let variants_offset = enum_generics_offset + (enum_generics.len() as u32); + let consts_offset = variants_offset + (variants.len() as u32); + let statics_offset = consts_offset + (consts.len() as u32); + let trait_generics_offset = statics_offset + (statics.len() as u32); + let trait_alias_generics_offset = trait_generics_offset + (trait_generics.len() as u32); + let impls_offset = trait_alias_generics_offset + (trait_alias_generics.len() as u32); + let type_aliases_offset = impls_offset + (impls.len() as u32 * 2); + let all_concatenated = generics_concat(functions) + .chain(generics_concat(structs)) + .chain(generics_concat(unions)) + .chain(enum_generics) + .chain(variants) + .chain(consts) + .chain(statics) + .chain(trait_generics) + .chain(trait_alias_generics) + .chain(generics_concat(impls)) + .chain(generics_concat(type_aliases)) + .collect(); + return ItemTreeSourceMaps { + all_concatenated, + structs_offset, + unions_offset, + enum_generics_offset, + variants_offset, + consts_offset, + statics_offset, + trait_generics_offset, + trait_alias_generics_offset, + impls_offset, + type_aliases_offset, + }; + + fn generics_concat( + source_maps: Vec, + ) -> impl Iterator { + source_maps.into_iter().flat_map(|it| [it.item, it.generics]) + } + } +} + +impl ItemTreeSourceMaps { + #[inline] + fn generic_item(&self, offset: u32, index: u32) -> GenericItemSourceMap<'_> { + GenericItemSourceMap( + self.all_concatenated[(offset + (index * 2)) as usize..][..2].try_into().unwrap(), + ) + } + + #[inline] + fn non_generic_item(&self, offset: u32, index: u32) -> &TypesSourceMap { + &self.all_concatenated[(offset + index) as usize] + } + + #[inline] + pub fn function(&self, index: FileItemTreeId) -> GenericItemSourceMap<'_> { + self.generic_item(0, index.0.into_raw().into_u32()) + } +} + +macro_rules! index_item_source_maps { + ( $( $name:ident; $field:ident[$tree_id:ident]; $fn:ident; $ret:ty, )* ) => { + impl ItemTreeSourceMaps { + $( + #[inline] + pub fn $name(&self, index: FileItemTreeId<$tree_id>) -> $ret { + self.$fn(self.$field, index.0.into_raw().into_u32()) + } + )* + } + }; +} +index_item_source_maps! { + strukt; structs_offset[Struct]; generic_item; GenericItemSourceMap<'_>, + union; unions_offset[Union]; generic_item; GenericItemSourceMap<'_>, + enum_generic; enum_generics_offset[Enum]; non_generic_item; &TypesSourceMap, + variant; variants_offset[Variant]; non_generic_item; &TypesSourceMap, + konst; consts_offset[Const]; non_generic_item; &TypesSourceMap, + statik; statics_offset[Static]; non_generic_item; &TypesSourceMap, + trait_generic; trait_generics_offset[Trait]; non_generic_item; &TypesSourceMap, + trait_alias_generic; trait_alias_generics_offset[TraitAlias]; non_generic_item; &TypesSourceMap, + impl_; impls_offset[Impl]; generic_item; GenericItemSourceMap<'_>, + type_alias; type_aliases_offset[TypeAlias]; generic_item; GenericItemSourceMap<'_>, +} + #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] pub enum AttrOwner { /// Attributes on an item. @@ -364,7 +537,7 @@ pub trait ItemTreeNode: Clone { fn attr_owner(id: FileItemTreeId) -> AttrOwner; } pub trait GenericsItemTreeNode: ItemTreeNode { - fn generic_params(&self) -> &Interned; + fn generic_params(&self) -> &Arc; } pub struct FileItemTreeId(Idx); @@ -429,6 +602,16 @@ impl TreeId { } } + pub fn item_tree_with_source_map( + &self, + db: &dyn DefDatabase, + ) -> (Arc, Arc) { + match self.block { + Some(block) => db.block_item_tree_with_source_map(block), + None => db.file_item_tree_with_source_map(self.file), + } + } + pub fn file_id(self) -> HirFileId { self.file } @@ -461,6 +644,13 @@ impl ItemTreeId { self.tree.item_tree(db) } + pub fn item_tree_with_source_map( + self, + db: &dyn DefDatabase, + ) -> (Arc, Arc) { + self.tree.item_tree_with_source_map(db) + } + pub fn resolved(self, db: &dyn DefDatabase, cb: impl FnOnce(&N) -> R) -> R where ItemTree: Index, Output = N>, @@ -593,7 +783,7 @@ macro_rules! mod_items { $( impl GenericsItemTreeNode for $typ { - fn generic_params(&self) -> &Interned { + fn generic_params(&self) -> &Arc { &self.$generic_params } } @@ -731,17 +921,18 @@ pub struct ExternBlock { pub struct Function { pub name: Name, pub visibility: RawVisibilityId, - pub explicit_generic_params: Interned, + pub explicit_generic_params: Arc, pub abi: Option, pub params: Box<[Param]>, - pub ret_type: Interned, + pub ret_type: TypeRefId, pub ast_id: FileAstId, + pub types_map: Arc, pub(crate) flags: FnFlags, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Param { - pub type_ref: Option>, + pub type_ref: Option, } bitflags::bitflags! { @@ -762,26 +953,28 @@ bitflags::bitflags! { pub struct Struct { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Interned, + pub generic_params: Arc, pub fields: Box<[Field]>, pub shape: FieldsShape, pub ast_id: FileAstId, + pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Union { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Interned, + pub generic_params: Arc, pub fields: Box<[Field]>, pub ast_id: FileAstId, + pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Enum { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Interned, + pub generic_params: Arc, pub variants: Range>, pub ast_id: FileAstId, } @@ -792,6 +985,7 @@ pub struct Variant { pub fields: Box<[Field]>, pub shape: FieldsShape, pub ast_id: FileAstId, + pub types_map: Arc, } #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -805,7 +999,7 @@ pub enum FieldsShape { #[derive(Debug, Clone, PartialEq, Eq)] pub struct Field { pub name: Name, - pub type_ref: Interned, + pub type_ref: TypeRefId, pub visibility: RawVisibilityId, } @@ -814,9 +1008,10 @@ pub struct Const { /// `None` for `const _: () = ();` pub name: Option, pub visibility: RawVisibilityId, - pub type_ref: Interned, + pub type_ref: TypeRefId, pub ast_id: FileAstId, pub has_body: bool, + pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -827,15 +1022,16 @@ pub struct Static { pub mutable: bool, pub has_safe_kw: bool, pub has_unsafe_kw: bool, - pub type_ref: Interned, + pub type_ref: TypeRefId, pub ast_id: FileAstId, + pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Trait { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Interned, + pub generic_params: Arc, pub is_auto: bool, pub is_unsafe: bool, pub items: Box<[AssocItem]>, @@ -846,19 +1042,20 @@ pub struct Trait { pub struct TraitAlias { pub name: Name, pub visibility: RawVisibilityId, - pub generic_params: Interned, + pub generic_params: Arc, pub ast_id: FileAstId, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct Impl { - pub generic_params: Interned, - pub target_trait: Option>, - pub self_ty: Interned, + pub generic_params: Arc, + pub target_trait: Option, + pub self_ty: TypeRefId, pub is_negative: bool, pub is_unsafe: bool, pub items: Box<[AssocItem]>, pub ast_id: FileAstId, + pub types_map: Arc, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -866,10 +1063,11 @@ pub struct TypeAlias { pub name: Name, pub visibility: RawVisibilityId, /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`. - pub bounds: Box<[Interned]>, - pub generic_params: Interned, - pub type_ref: Option>, + pub bounds: Box<[TypeBound]>, + pub generic_params: Arc, + pub type_ref: Option, pub ast_id: FileAstId, + pub types_map: Arc, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -968,6 +1166,11 @@ impl UseTree { self.expand_impl(None, &mut cb) } + /// The [`UseTreeKind`] of this `UseTree`. + pub fn kind(&self) -> &UseTreeKind { + &self.kind + } + fn expand_impl( &self, prefix: Option, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index 431a7f66f405d..bd17fce37b733 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -1,12 +1,18 @@ //! AST -> `ItemTree` lowering code. -use std::collections::hash_map::Entry; +use std::{cell::OnceCell, collections::hash_map::Entry}; -use hir_expand::{mod_path::path, name::AsName, span_map::SpanMapRef, HirFileId}; +use hir_expand::{ + mod_path::path, + name::AsName, + span_map::{SpanMap, SpanMapRef}, + HirFileId, +}; use intern::{sym, Symbol}; use la_arena::Arena; use rustc_hash::FxHashMap; use span::{AstIdMap, SyntaxContextId}; +use stdx::thin_vec::ThinVec; use syntax::{ ast::{self, HasModuleItem, HasName, HasTypeBounds, IsString}, AstNode, @@ -18,14 +24,19 @@ use crate::{ generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance}, item_tree::{ AssocItem, AttrOwner, Const, Either, Enum, ExternBlock, ExternCrate, Field, FieldParent, - FieldsShape, FileItemTreeId, FnFlags, Function, GenericArgs, GenericModItem, Idx, Impl, - ImportAlias, Interned, ItemTree, ItemTreeData, Macro2, MacroCall, MacroRules, Mod, ModItem, + FieldsShape, FileItemTreeId, FnFlags, Function, GenericArgs, GenericItemSourceMapBuilder, + GenericModItem, Idx, Impl, ImportAlias, Interned, ItemTree, ItemTreeData, + ItemTreeSourceMaps, ItemTreeSourceMapsBuilder, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, ModPath, Mutability, Name, Param, Path, Range, RawAttrs, RawIdx, RawVisibilityId, Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind, Variant, }, + lower::LowerCtx, path::AssociatedTypeBinding, - type_ref::{LifetimeRef, TraitBoundModifier, TraitRef, TypeBound, TypeRef}, + type_ref::{ + LifetimeRef, RefType, TraitBoundModifier, TraitRef, TypeBound, TypeRef, TypeRefId, + TypesMap, TypesSourceMap, + }, visibility::RawVisibility, LocalLifetimeParamId, LocalTypeOrConstParamId, }; @@ -40,7 +51,9 @@ pub(super) struct Ctx<'a> { source_ast_id_map: Arc, generic_param_attr_buffer: FxHashMap, RawAttrs>, - body_ctx: crate::lower::LowerCtx<'a>, + span_map: OnceCell, + file: HirFileId, + source_maps: ItemTreeSourceMapsBuilder, } impl<'a> Ctx<'a> { @@ -50,22 +63,49 @@ impl<'a> Ctx<'a> { tree: ItemTree::default(), generic_param_attr_buffer: FxHashMap::default(), source_ast_id_map: db.ast_id_map(file), - body_ctx: crate::lower::LowerCtx::new(db, file), + file, + span_map: OnceCell::new(), + source_maps: ItemTreeSourceMapsBuilder::default(), } } pub(super) fn span_map(&self) -> SpanMapRef<'_> { - self.body_ctx.span_map() + self.span_map.get_or_init(|| self.db.span_map(self.file)).as_ref() + } + + fn body_ctx<'b, 'c>( + &self, + types_map: &'b mut TypesMap, + types_source_map: &'b mut TypesSourceMap, + ) -> LowerCtx<'c> + where + 'a: 'c, + 'b: 'c, + { + // FIXME: This seems a bit wasteful that if `LowerCtx` will initialize the span map we won't benefit. + LowerCtx::with_span_map_cell( + self.db, + self.file, + self.span_map.clone(), + types_map, + types_source_map, + ) } - pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree { + pub(super) fn lower_module_items( + mut self, + item_owner: &dyn HasModuleItem, + ) -> (ItemTree, ItemTreeSourceMaps) { self.tree.top_level = item_owner.items().flat_map(|item| self.lower_mod_item(&item)).collect(); assert!(self.generic_param_attr_buffer.is_empty()); - self.tree + (self.tree, self.source_maps.build()) } - pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree { + pub(super) fn lower_macro_stmts( + mut self, + stmts: ast::MacroStmts, + ) -> (ItemTree, ItemTreeSourceMaps) { self.tree.top_level = stmts .statements() .filter_map(|stmt| { @@ -96,10 +136,10 @@ impl<'a> Ctx<'a> { } assert!(self.generic_param_attr_buffer.is_empty()); - self.tree + (self.tree, self.source_maps.build()) } - pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { + pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> (ItemTree, ItemTreeSourceMaps) { self.tree .attrs .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map())); @@ -125,7 +165,7 @@ impl<'a> Ctx<'a> { } assert!(self.generic_param_attr_buffer.is_empty()); - self.tree + (self.tree, self.source_maps.build()) } fn data(&mut self) -> &mut ItemTreeData { @@ -144,7 +184,7 @@ impl<'a> Ctx<'a> { ast::Item::Module(ast) => self.lower_module(ast)?.into(), ast::Item::Trait(ast) => self.lower_trait(ast)?.into(), ast::Item::TraitAlias(ast) => self.lower_trait_alias(ast)?.into(), - ast::Item::Impl(ast) => self.lower_impl(ast)?.into(), + ast::Item::Impl(ast) => self.lower_impl(ast).into(), ast::Item::Use(ast) => self.lower_use(ast)?.into(), ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(), ast::Item::MacroCall(ast) => self.lower_macro_call(ast)?.into(), @@ -159,12 +199,14 @@ impl<'a> Ctx<'a> { } fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) { - match self.tree.attrs.entry(item) { - Entry::Occupied(mut entry) => { - *entry.get_mut() = entry.get().merge(attrs); - } - Entry::Vacant(entry) => { - entry.insert(attrs); + if !attrs.is_empty() { + match self.tree.attrs.entry(item) { + Entry::Occupied(mut entry) => { + *entry.get_mut() = entry.get().merge(attrs); + } + Entry::Vacant(entry) => { + entry.insert(attrs); + } } } } @@ -190,13 +232,31 @@ impl<'a> Ctx<'a> { } fn lower_struct(&mut self, strukt: &ast::Struct) -> Option> { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let visibility = self.lower_visibility(strukt); let name = strukt.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(strukt); - let (fields, kind, attrs) = self.lower_fields(&strukt.kind()); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt); - let res = Struct { name, visibility, generic_params, fields, shape: kind, ast_id }; + let (fields, kind, attrs) = self.lower_fields(&strukt.kind(), &body_ctx); + let (generic_params, generics_source_map) = + self.lower_generic_params(HasImplicitSelf::No, strukt); + types_map.shrink_to_fit(); + types_source_map.shrink_to_fit(); + let res = Struct { + name, + visibility, + generic_params, + fields, + shape: kind, + ast_id, + types_map: Arc::new(types_map), + }; let id = id(self.data().structs.alloc(res)); + self.source_maps.structs.push(GenericItemSourceMapBuilder { + item: types_source_map, + generics: generics_source_map, + }); for (idx, attr) in attrs { self.add_attrs( AttrOwner::Field( @@ -213,6 +273,7 @@ impl<'a> Ctx<'a> { fn lower_fields( &mut self, strukt_kind: &ast::StructKind, + body_ctx: &LowerCtx<'_>, ) -> (Box<[Field]>, FieldsShape, Vec<(usize, RawAttrs)>) { match strukt_kind { ast::StructKind::Record(it) => { @@ -220,7 +281,7 @@ impl<'a> Ctx<'a> { let mut attrs = vec![]; for (i, field) in it.fields().enumerate() { - let data = self.lower_record_field(&field); + let data = self.lower_record_field(&field, body_ctx); fields.push(data); let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map()); if !attr.is_empty() { @@ -234,7 +295,7 @@ impl<'a> Ctx<'a> { let mut attrs = vec![]; for (i, field) in it.fields().enumerate() { - let data = self.lower_tuple_field(i, &field); + let data = self.lower_tuple_field(i, &field, body_ctx); fields.push(data); let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map()); if !attr.is_empty() { @@ -247,35 +308,59 @@ impl<'a> Ctx<'a> { } } - fn lower_record_field(&mut self, field: &ast::RecordField) -> Field { + fn lower_record_field(&mut self, field: &ast::RecordField, body_ctx: &LowerCtx<'_>) -> Field { let name = match field.name() { Some(name) => name.as_name(), None => Name::missing(), }; let visibility = self.lower_visibility(field); - let type_ref = self.lower_type_ref_opt(field.ty()); + let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty()); Field { name, type_ref, visibility } } - fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field { + fn lower_tuple_field( + &mut self, + idx: usize, + field: &ast::TupleField, + body_ctx: &LowerCtx<'_>, + ) -> Field { let name = Name::new_tuple_field(idx); let visibility = self.lower_visibility(field); - let type_ref = self.lower_type_ref_opt(field.ty()); + let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty()); Field { name, type_ref, visibility } } fn lower_union(&mut self, union: &ast::Union) -> Option> { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let visibility = self.lower_visibility(union); let name = union.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(union); let (fields, _, attrs) = match union.record_field_list() { - Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), + Some(record_field_list) => { + self.lower_fields(&StructKind::Record(record_field_list), &body_ctx) + } None => (Box::default(), FieldsShape::Record, Vec::default()), }; - let generic_params = self.lower_generic_params(HasImplicitSelf::No, union); - let res = Union { name, visibility, generic_params, fields, ast_id }; + let (generic_params, generics_source_map) = + self.lower_generic_params(HasImplicitSelf::No, union); + types_map.shrink_to_fit(); + types_source_map.shrink_to_fit(); + let res = Union { + name, + visibility, + generic_params, + fields, + ast_id, + types_map: Arc::new(types_map), + }; let id = id(self.data().unions.alloc(res)); + self.source_maps.unions.push(GenericItemSourceMapBuilder { + item: types_source_map, + generics: generics_source_map, + }); for (idx, attr) in attrs { self.add_attrs( AttrOwner::Field( @@ -299,9 +384,11 @@ impl<'a> Ctx<'a> { FileItemTreeId(self.next_variant_idx())..FileItemTreeId(self.next_variant_idx()) } }; - let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_); + let (generic_params, generics_source_map) = + self.lower_generic_params(HasImplicitSelf::No, enum_); let res = Enum { name, visibility, generic_params, variants, ast_id }; let id = id(self.data().enums.alloc(res)); + self.source_maps.enum_generics.push(generics_source_map); self.write_generic_params_attributes(id.into()); Some(id) } @@ -320,14 +407,20 @@ impl<'a> Ctx<'a> { } fn lower_variant(&mut self, variant: &ast::Variant) -> Idx { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let name = match variant.name() { Some(name) => name.as_name(), None => Name::missing(), }; - let (fields, kind, attrs) = self.lower_fields(&variant.kind()); + let (fields, kind, attrs) = self.lower_fields(&variant.kind(), &body_ctx); let ast_id = self.source_ast_id_map.ast_id(variant); - let res = Variant { name, fields, shape: kind, ast_id }; + types_map.shrink_to_fit(); + types_source_map.shrink_to_fit(); + let res = Variant { name, fields, shape: kind, ast_id, types_map: Arc::new(types_map) }; let id = self.data().variants.alloc(res); + self.source_maps.variants.push(types_source_map); for (idx, attr) in attrs { self.add_attrs( AttrOwner::Field( @@ -341,6 +434,10 @@ impl<'a> Ctx<'a> { } fn lower_function(&mut self, func: &ast::Fn) -> Option> { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); + let visibility = self.lower_visibility(func); let name = func.name()?.as_name(); @@ -360,27 +457,31 @@ impl<'a> Ctx<'a> { RawAttrs::new(self.db.upcast(), &self_param, self.span_map()), ); let self_type = match self_param.ty() { - Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref), + Some(type_ref) => TypeRef::from_ast(&body_ctx, type_ref), None => { - let self_type = - TypeRef::Path(Name::new_symbol_root(sym::Self_.clone()).into()); + let self_type = body_ctx.alloc_type_ref_desugared(TypeRef::Path( + Name::new_symbol_root(sym::Self_.clone()).into(), + )); match self_param.kind() { ast::SelfParamKind::Owned => self_type, - ast::SelfParamKind::Ref => TypeRef::Reference( - Box::new(self_type), - self_param.lifetime().as_ref().map(LifetimeRef::new), - Mutability::Shared, + ast::SelfParamKind::Ref => body_ctx.alloc_type_ref_desugared( + TypeRef::Reference(Box::new(RefType { + ty: self_type, + lifetime: self_param.lifetime().as_ref().map(LifetimeRef::new), + mutability: Mutability::Shared, + })), ), - ast::SelfParamKind::MutRef => TypeRef::Reference( - Box::new(self_type), - self_param.lifetime().as_ref().map(LifetimeRef::new), - Mutability::Mut, + ast::SelfParamKind::MutRef => body_ctx.alloc_type_ref_desugared( + TypeRef::Reference(Box::new(RefType { + ty: self_type, + lifetime: self_param.lifetime().as_ref().map(LifetimeRef::new), + mutability: Mutability::Mut, + })), ), } } }; - let type_ref = Interned::new(self_type); - params.push(Param { type_ref: Some(type_ref) }); + params.push(Param { type_ref: Some(self_type) }); has_self_param = true; } for param in param_list.params() { @@ -391,9 +492,8 @@ impl<'a> Ctx<'a> { Param { type_ref: None } } None => { - let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty()); - let ty = Interned::new(type_ref); - Param { type_ref: Some(ty) } + let type_ref = TypeRef::from_ast_opt(&body_ctx, param.ty()); + Param { type_ref: Some(type_ref) } } }; params.push(param); @@ -402,17 +502,17 @@ impl<'a> Ctx<'a> { let ret_type = match func.ret_type() { Some(rt) => match rt.ty() { - Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref), - None if rt.thin_arrow_token().is_some() => TypeRef::Error, - None => TypeRef::unit(), + Some(type_ref) => TypeRef::from_ast(&body_ctx, type_ref), + None if rt.thin_arrow_token().is_some() => body_ctx.alloc_error_type(), + None => body_ctx.alloc_type_ref_desugared(TypeRef::unit()), }, - None => TypeRef::unit(), + None => body_ctx.alloc_type_ref_desugared(TypeRef::unit()), }; let ret_type = if func.async_token().is_some() { let future_impl = desugar_future_path(ret_type); - let ty_bound = Interned::new(TypeBound::Path(future_impl, TraitBoundModifier::None)); - TypeRef::ImplTrait(vec![ty_bound]) + let ty_bound = TypeBound::Path(future_impl, TraitBoundModifier::None); + body_ctx.alloc_type_ref_desugared(TypeRef::ImplTrait(ThinVec::from_iter([ty_bound]))) } else { ret_type }; @@ -447,18 +547,27 @@ impl<'a> Ctx<'a> { flags |= FnFlags::IS_VARARGS; } + types_map.shrink_to_fit(); + types_source_map.shrink_to_fit(); + let (generic_params, generics_source_map) = + self.lower_generic_params(HasImplicitSelf::No, func); let res = Function { name, visibility, - explicit_generic_params: self.lower_generic_params(HasImplicitSelf::No, func), + explicit_generic_params: generic_params, abi, params: params.into_boxed_slice(), - ret_type: Interned::new(ret_type), + ret_type, ast_id, + types_map: Arc::new(types_map), flags, }; let id = id(self.data().functions.alloc(res)); + self.source_maps.functions.push(GenericItemSourceMapBuilder { + item: types_source_map, + generics: generics_source_map, + }); for (idx, attr) in attrs { self.add_attrs(AttrOwner::Param(id, Idx::from_raw(RawIdx::from_u32(idx as u32))), attr); } @@ -470,37 +579,82 @@ impl<'a> Ctx<'a> { &mut self, type_alias: &ast::TypeAlias, ) -> Option> { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let name = type_alias.name()?.as_name(); - let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it)); + let type_ref = type_alias.ty().map(|it| TypeRef::from_ast(&body_ctx, it)); let visibility = self.lower_visibility(type_alias); - let bounds = self.lower_type_bounds(type_alias); + let bounds = self.lower_type_bounds(type_alias, &body_ctx); let ast_id = self.source_ast_id_map.ast_id(type_alias); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias); - let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id }; + let (generic_params, generics_source_map) = + self.lower_generic_params(HasImplicitSelf::No, type_alias); + types_map.shrink_to_fit(); + types_source_map.shrink_to_fit(); + let res = TypeAlias { + name, + visibility, + bounds, + generic_params, + type_ref, + ast_id, + types_map: Arc::new(types_map), + }; let id = id(self.data().type_aliases.alloc(res)); + self.source_maps.type_aliases.push(GenericItemSourceMapBuilder { + item: types_source_map, + generics: generics_source_map, + }); self.write_generic_params_attributes(id.into()); Some(id) } fn lower_static(&mut self, static_: &ast::Static) -> Option> { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let name = static_.name()?.as_name(); - let type_ref = self.lower_type_ref_opt(static_.ty()); + let type_ref = TypeRef::from_ast_opt(&body_ctx, static_.ty()); let visibility = self.lower_visibility(static_); let mutable = static_.mut_token().is_some(); let has_safe_kw = static_.safe_token().is_some(); let has_unsafe_kw = static_.unsafe_token().is_some(); let ast_id = self.source_ast_id_map.ast_id(static_); - let res = - Static { name, visibility, mutable, type_ref, ast_id, has_safe_kw, has_unsafe_kw }; + types_map.shrink_to_fit(); + types_source_map.shrink_to_fit(); + let res = Static { + name, + visibility, + mutable, + type_ref, + ast_id, + has_safe_kw, + has_unsafe_kw, + types_map: Arc::new(types_map), + }; + self.source_maps.statics.push(types_source_map); Some(id(self.data().statics.alloc(res))) } fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); let name = konst.name().map(|it| it.as_name()); - let type_ref = self.lower_type_ref_opt(konst.ty()); + let type_ref = TypeRef::from_ast_opt(&body_ctx, konst.ty()); let visibility = self.lower_visibility(konst); let ast_id = self.source_ast_id_map.ast_id(konst); - let res = Const { name, visibility, type_ref, ast_id, has_body: konst.body().is_some() }; + types_map.shrink_to_fit(); + types_source_map.shrink_to_fit(); + let res = Const { + name, + visibility, + type_ref, + ast_id, + has_body: konst.body().is_some(), + types_map: Arc::new(types_map), + }; + self.source_maps.consts.push(types_source_map); id(self.data().consts.alloc(res)) } @@ -539,10 +693,11 @@ impl<'a> Ctx<'a> { .filter_map(|item_node| self.lower_assoc_item(&item_node)) .collect(); - let generic_params = + let (generic_params, generics_source_map) = self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def); let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id }; let id = id(self.data().traits.alloc(def)); + self.source_maps.trait_generics.push(generics_source_map); self.write_generic_params_attributes(id.into()); Some(id) } @@ -554,24 +709,29 @@ impl<'a> Ctx<'a> { let name = trait_alias_def.name()?.as_name(); let visibility = self.lower_visibility(trait_alias_def); let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); - let generic_params = self.lower_generic_params( + let (generic_params, generics_source_map) = self.lower_generic_params( HasImplicitSelf::Yes(trait_alias_def.type_bound_list()), trait_alias_def, ); let alias = TraitAlias { name, visibility, generic_params, ast_id }; let id = id(self.data().trait_aliases.alloc(alias)); + self.source_maps.trait_alias_generics.push(generics_source_map); self.write_generic_params_attributes(id.into()); Some(id) } - fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option> { + fn lower_impl(&mut self, impl_def: &ast::Impl) -> FileItemTreeId { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); + let ast_id = self.source_ast_id_map.ast_id(impl_def); // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only // equals itself. - let self_ty = self.lower_type_ref(&impl_def.self_ty()?); - let target_trait = impl_def.trait_().and_then(|tr| self.lower_trait_ref(&tr)); + let self_ty = TypeRef::from_ast_opt(&body_ctx, impl_def.self_ty()); + let target_trait = impl_def.trait_().and_then(|tr| TraitRef::from_ast(&body_ctx, tr)); let is_negative = impl_def.excl_token().is_some(); let is_unsafe = impl_def.unsafe_token().is_some(); @@ -584,12 +744,27 @@ impl<'a> Ctx<'a> { .collect(); // Note that trait impls don't get implicit `Self` unlike traits, because here they are a // type alias rather than a type parameter, so this is handled by the resolver. - let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def); - let res = - Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id }; + let (generic_params, generics_source_map) = + self.lower_generic_params(HasImplicitSelf::No, impl_def); + types_map.shrink_to_fit(); + types_source_map.shrink_to_fit(); + let res = Impl { + generic_params, + target_trait, + self_ty, + is_negative, + is_unsafe, + items, + ast_id, + types_map: Arc::new(types_map), + }; let id = id(self.data().impls.alloc(res)); + self.source_maps.impls.push(GenericItemSourceMapBuilder { + item: types_source_map, + generics: generics_source_map, + }); self.write_generic_params_attributes(id.into()); - Some(id) + id } fn lower_use(&mut self, use_item: &ast::Use) -> Option> { @@ -692,14 +867,17 @@ impl<'a> Ctx<'a> { &mut self, has_implicit_self: HasImplicitSelf, node: &dyn ast::HasGenericParams, - ) -> Interned { + ) -> (Arc, TypesSourceMap) { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let body_ctx = self.body_ctx(&mut types_map, &mut types_source_map); debug_assert!(self.generic_param_attr_buffer.is_empty(),); let add_param_attrs = |item: Either, param| { - let attrs = RawAttrs::new(self.db.upcast(), ¶m, self.body_ctx.span_map()); + let attrs = RawAttrs::new(self.db.upcast(), ¶m, body_ctx.span_map()); debug_assert!(self.generic_param_attr_buffer.insert(item, attrs).is_none()); }; - self.body_ctx.take_impl_traits_bounds(); + body_ctx.take_impl_traits_bounds(); let mut generics = GenericParamsCollector::default(); if let HasImplicitSelf::Yes(bounds) = has_implicit_self { @@ -715,23 +893,29 @@ impl<'a> Ctx<'a> { // add super traits as bounds on Self // i.e., `trait Foo: Bar` is equivalent to `trait Foo where Self: Bar` generics.fill_bounds( - &self.body_ctx, + &body_ctx, bounds, - Either::Left(TypeRef::Path(Name::new_symbol_root(sym::Self_.clone()).into())), + Either::Left(body_ctx.alloc_type_ref_desugared(TypeRef::Path( + Name::new_symbol_root(sym::Self_.clone()).into(), + ))), ); } - generics.fill(&self.body_ctx, node, add_param_attrs); + generics.fill(&body_ctx, node, add_param_attrs); - Interned::new(generics.finish()) + let generics = generics.finish(types_map, &mut types_source_map); + (generics, types_source_map) } - fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Box<[Interned]> { + fn lower_type_bounds( + &mut self, + node: &dyn ast::HasTypeBounds, + body_ctx: &LowerCtx<'_>, + ) -> Box<[TypeBound]> { match node.type_bound_list() { - Some(bound_list) => bound_list - .bounds() - .map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it))) - .collect(), + Some(bound_list) => { + bound_list.bounds().map(|it| TypeBound::from_ast(body_ctx, it)).collect() + } None => Box::default(), } } @@ -743,23 +927,6 @@ impl<'a> Ctx<'a> { self.data().vis.alloc(vis) } - fn lower_trait_ref(&mut self, trait_ref: &ast::Type) -> Option> { - let trait_ref = TraitRef::from_ast(&self.body_ctx, trait_ref.clone())?; - Some(Interned::new(trait_ref)) - } - - fn lower_type_ref(&mut self, type_ref: &ast::Type) -> Interned { - let tyref = TypeRef::from_ast(&self.body_ctx, type_ref.clone()); - Interned::new(tyref) - } - - fn lower_type_ref_opt(&mut self, type_ref: Option) -> Interned { - match type_ref.map(|ty| self.lower_type_ref(&ty)) { - Some(it) => it, - None => Interned::new(TypeRef::Error), - } - } - fn next_variant_idx(&self) -> Idx { Idx::from_raw(RawIdx::from( self.tree.data.as_ref().map_or(0, |data| data.variants.len() as u32), @@ -767,7 +934,7 @@ impl<'a> Ctx<'a> { } } -fn desugar_future_path(orig: TypeRef) -> Path { +fn desugar_future_path(orig: TypeRefId) -> Path { let path = path![core::future::Future]; let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments().len() - 1).collect(); @@ -777,10 +944,7 @@ fn desugar_future_path(orig: TypeRef) -> Path { type_ref: Some(orig), bounds: Box::default(), }; - generic_args.push(Some(Interned::new(GenericArgs { - bindings: Box::new([binding]), - ..GenericArgs::empty() - }))); + generic_args.push(Some(GenericArgs { bindings: Box::new([binding]), ..GenericArgs::empty() })); Path::from_known_path(path, generic_args) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 9dce28b2e4927..b6816a1f9684e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -10,11 +10,12 @@ use crate::{ item_tree::{ AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldParent, FieldsShape, FileItemTreeId, FnFlags, Function, GenericModItem, GenericParams, Impl, - Interned, ItemTree, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, Param, Path, - RawAttrs, RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, - TypeRef, Union, Use, UseTree, UseTreeKind, Variant, + ItemTree, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, Param, Path, RawAttrs, + RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, Union, Use, + UseTree, UseTreeKind, Variant, }, pretty::{print_path, print_type_bounds, print_type_ref}, + type_ref::{TypeRefId, TypesMap}, visibility::RawVisibility, }; @@ -121,7 +122,13 @@ impl Printer<'_> { }; } - fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) { + fn print_fields( + &mut self, + parent: FieldParent, + kind: FieldsShape, + fields: &[Field], + map: &TypesMap, + ) { let edition = self.edition; match kind { FieldsShape::Record => { @@ -135,7 +142,7 @@ impl Printer<'_> { ); this.print_visibility(*visibility); w!(this, "{}: ", name.display(self.db.upcast(), edition)); - this.print_type_ref(type_ref); + this.print_type_ref(*type_ref, map); wln!(this, ","); } }); @@ -151,7 +158,7 @@ impl Printer<'_> { ); this.print_visibility(*visibility); w!(this, "{}: ", name.display(self.db.upcast(), edition)); - this.print_type_ref(type_ref); + this.print_type_ref(*type_ref, map); wln!(this, ","); } }); @@ -167,20 +174,21 @@ impl Printer<'_> { kind: FieldsShape, fields: &[Field], params: &GenericParams, + map: &TypesMap, ) { match kind { FieldsShape::Record => { if self.print_where_clause(params) { wln!(self); } - self.print_fields(parent, kind, fields); + self.print_fields(parent, kind, fields, map); } FieldsShape::Unit => { self.print_where_clause(params); - self.print_fields(parent, kind, fields); + self.print_fields(parent, kind, fields, map); } FieldsShape::Tuple => { - self.print_fields(parent, kind, fields); + self.print_fields(parent, kind, fields, map); self.print_where_clause(params); } } @@ -262,6 +270,7 @@ impl Printer<'_> { params, ret_type, ast_id, + types_map, flags, } = &self.tree[it]; self.print_ast_id(ast_id.erase()); @@ -298,7 +307,7 @@ impl Printer<'_> { w!(this, "self: "); } if let Some(type_ref) = type_ref { - this.print_type_ref(type_ref); + this.print_type_ref(*type_ref, types_map); } else { wln!(this, "..."); } @@ -307,7 +316,7 @@ impl Printer<'_> { }); } w!(self, ") -> "); - self.print_type_ref(ret_type); + self.print_type_ref(*ret_type, types_map); self.print_where_clause(explicit_generic_params); if flags.contains(FnFlags::HAS_BODY) { wln!(self, " {{ ... }}"); @@ -316,8 +325,15 @@ impl Printer<'_> { } } ModItem::Struct(it) => { - let Struct { visibility, name, fields, shape: kind, generic_params, ast_id } = - &self.tree[it]; + let Struct { + visibility, + name, + fields, + shape: kind, + generic_params, + ast_id, + types_map, + } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); w!(self, "struct {}", name.display(self.db.upcast(), self.edition)); @@ -327,6 +343,7 @@ impl Printer<'_> { *kind, fields, generic_params, + types_map, ); if matches!(kind, FieldsShape::Record) { wln!(self); @@ -335,7 +352,8 @@ impl Printer<'_> { } } ModItem::Union(it) => { - let Union { name, visibility, fields, generic_params, ast_id } = &self.tree[it]; + let Union { name, visibility, fields, generic_params, ast_id, types_map } = + &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); w!(self, "union {}", name.display(self.db.upcast(), self.edition)); @@ -345,6 +363,7 @@ impl Printer<'_> { FieldsShape::Record, fields, generic_params, + types_map, ); wln!(self); } @@ -358,18 +377,20 @@ impl Printer<'_> { let edition = self.edition; self.indented(|this| { for variant in FileItemTreeId::range_iter(variants.clone()) { - let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant]; + let Variant { name, fields, shape: kind, ast_id, types_map } = + &this.tree[variant]; this.print_ast_id(ast_id.erase()); this.print_attrs_of(variant, "\n"); w!(this, "{}", name.display(self.db.upcast(), edition)); - this.print_fields(FieldParent::Variant(variant), *kind, fields); + this.print_fields(FieldParent::Variant(variant), *kind, fields, types_map); wln!(this, ","); } }); wln!(self, "}}"); } ModItem::Const(it) => { - let Const { name, visibility, type_ref, ast_id, has_body: _ } = &self.tree[it]; + let Const { name, visibility, type_ref, ast_id, has_body: _, types_map } = + &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); w!(self, "const "); @@ -378,7 +399,7 @@ impl Printer<'_> { None => w!(self, "_"), } w!(self, ": "); - self.print_type_ref(type_ref); + self.print_type_ref(*type_ref, types_map); wln!(self, " = _;"); } ModItem::Static(it) => { @@ -390,6 +411,7 @@ impl Printer<'_> { ast_id, has_safe_kw, has_unsafe_kw, + types_map, } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); @@ -404,7 +426,7 @@ impl Printer<'_> { w!(self, "mut "); } w!(self, "{}: ", name.display(self.db.upcast(), self.edition)); - self.print_type_ref(type_ref); + self.print_type_ref(*type_ref, types_map); w!(self, " = _;"); wln!(self); } @@ -449,6 +471,7 @@ impl Printer<'_> { items, generic_params, ast_id, + types_map, } = &self.tree[it]; self.print_ast_id(ast_id.erase()); if *is_unsafe { @@ -461,10 +484,10 @@ impl Printer<'_> { w!(self, "!"); } if let Some(tr) = target_trait { - self.print_path(&tr.path); + self.print_path(&tr.path, types_map); w!(self, " for "); } - self.print_type_ref(self_ty); + self.print_type_ref(*self_ty, types_map); self.print_where_clause_and_opening_brace(generic_params); self.indented(|this| { for item in &**items { @@ -474,19 +497,26 @@ impl Printer<'_> { wln!(self, "}}"); } ModItem::TypeAlias(it) => { - let TypeAlias { name, visibility, bounds, type_ref, generic_params, ast_id } = - &self.tree[it]; + let TypeAlias { + name, + visibility, + bounds, + type_ref, + generic_params, + ast_id, + types_map, + } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); w!(self, "type {}", name.display(self.db.upcast(), self.edition)); self.print_generic_params(generic_params, it.into()); if !bounds.is_empty() { w!(self, ": "); - self.print_type_bounds(bounds); + self.print_type_bounds(bounds, types_map); } if let Some(ty) = type_ref { w!(self, " = "); - self.print_type_ref(ty); + self.print_type_ref(*ty, types_map); } self.print_where_clause(generic_params); w!(self, ";"); @@ -543,19 +573,19 @@ impl Printer<'_> { self.blank(); } - fn print_type_ref(&mut self, type_ref: &TypeRef) { + fn print_type_ref(&mut self, type_ref: TypeRefId, map: &TypesMap) { let edition = self.edition; - print_type_ref(self.db, type_ref, self, edition).unwrap(); + print_type_ref(self.db, type_ref, map, self, edition).unwrap(); } - fn print_type_bounds(&mut self, bounds: &[Interned]) { + fn print_type_bounds(&mut self, bounds: &[TypeBound], map: &TypesMap) { let edition = self.edition; - print_type_bounds(self.db, bounds, self, edition).unwrap(); + print_type_bounds(self.db, bounds, map, self, edition).unwrap(); } - fn print_path(&mut self, path: &Path) { + fn print_path(&mut self, path: &Path, map: &TypesMap) { let edition = self.edition; - print_path(self.db, path, self, edition).unwrap(); + print_path(self.db, path, map, self, edition).unwrap(); } fn print_generic_params(&mut self, params: &GenericParams, parent: GenericModItem) { @@ -586,7 +616,7 @@ impl Printer<'_> { }, TypeOrConstParamData::ConstParamData(konst) => { w!(self, "const {}: ", konst.name.display(self.db.upcast(), self.edition)); - self.print_type_ref(&konst.ty); + self.print_type_ref(konst.ty, ¶ms.types_map); } } } @@ -640,14 +670,16 @@ impl Printer<'_> { }; match target { - WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty), + WherePredicateTypeTarget::TypeRef(ty) => { + this.print_type_ref(*ty, ¶ms.types_map) + } WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { Some(name) => w!(this, "{}", name.display(self.db.upcast(), edition)), None => w!(this, "_anon_{}", id.into_raw()), }, } w!(this, ": "); - this.print_type_bounds(std::slice::from_ref(bound)); + this.print_type_bounds(std::slice::from_ref(bound), ¶ms.types_map); } }); true diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index 157c9ef080578..f6ed826f04c7d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -1531,11 +1531,3 @@ fn macro_call_as_call_id_with_eager( pub struct UnresolvedMacro { pub path: hir_expand::mod_path::ModPath, } - -intern::impl_internable!( - crate::type_ref::TypeRef, - crate::type_ref::TraitRef, - crate::type_ref::TypeBound, - crate::path::GenericArgs, - generics::GenericParams, -); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs index e4786a1dd40e2..df5847929c551 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs @@ -5,43 +5,53 @@ use hir_expand::{ span_map::{SpanMap, SpanMapRef}, AstId, HirFileId, InFile, }; -use intern::Interned; use span::{AstIdMap, AstIdNode}; +use stdx::thin_vec::ThinVec; use syntax::ast; use triomphe::Arc; -use crate::{db::DefDatabase, path::Path, type_ref::TypeBound}; +use crate::{ + db::DefDatabase, + path::Path, + type_ref::{TypeBound, TypePtr, TypeRef, TypeRefId, TypesMap, TypesSourceMap}, +}; pub struct LowerCtx<'a> { pub db: &'a dyn DefDatabase, file_id: HirFileId, span_map: OnceCell, ast_id_map: OnceCell>, - impl_trait_bounds: RefCell>>>, + impl_trait_bounds: RefCell>>, // Prevent nested impl traits like `impl Foo`. outer_impl_trait: RefCell, + types_map: RefCell<(&'a mut TypesMap, &'a mut TypesSourceMap)>, } -pub(crate) struct OuterImplTraitGuard<'a> { - ctx: &'a LowerCtx<'a>, +pub(crate) struct OuterImplTraitGuard<'a, 'b> { + ctx: &'a LowerCtx<'b>, old: bool, } -impl<'a> OuterImplTraitGuard<'a> { - fn new(ctx: &'a LowerCtx<'a>, impl_trait: bool) -> Self { +impl<'a, 'b> OuterImplTraitGuard<'a, 'b> { + fn new(ctx: &'a LowerCtx<'b>, impl_trait: bool) -> Self { let old = ctx.outer_impl_trait.replace(impl_trait); Self { ctx, old } } } -impl<'a> Drop for OuterImplTraitGuard<'a> { +impl Drop for OuterImplTraitGuard<'_, '_> { fn drop(&mut self) { self.ctx.outer_impl_trait.replace(self.old); } } impl<'a> LowerCtx<'a> { - pub fn new(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self { + pub fn new( + db: &'a dyn DefDatabase, + file_id: HirFileId, + types_map: &'a mut TypesMap, + types_source_map: &'a mut TypesSourceMap, + ) -> Self { LowerCtx { db, file_id, @@ -49,6 +59,7 @@ impl<'a> LowerCtx<'a> { ast_id_map: OnceCell::new(), impl_trait_bounds: RefCell::new(Vec::new()), outer_impl_trait: RefCell::default(), + types_map: RefCell::new((types_map, types_source_map)), } } @@ -56,6 +67,8 @@ impl<'a> LowerCtx<'a> { db: &'a dyn DefDatabase, file_id: HirFileId, span_map: OnceCell, + types_map: &'a mut TypesMap, + types_source_map: &'a mut TypesSourceMap, ) -> Self { LowerCtx { db, @@ -64,6 +77,7 @@ impl<'a> LowerCtx<'a> { ast_id_map: OnceCell::new(), impl_trait_bounds: RefCell::new(Vec::new()), outer_impl_trait: RefCell::default(), + types_map: RefCell::new((types_map, types_source_map)), } } @@ -82,11 +96,11 @@ impl<'a> LowerCtx<'a> { ) } - pub fn update_impl_traits_bounds(&self, bounds: Vec>) { + pub fn update_impl_traits_bounds(&self, bounds: ThinVec) { self.impl_trait_bounds.borrow_mut().push(bounds); } - pub fn take_impl_traits_bounds(&self) -> Vec>> { + pub fn take_impl_traits_bounds(&self) -> Vec> { self.impl_trait_bounds.take() } @@ -94,7 +108,32 @@ impl<'a> LowerCtx<'a> { *self.outer_impl_trait.borrow() } - pub(crate) fn outer_impl_trait_scope(&'a self, impl_trait: bool) -> OuterImplTraitGuard<'a> { + pub(crate) fn outer_impl_trait_scope<'b>( + &'b self, + impl_trait: bool, + ) -> OuterImplTraitGuard<'b, 'a> { OuterImplTraitGuard::new(self, impl_trait) } + + pub(crate) fn alloc_type_ref(&self, type_ref: TypeRef, node: TypePtr) -> TypeRefId { + let mut types_map = self.types_map.borrow_mut(); + let (types_map, types_source_map) = &mut *types_map; + let id = types_map.types.alloc(type_ref); + types_source_map.types_map_back.insert(id, InFile::new(self.file_id, node)); + id + } + + pub(crate) fn alloc_type_ref_desugared(&self, type_ref: TypeRef) -> TypeRefId { + self.types_map.borrow_mut().0.types.alloc(type_ref) + } + + pub(crate) fn alloc_error_type(&self) -> TypeRefId { + self.types_map.borrow_mut().0.types.alloc(TypeRef::Error) + } + + // FIXME: If we alloc while holding this, well... Bad Things will happen. Need to change this + // to use proper mutability instead of interior mutability. + pub(crate) fn types_map(&self) -> std::cell::Ref<'_, TypesMap> { + std::cell::Ref::map(self.types_map.borrow(), |it| &*it.0) + } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index 450a15bd66e59..d5b94f0ae443a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -122,7 +122,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let mut expn_text = String::new(); if let Some(err) = exp.err { - format_to!(expn_text, "/* error: {} */", err.render_to_string(&db).0); + format_to!(expn_text, "/* error: {} */", err.render_to_string(&db).message); } let (parse, token_map) = exp.value; if expect_errors { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 22b9c2b4e37e1..a37e3c70e22a9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -31,7 +31,7 @@ use crate::{ item_scope::{ImportId, ImportOrExternCrate, ImportType, PerNsGlobImports}, item_tree::{ self, AttrOwner, FieldsShape, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, - ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, + ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId, UseTreeKind, }, macro_call_as_call_id, macro_call_as_call_id_with_eager, nameres::{ @@ -985,12 +985,8 @@ impl DefCollector<'_> { for (name, res) in resolutions { match name { Some(name) => { - changed |= self.push_res_and_update_glob_vis( - module_id, - name, - res.with_visibility(vis), - import, - ); + changed |= + self.push_res_and_update_glob_vis(module_id, name, *res, vis, import); } None => { let tr = match res.take_types() { @@ -1043,10 +1039,11 @@ impl DefCollector<'_> { .collect::>(); for (glob_importing_module, glob_import_vis, use_) in glob_imports { + let vis = glob_import_vis.min(vis, &self.def_map).unwrap_or(glob_import_vis); self.update_recursive( glob_importing_module, resolutions, - glob_import_vis, + vis, Some(ImportType::Glob(use_)), depth + 1, ); @@ -1058,8 +1055,44 @@ impl DefCollector<'_> { module_id: LocalModuleId, name: &Name, mut defs: PerNs, + vis: Visibility, def_import_type: Option, ) -> bool { + // `extern crate crate_name` things can be re-exported as `pub use crate_name`. + // But they cannot be re-exported as `pub use self::crate_name`, `pub use crate::crate_name` + // or `pub use ::crate_name`. + // + // This has been historically allowed, but may be not allowed in future + // https://github.com/rust-lang/rust/issues/127909 + if let Some((_, v, it)) = defs.types.as_mut() { + let is_extern_crate_reimport_without_prefix = || { + let Some(ImportOrExternCrate::ExternCrate(_)) = it else { + return false; + }; + let Some(ImportType::Import(id)) = def_import_type else { + return false; + }; + let use_id = id.import.lookup(self.db).id; + let item_tree = use_id.item_tree(self.db); + let use_kind = item_tree[use_id.value].use_tree.kind(); + let UseTreeKind::Single { path, .. } = use_kind else { + return false; + }; + path.segments().len() < 2 + }; + if is_extern_crate_reimport_without_prefix() { + *v = vis; + } else { + *v = v.min(vis, &self.def_map).unwrap_or(vis); + } + } + if let Some((_, v, _)) = defs.values.as_mut() { + *v = v.min(vis, &self.def_map).unwrap_or(vis); + } + if let Some((_, v, _)) = defs.macros.as_mut() { + *v = v.min(vis, &self.def_map).unwrap_or(vis); + } + let mut changed = false; if let Some(ImportType::Glob(_)) = def_import_type { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs index 75cab137f78b5..29379d0074936 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs @@ -10,6 +10,7 @@ //! //! `ReachedFixedPoint` signals about this. +use either::Either; use hir_expand::{name::Name, Lookup}; use span::Edition; use triomphe::Arc; @@ -150,17 +151,8 @@ impl DefMap { let mut arc; let mut current_map = self; - loop { - let new = current_map.resolve_path_fp_with_macro_single( - db, - mode, - original_module, - path, - shadow, - expected_macro_subns, - ); - // Merge `new` into `result`. + let mut merge = |new: ResolvePathResult| { result.resolved_def = result.resolved_def.or(new.resolved_def); if result.reached_fixedpoint == ReachedFixedPoint::No { result.reached_fixedpoint = new.reached_fixedpoint; @@ -171,7 +163,9 @@ impl DefMap { (Some(old), Some(new)) => Some(old.max(new)), (None, new) => new, }; + }; + loop { match current_map.block { Some(block) if original_module == Self::ROOT => { // Block modules "inherit" names from its parent module. @@ -180,8 +174,38 @@ impl DefMap { current_map = &arc; } // Proper (non-block) modules, including those in block `DefMap`s, don't. - _ => return result, + _ => { + if original_module != Self::ROOT && current_map.block.is_some() { + // A module inside a block. Do not resolve items declared in upper blocks, but we do need to get + // the prelude items (which are not inserted into blocks because they can be overridden there). + original_module = Self::ROOT; + arc = db.crate_def_map(self.krate); + current_map = &arc; + + let new = current_map.resolve_path_fp_in_all_preludes( + db, + mode, + original_module, + path, + shadow, + ); + merge(new); + } + + return result; + } } + + let new = current_map.resolve_path_fp_with_macro_single( + db, + mode, + original_module, + path, + shadow, + expected_macro_subns, + ); + + merge(new); } } @@ -195,7 +219,7 @@ impl DefMap { expected_macro_subns: Option, ) -> ResolvePathResult { let mut segments = path.segments().iter().enumerate(); - let mut curr_per_ns = match path.kind { + let curr_per_ns = match path.kind { PathKind::DollarCrate(krate) => { if krate == self.krate { cov_mark::hit!(macro_dollar_crate_self); @@ -296,25 +320,96 @@ impl DefMap { PerNs::types(module.into(), Visibility::Public, None) } - PathKind::Abs => { - // 2018-style absolute path -- only extern prelude - let segment = match segments.next() { - Some((_, segment)) => segment, + PathKind::Abs => match self.resolve_path_abs(&mut segments, path) { + Either::Left(it) => it, + Either::Right(reached_fixed_point) => { + return ResolvePathResult::empty(reached_fixed_point) + } + }, + }; + + self.resolve_remaining_segments(segments, curr_per_ns, path, db, shadow, original_module) + } + + /// Resolves a path only in the preludes, without accounting for item scopes. + pub(super) fn resolve_path_fp_in_all_preludes( + &self, + db: &dyn DefDatabase, + mode: ResolveMode, + original_module: LocalModuleId, + path: &ModPath, + shadow: BuiltinShadowMode, + ) -> ResolvePathResult { + let mut segments = path.segments().iter().enumerate(); + let curr_per_ns = match path.kind { + // plain import or absolute path in 2015: crate-relative with + // fallback to extern prelude (with the simplification in + // rust-lang/rust#57745) + // FIXME there must be a nicer way to write this condition + PathKind::Plain | PathKind::Abs + if self.data.edition == Edition::Edition2015 + && (path.kind == PathKind::Abs || mode == ResolveMode::Import) => + { + let (_, segment) = match segments.next() { + Some((idx, segment)) => (idx, segment), None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), }; - if let Some(&(def, extern_crate)) = self.data.extern_prelude.get(segment) { - tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def); - PerNs::types( - def.into(), - Visibility::Public, - extern_crate.map(ImportOrExternCrate::ExternCrate), - ) - } else { - return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude + tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment); + self.resolve_name_in_extern_prelude(segment) + } + PathKind::Plain => { + let (_, segment) = match segments.next() { + Some((idx, segment)) => (idx, segment), + None => return ResolvePathResult::empty(ReachedFixedPoint::Yes), + }; + tracing::debug!("resolving {:?} in module", segment); + self.resolve_name_in_all_preludes(db, segment) + } + PathKind::Abs => match self.resolve_path_abs(&mut segments, path) { + Either::Left(it) => it, + Either::Right(reached_fixed_point) => { + return ResolvePathResult::empty(reached_fixed_point) } + }, + PathKind::DollarCrate(_) | PathKind::Crate | PathKind::Super(_) => { + return ResolvePathResult::empty(ReachedFixedPoint::Yes) } }; + self.resolve_remaining_segments(segments, curr_per_ns, path, db, shadow, original_module) + } + + /// 2018-style absolute path -- only extern prelude + fn resolve_path_abs<'a>( + &self, + segments: &mut impl Iterator, + path: &ModPath, + ) -> Either { + let segment = match segments.next() { + Some((_, segment)) => segment, + None => return Either::Right(ReachedFixedPoint::Yes), + }; + if let Some(&(def, extern_crate)) = self.data.extern_prelude.get(segment) { + tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def); + Either::Left(PerNs::types( + def.into(), + Visibility::Public, + extern_crate.map(ImportOrExternCrate::ExternCrate), + )) + } else { + Either::Right(ReachedFixedPoint::No) // extern crate declarations can add to the extern prelude + } + } + + fn resolve_remaining_segments<'a>( + &self, + segments: impl Iterator, + mut curr_per_ns: PerNs, + path: &ModPath, + db: &dyn DefDatabase, + shadow: BuiltinShadowMode, + original_module: LocalModuleId, + ) -> ResolvePathResult { for (i, segment) in segments { let (curr, vis, imp) = match curr_per_ns.take_types_full() { Some(r) => r, @@ -475,24 +570,9 @@ impl DefMap { // they might been shadowed by local names. return PerNs::none(); } - self.data.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| { - PerNs::types( - it.into(), - Visibility::Public, - extern_crate.map(ImportOrExternCrate::ExternCrate), - ) - }) - }; - let macro_use_prelude = || { - self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| { - PerNs::macros( - it, - Visibility::Public, - // FIXME? - None, // extern_crate.map(ImportOrExternCrate::ExternCrate), - ) - }) + self.resolve_name_in_extern_prelude(name) }; + let macro_use_prelude = || self.resolve_in_macro_use_prelude(name); let prelude = || { if self.block.is_some() && module == DefMap::ROOT { return PerNs::none(); @@ -507,6 +587,38 @@ impl DefMap { .or_else(prelude) } + fn resolve_name_in_all_preludes(&self, db: &dyn DefDatabase, name: &Name) -> PerNs { + // Resolve in: + // - extern prelude / macro_use prelude + // - std prelude + let extern_prelude = self.resolve_name_in_extern_prelude(name); + let macro_use_prelude = || self.resolve_in_macro_use_prelude(name); + let prelude = || self.resolve_in_prelude(db, name); + + extern_prelude.or_else(macro_use_prelude).or_else(prelude) + } + + fn resolve_name_in_extern_prelude(&self, name: &Name) -> PerNs { + self.data.extern_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| { + PerNs::types( + it.into(), + Visibility::Public, + extern_crate.map(ImportOrExternCrate::ExternCrate), + ) + }) + } + + fn resolve_in_macro_use_prelude(&self, name: &Name) -> PerNs { + self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| { + PerNs::macros( + it, + Visibility::Public, + // FIXME? + None, // extern_crate.map(ImportOrExternCrate::ExternCrate), + ) + }) + } + fn resolve_name_in_crate_root_or_extern_prelude( &self, db: &dyn DefDatabase, @@ -525,16 +637,7 @@ impl DefMap { // Don't resolve extern prelude in pseudo-module of a block. return PerNs::none(); } - self.data.extern_prelude.get(name).copied().map_or( - PerNs::none(), - |(it, extern_crate)| { - PerNs::types( - it.into(), - Visibility::Public, - extern_crate.map(ImportOrExternCrate::ExternCrate), - ) - }, - ) + self.resolve_name_in_extern_prelude(name) }; from_crate_root.or_else(from_extern_prelude) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs index 7b02a89e5de7e..e1e30e5cec9a5 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs @@ -385,6 +385,52 @@ pub struct Arc; ); } +#[test] +fn extern_crate_reexport() { + check( + r#" +//- /main.rs crate:main deps:importer +use importer::*; +use importer::extern_crate1::exported::*; +use importer::allowed_reexport::*; +use importer::extern_crate2::*; +use importer::not_allowed_reexport1; +use importer::not_allowed_reexport2; + +//- /importer.rs crate:importer deps:extern_crate1,extern_crate2 +extern crate extern_crate1; +extern crate extern_crate2; + +pub use extern_crate1; +pub use extern_crate1 as allowed_reexport; + +pub use ::extern_crate; +pub use self::extern_crate as not_allowed_reexport1; +pub use crate::extern_crate as not_allowed_reexport2; + +//- /extern_crate1.rs crate:extern_crate1 +pub mod exported { + pub struct PublicItem; + struct PrivateItem; +} + +pub struct Exported; + +//- /extern_crate2.rs crate:extern_crate2 +pub struct NotExported; +"#, + expect![[r#" + crate + Exported: t v + PublicItem: t v + allowed_reexport: t + exported: t + not_allowed_reexport1: _ + not_allowed_reexport2: _ + "#]], + ); +} + #[test] fn extern_crate_rename_2015_edition() { check( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs index a2696055ca103..543ab41cd59a9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/globs.rs @@ -412,3 +412,42 @@ use reexport::*; "#]], ); } + +#[test] +fn regression_18308() { + check( + r#" +use outer::*; + +mod outer { + mod inner_superglob { + pub use super::*; + } + + // The importing order matters! + pub use inner_superglob::*; + use super::glob_target::*; +} + +mod glob_target { + pub struct ShouldBePrivate; +} +"#, + expect![[r#" + crate + glob_target: t + outer: t + + crate::glob_target + ShouldBePrivate: t v + + crate::outer + ShouldBePrivate: t v + inner_superglob: t + + crate::outer::inner_superglob + ShouldBePrivate: t v + inner_superglob: t + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index d319831867c0e..d920c10826625 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -253,7 +253,8 @@ m!(Z); let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); assert_eq!(module_data.scope.resolutions().count(), 4); }); - let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); + let n_recalculated_item_trees = + events.iter().filter(|it| it.contains("item_tree(")).count(); assert_eq!(n_recalculated_item_trees, 6); let n_reparsed_macros = events.iter().filter(|it| it.contains("parse_macro_expansion(")).count(); @@ -308,7 +309,7 @@ pub type Ty = (); let events = db.log_executed(|| { db.file_item_tree(pos.file_id.into()); }); - let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count(); + let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree(")).count(); assert_eq!(n_calculated_item_trees, 1); let n_parsed_files = events.iter().filter(|it| it.contains("parse(")).count(); assert_eq!(n_parsed_files, 1); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs index 077863c0c939c..dc6947c5b56b6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs @@ -9,11 +9,12 @@ use std::{ use crate::{ lang_item::LangItemTarget, lower::LowerCtx, - type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRef}, + type_ref::{ConstRef, LifetimeRef, TypeBound, TypeRefId}, }; use hir_expand::name::Name; use intern::Interned; use span::Edition; +use stdx::thin_vec::thin_vec_with_header_struct; use syntax::ast; pub use hir_expand::mod_path::{path, ModPath, PathKind}; @@ -47,20 +48,33 @@ impl Display for ImportAliasDisplay<'_> { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Path { - /// A normal path - Normal { - /// Type based path like `::foo`. - /// Note that paths like `::foo` are desugared to `Trait::::foo`. - type_anchor: Option>, - mod_path: Interned, - /// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`. - generic_args: Option>]>>, - }, + /// `BarePath` is used when the path has neither generics nor type anchor, since the vast majority of paths + /// are in this category, and splitting `Path` this way allows it to be more thin. When the path has either generics + /// or type anchor, it is `Path::Normal` with the generics filled with `None` even if there are none (practically + /// this is not a problem since many more paths have generics than a type anchor). + BarePath(Interned), + /// `Path::Normal` may have empty generics and type anchor (but generic args will be filled with `None`). + Normal(NormalPath), /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these /// links via a normal path since they might be private and not accessible in the usage place. LangItem(LangItemTarget, Option), } +// This type is being used a lot, make sure it doesn't grow unintentionally. +#[cfg(target_arch = "x86_64")] +const _: () = { + assert!(size_of::() == 16); + assert!(size_of::>() == 16); +}; + +thin_vec_with_header_struct! { + pub new(pub(crate)) struct NormalPath, NormalPathHeader { + pub generic_args: [Option], + pub type_anchor: Option, + pub mod_path: Interned; ref, + } +} + /// Generic arguments to a path segment (e.g. the `i32` in `Option`). This /// also includes bindings of associated types, like in `Iterator`. #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -86,20 +100,20 @@ pub struct AssociatedTypeBinding { pub name: Name, /// The generic arguments to the associated type. e.g. For `Trait = &'a T>`, this /// would be `['a, T]`. - pub args: Option>, + pub args: Option, /// The type bound to this associated type (in `Item = T`, this would be the /// `T`). This can be `None` if there are bounds instead. - pub type_ref: Option, + pub type_ref: Option, /// Bounds for the associated type, like in `Iterator`. (This is the unstable `associated_type_bounds` /// feature.) - pub bounds: Box<[Interned]>, + pub bounds: Box<[TypeBound]>, } /// A single generic argument. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum GenericArg { - Type(TypeRef), + Type(TypeRefId), Lifetime(LifetimeRef), Const(ConstRef), } @@ -112,50 +126,49 @@ impl Path { } /// Converts a known mod path to `Path`. - pub fn from_known_path( - path: ModPath, - generic_args: impl Into>]>>, - ) -> Path { - let generic_args = generic_args.into(); - assert_eq!(path.len(), generic_args.len()); - Path::Normal { - type_anchor: None, - mod_path: Interned::new(path), - generic_args: Some(generic_args), - } + pub fn from_known_path(path: ModPath, generic_args: Vec>) -> Path { + Path::Normal(NormalPath::new(None, Interned::new(path), generic_args)) } /// Converts a known mod path to `Path`. pub fn from_known_path_with_no_generic(path: ModPath) -> Path { - Path::Normal { type_anchor: None, mod_path: Interned::new(path), generic_args: None } + Path::BarePath(Interned::new(path)) } + #[inline] pub fn kind(&self) -> &PathKind { match self { - Path::Normal { mod_path, .. } => &mod_path.kind, + Path::BarePath(mod_path) => &mod_path.kind, + Path::Normal(path) => &path.mod_path().kind, Path::LangItem(..) => &PathKind::Abs, } } - pub fn type_anchor(&self) -> Option<&TypeRef> { + #[inline] + pub fn type_anchor(&self) -> Option { match self { - Path::Normal { type_anchor, .. } => type_anchor.as_deref(), - Path::LangItem(..) => None, + Path::Normal(path) => path.type_anchor(), + Path::LangItem(..) | Path::BarePath(_) => None, + } + } + + #[inline] + pub fn generic_args(&self) -> Option<&[Option]> { + match self { + Path::Normal(path) => Some(path.generic_args()), + Path::LangItem(..) | Path::BarePath(_) => None, } } pub fn segments(&self) -> PathSegments<'_> { match self { - Path::Normal { mod_path, generic_args, .. } => { - let s = PathSegments { - segments: mod_path.segments(), - generic_args: generic_args.as_deref(), - }; - if let Some(generic_args) = s.generic_args { - assert_eq!(s.segments.len(), generic_args.len()); - } - s + Path::BarePath(mod_path) => { + PathSegments { segments: mod_path.segments(), generic_args: None } } + Path::Normal(path) => PathSegments { + segments: path.mod_path().segments(), + generic_args: Some(path.generic_args()), + }, Path::LangItem(_, seg) => PathSegments { segments: seg.as_ref().map_or(&[], |seg| std::slice::from_ref(seg)), generic_args: None, @@ -165,34 +178,55 @@ impl Path { pub fn mod_path(&self) -> Option<&ModPath> { match self { - Path::Normal { mod_path, .. } => Some(mod_path), + Path::BarePath(mod_path) => Some(mod_path), + Path::Normal(path) => Some(path.mod_path()), Path::LangItem(..) => None, } } pub fn qualifier(&self) -> Option { - let Path::Normal { mod_path, generic_args, type_anchor } = self else { - return None; - }; - if mod_path.is_ident() { - return None; + match self { + Path::BarePath(mod_path) => { + if mod_path.is_ident() { + return None; + } + Some(Path::BarePath(Interned::new(ModPath::from_segments( + mod_path.kind, + mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), + )))) + } + Path::Normal(path) => { + let mod_path = path.mod_path(); + if mod_path.is_ident() { + return None; + } + let type_anchor = path.type_anchor(); + let generic_args = path.generic_args(); + let qualifier_mod_path = Interned::new(ModPath::from_segments( + mod_path.kind, + mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), + )); + let qualifier_generic_args = &generic_args[..generic_args.len() - 1]; + Some(Path::Normal(NormalPath::new( + type_anchor, + qualifier_mod_path, + qualifier_generic_args.iter().cloned(), + ))) + } + Path::LangItem(..) => None, } - let res = Path::Normal { - type_anchor: type_anchor.clone(), - mod_path: Interned::new(ModPath::from_segments( - mod_path.kind, - mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(), - )), - generic_args: generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()), - }; - Some(res) } pub fn is_self_type(&self) -> bool { - let Path::Normal { mod_path, generic_args, type_anchor } = self else { - return false; - }; - type_anchor.is_none() && generic_args.as_deref().is_none() && mod_path.is_Self() + match self { + Path::BarePath(mod_path) => mod_path.is_Self(), + Path::Normal(path) => { + path.type_anchor().is_none() + && path.mod_path().is_Self() + && path.generic_args().iter().all(|args| args.is_none()) + } + Path::LangItem(..) => false, + } } } @@ -204,7 +238,7 @@ pub struct PathSegment<'a> { pub struct PathSegments<'a> { segments: &'a [Name], - generic_args: Option<&'a [Option>]>, + generic_args: Option<&'a [Option]>, } impl<'a> PathSegments<'a> { @@ -224,7 +258,7 @@ impl<'a> PathSegments<'a> { pub fn get(&self, idx: usize) -> Option> { let res = PathSegment { name: self.segments.get(idx)?, - args_and_bindings: self.generic_args.and_then(|it| it.get(idx)?.as_deref()), + args_and_bindings: self.generic_args.and_then(|it| it.get(idx)?.as_ref()), }; Some(res) } @@ -244,7 +278,7 @@ impl<'a> PathSegments<'a> { self.segments .iter() .zip(self.generic_args.into_iter().flatten().chain(iter::repeat(&None))) - .map(|(name, args)| PathSegment { name, args_and_bindings: args.as_deref() }) + .map(|(name, args)| PathSegment { name, args_and_bindings: args.as_ref() }) } } @@ -268,16 +302,6 @@ impl GenericArgs { impl From for Path { fn from(name: Name) -> Path { - Path::Normal { - type_anchor: None, - mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))), - generic_args: None, - } - } -} - -impl From for Box { - fn from(name: Name) -> Box { - Box::new(Path::from(name)) + Path::BarePath(Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name)))) } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs index 70918a9358e8e..c328b9c6ce2f0 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs @@ -2,13 +2,14 @@ use std::iter; -use crate::{lower::LowerCtx, type_ref::ConstRef}; +use crate::{lower::LowerCtx, path::NormalPath, type_ref::ConstRef}; use hir_expand::{ mod_path::resolve_crate_root, name::{AsName, Name}, }; use intern::{sym, Interned}; +use stdx::thin_vec::EmptyOptimizedThinVec; use syntax::ast::{self, AstNode, HasGenericArgs, HasTypeBounds}; use crate::{ @@ -51,8 +52,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option, mut path: ast::Path) -> Option::foo None => { - type_anchor = Some(Interned::new(self_type)); + type_anchor = Some(self_type); kind = PathKind::Plain; } // >::Foo desugars to Trait::Foo Some(trait_ref) => { - let Path::Normal { mod_path, generic_args: path_generic_args, .. } = - Path::from_src(ctx, trait_ref.path()?)? - else { - return None; - }; + let path = Path::from_src(ctx, trait_ref.path()?)?; + let mod_path = path.mod_path()?; + let path_generic_args = path.generic_args(); let num_segments = mod_path.segments().len(); kind = mod_path.kind; @@ -95,7 +93,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option GenericArgs { args: iter::once(self_type) .chain(it.args.iter().cloned()) @@ -110,7 +108,7 @@ pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option, mut path: ast::Path) -> Option, mut path: ast::Path) -> Option Option { if let Some(q) = path.qualifier() { @@ -194,11 +192,13 @@ pub(super) fn lower_generic_args( match generic_arg { ast::GenericArg::TypeArg(type_arg) => { let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty()); - type_ref.walk(&mut |tr| { + let types_map = lower_ctx.types_map(); + TypeRef::walk(type_ref, &types_map, &mut |tr| { if let TypeRef::ImplTrait(bounds) = tr { lower_ctx.update_impl_traits_bounds(bounds.clone()); } }); + drop(types_map); args.push(GenericArg::Type(type_ref)); } ast::GenericArg::AssocTypeArg(assoc_type_arg) => { @@ -212,20 +212,19 @@ pub(super) fn lower_generic_args( let name = name_ref.as_name(); let args = assoc_type_arg .generic_arg_list() - .and_then(|args| lower_generic_args(lower_ctx, args)) - .map(Interned::new); + .and_then(|args| lower_generic_args(lower_ctx, args)); let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it)); - let type_ref = type_ref.inspect(|tr| { - tr.walk(&mut |tr| { + let type_ref = type_ref.inspect(|&tr| { + let types_map = lower_ctx.types_map(); + TypeRef::walk(tr, &types_map, &mut |tr| { if let TypeRef::ImplTrait(bounds) = tr { lower_ctx.update_impl_traits_bounds(bounds.clone()); } }); + drop(types_map); }); let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { - l.bounds() - .map(|it| Interned::new(TypeBound::from_ast(lower_ctx, it))) - .collect() + l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() } else { Box::default() }; @@ -269,7 +268,9 @@ fn lower_generic_args_from_fn_path( let type_ref = TypeRef::from_ast_opt(ctx, param.ty()); param_types.push(type_ref); } - let args = Box::new([GenericArg::Type(TypeRef::Tuple(param_types))]); + let args = Box::new([GenericArg::Type( + ctx.alloc_type_ref_desugared(TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(param_types))), + )]); let bindings = if let Some(ret_type) = ret_type { let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty()); Box::new([AssociatedTypeBinding { @@ -280,7 +281,7 @@ fn lower_generic_args_from_fn_path( }]) } else { // -> () - let type_ref = TypeRef::Tuple(Vec::new()); + let type_ref = ctx.alloc_type_ref_desugared(TypeRef::unit()); Box::new([AssociatedTypeBinding { name: Name::new_symbol_root(sym::Output.clone()), args: None, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs index d5ef17a91fb2b..9ceb82d5fd6b6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs @@ -1,9 +1,11 @@ //! Display and pretty printing routines. -use std::fmt::{self, Write}; +use std::{ + fmt::{self, Write}, + mem, +}; use hir_expand::mod_path::PathKind; -use intern::Interned; use itertools::Itertools; use span::Edition; @@ -11,12 +13,15 @@ use crate::{ db::DefDatabase, lang_item::LangItemTarget, path::{GenericArg, GenericArgs, Path}, - type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef}, + type_ref::{ + Mutability, TraitBoundModifier, TypeBound, TypeRef, TypeRefId, TypesMap, UseArgRef, + }, }; pub(crate) fn print_path( db: &dyn DefDatabase, path: &Path, + map: &TypesMap, buf: &mut dyn Write, edition: Edition, ) -> fmt::Result { @@ -58,7 +63,7 @@ pub(crate) fn print_path( match path.type_anchor() { Some(anchor) => { write!(buf, "<")?; - print_type_ref(db, anchor, buf, edition)?; + print_type_ref(db, anchor, map, buf, edition)?; write!(buf, ">::")?; } None => match path.kind() { @@ -87,7 +92,7 @@ pub(crate) fn print_path( write!(buf, "{}", segment.name.display(db.upcast(), edition))?; if let Some(generics) = segment.args_and_bindings { write!(buf, "::<")?; - print_generic_args(db, generics, buf, edition)?; + print_generic_args(db, generics, map, buf, edition)?; write!(buf, ">")?; } @@ -99,6 +104,7 @@ pub(crate) fn print_path( pub(crate) fn print_generic_args( db: &dyn DefDatabase, generics: &GenericArgs, + map: &TypesMap, buf: &mut dyn Write, edition: Edition, ) -> fmt::Result { @@ -106,7 +112,7 @@ pub(crate) fn print_generic_args( let args = if generics.has_self_type { let (self_ty, args) = generics.args.split_first().unwrap(); write!(buf, "Self=")?; - print_generic_arg(db, self_ty, buf, edition)?; + print_generic_arg(db, self_ty, map, buf, edition)?; first = false; args } else { @@ -117,7 +123,7 @@ pub(crate) fn print_generic_args( write!(buf, ", ")?; } first = false; - print_generic_arg(db, arg, buf, edition)?; + print_generic_arg(db, arg, map, buf, edition)?; } for binding in generics.bindings.iter() { if !first { @@ -127,11 +133,11 @@ pub(crate) fn print_generic_args( write!(buf, "{}", binding.name.display(db.upcast(), edition))?; if !binding.bounds.is_empty() { write!(buf, ": ")?; - print_type_bounds(db, &binding.bounds, buf, edition)?; + print_type_bounds(db, &binding.bounds, map, buf, edition)?; } - if let Some(ty) = &binding.type_ref { + if let Some(ty) = binding.type_ref { write!(buf, " = ")?; - print_type_ref(db, ty, buf, edition)?; + print_type_ref(db, ty, map, buf, edition)?; } } Ok(()) @@ -140,11 +146,12 @@ pub(crate) fn print_generic_args( pub(crate) fn print_generic_arg( db: &dyn DefDatabase, arg: &GenericArg, + map: &TypesMap, buf: &mut dyn Write, edition: Edition, ) -> fmt::Result { match arg { - GenericArg::Type(ty) => print_type_ref(db, ty, buf, edition), + GenericArg::Type(ty) => print_type_ref(db, *ty, map, buf, edition), GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast(), edition)), GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition)), } @@ -152,12 +159,13 @@ pub(crate) fn print_generic_arg( pub(crate) fn print_type_ref( db: &dyn DefDatabase, - type_ref: &TypeRef, + type_ref: TypeRefId, + map: &TypesMap, buf: &mut dyn Write, edition: Edition, ) -> fmt::Result { // FIXME: deduplicate with `HirDisplay` impl - match type_ref { + match &map[type_ref] { TypeRef::Never => write!(buf, "!")?, TypeRef::Placeholder => write!(buf, "_")?, TypeRef::Tuple(fields) => { @@ -166,48 +174,48 @@ pub(crate) fn print_type_ref( if i != 0 { write!(buf, ", ")?; } - print_type_ref(db, field, buf, edition)?; + print_type_ref(db, *field, map, buf, edition)?; } write!(buf, ")")?; } - TypeRef::Path(path) => print_path(db, path, buf, edition)?, + TypeRef::Path(path) => print_path(db, path, map, buf, edition)?, TypeRef::RawPtr(pointee, mtbl) => { let mtbl = match mtbl { Mutability::Shared => "*const", Mutability::Mut => "*mut", }; write!(buf, "{mtbl} ")?; - print_type_ref(db, pointee, buf, edition)?; + print_type_ref(db, *pointee, map, buf, edition)?; } - TypeRef::Reference(pointee, lt, mtbl) => { - let mtbl = match mtbl { + TypeRef::Reference(ref_) => { + let mtbl = match ref_.mutability { Mutability::Shared => "", Mutability::Mut => "mut ", }; write!(buf, "&")?; - if let Some(lt) = lt { + if let Some(lt) = &ref_.lifetime { write!(buf, "{} ", lt.name.display(db.upcast(), edition))?; } write!(buf, "{mtbl}")?; - print_type_ref(db, pointee, buf, edition)?; + print_type_ref(db, ref_.ty, map, buf, edition)?; } - TypeRef::Array(elem, len) => { + TypeRef::Array(array) => { write!(buf, "[")?; - print_type_ref(db, elem, buf, edition)?; - write!(buf, "; {}]", len.display(db.upcast(), edition))?; + print_type_ref(db, array.ty, map, buf, edition)?; + write!(buf, "; {}]", array.len.display(db.upcast(), edition))?; } TypeRef::Slice(elem) => { write!(buf, "[")?; - print_type_ref(db, elem, buf, edition)?; + print_type_ref(db, *elem, map, buf, edition)?; write!(buf, "]")?; } - TypeRef::Fn(args_and_ret, varargs, is_unsafe, abi) => { + TypeRef::Fn(fn_) => { let ((_, return_type), args) = - args_and_ret.split_last().expect("TypeRef::Fn is missing return type"); - if *is_unsafe { + fn_.params().split_last().expect("TypeRef::Fn is missing return type"); + if fn_.is_unsafe() { write!(buf, "unsafe ")?; } - if let Some(abi) = abi { + if let Some(abi) = fn_.abi() { buf.write_str("extern ")?; buf.write_str(abi.as_str())?; buf.write_char(' ')?; @@ -217,16 +225,16 @@ pub(crate) fn print_type_ref( if i != 0 { write!(buf, ", ")?; } - print_type_ref(db, typeref, buf, edition)?; + print_type_ref(db, *typeref, map, buf, edition)?; } - if *varargs { + if fn_.is_varargs() { if !args.is_empty() { write!(buf, ", ")?; } write!(buf, "...")?; } write!(buf, ") -> ")?; - print_type_ref(db, return_type, buf, edition)?; + print_type_ref(db, *return_type, map, buf, edition)?; } TypeRef::Macro(_ast_id) => { write!(buf, "")?; @@ -234,11 +242,11 @@ pub(crate) fn print_type_ref( TypeRef::Error => write!(buf, "{{unknown}}")?, TypeRef::ImplTrait(bounds) => { write!(buf, "impl ")?; - print_type_bounds(db, bounds, buf, edition)?; + print_type_bounds(db, bounds, map, buf, edition)?; } TypeRef::DynTrait(bounds) => { write!(buf, "dyn ")?; - print_type_bounds(db, bounds, buf, edition)?; + print_type_bounds(db, bounds, map, buf, edition)?; } } @@ -247,7 +255,8 @@ pub(crate) fn print_type_ref( pub(crate) fn print_type_bounds( db: &dyn DefDatabase, - bounds: &[Interned], + bounds: &[TypeBound], + map: &TypesMap, buf: &mut dyn Write, edition: Edition, ) -> fmt::Result { @@ -256,13 +265,13 @@ pub(crate) fn print_type_bounds( write!(buf, " + ")?; } - match bound.as_ref() { + match bound { TypeBound::Path(path, modifier) => { match modifier { TraitBoundModifier::None => (), TraitBoundModifier::Maybe => write!(buf, "?")?, } - print_path(db, path, buf, edition)?; + print_path(db, path, map, buf, edition)?; } TypeBound::ForLifetime(lifetimes, path) => { write!( @@ -270,9 +279,25 @@ pub(crate) fn print_type_bounds( "for<{}> ", lifetimes.iter().map(|it| it.display(db.upcast(), edition)).format(", ") )?; - print_path(db, path, buf, edition)?; + print_path(db, path, map, buf, edition)?; } TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition))?, + TypeBound::Use(args) => { + write!(buf, "use<")?; + let mut first = true; + for arg in args { + if !mem::take(&mut first) { + write!(buf, ", ")?; + } + match arg { + UseArgRef::Name(it) => write!(buf, "{}", it.display(db.upcast(), edition))?, + UseArgRef::Lifetime(it) => { + write!(buf, "{}", it.name.display(db.upcast(), edition))? + } + } + } + write!(buf, ">")? + } TypeBound::Error => write!(buf, "{{unknown}}")?, } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index f0f2210ec2c51..26655e40ca791 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -3,14 +3,17 @@ use std::{fmt, iter, mem}; use base_db::CrateId; use hir_expand::{name::Name, MacroDefId}; -use intern::{sym, Interned}; +use intern::sym; use itertools::Itertools as _; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; use triomphe::Arc; use crate::{ - body::scope::{ExprScopes, ScopeId}, + body::{ + scope::{ExprScopes, ScopeId}, + HygieneId, + }, builtin_type::BuiltinType, data::ExternCrateDeclData, db::DefDatabase, @@ -21,7 +24,7 @@ use crate::{ nameres::{DefMap, MacroSubNs}, path::{ModPath, Path, PathKind}, per_ns::PerNs, - type_ref::LifetimeRef, + type_ref::{LifetimeRef, TypesMap}, visibility::{RawVisibility, Visibility}, AdtId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, FxIndexMap, GenericDefId, GenericParamId, HasModule, @@ -73,13 +76,15 @@ enum Scope { /// All the items and imported names of a module BlockScope(ModuleItemMap), /// Brings the generic parameters of an item into scope - GenericParams { def: GenericDefId, params: Interned }, + GenericParams { def: GenericDefId, params: Arc }, /// Brings `Self` in `impl` block into scope ImplDefScope(ImplId), /// Brings `Self` in enum, struct and union definitions into scope AdtScope(AdtId), /// Local bindings ExprScope(ExprScope), + /// Macro definition inside bodies that affects all paths after it in the same block. + MacroDefScope(Box), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -162,7 +167,8 @@ impl Resolver { path: &Path, ) -> Option<(TypeNs, Option, Option)> { let path = match path { - Path::Normal { mod_path, .. } => mod_path, + Path::BarePath(mod_path) => mod_path, + Path::Normal(it) => it.mod_path(), Path::LangItem(l, seg) => { let type_ns = match *l { LangItemTarget::Union(it) => TypeNs::AdtId(it.into()), @@ -188,7 +194,7 @@ impl Resolver { for scope in self.scopes() { match scope { - Scope::ExprScope(_) => continue, + Scope::ExprScope(_) | Scope::MacroDefScope(_) => continue, Scope::GenericParams { params, def } => { if let Some(id) = params.find_type_by_name(first_name, *def) { return Some((TypeNs::GenericParam(id), remaining_idx(), None)); @@ -257,9 +263,11 @@ impl Resolver { &self, db: &dyn DefDatabase, path: &Path, + mut hygiene_id: HygieneId, ) -> Option { let path = match path { - Path::Normal { mod_path, .. } => mod_path, + Path::BarePath(mod_path) => mod_path, + Path::Normal(it) => it.mod_path(), Path::LangItem(l, None) => { return Some(ResolveValueResult::ValueNs( match *l { @@ -300,14 +308,22 @@ impl Resolver { } if n_segments <= 1 { + let mut hygiene_info = if !hygiene_id.is_root() { + let ctx = db.lookup_intern_syntax_context(hygiene_id.0); + ctx.outer_expn.map(|expansion| { + let expansion = db.lookup_intern_macro_call(expansion); + (ctx.parent, expansion.def) + }) + } else { + None + }; for scope in self.scopes() { match scope { Scope::ExprScope(scope) => { - let entry = scope - .expr_scopes - .entries(scope.scope_id) - .iter() - .find(|entry| entry.name() == first_name); + let entry = + scope.expr_scopes.entries(scope.scope_id).iter().find(|entry| { + entry.name() == first_name && entry.hygiene() == hygiene_id + }); if let Some(e) = entry { return Some(ResolveValueResult::ValueNs( @@ -316,6 +332,21 @@ impl Resolver { )); } } + Scope::MacroDefScope(macro_id) => { + if let Some((parent_ctx, label_macro_id)) = hygiene_info { + if label_macro_id == **macro_id { + // A macro is allowed to refer to variables from before its declaration. + // Therefore, if we got to the rib of its declaration, give up its hygiene + // and use its parent expansion. + let parent_ctx = db.lookup_intern_syntax_context(parent_ctx); + hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent); + hygiene_info = parent_ctx.outer_expn.map(|expansion| { + let expansion = db.lookup_intern_macro_call(expansion); + (parent_ctx.parent, expansion.def) + }); + } + } + } Scope::GenericParams { params, def } => { if let Some(id) = params.find_const_by_name(first_name, *def) { let val = ValueNs::GenericParam(id); @@ -342,7 +373,7 @@ impl Resolver { } else { for scope in self.scopes() { match scope { - Scope::ExprScope(_) => continue, + Scope::ExprScope(_) | Scope::MacroDefScope(_) => continue, Scope::GenericParams { params, def } => { if let Some(id) = params.find_type_by_name(first_name, *def) { let ty = TypeNs::GenericParam(id); @@ -393,8 +424,9 @@ impl Resolver { &self, db: &dyn DefDatabase, path: &Path, + hygiene: HygieneId, ) -> Option { - match self.resolve_path_in_value_ns(db, path)? { + match self.resolve_path_in_value_ns(db, path, hygiene)? { ResolveValueResult::ValueNs(it, _) => Some(it), ResolveValueResult::Partial(..) => None, } @@ -590,13 +622,15 @@ impl Resolver { pub fn where_predicates_in_scope( &self, - ) -> impl Iterator { + ) -> impl Iterator { self.scopes() .filter_map(|scope| match scope { Scope::GenericParams { params, def } => Some((params, def)), _ => None, }) - .flat_map(|(params, def)| params.where_predicates().zip(iter::repeat(def))) + .flat_map(|(params, def)| { + params.where_predicates().zip(iter::repeat((def, ¶ms.types_map))) + }) } pub fn generic_def(&self) -> Option { @@ -606,13 +640,20 @@ impl Resolver { }) } - pub fn generic_params(&self) -> Option<&Interned> { + pub fn generic_params(&self) -> Option<&Arc> { self.scopes().find_map(|scope| match scope { Scope::GenericParams { params, .. } => Some(params), _ => None, }) } + pub fn all_generic_params(&self) -> impl Iterator { + self.scopes().filter_map(|scope| match scope { + Scope::GenericParams { params, def } => Some((&**params, def)), + _ => None, + }) + } + pub fn body_owner(&self) -> Option { self.scopes().find_map(|scope| match scope { Scope::ExprScope(it) => Some(it.owner), @@ -622,7 +663,7 @@ impl Resolver { pub fn type_owner(&self) -> Option { self.scopes().find_map(|scope| match scope { - Scope::BlockScope(_) => None, + Scope::BlockScope(_) | Scope::MacroDefScope(_) => None, &Scope::GenericParams { def, .. } => Some(def.into()), &Scope::ImplDefScope(id) => Some(id.into()), &Scope::AdtScope(adt) => Some(adt.into()), @@ -653,6 +694,9 @@ impl Resolver { expr_scopes: &Arc, scope_id: ScopeId, ) { + if let Some(macro_id) = expr_scopes.macro_def(scope_id) { + resolver.scopes.push(Scope::MacroDefScope(macro_id.clone())); + } resolver.scopes.push(Scope::ExprScope(ExprScope { owner, expr_scopes: expr_scopes.clone(), @@ -670,7 +714,7 @@ impl Resolver { } let start = self.scopes.len(); - let innermost_scope = self.scopes().next(); + let innermost_scope = self.scopes().find(|scope| !matches!(scope, Scope::MacroDefScope(_))); match innermost_scope { Some(&Scope::ExprScope(ExprScope { scope_id, ref expr_scopes, owner })) => { let expr_scopes = expr_scopes.clone(); @@ -794,6 +838,7 @@ impl Scope { acc.add_local(e.name(), e.binding()); }); } + Scope::MacroDefScope(_) => {} } } } @@ -833,6 +878,9 @@ fn resolver_for_scope_( // already traverses all parents, so this is O(n²). I think we could only store the // innermost module scope instead? } + if let Some(macro_id) = scopes.macro_def(scope) { + r = r.push_scope(Scope::MacroDefScope(macro_id.clone())); + } r = r.push_expr_scope(owner, Arc::clone(&scopes), scope); } @@ -1006,12 +1054,12 @@ impl HasResolver for ModuleId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { let mut def_map = self.def_map(db); let mut module_id = self.local_id; - let mut modules: SmallVec<[_; 1]> = smallvec![]; if !self.is_block_module() { return Resolver { scopes: vec![], module_scope: ModuleItemMap { def_map, module_id } }; } + let mut modules: SmallVec<[_; 1]> = smallvec![]; while let Some(parent) = def_map.parent() { let block_def_map = mem::replace(&mut def_map, parent.def_map(db)); modules.push(block_def_map); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs index 4db21eb46bd58..0c36c88fb0931 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs @@ -198,7 +198,10 @@ impl TestDB { .filter_map(|node| { let block = ast::BlockExpr::cast(node)?; let expr = ast::Expr::from(block); - let expr_id = source_map.node_expr(InFile::new(position.file_id.into(), &expr))?; + let expr_id = source_map + .node_expr(InFile::new(position.file_id.into(), &expr))? + .as_expr() + .unwrap(); let scope = scopes.scope_for(expr_id).unwrap(); Some(scope) }); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs index 3aeb88047a0de..4edb683592253 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs @@ -191,6 +191,11 @@ impl Visibility { return None; } + let def_block = def_map.block_id(); + if (mod_a.containing_block(), mod_b.containing_block()) != (def_block, def_block) { + return None; + } + let mut a_ancestors = iter::successors(Some(mod_a.local_id), |&m| def_map[m].parent); let mut b_ancestors = @@ -210,6 +215,43 @@ impl Visibility { } } } + + /// Returns the least permissive visibility of `self` and `other`. + /// + /// If there is no subset relation between `self` and `other`, returns `None` (ie. they're only + /// visible in unrelated modules). + pub(crate) fn min(self, other: Visibility, def_map: &DefMap) -> Option { + match (self, other) { + (vis, Visibility::Public) | (Visibility::Public, vis) => Some(vis), + (Visibility::Module(mod_a, expl_a), Visibility::Module(mod_b, expl_b)) => { + if mod_a.krate != mod_b.krate { + return None; + } + + let def_block = def_map.block_id(); + if (mod_a.containing_block(), mod_b.containing_block()) != (def_block, def_block) { + return None; + } + + let mut a_ancestors = + iter::successors(Some(mod_a.local_id), |&m| def_map[m].parent); + let mut b_ancestors = + iter::successors(Some(mod_b.local_id), |&m| def_map[m].parent); + + if a_ancestors.any(|m| m == mod_b.local_id) { + // B is above A + return Some(Visibility::Module(mod_a, expl_b)); + } + + if b_ancestors.any(|m| m == mod_a.local_id) { + // A is above B + return Some(Visibility::Module(mod_b, expl_a)); + } + + None + } + } + } } /// Whether the item was imported through an explicit `pub(crate) use` or just a `use` without diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs index 79cfeb4cf1844..12df3cf218828 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs @@ -26,6 +26,7 @@ use crate::{ /// Syntactical attributes, without filtering of `cfg_attr`s. #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct RawAttrs { + // FIXME: This can become `Box<[Attr]>` if https://internals.rust-lang.org/t/layout-of-dst-box/21728?u=chrefr is accepted. entries: Option>, } @@ -169,6 +170,10 @@ impl RawAttrs { }; RawAttrs { entries } } + + pub fn is_empty(&self) -> bool { + self.entries.is_none() + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs index a81911891577c..f48de807c28c7 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs @@ -227,7 +227,7 @@ pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String { &'a SyntaxContextData, ); - impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> { + impl std::fmt::Debug for SyntaxContextDebug<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fancy_debug(self.2, self.1, self.0, f) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index 5d5f72490d0cb..7d2f556406d49 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -165,40 +165,73 @@ pub enum ExpandErrorKind { } impl ExpandError { - pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) { + pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> RenderedExpandError { self.inner.0.render_to_string(db) } } +pub struct RenderedExpandError { + pub message: String, + pub error: bool, + pub kind: &'static str, +} + +impl RenderedExpandError { + const GENERAL_KIND: &str = "macro-error"; +} + impl ExpandErrorKind { - pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> (String, bool) { + pub fn render_to_string(&self, db: &dyn ExpandDatabase) -> RenderedExpandError { match self { - ExpandErrorKind::ProcMacroAttrExpansionDisabled => { - ("procedural attribute macro expansion is disabled".to_owned(), false) - } - ExpandErrorKind::MacroDisabled => { - ("proc-macro is explicitly disabled".to_owned(), false) - } + ExpandErrorKind::ProcMacroAttrExpansionDisabled => RenderedExpandError { + message: "procedural attribute macro expansion is disabled".to_owned(), + error: false, + kind: "proc-macros-disabled", + }, + ExpandErrorKind::MacroDisabled => RenderedExpandError { + message: "proc-macro is explicitly disabled".to_owned(), + error: false, + kind: "proc-macro-disabled", + }, &ExpandErrorKind::MissingProcMacroExpander(def_crate) => { match db.proc_macros().get_error_for_crate(def_crate) { - Some((e, hard_err)) => (e.to_owned(), hard_err), - None => ( - format!( - "internal error: proc-macro map is missing error entry for crate {def_crate:?}" - ), - true, - ), + Some((e, hard_err)) => RenderedExpandError { + message: e.to_owned(), + error: hard_err, + kind: RenderedExpandError::GENERAL_KIND, + }, + None => RenderedExpandError { + message: format!("internal error: proc-macro map is missing error entry for crate {def_crate:?}"), + error: true, + kind: RenderedExpandError::GENERAL_KIND, + }, } } - ExpandErrorKind::MacroDefinition => { - ("macro definition has parse errors".to_owned(), true) - } - ExpandErrorKind::Mbe(e) => (e.to_string(), true), - ExpandErrorKind::RecursionOverflow => { - ("overflow expanding the original macro".to_owned(), true) - } - ExpandErrorKind::Other(e) => ((**e).to_owned(), true), - ExpandErrorKind::ProcMacroPanic(e) => (format!("proc-macro panicked: {e}"), true), + ExpandErrorKind::MacroDefinition => RenderedExpandError { + message: "macro definition has parse errors".to_owned(), + error: true, + kind: RenderedExpandError::GENERAL_KIND, + }, + ExpandErrorKind::Mbe(e) => RenderedExpandError { + message: e.to_string(), + error: true, + kind: RenderedExpandError::GENERAL_KIND, + }, + ExpandErrorKind::RecursionOverflow => RenderedExpandError { + message: "overflow expanding the original macro".to_owned(), + error: true, + kind: RenderedExpandError::GENERAL_KIND, + }, + ExpandErrorKind::Other(e) => RenderedExpandError { + message: (**e).to_owned(), + error: true, + kind: RenderedExpandError::GENERAL_KIND, + }, + ExpandErrorKind::ProcMacroPanic(e) => RenderedExpandError { + message: format!("proc-macro panicked: {e}"), + error: true, + kind: RenderedExpandError::GENERAL_KIND, + }, } } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index 54313904a7ecd..267d54583338d 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -18,6 +18,8 @@ use syntax::utils::is_raw_identifier; #[derive(Clone, PartialEq, Eq, Hash)] pub struct Name { symbol: Symbol, + // If you are making this carry actual hygiene, beware that the special handling for variables and labels + // in bodies can go. ctx: (), } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index 7a3846df40eef..2b5342314a65a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -114,7 +114,7 @@ impl<'table, 'db> Autoderef<'table, 'db, usize> { } #[allow(private_bounds)] -impl<'table, 'db, T: TrackAutoderefSteps> Autoderef<'table, 'db, T> { +impl Autoderef<'_, '_, T> { pub(crate) fn step_count(&self) -> usize { self.steps.len() } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index f7bacbd49b335..4bc78afacc093 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -521,7 +521,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { } } -impl<'a> ChalkContext<'a> { +impl ChalkContext<'_> { fn edition(&self) -> Edition { self.db.crate_graph()[self.krate].edition } @@ -615,8 +615,9 @@ pub(crate) fn associated_ty_data_query( let type_alias_data = db.type_alias_data(type_alias); let generic_params = generics(db.upcast(), type_alias.into()); let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast()); - let ctx = crate::TyLoweringContext::new(db, &resolver, type_alias.into()) - .with_type_param_mode(crate::lower::ParamLoweringMode::Variable); + let ctx = + crate::TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, type_alias.into()) + .with_type_param_mode(crate::lower::ParamLoweringMode::Variable); let trait_subst = TyBuilder::subst_for_def(db, trait_, None) .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, generic_params.len_self()) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index e41058aac2a95..091cfcd4654c1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -3,7 +3,7 @@ use base_db::{ra_salsa::Cycle, CrateId}; use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex}; use hir_def::{ - body::Body, + body::{Body, HygieneId}, hir::{Expr, ExprId}, path::Path, resolver::{Resolver, ValueNs}, @@ -11,7 +11,7 @@ use hir_def::{ ConstBlockLoc, EnumVariantId, GeneralConstId, StaticId, }; use hir_expand::Lookup; -use stdx::{never, IsNoneOr}; +use stdx::never; use triomphe::Arc; use crate::{ @@ -80,7 +80,7 @@ pub(crate) fn path_to_const<'g>( debruijn: DebruijnIndex, expected_ty: Ty, ) -> Option { - match resolver.resolve_path_in_value_ns_fully(db.upcast(), path) { + match resolver.resolve_path_in_value_ns_fully(db.upcast(), path, HygieneId::ROOT) { Some(ValueNs::GenericParam(p)) => { let ty = db.const_param_ty(p); let value = match mode { @@ -287,7 +287,7 @@ pub(crate) fn const_eval_discriminant_variant( } let repr = db.enum_data(loc.parent).repr; - let is_signed = IsNoneOr::is_none_or(repr.and_then(|repr| repr.int), |int| int.is_signed()); + let is_signed = repr.and_then(|repr| repr.int).is_none_or(|int| int.is_signed()); let mir_body = db.monomorphized_mir_body( def, @@ -319,7 +319,7 @@ pub(crate) fn eval_to_const( return true; } let mut r = false; - body[expr].walk_child_exprs(|idx| r |= has_closure(body, idx)); + body.walk_child_exprs(expr, |idx| r |= has_closure(body, idx)); r } if has_closure(ctx.body, expr) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 7f6b7e392b308..c9ab0acc0849e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -309,7 +309,7 @@ impl<'a> DeclValidator<'a> { /// Check incorrect names for struct fields. fn validate_struct_fields(&mut self, struct_id: StructId) { let data = self.db.struct_data(struct_id); - let VariantData::Record(fields) = data.variant_data.as_ref() else { + let VariantData::Record { fields, .. } = data.variant_data.as_ref() else { return; }; let edition = self.edition(struct_id); @@ -469,7 +469,7 @@ impl<'a> DeclValidator<'a> { /// Check incorrect names for fields of enum variant. fn validate_enum_variant_fields(&mut self, variant_id: EnumVariantId) { let variant_data = self.db.enum_variant_data(variant_id); - let VariantData::Record(fields) = variant_data.variant_data.as_ref() else { + let VariantData::Record { fields, .. } = variant_data.variant_data.as_ref() else { return; }; let edition = self.edition(variant_id); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index f8b5c7d0ce2c8..92404e3a10e21 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -289,10 +289,12 @@ impl ExprValidator { match &self.body[scrutinee_expr] { Expr::UnaryOp { op: UnaryOp::Deref, .. } => false, Expr::Path(path) => { - let value_or_partial = self - .owner - .resolver(db.upcast()) - .resolve_path_in_value_ns_fully(db.upcast(), path); + let value_or_partial = + self.owner.resolver(db.upcast()).resolve_path_in_value_ns_fully( + db.upcast(), + path, + self.body.expr_path_hygiene(scrutinee_expr), + ); value_or_partial.map_or(true, |v| !matches!(v, ValueNs::StaticId(_))) } Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) { @@ -546,10 +548,7 @@ pub fn record_literal_missing_fields( expr: &Expr, ) -> Option<(VariantId, Vec, /*exhaustive*/ bool)> { let (fields, exhaustive) = match expr { - Expr::RecordLit { fields, spread, ellipsis, is_assignee_expr, .. } => { - let exhaustive = if *is_assignee_expr { !*ellipsis } else { spread.is_none() }; - (fields, exhaustive) - } + Expr::RecordLit { fields, spread, .. } => (fields, spread.is_none()), _ => return None, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index 4bc07bc9ec8fe..c5d8c9566155d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -341,7 +341,7 @@ impl HirDisplay for Pat { }; let variant_data = variant.variant_data(f.db.upcast()); - if let VariantData::Record(rec_fields) = &*variant_data { + if let VariantData::Record { fields: rec_fields, .. } = &*variant_data { write!(f, " {{ ")?; let mut printed = 0; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 1066a28c3ff88..58de19ba81eeb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -519,7 +519,7 @@ impl<'db> PatCx for MatchCheckCtx<'db> { } } -impl<'db> fmt::Debug for MatchCheckCtx<'db> { +impl fmt::Debug for MatchCheckCtx<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("MatchCheckCtx").finish() } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index bcfc37c86711e..c7f7fb7ad3d35 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -3,7 +3,7 @@ use hir_def::{ body::Body, - hir::{Expr, ExprId, UnaryOp}, + hir::{Expr, ExprId, ExprOrPatId, Pat, UnaryOp}, resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs}, type_ref::Rawness, DefWithBodyId, @@ -16,7 +16,7 @@ use crate::{ /// Returns `(unsafe_exprs, fn_is_unsafe)`. /// /// If `fn_is_unsafe` is false, `unsafe_exprs` are hard errors. If true, they're `unsafe_op_in_unsafe_fn`. -pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> (Vec, bool) { +pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> (Vec, bool) { let _p = tracing::info_span!("missing_unsafe").entered(); let mut res = Vec::new(); @@ -32,7 +32,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> (Vec, let infer = db.infer(def); unsafe_expressions(db, &infer, def, &body, body.body_expr, &mut |expr| { if !expr.inside_unsafe_block { - res.push(expr.expr); + res.push(expr.node); } }); @@ -40,7 +40,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> (Vec, } pub struct UnsafeExpr { - pub expr: ExprId, + pub node: ExprOrPatId, pub inside_unsafe_block: bool, } @@ -75,26 +75,29 @@ fn walk_unsafe( inside_unsafe_block: bool, unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr), ) { + let mut mark_unsafe_path = |path, node| { + let g = resolver.update_to_inner_scope(db.upcast(), def, current); + let hygiene = body.expr_or_pat_path_hygiene(node); + let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path, hygiene); + if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial { + let static_data = db.static_data(id); + if static_data.mutable || (static_data.is_extern && !static_data.has_safe_kw) { + unsafe_expr_cb(UnsafeExpr { node, inside_unsafe_block }); + } + } + resolver.reset_to_guard(g); + }; + let expr = &body.exprs[current]; match expr { &Expr::Call { callee, .. } => { if let Some(func) = infer[callee].as_fn_def(db) { if is_fn_unsafe_to_call(db, func) { - unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block }); - } - } - } - Expr::Path(path) => { - let g = resolver.update_to_inner_scope(db.upcast(), def, current); - let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path); - if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial { - let static_data = db.static_data(id); - if static_data.mutable || (static_data.is_extern && !static_data.has_safe_kw) { - unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block }); + unsafe_expr_cb(UnsafeExpr { node: current.into(), inside_unsafe_block }); } } - resolver.reset_to_guard(g); } + Expr::Path(path) => mark_unsafe_path(path, current.into()), Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => { if let Expr::Path(_) = body.exprs[*expr] { // Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`, @@ -108,23 +111,30 @@ fn walk_unsafe( .map(|(func, _)| is_fn_unsafe_to_call(db, func)) .unwrap_or(false) { - unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block }); + unsafe_expr_cb(UnsafeExpr { node: current.into(), inside_unsafe_block }); } } Expr::UnaryOp { expr, op: UnaryOp::Deref } => { if let TyKind::Raw(..) = &infer[*expr].kind(Interner) { - unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block }); + unsafe_expr_cb(UnsafeExpr { node: current.into(), inside_unsafe_block }); } } Expr::Unsafe { .. } => { - return expr.walk_child_exprs(|child| { + return body.walk_child_exprs(current, |child| { walk_unsafe(db, infer, body, resolver, def, child, true, unsafe_expr_cb); }); } + &Expr::Assignment { target, value: _ } => { + body.walk_pats(target, &mut |pat| { + if let Pat::Path(path) = &body[pat] { + mark_unsafe_path(path, pat.into()); + } + }); + } _ => {} } - expr.walk_child_exprs(|child| { + body.walk_child_exprs(current, |child| { walk_unsafe(db, infer, body, resolver, def, child, inside_unsafe_block, unsafe_expr_cb); }); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 10f5bcdad8604..277dabe9aa34a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -19,7 +19,9 @@ use hir_def::{ lang_item::{LangItem, LangItemTarget}, nameres::DefMap, path::{Path, PathKind}, - type_ref::{TraitBoundModifier, TypeBound, TypeRef}, + type_ref::{ + TraitBoundModifier, TypeBound, TypeRef, TypeRefId, TypesMap, TypesSourceMap, UseArgRef, + }, visibility::Visibility, GenericDefId, HasModule, ImportPathConfig, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId, @@ -806,7 +808,7 @@ fn render_variant_after_name( memory_map: &MemoryMap, ) -> Result<(), HirDisplayError> { match data { - VariantData::Record(fields) | VariantData::Tuple(fields) => { + VariantData::Record { fields, .. } | VariantData::Tuple { fields, .. } => { let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| { let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize(); let ty = field_types[id].clone().substitute(Interner, subst); @@ -817,7 +819,7 @@ fn render_variant_after_name( render_const_scalar(f, &b[offset..offset + size], memory_map, &ty) }; let mut it = fields.iter(); - if matches!(data, VariantData::Record(_)) { + if matches!(data, VariantData::Record { .. }) { write!(f, " {{")?; if let Some((id, data)) = it.next() { write!(f, " {}: ", data.name.display(f.db.upcast(), f.edition()))?; @@ -1897,100 +1899,150 @@ pub fn write_visibility( } } -impl HirDisplay for TypeRef { +pub trait HirDisplayWithTypesMap { + fn hir_fmt( + &self, + f: &mut HirFormatter<'_>, + types_map: &TypesMap, + ) -> Result<(), HirDisplayError>; +} + +impl HirDisplayWithTypesMap for &'_ T { + fn hir_fmt( + &self, + f: &mut HirFormatter<'_>, + types_map: &TypesMap, + ) -> Result<(), HirDisplayError> { + T::hir_fmt(&**self, f, types_map) + } +} + +pub fn hir_display_with_types_map<'a, T: HirDisplayWithTypesMap + 'a>( + value: T, + types_map: &'a TypesMap, +) -> impl HirDisplay + 'a { + TypesMapAdapter(value, types_map) +} + +struct TypesMapAdapter<'a, T>(T, &'a TypesMap); + +impl<'a, T> TypesMapAdapter<'a, T> { + fn wrap(types_map: &'a TypesMap) -> impl Fn(T) -> TypesMapAdapter<'a, T> { + move |value| TypesMapAdapter(value, types_map) + } +} + +impl HirDisplay for TypesMapAdapter<'_, T> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - match self { + T::hir_fmt(&self.0, f, self.1) + } +} + +impl HirDisplayWithTypesMap for TypeRefId { + fn hir_fmt( + &self, + f: &mut HirFormatter<'_>, + types_map: &TypesMap, + ) -> Result<(), HirDisplayError> { + match &types_map[*self] { TypeRef::Never => write!(f, "!")?, TypeRef::Placeholder => write!(f, "_")?, TypeRef::Tuple(elems) => { write!(f, "(")?; - f.write_joined(elems, ", ")?; + f.write_joined(elems.iter().map(TypesMapAdapter::wrap(types_map)), ", ")?; if elems.len() == 1 { write!(f, ",")?; } write!(f, ")")?; } - TypeRef::Path(path) => path.hir_fmt(f)?, + TypeRef::Path(path) => path.hir_fmt(f, types_map)?, TypeRef::RawPtr(inner, mutability) => { let mutability = match mutability { hir_def::type_ref::Mutability::Shared => "*const ", hir_def::type_ref::Mutability::Mut => "*mut ", }; write!(f, "{mutability}")?; - inner.hir_fmt(f)?; + inner.hir_fmt(f, types_map)?; } - TypeRef::Reference(inner, lifetime, mutability) => { - let mutability = match mutability { + TypeRef::Reference(ref_) => { + let mutability = match ref_.mutability { hir_def::type_ref::Mutability::Shared => "", hir_def::type_ref::Mutability::Mut => "mut ", }; write!(f, "&")?; - if let Some(lifetime) = lifetime { + if let Some(lifetime) = &ref_.lifetime { write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?; } write!(f, "{mutability}")?; - inner.hir_fmt(f)?; + ref_.ty.hir_fmt(f, types_map)?; } - TypeRef::Array(inner, len) => { + TypeRef::Array(array) => { write!(f, "[")?; - inner.hir_fmt(f)?; - write!(f, "; {}]", len.display(f.db.upcast(), f.edition()))?; + array.ty.hir_fmt(f, types_map)?; + write!(f, "; {}]", array.len.display(f.db.upcast(), f.edition()))?; } TypeRef::Slice(inner) => { write!(f, "[")?; - inner.hir_fmt(f)?; + inner.hir_fmt(f, types_map)?; write!(f, "]")?; } - &TypeRef::Fn(ref parameters, is_varargs, is_unsafe, ref abi) => { - if is_unsafe { + TypeRef::Fn(fn_) => { + if fn_.is_unsafe() { write!(f, "unsafe ")?; } - if let Some(abi) = abi { + if let Some(abi) = fn_.abi() { f.write_str("extern \"")?; f.write_str(abi.as_str())?; f.write_str("\" ")?; } write!(f, "fn(")?; - if let Some(((_, return_type), function_parameters)) = parameters.split_last() { + if let Some(((_, return_type), function_parameters)) = fn_.params().split_last() { for index in 0..function_parameters.len() { let (param_name, param_type) = &function_parameters[index]; if let Some(name) = param_name { write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?; } - param_type.hir_fmt(f)?; + param_type.hir_fmt(f, types_map)?; if index != function_parameters.len() - 1 { write!(f, ", ")?; } } - if is_varargs { - write!(f, "{}...", if parameters.len() == 1 { "" } else { ", " })?; + if fn_.is_varargs() { + write!(f, "{}...", if fn_.params().len() == 1 { "" } else { ", " })?; } write!(f, ")")?; - match &return_type { + match &types_map[*return_type] { TypeRef::Tuple(tup) if tup.is_empty() => {} _ => { write!(f, " -> ")?; - return_type.hir_fmt(f)?; + return_type.hir_fmt(f, types_map)?; } } } } TypeRef::ImplTrait(bounds) => { write!(f, "impl ")?; - f.write_joined(bounds, " + ")?; + f.write_joined(bounds.iter().map(TypesMapAdapter::wrap(types_map)), " + ")?; } TypeRef::DynTrait(bounds) => { write!(f, "dyn ")?; - f.write_joined(bounds, " + ")?; + f.write_joined(bounds.iter().map(TypesMapAdapter::wrap(types_map)), " + ")?; } TypeRef::Macro(macro_call) => { - let ctx = hir_def::lower::LowerCtx::new(f.db.upcast(), macro_call.file_id); + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let ctx = hir_def::lower::LowerCtx::new( + f.db.upcast(), + macro_call.file_id, + &mut types_map, + &mut types_source_map, + ); let macro_call = macro_call.to_node(f.db.upcast()); match macro_call.path() { Some(path) => match Path::from_src(&ctx, path) { - Some(path) => path.hir_fmt(f)?, + Some(path) => path.hir_fmt(f, &types_map)?, None => write!(f, "{{macro}}")?, }, None => write!(f, "{{macro}}")?, @@ -2003,15 +2055,19 @@ impl HirDisplay for TypeRef { } } -impl HirDisplay for TypeBound { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl HirDisplayWithTypesMap for TypeBound { + fn hir_fmt( + &self, + f: &mut HirFormatter<'_>, + types_map: &TypesMap, + ) -> Result<(), HirDisplayError> { match self { TypeBound::Path(path, modifier) => { match modifier { TraitBoundModifier::None => (), TraitBoundModifier::Maybe => write!(f, "?")?, } - path.hir_fmt(f) + path.hir_fmt(f, types_map) } TypeBound::Lifetime(lifetime) => { write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition())) @@ -2023,19 +2079,36 @@ impl HirDisplay for TypeBound { "for<{}> ", lifetimes.iter().map(|it| it.display(f.db.upcast(), edition)).format(", ") )?; - path.hir_fmt(f) + path.hir_fmt(f, types_map) + } + TypeBound::Use(args) => { + let edition = f.edition(); + write!( + f, + "use<{}> ", + args.iter() + .map(|it| match it { + UseArgRef::Lifetime(lt) => lt.name.display(f.db.upcast(), edition), + UseArgRef::Name(n) => n.display(f.db.upcast(), edition), + }) + .format(", ") + ) } TypeBound::Error => write!(f, "{{error}}"), } } } -impl HirDisplay for Path { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl HirDisplayWithTypesMap for Path { + fn hir_fmt( + &self, + f: &mut HirFormatter<'_>, + types_map: &TypesMap, + ) -> Result<(), HirDisplayError> { match (self.type_anchor(), self.kind()) { (Some(anchor), _) => { write!(f, "<")?; - anchor.hir_fmt(f)?; + anchor.hir_fmt(f, types_map)?; write!(f, ">")?; } (_, PathKind::Plain) => {} @@ -2078,7 +2151,7 @@ impl HirDisplay for Path { }); if let Some(ty) = trait_self_ty { write!(f, "<")?; - ty.hir_fmt(f)?; + ty.hir_fmt(f, types_map)?; write!(f, " as ")?; // Now format the path of the trait... } @@ -2094,21 +2167,26 @@ impl HirDisplay for Path { if generic_args.desugared_from_fn { // First argument will be a tuple, which already includes the parentheses. // If the tuple only contains 1 item, write it manually to avoid the trailing `,`. - if let hir_def::path::GenericArg::Type(TypeRef::Tuple(v)) = - &generic_args.args[0] - { + let tuple = match generic_args.args[0] { + hir_def::path::GenericArg::Type(ty) => match &types_map[ty] { + TypeRef::Tuple(it) => Some(it), + _ => None, + }, + _ => None, + }; + if let Some(v) = tuple { if v.len() == 1 { write!(f, "(")?; - v[0].hir_fmt(f)?; + v[0].hir_fmt(f, types_map)?; write!(f, ")")?; } else { - generic_args.args[0].hir_fmt(f)?; + generic_args.args[0].hir_fmt(f, types_map)?; } } - if let Some(ret) = &generic_args.bindings[0].type_ref { - if !matches!(ret, TypeRef::Tuple(v) if v.is_empty()) { + if let Some(ret) = generic_args.bindings[0].type_ref { + if !matches!(&types_map[ret], TypeRef::Tuple(v) if v.is_empty()) { write!(f, " -> ")?; - ret.hir_fmt(f)?; + ret.hir_fmt(f, types_map)?; } } return Ok(()); @@ -2123,7 +2201,7 @@ impl HirDisplay for Path { } else { write!(f, ", ")?; } - arg.hir_fmt(f)?; + arg.hir_fmt(f, types_map)?; } for binding in generic_args.bindings.iter() { if first { @@ -2136,11 +2214,14 @@ impl HirDisplay for Path { match &binding.type_ref { Some(ty) => { write!(f, " = ")?; - ty.hir_fmt(f)? + ty.hir_fmt(f, types_map)? } None => { write!(f, ": ")?; - f.write_joined(binding.bounds.iter(), " + ")?; + f.write_joined( + binding.bounds.iter().map(TypesMapAdapter::wrap(types_map)), + " + ", + )?; } } } @@ -2162,10 +2243,14 @@ impl HirDisplay for Path { } } -impl HirDisplay for hir_def::path::GenericArg { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { +impl HirDisplayWithTypesMap for hir_def::path::GenericArg { + fn hir_fmt( + &self, + f: &mut HirFormatter<'_>, + types_map: &TypesMap, + ) -> Result<(), HirDisplayError> { match self { - hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f), + hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f, types_map), hir_def::path::GenericArg::Const(c) => { write!(f, "{}", c.display(f.db.upcast(), f.edition())) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index e0d1758210ecc..3d21785a70a34 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -266,7 +266,7 @@ fn contains_illegal_self_type_reference>( trait_self_param_idx: usize, allow_self_projection: AllowSelfProjection, } - impl<'a> TypeVisitor for IllegalSelfTypeVisitor<'a> { + impl TypeVisitor for IllegalSelfTypeVisitor<'_> { type BreakTy = (); fn as_dyn(&mut self) -> &mut dyn TypeVisitor { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs index 89ca707c2e697..c094bc3951293 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/generics.rs @@ -16,12 +16,13 @@ use hir_def::{ GenericParamDataRef, GenericParams, LifetimeParamData, TypeOrConstParamData, TypeParamProvenance, }, + type_ref::TypesMap, ConstParamId, GenericDefId, GenericParamId, ItemContainerId, LifetimeParamId, LocalLifetimeParamId, LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId, }; -use intern::Interned; use itertools::chain; use stdx::TupleExt; +use triomphe::Arc; use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution}; @@ -34,7 +35,7 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { #[derive(Clone, Debug)] pub(crate) struct Generics { def: GenericDefId, - params: Interned, + params: Arc, parent_generics: Option>, has_trait_self_param: bool, } @@ -85,6 +86,18 @@ impl Generics { self.iter_self().chain(self.iter_parent()) } + pub(crate) fn iter_with_types_map( + &self, + ) -> impl Iterator), &TypesMap)> + '_ { + self.iter_self().zip(std::iter::repeat(&self.params.types_map)).chain( + self.iter_parent().zip( + self.parent_generics() + .into_iter() + .flat_map(|it| std::iter::repeat(&it.params.types_map)), + ), + ) + } + /// Iterate over the params without parent params. pub(crate) fn iter_self( &self, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 88334b492d5aa..3685ed5696409 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -33,7 +33,7 @@ use chalk_ir::{ }; use either::Either; use hir_def::{ - body::Body, + body::{Body, HygieneId}, builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, data::{ConstData, StaticData}, hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId}, @@ -41,7 +41,7 @@ use hir_def::{ layout::Integer, path::{ModPath, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, - type_ref::{LifetimeRef, TypeRef}, + type_ref::{LifetimeRef, TypeRefId, TypesMap}, AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ImplId, ItemContainerId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, }; @@ -228,7 +228,7 @@ pub enum InferenceDiagnostic { id: ExprOrPatId, }, UnresolvedIdent { - expr: ExprId, + id: ExprOrPatId, }, // FIXME: This should be emitted in body lowering BreakOutsideOfLoop { @@ -482,12 +482,27 @@ impl InferenceResult { pub fn variant_resolution_for_pat(&self, id: PatId) -> Option { self.variant_resolutions.get(&id.into()).copied() } + pub fn variant_resolution_for_expr_or_pat(&self, id: ExprOrPatId) -> Option { + match id { + ExprOrPatId::ExprId(id) => self.variant_resolution_for_expr(id), + ExprOrPatId::PatId(id) => self.variant_resolution_for_pat(id), + } + } pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<(AssocItemId, Substitution)> { self.assoc_resolutions.get(&id.into()).cloned() } pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(AssocItemId, Substitution)> { self.assoc_resolutions.get(&id.into()).cloned() } + pub fn assoc_resolutions_for_expr_or_pat( + &self, + id: ExprOrPatId, + ) -> Option<(AssocItemId, Substitution)> { + match id { + ExprOrPatId::ExprId(id) => self.assoc_resolutions_for_expr(id), + ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id), + } + } pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { self.type_mismatches.get(&expr.into()) } @@ -506,6 +521,12 @@ impl InferenceResult { pub fn closure_info(&self, closure: &ClosureId) -> &(Vec, FnTrait) { self.closure_info.get(closure).unwrap() } + pub fn type_of_expr_or_pat(&self, id: ExprOrPatId) -> Option<&Ty> { + match id { + ExprOrPatId::ExprId(id) => self.type_of_expr.get(id), + ExprOrPatId::PatId(id) => self.type_of_pat.get(id), + } + } } impl Index for InferenceResult { @@ -524,6 +545,14 @@ impl Index for InferenceResult { } } +impl Index for InferenceResult { + type Output = Ty; + + fn index(&self, id: ExprOrPatId) -> &Ty { + self.type_of_expr_or_pat(id).unwrap_or(&self.standard_types.unknown) + } +} + impl Index for InferenceResult { type Output = Ty; @@ -561,6 +590,9 @@ pub(crate) struct InferenceContext<'a> { diverges: Diverges, breakables: Vec, + /// Whether we are inside the pattern of a destructuring assignment. + inside_assignment: bool, + deferred_cast_checks: Vec, // fields related to closure capture @@ -656,6 +688,7 @@ impl<'a> InferenceContext<'a> { current_closure: None, deferred_closures: FxHashMap::default(), closure_dependencies: FxHashMap::default(), + inside_assignment: false, } } @@ -825,7 +858,7 @@ impl<'a> InferenceContext<'a> { } fn collect_const(&mut self, data: &ConstData) { - let return_ty = self.make_ty(&data.type_ref); + let return_ty = self.make_ty(data.type_ref, &data.types_map); // Constants might be defining usage sites of TAITs. self.make_tait_coercion_table(iter::once(&return_ty)); @@ -834,7 +867,7 @@ impl<'a> InferenceContext<'a> { } fn collect_static(&mut self, data: &StaticData) { - let return_ty = self.make_ty(&data.type_ref); + let return_ty = self.make_ty(data.type_ref, &data.types_map); // Statics might be defining usage sites of TAITs. self.make_tait_coercion_table(iter::once(&return_ty)); @@ -844,11 +877,11 @@ impl<'a> InferenceContext<'a> { fn collect_fn(&mut self, func: FunctionId) { let data = self.db.function_data(func); - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()) - .with_type_param_mode(ParamLoweringMode::Placeholder) - .with_impl_trait_mode(ImplTraitLoweringMode::Param); - let mut param_tys = - data.params.iter().map(|type_ref| ctx.lower_ty(type_ref)).collect::>(); + let mut param_tys = self.with_ty_lowering(&data.types_map, |ctx| { + ctx.type_param_mode(ParamLoweringMode::Placeholder) + .impl_trait_mode(ImplTraitLoweringMode::Param); + data.params.iter().map(|&type_ref| ctx.lower_ty(type_ref)).collect::>() + }); // Check if function contains a va_list, if it does then we append it to the parameter types // that are collected from the function data if data.is_varargs() { @@ -883,12 +916,13 @@ impl<'a> InferenceContext<'a> { tait_candidates.insert(ty); } } - let return_ty = &*data.ret_type; + let return_ty = data.ret_type; - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()) - .with_type_param_mode(ParamLoweringMode::Placeholder) - .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); - let return_ty = ctx.lower_ty(return_ty); + let return_ty = self.with_ty_lowering(&data.types_map, |ctx| { + ctx.type_param_mode(ParamLoweringMode::Placeholder) + .impl_trait_mode(ImplTraitLoweringMode::Opaque) + .lower_ty(return_ty) + }); let return_ty = self.insert_type_vars(return_ty); let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { @@ -1022,7 +1056,7 @@ impl<'a> InferenceContext<'a> { non_assocs: FxHashMap, } - impl<'a, 'b> TypeVisitor for TypeAliasImplTraitCollector<'a, 'b> { + impl TypeVisitor for TypeAliasImplTraitCollector<'_, '_> { type BreakTy = (); fn as_dyn(&mut self) -> &mut dyn TypeVisitor { @@ -1192,20 +1226,43 @@ impl<'a> InferenceContext<'a> { self.result.diagnostics.push(diagnostic); } - fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); - let ty = ctx.lower_ty(type_ref); + fn with_ty_lowering( + &self, + types_map: &TypesMap, + f: impl FnOnce(&mut crate::lower::TyLoweringContext<'_>) -> R, + ) -> R { + let mut ctx = crate::lower::TyLoweringContext::new( + self.db, + &self.resolver, + types_map, + self.owner.into(), + ); + f(&mut ctx) + } + + fn with_body_ty_lowering( + &self, + f: impl FnOnce(&mut crate::lower::TyLoweringContext<'_>) -> R, + ) -> R { + self.with_ty_lowering(&self.body.types, f) + } + + fn make_ty(&mut self, type_ref: TypeRefId, types_map: &TypesMap) -> Ty { + let ty = self.with_ty_lowering(types_map, |ctx| ctx.lower_ty(type_ref)); let ty = self.insert_type_vars(ty); self.normalize_associated_types_in(ty) } + fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty { + self.make_ty(type_ref, &self.body.types) + } + fn err_ty(&self) -> Ty { self.result.standard_types.unknown.clone() } fn make_lifetime(&mut self, lifetime_ref: &LifetimeRef) -> Lifetime { - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); - let lt = ctx.lower_lifetime(lifetime_ref); + let lt = self.with_ty_lowering(TypesMap::EMPTY, |ctx| ctx.lower_lifetime(lifetime_ref)); self.insert_type_vars(lt) } @@ -1363,9 +1420,14 @@ impl<'a> InferenceContext<'a> { Some(path) => path, None => return (self.err_ty(), None), }; - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); + let ctx = crate::lower::TyLoweringContext::new( + self.db, + &self.resolver, + &self.body.types, + self.owner.into(), + ); let (resolution, unresolved) = if value_ns { - match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path) { + match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path, HygieneId::ROOT) { Some(ResolveValueResult::ValueNs(value, _)) => match value { ValueNs::EnumVariantId(var) => { let substs = ctx.substs_from_path(path, var.into(), true); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index e9825cf09988b..5a251683b962a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -11,11 +11,12 @@ use either::Either; use hir_def::{ data::adt::VariantData, hir::{ - Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, Pat, PatId, Statement, - UnaryOp, + Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, ExprOrPatId, Pat, PatId, + Statement, UnaryOp, }, lang_item::LangItem, - resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, + path::Path, + resolver::ValueNs, DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId, }; use hir_expand::name::Name; @@ -282,11 +283,11 @@ impl CapturedItem { ProjectionElem::Deref => {} ProjectionElem::Field(Either::Left(f)) => { match &*f.parent.variant_data(db.upcast()) { - VariantData::Record(fields) => { + VariantData::Record { fields, .. } => { result.push('_'); result.push_str(fields[f.local_id].name.as_str()) } - VariantData::Tuple(fields) => { + VariantData::Tuple { fields, .. } => { let index = fields.iter().position(|it| it.0 == f.local_id); if let Some(index) = index { format_to!(result, "_{index}"); @@ -324,12 +325,12 @@ impl CapturedItem { ProjectionElem::Field(Either::Left(f)) => { let variant_data = f.parent.variant_data(db.upcast()); match &*variant_data { - VariantData::Record(fields) => format_to!( + VariantData::Record { fields, .. } => format_to!( result, ".{}", fields[f.local_id].name.display(db.upcast(), edition) ), - VariantData::Tuple(fields) => format_to!( + VariantData::Tuple { fields, .. } => format_to!( result, ".{}", fields.iter().position(|it| it.0 == f.local_id).unwrap_or_default() @@ -382,8 +383,10 @@ impl CapturedItem { } let variant_data = f.parent.variant_data(db.upcast()); let field = match &*variant_data { - VariantData::Record(fields) => fields[f.local_id].name.as_str().to_owned(), - VariantData::Tuple(fields) => fields + VariantData::Record { fields, .. } => { + fields[f.local_id].name.as_str().to_owned() + } + VariantData::Tuple { fields, .. } => fields .iter() .position(|it| it.0 == f.local_id) .unwrap_or_default() @@ -508,18 +511,39 @@ impl InferenceContext<'_> { apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments) } + /// Pushes the span into `current_capture_span_stack`, *without clearing it first*. + fn path_place(&mut self, path: &Path, id: ExprOrPatId) -> Option { + if path.type_anchor().is_some() { + return None; + } + let hygiene = self.body.expr_or_pat_path_hygiene(id); + let result = self + .resolver + .resolve_path_in_value_ns_fully(self.db.upcast(), path, hygiene) + .and_then(|result| match result { + ValueNs::LocalBinding(binding) => { + let mir_span = match id { + ExprOrPatId::ExprId(id) => MirSpan::ExprId(id), + ExprOrPatId::PatId(id) => MirSpan::PatId(id), + }; + self.current_capture_span_stack.push(mir_span); + Some(HirPlace { local: binding, projections: Vec::new() }) + } + _ => None, + }); + result + } + /// Changes `current_capture_span_stack` to contain the stack of spans for this expr. fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option { self.current_capture_span_stack.clear(); match &self.body[tgt_expr] { Expr::Path(p) => { - let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr); - if let Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(b), _)) = - resolver.resolve_path_in_value_ns(self.db.upcast(), p) - { - self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); - return Some(HirPlace { local: b, projections: vec![] }); - } + let resolver_guard = + self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); + let result = self.path_place(p, tgt_expr.into()); + self.resolver.reset_to_guard(resolver_guard); + return result; } Expr::Field { expr, name: _ } => { let mut place = self.place_of_expr(*expr)?; @@ -590,6 +614,16 @@ impl InferenceContext<'_> { } } + fn mutate_path_pat(&mut self, path: &Path, id: PatId) { + if let Some(place) = self.path_place(path, id.into()) { + self.add_capture( + place, + CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), + ); + self.current_capture_span_stack.pop(); // Remove the pattern span. + } + } + fn mutate_expr(&mut self, expr: ExprId, place: Option) { if let Some(place) = place { self.add_capture( @@ -715,14 +749,14 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.consume_expr(*expr); } - Statement::Item => (), + Statement::Item(_) => (), } } if let Some(tail) = tail { self.consume_expr(*tail); } } - Expr::Call { callee, args, is_assignee_expr: _ } => { + Expr::Call { callee, args } => { self.consume_expr(*callee); self.consume_exprs(args.iter().copied()); } @@ -838,7 +872,7 @@ impl InferenceContext<'_> { self.consume_expr(expr); } } - Expr::Index { base, index, is_assignee_expr: _ } => { + Expr::Index { base, index } => { self.select_from_expr(*base); self.consume_expr(*index); } @@ -862,10 +896,30 @@ impl InferenceContext<'_> { })); self.current_captures = cc; } - Expr::Array(Array::ElementList { elements: exprs, is_assignee_expr: _ }) - | Expr::Tuple { exprs, is_assignee_expr: _ } => { + Expr::Array(Array::ElementList { elements: exprs }) | Expr::Tuple { exprs } => { self.consume_exprs(exprs.iter().copied()) } + &Expr::Assignment { target, value } => { + self.walk_expr(value); + let resolver_guard = + self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); + match self.place_of_expr(value) { + Some(rhs_place) => { + self.inside_assignment = true; + self.consume_with_pat(rhs_place, target); + self.inside_assignment = false; + } + None => self.body.walk_pats(target, &mut |pat| match &self.body[pat] { + Pat::Path(path) => self.mutate_path_pat(path, pat), + &Pat::Expr(expr) => { + let place = self.place_of_expr(expr); + self.mutate_expr(expr, place); + } + _ => {} + }), + } + self.resolver.reset_to_guard(resolver_guard); + } Expr::Missing | Expr::Continue { .. } @@ -903,6 +957,7 @@ impl InferenceContext<'_> { | Pat::Missing | Pat::Wild | Pat::Tuple { .. } + | Pat::Expr(_) | Pat::Or(_) => (), Pat::TupleStruct { .. } | Pat::Record { .. } => { if let Some(variant) = self.result.variant_resolution_for_pat(p) { @@ -1122,11 +1177,15 @@ impl InferenceContext<'_> { } } } - Pat::Range { .. } - | Pat::Slice { .. } - | Pat::ConstBlock(_) - | Pat::Path(_) - | Pat::Lit(_) => self.consume_place(place), + Pat::Range { .. } | Pat::Slice { .. } | Pat::ConstBlock(_) | Pat::Lit(_) => { + self.consume_place(place) + } + Pat::Path(path) => { + if self.inside_assignment { + self.mutate_path_pat(path, tgt_pat); + } + self.consume_place(place); + } &Pat::Bind { id, subpat: _ } => { let mode = self.result.binding_modes[tgt_pat]; let capture_kind = match mode { @@ -1180,6 +1239,15 @@ impl InferenceContext<'_> { self.current_capture_span_stack.pop(); } Pat::Box { .. } => (), // not supported + &Pat::Expr(expr) => { + self.consume_place(place); + let pat_capture_span_stack = mem::take(&mut self.current_capture_span_stack); + let old_inside_assignment = mem::replace(&mut self.inside_assignment, false); + let lhs_place = self.place_of_expr(expr); + self.mutate_expr(expr, lhs_place); + self.inside_assignment = old_inside_assignment; + self.current_capture_span_stack = pat_capture_span_stack; + } } } self.current_capture_span_stack diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 657e4d7796613..32b4ea2f28ba4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -9,8 +9,8 @@ use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKin use either::Either; use hir_def::{ hir::{ - ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, ClosureKind, Expr, ExprId, LabelId, - Literal, Pat, PatId, Statement, UnaryOp, + ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, ClosureKind, Expr, ExprId, ExprOrPatId, + LabelId, Literal, Pat, PatId, Statement, UnaryOp, }, lang_item::{LangItem, LangItemTarget}, path::{GenericArg, GenericArgs, Path}, @@ -188,6 +188,9 @@ impl InferenceContext<'_> { | Pat::ConstBlock(_) | Pat::Record { .. } | Pat::Missing => true, + Pat::Expr(_) => unreachable!( + "we don't call pat_guaranteed_to_constitute_read_for_never() with assignments" + ), } } @@ -195,10 +198,14 @@ impl InferenceContext<'_> { match &self.body[expr] { // Lang item paths cannot currently be local variables or statics. Expr::Path(Path::LangItem(_, _)) => false, - Expr::Path(Path::Normal { type_anchor: Some(_), .. }) => false, + Expr::Path(Path::Normal(path)) => path.type_anchor().is_none(), Expr::Path(path) => self .resolver - .resolve_path_in_value_ns_fully(self.db.upcast(), path) + .resolve_path_in_value_ns_fully( + self.db.upcast(), + path, + self.body.expr_path_hygiene(expr), + ) .map_or(true, |res| matches!(res, ValueNs::LocalBinding(_) | ValueNs::StaticId(_))), Expr::Underscore => true, Expr::UnaryOp { op: UnaryOp::Deref, .. } => true, @@ -223,6 +230,7 @@ impl InferenceContext<'_> { | Expr::Const(..) | Expr::UnaryOp { .. } | Expr::BinaryOp { .. } + | Expr::Assignment { .. } | Expr::Yield { .. } | Expr::Cast { .. } | Expr::Async { .. } @@ -374,7 +382,7 @@ impl InferenceContext<'_> { // collect explicitly written argument types for arg_type in arg_types.iter() { let arg_ty = match arg_type { - Some(type_ref) => self.make_ty(type_ref), + Some(type_ref) => self.make_body_ty(*type_ref), None => self.table.new_type_var(), }; sig_tys.push(arg_ty); @@ -382,7 +390,7 @@ impl InferenceContext<'_> { // add return type let ret_ty = match ret_type { - Some(type_ref) => self.make_ty(type_ref), + Some(type_ref) => self.make_body_ty(*type_ref), None => self.table.new_type_var(), }; if let ClosureKind::Async = closure_kind { @@ -609,23 +617,7 @@ impl InferenceContext<'_> { coerce.complete(self) } } - Expr::Path(p) => { - let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); - let ty = match self.infer_path(p, tgt_expr.into()) { - Some(ty) => ty, - None => { - if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident() || mod_path.is_self()) - { - self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { - expr: tgt_expr, - }); - } - self.err_ty() - } - }; - self.resolver.reset_to_guard(g); - ty - } + Expr::Path(p) => self.infer_expr_path(p, tgt_expr.into(), tgt_expr), &Expr::Continue { label } => { if find_continuable(&mut self.breakables, label).is_none() { self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop { @@ -794,7 +786,7 @@ impl InferenceContext<'_> { self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) } Expr::Cast { expr, type_ref } => { - let cast_ty = self.make_ty(type_ref); + let cast_ty = self.make_body_ty(*type_ref); let expr_ty = self.infer_expr( *expr, &Expectation::Castable(cast_ty.clone()), @@ -892,36 +884,6 @@ impl InferenceContext<'_> { } } Expr::BinaryOp { lhs, rhs, op } => match op { - Some(BinaryOp::Assignment { op: None }) => { - let lhs = *lhs; - let is_ordinary = match &self.body[lhs] { - Expr::Array(_) - | Expr::RecordLit { .. } - | Expr::Tuple { .. } - | Expr::Underscore => false, - Expr::Call { callee, .. } => !matches!(&self.body[*callee], Expr::Path(_)), - _ => true, - }; - - // In ordinary (non-destructuring) assignments, the type of - // `lhs` must be inferred first so that the ADT fields - // instantiations in RHS can be coerced to it. Note that this - // cannot happen in destructuring assignments because of how - // they are desugared. - if is_ordinary { - // LHS of assignment doesn't constitute reads. - let lhs_ty = self.infer_expr(lhs, &Expectation::none(), ExprIsRead::No); - self.infer_expr_coerce( - *rhs, - &Expectation::has_type(lhs_ty), - ExprIsRead::No, - ); - } else { - let rhs_ty = self.infer_expr(*rhs, &Expectation::none(), ExprIsRead::Yes); - self.infer_assignee_expr(lhs, &rhs_ty); - } - self.result.standard_types.unit.clone() - } Some(BinaryOp::LogicOp(_)) => { let bool_ty = self.result.standard_types.bool_.clone(); self.infer_expr_coerce( @@ -942,6 +904,35 @@ impl InferenceContext<'_> { Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr), _ => self.err_ty(), }, + &Expr::Assignment { target, value } => { + // In ordinary (non-destructuring) assignments, the type of + // `lhs` must be inferred first so that the ADT fields + // instantiations in RHS can be coerced to it. Note that this + // cannot happen in destructuring assignments because of how + // they are desugared. + let lhs_ty = match &self.body[target] { + // LHS of assignment doesn't constitute reads. + &Pat::Expr(expr) => { + Some(self.infer_expr(expr, &Expectation::none(), ExprIsRead::No)) + } + Pat::Path(path) => Some(self.infer_expr_path(path, target.into(), tgt_expr)), + _ => None, + }; + + if let Some(lhs_ty) = lhs_ty { + self.write_pat_ty(target, lhs_ty.clone()); + self.infer_expr_coerce(value, &Expectation::has_type(lhs_ty), ExprIsRead::No); + } else { + let rhs_ty = self.infer_expr(value, &Expectation::none(), ExprIsRead::Yes); + let resolver_guard = + self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); + self.inside_assignment = true; + self.infer_top_pat(target, &rhs_ty); + self.inside_assignment = false; + self.resolver.reset_to_guard(resolver_guard); + } + self.result.standard_types.unit.clone() + } Expr::Range { lhs, rhs, range_type } => { let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none(), ExprIsRead::Yes)); @@ -981,7 +972,7 @@ impl InferenceContext<'_> { (RangeOp::Inclusive, _, None) => self.err_ty(), } } - Expr::Index { base, index, is_assignee_expr } => { + Expr::Index { base, index } => { let base_ty = self.infer_expr_inner(*base, &Expectation::none(), ExprIsRead::Yes); let index_ty = self.infer_expr(*index, &Expectation::none(), ExprIsRead::Yes); @@ -1017,23 +1008,11 @@ impl InferenceContext<'_> { self.write_method_resolution(tgt_expr, func, subst); } let assoc = self.resolve_ops_index_output(); - let res = self.resolve_associated_type_with_params( + self.resolve_associated_type_with_params( self_ty.clone(), assoc, &[index_ty.clone().cast(Interner)], - ); - - if *is_assignee_expr { - if let Some(index_trait) = self.resolve_lang_trait(LangItem::IndexMut) { - let trait_ref = TyBuilder::trait_ref(self.db, index_trait) - .push(self_ty) - .fill(|_| index_ty.clone().cast(Interner)) - .build(); - self.push_obligation(trait_ref.cast(Interner)); - } - } - - res + ) } else { self.err_ty() } @@ -1151,9 +1130,7 @@ impl InferenceContext<'_> { }, }, Expr::Underscore => { - // Underscore expressions may only appear in assignee expressions, - // which are handled by `infer_assignee_expr()`. - // Any other underscore expression is an error, we render a specialized diagnostic + // Underscore expression is an error, we render a specialized diagnostic // to let the user know what type is expected though. let expected = expected.to_option(&mut self.table).unwrap_or_else(|| self.err_ty()); self.push_diagnostic(InferenceDiagnostic::TypedHole { @@ -1232,6 +1209,22 @@ impl InferenceContext<'_> { ty } + fn infer_expr_path(&mut self, path: &Path, id: ExprOrPatId, scope_id: ExprId) -> Ty { + let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, scope_id); + let ty = match self.infer_path(path, id) { + Some(ty) => ty, + None => { + if path.mod_path().is_some_and(|mod_path| mod_path.is_ident() || mod_path.is_self()) + { + self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { id }); + } + self.err_ty() + } + }; + self.resolver.reset_to_guard(g); + ty + } + fn infer_async_block( &mut self, tgt_expr: ExprId, @@ -1482,107 +1475,6 @@ impl InferenceContext<'_> { } } - pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty { - let is_rest_expr = |expr| { - matches!( - &self.body[expr], - Expr::Range { lhs: None, rhs: None, range_type: RangeOp::Exclusive }, - ) - }; - - let rhs_ty = self.resolve_ty_shallow(rhs_ty); - - let ty = match &self.body[lhs] { - Expr::Tuple { exprs, .. } => { - // We don't consider multiple ellipses. This is analogous to - // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`. - let ellipsis = exprs.iter().position(|e| is_rest_expr(*e)).map(|it| it as u32); - let exprs: Vec<_> = exprs.iter().filter(|e| !is_rest_expr(**e)).copied().collect(); - - self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs) - } - Expr::Call { callee, args, .. } => { - // Tuple structs - let path = match &self.body[*callee] { - Expr::Path(path) => Some(path), - _ => None, - }; - - // We don't consider multiple ellipses. This is analogous to - // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`. - let ellipsis = args.iter().position(|e| is_rest_expr(*e)).map(|it| it as u32); - let args: Vec<_> = args.iter().filter(|e| !is_rest_expr(**e)).copied().collect(); - - self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args) - } - Expr::Array(Array::ElementList { elements, .. }) => { - let elem_ty = match rhs_ty.kind(Interner) { - TyKind::Array(st, _) => st.clone(), - _ => self.err_ty(), - }; - - // There's no need to handle `..` as it cannot be bound. - let sub_exprs = elements.iter().filter(|e| !is_rest_expr(**e)); - - for e in sub_exprs { - self.infer_assignee_expr(*e, &elem_ty); - } - - match rhs_ty.kind(Interner) { - TyKind::Array(_, _) => rhs_ty.clone(), - // Even when `rhs_ty` is not an array type, this assignee - // expression is inferred to be an array (of unknown element - // type and length). This should not be just an error type, - // because we are to compute the unifiability of this type and - // `rhs_ty` in the end of this function to issue type mismatches. - _ => TyKind::Array( - self.err_ty(), - crate::consteval::usize_const(self.db, None, self.resolver.krate()), - ) - .intern(Interner), - } - } - Expr::RecordLit { path, fields, .. } => { - let subs = fields.iter().map(|f| (f.name.clone(), f.expr)); - - self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs, subs) - } - Expr::Underscore => rhs_ty.clone(), - _ => { - // `lhs` is a place expression, a unit struct, or an enum variant. - // LHS of assignment doesn't constitute reads. - let lhs_ty = self.infer_expr_inner(lhs, &Expectation::none(), ExprIsRead::No); - - // This is the only branch where this function may coerce any type. - // We are returning early to avoid the unifiability check below. - let lhs_ty = self.insert_type_vars_shallow(lhs_ty); - let ty = match self.coerce(None, &rhs_ty, &lhs_ty, CoerceNever::Yes) { - Ok(ty) => ty, - Err(_) => { - self.result.type_mismatches.insert( - lhs.into(), - TypeMismatch { expected: rhs_ty.clone(), actual: lhs_ty.clone() }, - ); - // `rhs_ty` is returned so no further type mismatches are - // reported because of this mismatch. - rhs_ty - } - }; - self.write_expr_ty(lhs, ty.clone()); - return ty; - } - }; - - let ty = self.insert_type_vars_shallow(ty); - if !self.unify(&ty, &rhs_ty) { - self.result - .type_mismatches - .insert(lhs.into(), TypeMismatch { expected: rhs_ty.clone(), actual: ty.clone() }); - } - self.write_expr_ty(lhs, ty.clone()); - ty - } - fn infer_overloadable_binop( &mut self, lhs: ExprId, @@ -1706,7 +1598,7 @@ impl InferenceContext<'_> { Statement::Let { pat, type_ref, initializer, else_branch } => { let decl_ty = type_ref .as_ref() - .map(|tr| this.make_ty(tr)) + .map(|&tr| this.make_body_ty(tr)) .unwrap_or_else(|| this.table.new_type_var()); let ty = if let Some(expr) = initializer { @@ -1764,7 +1656,7 @@ impl InferenceContext<'_> { ); } } - Statement::Item => (), + Statement::Item(_) => (), } } @@ -2249,7 +2141,8 @@ impl InferenceContext<'_> { kind_id, args.next().unwrap(), // `peek()` is `Some(_)`, so guaranteed no panic self, - |this, type_ref| this.make_ty(type_ref), + &self.body.types, + |this, type_ref| this.make_body_ty(type_ref), |this, c, ty| { const_or_path_to_chalk( this.db, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index 6a0daee6ea9f6..d74a383f44ef4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -4,7 +4,8 @@ use chalk_ir::{cast::Cast, Mutability}; use hir_def::{ hir::{ - Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, PatId, Statement, UnaryOp, + Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, + UnaryOp, }, lang_item::LangItem, }; @@ -88,7 +89,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.infer_mut_expr(*expr, Mutability::Not); } - Statement::Item => (), + Statement::Item(_) => (), } } if let Some(tail) = tail { @@ -96,7 +97,7 @@ impl InferenceContext<'_> { } } Expr::MethodCall { receiver: it, method_name: _, args, generic_args: _ } - | Expr::Call { callee: it, args, is_assignee_expr: _ } => { + | Expr::Call { callee: it, args } => { self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*it))); } Expr::Match { expr, arms } => { @@ -120,10 +121,10 @@ impl InferenceContext<'_> { Expr::Become { expr } => { self.infer_mut_expr(*expr, Mutability::Not); } - Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { + Expr::RecordLit { path: _, fields, spread } => { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } - &Expr::Index { base, index, is_assignee_expr } => { + &Expr::Index { base, index } => { if mutability == Mutability::Mut { if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { if let Some(index_trait) = self @@ -148,11 +149,8 @@ impl InferenceContext<'_> { target, }) = base_adjustments { - // For assignee exprs `IndexMut` obligations are already applied - if !is_assignee_expr { - if let TyKind::Ref(_, _, ty) = target.kind(Interner) { - base_ty = Some(ty.clone()); - } + if let TyKind::Ref(_, _, ty) = target.kind(Interner) { + base_ty = Some(ty.clone()); } *mutability = Mutability::Mut; } @@ -233,6 +231,14 @@ impl InferenceContext<'_> { self.infer_mut_expr(*lhs, Mutability::Mut); self.infer_mut_expr(*rhs, Mutability::Not); } + &Expr::Assignment { target, value } => { + self.body.walk_pats(target, &mut |pat| match self.body[pat] { + Pat::Expr(expr) => self.infer_mut_expr(expr, Mutability::Mut), + Pat::ConstBlock(block) => self.infer_mut_expr(block, Mutability::Not), + _ => {} + }); + self.infer_mut_expr(value, Mutability::Not); + } Expr::Array(Array::Repeat { initializer: lhs, repeat: rhs }) | Expr::BinaryOp { lhs, rhs, op: _ } | Expr::Range { lhs: Some(lhs), rhs: Some(rhs), range_type: _ } => { @@ -242,8 +248,7 @@ impl InferenceContext<'_> { Expr::Closure { body, .. } => { self.infer_mut_expr(*body, Mutability::Not); } - Expr::Tuple { exprs, is_assignee_expr: _ } - | Expr::Array(Array::ElementList { elements: exprs, is_assignee_expr: _ }) => { + Expr::Tuple { exprs } | Expr::Array(Array::ElementList { elements: exprs }) => { self.infer_mut_not_expr_iter(exprs.iter().copied()); } // These don't need any action, as they don't have sub expressions diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index fee6755408ead..50e761196ec1b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -4,7 +4,7 @@ use std::iter::repeat_with; use hir_def::{ body::Body, - hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId}, + hir::{Binding, BindingAnnotation, BindingId, Expr, ExprId, Literal, Pat, PatId}, path::Path, }; use hir_expand::name::Name; @@ -12,63 +12,28 @@ use stdx::TupleExt; use crate::{ consteval::{try_const_usize, usize_const}, - infer::{expr::ExprIsRead, BindingMode, Expectation, InferenceContext, TypeMismatch}, + infer::{ + coerce::CoerceNever, expr::ExprIsRead, BindingMode, Expectation, InferenceContext, + TypeMismatch, + }, lower::lower_to_chalk_mutability, primitive::UintTy, static_lifetime, InferenceDiagnostic, Interner, Mutability, Scalar, Substitution, Ty, TyBuilder, TyExt, TyKind, }; -/// Used to generalize patterns and assignee expressions. -pub(super) trait PatLike: Into + Copy { - type BindingMode: Copy; - - fn infer( - this: &mut InferenceContext<'_>, - id: Self, - expected_ty: &Ty, - default_bm: Self::BindingMode, - ) -> Ty; -} - -impl PatLike for ExprId { - type BindingMode = (); - - fn infer( - this: &mut InferenceContext<'_>, - id: Self, - expected_ty: &Ty, - (): Self::BindingMode, - ) -> Ty { - this.infer_assignee_expr(id, expected_ty) - } -} - -impl PatLike for PatId { - type BindingMode = BindingMode; - - fn infer( - this: &mut InferenceContext<'_>, - id: Self, - expected_ty: &Ty, - default_bm: Self::BindingMode, - ) -> Ty { - this.infer_pat(id, expected_ty, default_bm) - } -} - impl InferenceContext<'_> { /// Infers type for tuple struct pattern or its corresponding assignee expression. /// /// Ellipses found in the original pattern or expression must be filtered out. - pub(super) fn infer_tuple_struct_pat_like( + pub(super) fn infer_tuple_struct_pat_like( &mut self, path: Option<&Path>, expected: &Ty, - default_bm: T::BindingMode, - id: T, + default_bm: BindingMode, + id: PatId, ellipsis: Option, - subs: &[T], + subs: &[PatId], ) -> Ty { let (ty, def) = self.resolve_variant(path, true); let var_data = def.map(|it| it.variant_data(self.db.upcast())); @@ -127,13 +92,13 @@ impl InferenceContext<'_> { } }; - T::infer(self, subpat, &expected_ty, default_bm); + self.infer_pat(subpat, &expected_ty, default_bm); } } None => { let err_ty = self.err_ty(); for &inner in subs { - T::infer(self, inner, &err_ty, default_bm); + self.infer_pat(inner, &err_ty, default_bm); } } } @@ -142,13 +107,13 @@ impl InferenceContext<'_> { } /// Infers type for record pattern or its corresponding assignee expression. - pub(super) fn infer_record_pat_like( + pub(super) fn infer_record_pat_like( &mut self, path: Option<&Path>, expected: &Ty, - default_bm: T::BindingMode, - id: T, - subs: impl ExactSizeIterator, + default_bm: BindingMode, + id: PatId, + subs: impl ExactSizeIterator, ) -> Ty { let (ty, def) = self.resolve_variant(path, false); if let Some(variant) = def { @@ -197,13 +162,13 @@ impl InferenceContext<'_> { } }; - T::infer(self, inner, &expected_ty, default_bm); + self.infer_pat(inner, &expected_ty, default_bm); } } None => { let err_ty = self.err_ty(); for (_, inner) in subs { - T::infer(self, inner, &err_ty, default_bm); + self.infer_pat(inner, &err_ty, default_bm); } } } @@ -214,12 +179,12 @@ impl InferenceContext<'_> { /// Infers type for tuple pattern or its corresponding assignee expression. /// /// Ellipses found in the original pattern or expression must be filtered out. - pub(super) fn infer_tuple_pat_like( + pub(super) fn infer_tuple_pat_like( &mut self, expected: &Ty, - default_bm: T::BindingMode, + default_bm: BindingMode, ellipsis: Option, - subs: &[T], + subs: &[PatId], ) -> Ty { let expected = self.resolve_ty_shallow(expected); let expectations = match expected.as_tuple() { @@ -244,18 +209,20 @@ impl InferenceContext<'_> { // Process pre for (ty, pat) in inner_tys.iter_mut().zip(pre) { - *ty = T::infer(self, *pat, ty, default_bm); + *ty = self.infer_pat(*pat, ty, default_bm); } // Process post for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) { - *ty = T::infer(self, *pat, ty, default_bm); + *ty = self.infer_pat(*pat, ty, default_bm); } TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys)) .intern(Interner) } + /// The resolver needs to be updated to the surrounding expression when inside assignment + /// (because there, `Pat::Path` can refer to a variable). pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty) { self.infer_pat(pat, expected, BindingMode::default()); } @@ -263,7 +230,14 @@ impl InferenceContext<'_> { fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty { let mut expected = self.resolve_ty_shallow(expected); - if self.is_non_ref_pat(self.body, pat) { + if matches!(&self.body[pat], Pat::Ref { .. }) || self.inside_assignment { + cov_mark::hit!(match_ergonomics_ref); + // When you encounter a `&pat` pattern, reset to Move. + // This is so that `w` is by value: `let (_, &w) = &(1, &2);` + // Destructuring assignments also reset the binding mode and + // don't do match ergonomics. + default_bm = BindingMode::Move; + } else if self.is_non_ref_pat(self.body, pat) { let mut pat_adjustments = Vec::new(); while let Some((inner, _lifetime, mutability)) = expected.as_reference() { pat_adjustments.push(expected.clone()); @@ -279,11 +253,6 @@ impl InferenceContext<'_> { pat_adjustments.shrink_to_fit(); self.result.pat_adjustments.insert(pat, pat_adjustments); } - } else if let Pat::Ref { .. } = &self.body[pat] { - cov_mark::hit!(match_ergonomics_ref); - // When you encounter a `&pat` pattern, reset to Move. - // This is so that `w` is by value: `let (_, &w) = &(1, &2);` - default_bm = BindingMode::Move; } // Lose mutability. @@ -320,8 +289,34 @@ impl InferenceContext<'_> { self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs) } Pat::Path(path) => { - // FIXME update resolver for the surrounding expression - self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty()) + let ty = self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty()); + let ty_inserted_vars = self.insert_type_vars_shallow(ty.clone()); + match self.table.coerce(&expected, &ty_inserted_vars, CoerceNever::Yes) { + Ok((adjustments, coerced_ty)) => { + if !adjustments.is_empty() { + self.result + .pat_adjustments + .entry(pat) + .or_default() + .extend(adjustments.into_iter().map(|adjust| adjust.target)); + } + self.write_pat_ty(pat, coerced_ty); + return self.pat_ty_after_adjustment(pat); + } + Err(_) => { + self.result.type_mismatches.insert( + pat.into(), + TypeMismatch { + expected: expected.clone(), + actual: ty_inserted_vars.clone(), + }, + ); + self.write_pat_ty(pat, ty); + // We return `expected` to prevent cascading errors. I guess an alternative is to + // not emit type mismatches for error types and emit an error type here. + return expected; + } + } } Pat::Bind { id, subpat } => { return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected); @@ -361,7 +356,40 @@ impl InferenceContext<'_> { None => self.err_ty(), }, Pat::ConstBlock(expr) => { - self.infer_expr(*expr, &Expectation::has_type(expected.clone()), ExprIsRead::Yes) + let old_inside_assign = std::mem::replace(&mut self.inside_assignment, false); + let result = self.infer_expr( + *expr, + &Expectation::has_type(expected.clone()), + ExprIsRead::Yes, + ); + self.inside_assignment = old_inside_assign; + result + } + Pat::Expr(expr) => { + let old_inside_assign = std::mem::replace(&mut self.inside_assignment, false); + // LHS of assignment doesn't constitute reads. + let result = self.infer_expr_coerce( + *expr, + &Expectation::has_type(expected.clone()), + ExprIsRead::No, + ); + // We are returning early to avoid the unifiability check below. + let lhs_ty = self.insert_type_vars_shallow(result); + let ty = match self.coerce(None, &expected, &lhs_ty, CoerceNever::Yes) { + Ok(ty) => ty, + Err(_) => { + self.result.type_mismatches.insert( + pat.into(), + TypeMismatch { expected: expected.clone(), actual: lhs_ty.clone() }, + ); + // `rhs_ty` is returned so no further type mismatches are + // reported because of this mismatch. + expected + } + }; + self.write_pat_ty(pat, ty.clone()); + self.inside_assignment = old_inside_assign; + return ty; } Pat::Missing => self.err_ty(), }; @@ -517,9 +545,12 @@ impl InferenceContext<'_> { body[*expr], Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..)) ), - Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => { - false - } + Pat::Wild + | Pat::Bind { .. } + | Pat::Ref { .. } + | Pat::Box { .. } + | Pat::Missing + | Pat::Expr(_) => false, } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index e4841c7b15b60..442daa9f9ee34 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -94,8 +94,7 @@ impl InferenceContext<'_> { return Some(ValuePathResolution::NonGeneric(ty)); }; - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); - let substs = ctx.substs_from_path(path, value_def, true); + let substs = self.with_body_ty_lowering(|ctx| ctx.substs_from_path(path, value_def, true)); let substs = substs.as_slice(Interner); if let ValueNs::EnumVariantId(_) = value { @@ -152,8 +151,12 @@ impl InferenceContext<'_> { let last = path.segments().last()?; // Don't use `self.make_ty()` here as we need `orig_ns`. - let ctx = - crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into()); + let ctx = crate::lower::TyLoweringContext::new( + self.db, + &self.resolver, + &self.body.types, + self.owner.into(), + ); let (ty, orig_ns) = ctx.lower_ty_ext(type_ref); let ty = self.table.insert_type_vars(ty); let ty = self.table.normalize_associated_types_in(ty); @@ -164,9 +167,10 @@ impl InferenceContext<'_> { let ty = self.table.normalize_associated_types_in(ty); self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))? } else { + let hygiene = self.body.expr_or_pat_path_hygiene(id); // FIXME: report error, unresolved first path segment let value_or_partial = - self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?; + self.resolver.resolve_path_in_value_ns(self.db.upcast(), path, hygiene)?; match value_or_partial { ResolveValueResult::ValueNs(it, _) => (it, None), @@ -218,7 +222,7 @@ impl InferenceContext<'_> { let _d; let (resolved_segment, remaining_segments) = match path { - Path::Normal { .. } => { + Path::Normal { .. } | Path::BarePath(_) => { assert!(remaining_index < path.segments().len()); ( path.segments().get(remaining_index - 1).unwrap(), @@ -242,17 +246,10 @@ impl InferenceContext<'_> { (TypeNs::TraitId(trait_), true) => { let segment = remaining_segments.last().expect("there should be at least one segment here"); - let ctx = crate::lower::TyLoweringContext::new( - self.db, - &self.resolver, - self.owner.into(), - ); - let trait_ref = ctx.lower_trait_ref_from_resolved_path( - trait_, - resolved_segment, - self.table.new_type_var(), - ); - + let self_ty = self.table.new_type_var(); + let trait_ref = self.with_body_ty_lowering(|ctx| { + ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty) + }); self.resolve_trait_assoc_item(trait_ref, segment, id) } (def, _) => { @@ -262,17 +259,14 @@ impl InferenceContext<'_> { // as Iterator>::Item::default`) let remaining_segments_for_ty = remaining_segments.take(remaining_segments.len() - 1); - let ctx = crate::lower::TyLoweringContext::new( - self.db, - &self.resolver, - self.owner.into(), - ); - let (ty, _) = ctx.lower_partly_resolved_path( - def, - resolved_segment, - remaining_segments_for_ty, - true, - ); + let (ty, _) = self.with_body_ty_lowering(|ctx| { + ctx.lower_partly_resolved_path( + def, + resolved_segment, + remaining_segments_for_ty, + true, + ) + }); if ty.is_unknown() { return None; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index c7ed68448bb48..e3a92e52f61e5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -34,6 +34,7 @@ use hir_def::{ resolver::{HasResolver, LifetimeNs, Resolver, TypeNs}, type_ref::{ ConstRef, LifetimeRef, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef, + TypeRefId, TypesMap, TypesSourceMap, }, AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstLoc, ItemContainerId, @@ -41,7 +42,6 @@ use hir_def::{ TypeOwnerId, UnionId, VariantId, }; use hir_expand::{name::Name, ExpandResult}; -use intern::Interned; use la_arena::{Arena, ArenaMap}; use rustc_hash::FxHashSet; use rustc_pattern_analysis::Captures; @@ -122,6 +122,11 @@ pub struct TyLoweringContext<'a> { pub db: &'a dyn HirDatabase, resolver: &'a Resolver, generics: OnceCell>, + types_map: &'a TypesMap, + /// If this is set, that means we're in a context of a freshly expanded macro, and that means + /// we should not use `TypeRefId` in diagnostics because the caller won't have the `TypesMap`, + /// instead we need to put `TypeSource` from the source map. + types_source_map: Option<&'a TypesSourceMap>, in_binders: DebruijnIndex, // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases // where expected @@ -138,13 +143,20 @@ pub struct TyLoweringContext<'a> { } impl<'a> TyLoweringContext<'a> { - pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self { - Self::new_maybe_unowned(db, resolver, Some(owner)) + pub fn new( + db: &'a dyn HirDatabase, + resolver: &'a Resolver, + types_map: &'a TypesMap, + owner: TypeOwnerId, + ) -> Self { + Self::new_maybe_unowned(db, resolver, types_map, None, Some(owner)) } pub fn new_maybe_unowned( db: &'a dyn HirDatabase, resolver: &'a Resolver, + types_map: &'a TypesMap, + types_source_map: Option<&'a TypesSourceMap>, owner: Option, ) -> Self { let impl_trait_mode = ImplTraitLoweringState::Disallowed; @@ -154,6 +166,8 @@ impl<'a> TyLoweringContext<'a> { db, resolver, generics: OnceCell::new(), + types_map, + types_source_map, owner, in_binders, impl_trait_mode, @@ -201,6 +215,16 @@ impl<'a> TyLoweringContext<'a> { pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self { Self { type_param_mode, ..self } } + + pub fn impl_trait_mode(&mut self, impl_trait_mode: ImplTraitLoweringMode) -> &mut Self { + self.impl_trait_mode = ImplTraitLoweringState::new(impl_trait_mode); + self + } + + pub fn type_param_mode(&mut self, type_param_mode: ParamLoweringMode) -> &mut Self { + self.type_param_mode = type_param_mode; + self + } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -230,7 +254,7 @@ pub enum ParamLoweringMode { } impl<'a> TyLoweringContext<'a> { - pub fn lower_ty(&self, type_ref: &TypeRef) -> Ty { + pub fn lower_ty(&self, type_ref: TypeRefId) -> Ty { self.lower_ty_ext(type_ref).0 } @@ -254,12 +278,13 @@ impl<'a> TyLoweringContext<'a> { .as_ref() } - pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option) { + pub fn lower_ty_ext(&self, type_ref_id: TypeRefId) -> (Ty, Option) { let mut res = None; + let type_ref = &self.types_map[type_ref_id]; let ty = match type_ref { TypeRef::Never => TyKind::Never.intern(Interner), TypeRef::Tuple(inner) => { - let inner_tys = inner.iter().map(|tr| self.lower_ty(tr)); + let inner_tys = inner.iter().map(|&tr| self.lower_ty(tr)); TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys)) .intern(Interner) } @@ -268,38 +293,43 @@ impl<'a> TyLoweringContext<'a> { res = res_; ty } - TypeRef::RawPtr(inner, mutability) => { + &TypeRef::RawPtr(inner, mutability) => { let inner_ty = self.lower_ty(inner); - TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(Interner) + TyKind::Raw(lower_to_chalk_mutability(mutability), inner_ty).intern(Interner) } - TypeRef::Array(inner, len) => { - let inner_ty = self.lower_ty(inner); - let const_len = self.lower_const(len, TyBuilder::usize()); + TypeRef::Array(array) => { + let inner_ty = self.lower_ty(array.ty); + let const_len = self.lower_const(&array.len, TyBuilder::usize()); TyKind::Array(inner_ty, const_len).intern(Interner) } - TypeRef::Slice(inner) => { + &TypeRef::Slice(inner) => { let inner_ty = self.lower_ty(inner); TyKind::Slice(inner_ty).intern(Interner) } - TypeRef::Reference(inner, lifetime, mutability) => { - let inner_ty = self.lower_ty(inner); + TypeRef::Reference(ref_) => { + let inner_ty = self.lower_ty(ref_.ty); // FIXME: It should infer the eldided lifetimes instead of stubbing with static - let lifetime = - lifetime.as_ref().map_or_else(error_lifetime, |lr| self.lower_lifetime(lr)); - TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty) + let lifetime = ref_ + .lifetime + .as_ref() + .map_or_else(error_lifetime, |lr| self.lower_lifetime(lr)); + TyKind::Ref(lower_to_chalk_mutability(ref_.mutability), lifetime, inner_ty) .intern(Interner) } TypeRef::Placeholder => TyKind::Error.intern(Interner), - &TypeRef::Fn(ref params, variadic, is_unsafe, ref abi) => { + TypeRef::Fn(fn_) => { let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { - Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr))) + Substitution::from_iter( + Interner, + fn_.params().iter().map(|&(_, tr)| ctx.lower_ty(tr)), + ) }); TyKind::Function(FnPointer { num_binders: 0, // FIXME lower `for<'a> fn()` correctly sig: FnSig { - abi: abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), - safety: if is_unsafe { Safety::Unsafe } else { Safety::Safe }, - variadic, + abi: fn_.abi().as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), + safety: if fn_.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, + variadic: fn_.is_varargs(), }, substitution: FnSubst(substs), }) @@ -351,8 +381,8 @@ impl<'a> TyLoweringContext<'a> { ImplTraitLoweringState::Param(counter) => { let idx = counter.get(); // Count the number of `impl Trait` things that appear within our bounds. - // Since t hose have been emitted as implicit type args already. - counter.set(idx + count_impl_traits(type_ref) as u16); + // Since those have been emitted as implicit type args already. + counter.set(idx + self.count_impl_traits(type_ref_id) as u16); let kind = self .generics() .expect("param impl trait lowering must be in a generic def") @@ -376,7 +406,7 @@ impl<'a> TyLoweringContext<'a> { let idx = counter.get(); // Count the number of `impl Trait` things that appear within our bounds. // Since t hose have been emitted as implicit type args already. - counter.set(idx + count_impl_traits(type_ref) as u16); + counter.set(idx + self.count_impl_traits(type_ref_id) as u16); let kind = self .generics() .expect("variable impl trait lowering must be in a generic def") @@ -432,12 +462,40 @@ impl<'a> TyLoweringContext<'a> { match expander.enter_expand::(self.db.upcast(), macro_call, resolver) { Ok(ExpandResult { value: Some((mark, expanded)), .. }) => { - let ctx = expander.ctx(self.db.upcast()); + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + + let ctx = expander.ctx( + self.db.upcast(), + &mut types_map, + &mut types_source_map, + ); // FIXME: Report syntax errors in expansion here let type_ref = TypeRef::from_ast(&ctx, expanded.tree()); drop(expander); - let ty = self.lower_ty(&type_ref); + + // FIXME: That may be better served by mutating `self` then restoring, but this requires + // making it `&mut self`. + let inner_ctx = TyLoweringContext { + db: self.db, + resolver: self.resolver, + generics: self.generics.clone(), + types_map: &types_map, + types_source_map: Some(&types_source_map), + in_binders: self.in_binders, + owner: self.owner, + type_param_mode: self.type_param_mode, + impl_trait_mode: self.impl_trait_mode.take(), + expander: RefCell::new(self.expander.take()), + unsized_types: RefCell::new(self.unsized_types.take()), + }; + + let ty = inner_ctx.lower_ty(type_ref); + + self.impl_trait_mode.swap(&inner_ctx.impl_trait_mode); + *self.expander.borrow_mut() = inner_ctx.expander.into_inner(); + *self.unsized_types.borrow_mut() = inner_ctx.unsized_types.into_inner(); self.expander.borrow_mut().as_mut().unwrap().exit(mark); Some(ty) @@ -463,7 +521,8 @@ impl<'a> TyLoweringContext<'a> { /// This is only for `generic_predicates_for_param`, where we can't just /// lower the self types of the predicates since that could lead to cycles. /// So we just check here if the `type_ref` resolves to a generic param, and which. - fn lower_ty_only_param(&self, type_ref: &TypeRef) -> Option { + fn lower_ty_only_param(&self, type_ref: TypeRefId) -> Option { + let type_ref = &self.types_map[type_ref]; let path = match type_ref { TypeRef::Path(path) => path, _ => return None, @@ -663,7 +722,7 @@ impl<'a> TyLoweringContext<'a> { if matches!(resolution, TypeNs::TraitId(_)) && remaining_index.is_none() { // trait object type without dyn let bound = TypeBound::Path(path.clone(), TraitBoundModifier::None); - let ty = self.lower_dyn_trait(&[Interned::new(bound)]); + let ty = self.lower_dyn_trait(&[bound]); return (ty, None); } @@ -864,7 +923,7 @@ impl<'a> TyLoweringContext<'a> { assert!(matches!(id, GenericParamId::TypeParamId(_))); had_explicit_args = true; if let GenericArg::Type(ty) = &args[0] { - substs.push(self.lower_ty(ty).cast(Interner)); + substs.push(self.lower_ty(*ty).cast(Interner)); } } } else { @@ -901,6 +960,7 @@ impl<'a> TyLoweringContext<'a> { id, arg, &mut (), + self.types_map, |_, type_ref| self.lower_ty(type_ref), |_, const_ref, ty| self.lower_const(const_ref, ty), |_, lifetime_ref| self.lower_lifetime(lifetime_ref), @@ -998,7 +1058,7 @@ impl<'a> TyLoweringContext<'a> { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { let self_ty = match target { - WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(type_ref), + WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(*type_ref), &WherePredicateTypeTarget::TypeOrConstParam(local_id) => { let param_id = hir_def::TypeOrConstParamId { parent: def, local_id }; match self.type_param_mode { @@ -1029,12 +1089,12 @@ impl<'a> TyLoweringContext<'a> { pub(crate) fn lower_type_bound( &'a self, - bound: &'a Interned, + bound: &'a TypeBound, self_ty: Ty, ignore_bindings: bool, ) -> impl Iterator + 'a { let mut trait_ref = None; - let clause = match bound.as_ref() { + let clause = match bound { TypeBound::Path(path, TraitBoundModifier::None) => { trait_ref = self.lower_trait_ref_from_path(path, self_ty); trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders) @@ -1067,7 +1127,7 @@ impl<'a> TyLoweringContext<'a> { lifetime, }))) } - TypeBound::Error => None, + TypeBound::Use(_) | TypeBound::Error => None, }; clause.into_iter().chain( trait_ref @@ -1079,14 +1139,15 @@ impl<'a> TyLoweringContext<'a> { fn assoc_type_bindings_from_type_bound( &'a self, - bound: &'a Interned, + bound: &'a TypeBound, trait_ref: TraitRef, ) -> impl Iterator + 'a { - let last_segment = match bound.as_ref() { + let last_segment = match bound { TypeBound::Path(path, TraitBoundModifier::None) | TypeBound::ForLifetime(_, path) => { path.segments().last() } TypeBound::Path(_, TraitBoundModifier::Maybe) + | TypeBound::Use(_) | TypeBound::Error | TypeBound::Lifetime(_) => None, }; @@ -1110,7 +1171,7 @@ impl<'a> TyLoweringContext<'a> { // this point (`super_trait_ref.substitution`). let substitution = self.substs_from_path_segment( // FIXME: This is hack. We shouldn't really build `PathSegment` directly. - PathSegment { name: &binding.name, args_and_bindings: binding.args.as_deref() }, + PathSegment { name: &binding.name, args_and_bindings: binding.args.as_ref() }, Some(associated_ty.into()), false, // this is not relevant Some(super_trait_ref.self_type_parameter(Interner)), @@ -1130,8 +1191,8 @@ impl<'a> TyLoweringContext<'a> { let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity( binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), ); - if let Some(type_ref) = &binding.type_ref { - match (type_ref, &self.impl_trait_mode) { + if let Some(type_ref) = binding.type_ref { + match (&self.types_map[type_ref], &self.impl_trait_mode) { (TypeRef::ImplTrait(_), ImplTraitLoweringState::Disallowed) => (), ( _, @@ -1178,6 +1239,8 @@ impl<'a> TyLoweringContext<'a> { let mut ext = TyLoweringContext::new_maybe_unowned( self.db, self.resolver, + self.types_map, + self.types_source_map, self.owner, ) .with_type_param_mode(self.type_param_mode); @@ -1215,7 +1278,7 @@ impl<'a> TyLoweringContext<'a> { }) } - fn lower_dyn_trait(&self, bounds: &[Interned]) -> Ty { + fn lower_dyn_trait(&self, bounds: &[TypeBound]) -> Ty { let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); // INVARIANT: The principal trait bound, if present, must come first. Others may be in any // order but should be in the same order for the same set but possibly different order of @@ -1313,7 +1376,7 @@ impl<'a> TyLoweringContext<'a> { } } - fn lower_impl_trait(&self, bounds: &[Interned], krate: CrateId) -> ImplTrait { + fn lower_impl_trait(&self, bounds: &[TypeBound], krate: CrateId) -> ImplTrait { cov_mark::hit!(lower_rpit); let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { @@ -1365,6 +1428,17 @@ impl<'a> TyLoweringContext<'a> { None => error_lifetime(), } } + + // FIXME: This does not handle macros! + fn count_impl_traits(&self, type_ref: TypeRefId) -> usize { + let mut count = 0; + TypeRef::walk(type_ref, self.types_map, &mut |type_ref| { + if matches!(type_ref, TypeRef::ImplTrait(_)) { + count += 1; + } + }); + count + } } /// Build the signature of a callable item (function, struct or enum variant). @@ -1385,17 +1459,6 @@ pub fn associated_type_shorthand_candidates( named_associated_type_shorthand_candidates(db, def, res, None, |name, _, id| cb(name, id)) } -// FIXME: This does not handle macros! -fn count_impl_traits(type_ref: &TypeRef) -> usize { - let mut count = 0; - type_ref.walk(&mut |type_ref| { - if matches!(type_ref, TypeRef::ImplTrait(_)) { - count += 1; - } - }); - count -} - fn named_associated_type_shorthand_candidates( db: &dyn HirDatabase, // If the type parameter is defined in an impl and we're in a method, there @@ -1499,10 +1562,10 @@ pub(crate) fn field_types_query( }; let generics = generics(db.upcast(), def); let mut res = ArenaMap::default(); - let ctx = TyLoweringContext::new(db, &resolver, def.into()) + let ctx = TyLoweringContext::new(db, &resolver, var_data.types_map(), def.into()) .with_type_param_mode(ParamLoweringMode::Variable); for (field_id, field_data) in var_data.fields().iter() { - res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref))); + res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(field_data.type_ref))); } Arc::new(res) } @@ -1522,38 +1585,38 @@ pub(crate) fn generic_predicates_for_param_query( assoc_name: Option, ) -> GenericPredicates { let resolver = def.resolver(db.upcast()); - let ctx = if let GenericDefId::FunctionId(_) = def { - TyLoweringContext::new(db, &resolver, def.into()) + let mut ctx = if let GenericDefId::FunctionId(_) = def { + TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) .with_impl_trait_mode(ImplTraitLoweringMode::Variable) .with_type_param_mode(ParamLoweringMode::Variable) } else { - TyLoweringContext::new(db, &resolver, def.into()) + TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) .with_type_param_mode(ParamLoweringMode::Variable) }; let generics = generics(db.upcast(), def); // we have to filter out all other predicates *first*, before attempting to lower them - let predicate = |(pred, &def): &(&_, _)| match pred { + let predicate = |pred: &_, def: &_, ctx: &TyLoweringContext<'_>| match pred { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound, .. } => { let invalid_target = match target { WherePredicateTypeTarget::TypeRef(type_ref) => { - ctx.lower_ty_only_param(type_ref) != Some(param_id) + ctx.lower_ty_only_param(*type_ref) != Some(param_id) } &WherePredicateTypeTarget::TypeOrConstParam(local_id) => { - let target_id = TypeOrConstParamId { parent: def, local_id }; + let target_id = TypeOrConstParamId { parent: *def, local_id }; target_id != param_id } }; if invalid_target { // If this is filtered out without lowering, `?Sized` is not gathered into `ctx.unsized_types` - if let TypeBound::Path(_, TraitBoundModifier::Maybe) = &**bound { - ctx.lower_where_predicate(pred, &def, true).for_each(drop); + if let TypeBound::Path(_, TraitBoundModifier::Maybe) = bound { + ctx.lower_where_predicate(pred, def, true).for_each(drop); } return false; } - match &**bound { + match bound { TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => { // Only lower the bound if the trait could possibly define the associated // type we're looking for. @@ -1571,18 +1634,20 @@ pub(crate) fn generic_predicates_for_param_query( }) }) } - TypeBound::Lifetime(_) | TypeBound::Error => false, + TypeBound::Use(_) | TypeBound::Lifetime(_) | TypeBound::Error => false, } } WherePredicate::Lifetime { .. } => false, }; - let mut predicates: Vec<_> = resolver - .where_predicates_in_scope() - .filter(predicate) - .flat_map(|(pred, def)| { - ctx.lower_where_predicate(pred, def, true).map(|p| make_binders(db, &generics, p)) - }) - .collect(); + let mut predicates = Vec::new(); + for (params, def) in resolver.all_generic_params() { + ctx.types_map = ¶ms.types_map; + predicates.extend( + params.where_predicates().filter(|pred| predicate(pred, def, &ctx)).flat_map(|pred| { + ctx.lower_where_predicate(pred, def, true).map(|p| make_binders(db, &generics, p)) + }), + ); + } let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); if !subst.is_empty(Interner) { @@ -1629,23 +1694,27 @@ pub(crate) fn trait_environment_query( def: GenericDefId, ) -> Arc { let resolver = def.resolver(db.upcast()); - let ctx = if let GenericDefId::FunctionId(_) = def { - TyLoweringContext::new(db, &resolver, def.into()) + let mut ctx = if let GenericDefId::FunctionId(_) = def { + TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) .with_impl_trait_mode(ImplTraitLoweringMode::Param) .with_type_param_mode(ParamLoweringMode::Placeholder) } else { - TyLoweringContext::new(db, &resolver, def.into()) + TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) .with_type_param_mode(ParamLoweringMode::Placeholder) }; let mut traits_in_scope = Vec::new(); let mut clauses = Vec::new(); - for (pred, def) in resolver.where_predicates_in_scope() { - for pred in ctx.lower_where_predicate(pred, def, false) { - if let WhereClause::Implemented(tr) = &pred.skip_binders() { - traits_in_scope.push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id())); + for (params, def) in resolver.all_generic_params() { + ctx.types_map = ¶ms.types_map; + for pred in params.where_predicates() { + for pred in ctx.lower_where_predicate(pred, def, false) { + if let WhereClause::Implemented(tr) = pred.skip_binders() { + traits_in_scope + .push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id())); + } + let program_clause: chalk_ir::ProgramClause = pred.cast(Interner); + clauses.push(program_clause.into_from_env_clause(Interner)); } - let program_clause: chalk_ir::ProgramClause = pred.cast(Interner); - clauses.push(program_clause.into_from_env_clause(Interner)); } } @@ -1724,18 +1793,20 @@ where } _ => (ImplTraitLoweringMode::Disallowed, ParamLoweringMode::Variable), }; - let ctx = TyLoweringContext::new(db, &resolver, def.into()) + let mut ctx = TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) .with_impl_trait_mode(impl_trait_lowering) .with_type_param_mode(param_lowering); let generics = generics(db.upcast(), def); - let mut predicates = resolver - .where_predicates_in_scope() - .filter(|(pred, def)| filter(pred, def)) - .flat_map(|(pred, def)| { - ctx.lower_where_predicate(pred, def, false).map(|p| make_binders(db, &generics, p)) - }) - .collect::>(); + let mut predicates = Vec::new(); + for (params, def) in resolver.all_generic_params() { + ctx.types_map = ¶ms.types_map; + predicates.extend(params.where_predicates().filter(|pred| filter(pred, def)).flat_map( + |pred| { + ctx.lower_where_predicate(pred, def, false).map(|p| make_binders(db, &generics, p)) + }, + )); + } if generics.len() > 0 { let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST); @@ -1811,18 +1882,19 @@ pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> let resolver = def.resolver(db.upcast()); let parent_start_idx = generic_params.len_self(); - let ctx = TyLoweringContext::new(db, &resolver, def.into()) + let mut ctx = TyLoweringContext::new(db, &resolver, TypesMap::EMPTY, def.into()) .with_impl_trait_mode(ImplTraitLoweringMode::Disallowed) .with_type_param_mode(ParamLoweringMode::Variable); - GenericDefaults(Some(Arc::from_iter(generic_params.iter().enumerate().map( - |(idx, (id, p))| { + GenericDefaults(Some(Arc::from_iter(generic_params.iter_with_types_map().enumerate().map( + |(idx, ((id, p), types_map))| { + ctx.types_map = types_map; match p { GenericParamDataRef::TypeParamData(p) => { let ty = p.default.as_ref().map_or(TyKind::Error.intern(Interner), |ty| { // Each default can only refer to previous parameters. // Type variable default referring to parameter coming // after it is forbidden (FIXME: report diagnostic) - fallback_bound_vars(ctx.lower_ty(ty), idx, parent_start_idx) + fallback_bound_vars(ctx.lower_ty(*ty), idx, parent_start_idx) }); crate::make_binders(db, &generic_params, ty.cast(Interner)) } @@ -1834,7 +1906,7 @@ pub(crate) fn generic_defaults_query(db: &dyn HirDatabase, def: GenericDefId) -> let mut val = p.default.as_ref().map_or_else( || unknown_const_as_generic(db.const_param_ty(id)), |c| { - let c = ctx.lower_const(c, ctx.lower_ty(&p.ty)); + let c = ctx.lower_const(c, ctx.lower_ty(p.ty)); c.cast(Interner) }, ); @@ -1874,14 +1946,14 @@ pub(crate) fn generic_defaults_recover( fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { let data = db.function_data(def); let resolver = def.resolver(db.upcast()); - let ctx_params = TyLoweringContext::new(db, &resolver, def.into()) + let ctx_params = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) .with_impl_trait_mode(ImplTraitLoweringMode::Variable) .with_type_param_mode(ParamLoweringMode::Variable); - let params = data.params.iter().map(|tr| ctx_params.lower_ty(tr)); - let ctx_ret = TyLoweringContext::new(db, &resolver, def.into()) + let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr)); + let ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_type_param_mode(ParamLoweringMode::Variable); - let ret = ctx_ret.lower_ty(&data.ret_type); + let ret = ctx_ret.lower_ty(data.ret_type); let generics = generics(db.upcast(), def.into()); let sig = CallableSig::from_params_and_return( params, @@ -1910,28 +1982,33 @@ fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders { let data = db.const_data(def); let generics = generics(db.upcast(), def.into()); let resolver = def.resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver, def.into()) + let ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) .with_type_param_mode(ParamLoweringMode::Variable); - make_binders(db, &generics, ctx.lower_ty(&data.type_ref)) + make_binders(db, &generics, ctx.lower_ty(data.type_ref)) } /// Build the declared type of a static. fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders { let data = db.static_data(def); let resolver = def.resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver, def.into()); + let ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()); - Binders::empty(Interner, ctx.lower_ty(&data.type_ref)) + Binders::empty(Interner, ctx.lower_ty(data.type_ref)) } fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig { let struct_data = db.struct_data(def); let fields = struct_data.variant_data.fields(); let resolver = def.resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver, AdtId::from(def).into()) - .with_type_param_mode(ParamLoweringMode::Variable); - let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)); + let ctx = TyLoweringContext::new( + db, + &resolver, + struct_data.variant_data.types_map(), + AdtId::from(def).into(), + ) + .with_type_param_mode(ParamLoweringMode::Variable); + let params = fields.iter().map(|(_, field)| ctx.lower_ty(field.type_ref)); let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders(); Binders::new( binders, @@ -1961,9 +2038,14 @@ fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) let var_data = db.enum_variant_data(def); let fields = var_data.variant_data.fields(); let resolver = def.resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver, DefWithBodyId::VariantId(def).into()) - .with_type_param_mode(ParamLoweringMode::Variable); - let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)); + let ctx = TyLoweringContext::new( + db, + &resolver, + var_data.variant_data.types_map(), + DefWithBodyId::VariantId(def).into(), + ) + .with_type_param_mode(ParamLoweringMode::Variable); + let params = fields.iter().map(|(_, field)| ctx.lower_ty(field.type_ref)); let (ret, binders) = type_for_adt(db, def.lookup(db.upcast()).parent.into()).into_value_and_skipped_binders(); Binders::new( @@ -2004,15 +2086,17 @@ fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders { fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders { let generics = generics(db.upcast(), t.into()); let resolver = t.resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver, t.into()) + let type_alias_data = db.type_alias_data(t); + let ctx = TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, t.into()) .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_type_param_mode(ParamLoweringMode::Variable); - let type_alias_data = db.type_alias_data(t); let inner = if type_alias_data.is_extern { TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner) } else { - let type_ref = &type_alias_data.type_ref; - ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error)) + type_alias_data + .type_ref + .map(|type_ref| ctx.lower_ty(type_ref)) + .unwrap_or_else(|| TyKind::Error.intern(Interner)) }; make_binders(db, &generics, inner) } @@ -2085,9 +2169,9 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde let impl_data = db.impl_data(impl_id); let resolver = impl_id.resolver(db.upcast()); let generics = generics(db.upcast(), impl_id.into()); - let ctx = TyLoweringContext::new(db, &resolver, impl_id.into()) + let ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into()) .with_type_param_mode(ParamLoweringMode::Variable); - make_binders(db, &generics, ctx.lower_ty(&impl_data.self_ty)) + make_binders(db, &generics, ctx.lower_ty(impl_data.self_ty)) } // returns None if def is a type arg @@ -2095,13 +2179,13 @@ pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> T let parent_data = db.generic_params(def.parent()); let data = &parent_data[def.local_id()]; let resolver = def.parent().resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver, def.parent().into()); + let ctx = TyLoweringContext::new(db, &resolver, &parent_data.types_map, def.parent().into()); match data { TypeOrConstParamData::TypeParamData(_) => { never!(); Ty::new(Interner, TyKind::Error) } - TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(&d.ty), + TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(d.ty), } } @@ -2117,7 +2201,7 @@ pub(crate) fn impl_self_ty_recover( pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option> { let impl_data = db.impl_data(impl_id); let resolver = impl_id.resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver, impl_id.into()) + let ctx = TyLoweringContext::new(db, &resolver, &impl_data.types_map, impl_id.into()) .with_type_param_mode(ParamLoweringMode::Variable); let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders(); let target_trait = impl_data.target_trait.as_ref()?; @@ -2131,10 +2215,10 @@ pub(crate) fn return_type_impl_traits( // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe let data = db.function_data(def); let resolver = def.resolver(db.upcast()); - let ctx_ret = TyLoweringContext::new(db, &resolver, def.into()) + let ctx_ret = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_type_param_mode(ParamLoweringMode::Variable); - let _ret = ctx_ret.lower_ty(&data.ret_type); + let _ret = ctx_ret.lower_ty(data.ret_type); let generics = generics(db.upcast(), def.into()); let return_type_impl_traits = ImplTraits { impl_traits: match ctx_ret.impl_trait_mode { @@ -2155,10 +2239,10 @@ pub(crate) fn type_alias_impl_traits( ) -> Option>> { let data = db.type_alias_data(def); let resolver = def.resolver(db.upcast()); - let ctx = TyLoweringContext::new(db, &resolver, def.into()) + let ctx = TyLoweringContext::new(db, &resolver, &data.types_map, def.into()) .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_type_param_mode(ParamLoweringMode::Variable); - if let Some(type_ref) = &data.type_ref { + if let Some(type_ref) = data.type_ref { let _ty = ctx.lower_ty(type_ref); } let type_alias_impl_traits = ImplTraits { @@ -2190,7 +2274,8 @@ pub(crate) fn generic_arg_to_chalk<'a, T>( kind_id: GenericParamId, arg: &'a GenericArg, this: &mut T, - for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a, + types_map: &TypesMap, + for_type: impl FnOnce(&mut T, TypeRefId) -> Ty + 'a, for_const: impl FnOnce(&mut T, &ConstRef, Ty) -> Const + 'a, for_lifetime: impl FnOnce(&mut T, &LifetimeRef) -> Lifetime + 'a, ) -> crate::GenericArg { @@ -2203,7 +2288,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>( GenericParamId::LifetimeParamId(_) => ParamKind::Lifetime, }; match (arg, kind) { - (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, type_ref).cast(Interner), + (GenericArg::Type(type_ref), ParamKind::Type) => for_type(this, *type_ref).cast(Interner), (GenericArg::Const(c), ParamKind::Const(c_ty)) => for_const(this, c, c_ty).cast(Interner), (GenericArg::Lifetime(lifetime_ref), ParamKind::Lifetime) => { for_lifetime(this, lifetime_ref).cast(Interner) @@ -2214,7 +2299,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>( // We want to recover simple idents, which parser detects them // as types. Maybe here is not the best place to do it, but // it works. - if let TypeRef::Path(p) = t { + if let TypeRef::Path(p) = &types_map[*t] { if let Some(p) = p.mod_path() { if p.kind == PathKind::Plain { if let [n] = p.segments() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 8e815aabf2070..59c583afb2a85 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -879,7 +879,8 @@ pub enum Rvalue { /// /// **Needs clarification**: Are there weird additional semantics here related to the runtime /// nature of this operation? - //ThreadLocalRef(DefId), + // ThreadLocalRef(DefId), + ThreadLocalRef(std::convert::Infallible), /// Creates a pointer with the indicated mutability to the place. /// @@ -888,7 +889,8 @@ pub enum Rvalue { /// /// Like with references, the semantics of this operation are heavily dependent on the aliasing /// model. - //AddressOf(Mutability, Place), + // AddressOf(Mutability, Place), + AddressOf(std::convert::Infallible), /// Yields the length of the place, as a `usize`. /// @@ -906,19 +908,21 @@ pub enum Rvalue { Cast(CastKind, Operand, Ty), // FIXME link to `pointer::offset` when it hits stable. - // /// * `Offset` has the same semantics as `pointer::offset`, except that the second - // /// parameter may be a `usize` as well. - // /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats, - // /// raw pointers, or function pointers and return a `bool`. The types of the operands must be - // /// matching, up to the usual caveat of the lifetimes in function pointers. - // /// * Left and right shift operations accept signed or unsigned integers not necessarily of the - // /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is - // /// truncated as needed. - // /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching - // /// types and return a value of that type. - // /// * The remaining operations accept signed integers, unsigned integers, or floats with - // /// matching types and return a value of that type. + /// * `Offset` has the same semantics as `pointer::offset`, except that the second + /// parameter may be a `usize` as well. + /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats, + /// raw pointers, or function pointers and return a `bool`. The types of the operands must be + /// matching, up to the usual caveat of the lifetimes in function pointers. + /// * Left and right shift operations accept signed or unsigned integers not necessarily of the + /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is + /// truncated as needed. + /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching + /// types and return a value of that type. + /// * The remaining operations accept signed integers, unsigned integers, or floats with + /// matching types and return a value of that type. //BinaryOp(BinOp, Box<(Operand, Operand)>), + BinaryOp(std::convert::Infallible), + /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition. /// /// When overflow checking is disabled and we are generating run-time code, the error condition @@ -937,6 +941,7 @@ pub enum Rvalue { /// Computes a value as described by the operation. //NullaryOp(NullOp, Ty), + NullaryOp(std::convert::Infallible), /// Exactly like `BinaryOp`, but less operands. /// @@ -1095,6 +1100,10 @@ impl MirBody { for_operand(op, &mut f, &mut self.projection_store); } } + Rvalue::ThreadLocalRef(n) + | Rvalue::AddressOf(n) + | Rvalue::BinaryOp(n) + | Rvalue::NullaryOp(n) => match *n {}, } } StatementKind::FakeRead(p) | StatementKind::Deinit(p) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index 9830fa1ca7b73..9c86d3b59f6d9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -167,6 +167,10 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec for_operand(op, statement.span); } } + Rvalue::ThreadLocalRef(n) + | Rvalue::AddressOf(n) + | Rvalue::BinaryOp(n) + | Rvalue::NullaryOp(n) => match *n {}, }, StatementKind::FakeRead(_) | StatementKind::Deinit(_) @@ -253,6 +257,10 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec for_operand(op, statement.span); } } + Rvalue::ThreadLocalRef(n) + | Rvalue::AddressOf(n) + | Rvalue::BinaryOp(n) + | Rvalue::NullaryOp(n) => match *n {}, }, StatementKind::FakeRead(_) | StatementKind::Deinit(_) @@ -548,6 +556,10 @@ fn mutability_of_locals( } } Rvalue::ShallowInitBox(_, _) | Rvalue::ShallowInitBoxWithAlloc(_) => (), + Rvalue::ThreadLocalRef(n) + | Rvalue::AddressOf(n) + | Rvalue::BinaryOp(n) + | Rvalue::NullaryOp(n) => match *n {}, } if let Rvalue::Ref( BorrowKind::Mut { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 0d42617d185c4..e73b9dc27d12c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -6,6 +6,7 @@ use base_db::CrateId; use chalk_ir::{cast::Cast, Mutability}; use either::Either; use hir_def::{ + body::HygieneId, builtin_type::BuiltinType, data::adt::{StructFlags, VariantData}, lang_item::LangItem, @@ -1628,6 +1629,10 @@ impl Evaluator<'_> { } CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"), }, + Rvalue::ThreadLocalRef(n) + | Rvalue::AddressOf(n) + | Rvalue::BinaryOp(n) + | Rvalue::NullaryOp(n) => match *n {}, }) } @@ -2703,17 +2708,15 @@ impl Evaluator<'_> { TyKind::Function(_) => { self.exec_fn_pointer(func_data, destination, &args[1..], locals, target_bb, span) } - TyKind::Closure(closure, subst) => { - return self.exec_closure( - *closure, - func_data, - &Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()), - destination, - &args[1..], - locals, - span, - ); - } + TyKind::Closure(closure, subst) => self.exec_closure( + *closure, + func_data, + &Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()), + destination, + &args[1..], + locals, + span, + ), _ => { // try to execute the manual impl of `FnTrait` for structs (nightly feature used in std) let arg0 = func; @@ -2846,7 +2849,8 @@ impl Evaluator<'_> { } let layout = self.layout_adt(id.0, subst.clone())?; match data.variant_data.as_ref() { - VariantData::Record(fields) | VariantData::Tuple(fields) => { + VariantData::Record { fields, .. } + | VariantData::Tuple { fields, .. } => { let field_types = self.db.field_types(s.into()); for (field, _) in fields.iter() { let offset = layout @@ -2951,6 +2955,7 @@ pub fn render_const_using_debug_impl( let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully( db.upcast(), &hir_def::path::Path::from_known_path_with_no_generic(path![std::fmt::format]), + HygieneId::ROOT, ) else { not_supported!("std::fmt::format not found"); }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 16994cdd0c657..c4e0640051064 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -5,7 +5,7 @@ use std::{fmt::Write, iter, mem}; use base_db::ra_salsa::Cycle; use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind}; use hir_def::{ - body::Body, + body::{Body, HygieneId}, data::adt::{StructKind, VariantData}, hir::{ ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, @@ -13,7 +13,8 @@ use hir_def::{ }, lang_item::{LangItem, LangItemTarget}, path::Path, - resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs}, + resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs}, + type_ref::TypesMap, AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, Lookup, TraitId, TupleId, TypeOrConstParamId, }; @@ -28,7 +29,7 @@ use triomphe::Arc; use crate::{ consteval::ConstEvalError, db::{HirDatabase, InternedClosure}, - display::HirDisplay, + display::{hir_display_with_types_map, HirDisplay}, error_lifetime, generics::generics, infer::{cast::CastTy, unify::InferenceTable, CaptureKind, CapturedItem, TypeMismatch}, @@ -76,6 +77,7 @@ struct MirLowerCtx<'a> { db: &'a dyn HirDatabase, body: &'a Body, infer: &'a InferenceResult, + resolver: Resolver, drop_scopes: Vec, } @@ -246,8 +248,15 @@ impl From for MirLowerError { } impl MirLowerError { - fn unresolved_path(db: &dyn HirDatabase, p: &Path, edition: Edition) -> Self { - Self::UnresolvedName(p.display(db, edition).to_string()) + fn unresolved_path( + db: &dyn HirDatabase, + p: &Path, + edition: Edition, + types_map: &TypesMap, + ) -> Self { + Self::UnresolvedName( + hir_display_with_types_map(p, types_map).display(db, edition).to_string(), + ) } } @@ -278,6 +287,7 @@ impl<'ctx> MirLowerCtx<'ctx> { owner, closures: vec![], }; + let resolver = owner.resolver(db.upcast()); MirLowerCtx { result: mir, @@ -285,6 +295,7 @@ impl<'ctx> MirLowerCtx<'ctx> { infer, body, owner, + resolver, current_loop_blocks: None, labeled_loop_blocks: Default::default(), discr_temp: None, @@ -410,43 +421,54 @@ impl<'ctx> MirLowerCtx<'ctx> { Err(MirLowerError::IncompleteExpr) } Expr::Path(p) => { - let pr = - if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) { - match assoc { - hir_def::AssocItemId::ConstId(c) => { - self.lower_const( - c.into(), - current, - place, - subst, - expr_id.into(), - self.expr_ty_without_adjust(expr_id), - )?; - return Ok(Some(current)); - } - hir_def::AssocItemId::FunctionId(_) => { - // FnDefs are zero sized, no action is needed. - return Ok(Some(current)); - } - hir_def::AssocItemId::TypeAliasId(_) => { - // FIXME: If it is unreachable, use proper error instead of `not_supported`. - not_supported!("associated functions and types") - } + let pr = if let Some((assoc, subst)) = + self.infer.assoc_resolutions_for_expr(expr_id) + { + match assoc { + hir_def::AssocItemId::ConstId(c) => { + self.lower_const( + c.into(), + current, + place, + subst, + expr_id.into(), + self.expr_ty_without_adjust(expr_id), + )?; + return Ok(Some(current)); } - } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) { - match variant { - VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e), - VariantId::StructId(s) => ValueNs::StructId(s), - VariantId::UnionId(_) => implementation_error!("Union variant as path"), + hir_def::AssocItemId::FunctionId(_) => { + // FnDefs are zero sized, no action is needed. + return Ok(Some(current)); } - } else { - let unresolved_name = - || MirLowerError::unresolved_path(self.db, p, self.edition()); - let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); - resolver - .resolve_path_in_value_ns_fully(self.db.upcast(), p) - .ok_or_else(unresolved_name)? - }; + hir_def::AssocItemId::TypeAliasId(_) => { + // FIXME: If it is unreachable, use proper error instead of `not_supported`. + not_supported!("associated functions and types") + } + } + } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) { + match variant { + VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e), + VariantId::StructId(s) => ValueNs::StructId(s), + VariantId::UnionId(_) => implementation_error!("Union variant as path"), + } + } else { + let resolver_guard = + self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); + let hygiene = self.body.expr_path_hygiene(expr_id); + let result = self + .resolver + .resolve_path_in_value_ns_fully(self.db.upcast(), p, hygiene) + .ok_or_else(|| { + MirLowerError::unresolved_path( + self.db, + p, + self.edition(), + &self.body.types, + ) + })?; + self.resolver.reset_to_guard(resolver_guard); + result + }; match pr { ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => { let Some((temp, current)) = @@ -553,8 +575,11 @@ impl<'ctx> MirLowerCtx<'ctx> { return Ok(None); }; self.push_fake_read(current, cond_place, expr_id.into()); + let resolver_guard = + self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); let (then_target, else_target) = self.pattern_match(current, None, cond_place, *pat)?; + self.resolver.reset_to_guard(resolver_guard); self.write_bytes_to_place( then_target, place, @@ -688,6 +713,8 @@ impl<'ctx> MirLowerCtx<'ctx> { }; self.push_fake_read(current, cond_place, expr_id.into()); let mut end = None; + let resolver_guard = + self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); for MatchArm { pat, guard, expr } in arms.iter() { let (then, mut otherwise) = self.pattern_match(current, None, cond_place, *pat)?; @@ -721,6 +748,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } } } + self.resolver.reset_to_guard(resolver_guard); if self.is_unterminated(current) { self.set_terminator(current, TerminatorKind::Unreachable, expr_id.into()); } @@ -795,7 +823,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } Expr::Become { .. } => not_supported!("tail-calls"), Expr::Yield { .. } => not_supported!("yield"), - Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => { + Expr::RecordLit { fields, path, spread } => { let spread_place = match spread { &Some(it) => { let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else { @@ -809,7 +837,9 @@ impl<'ctx> MirLowerCtx<'ctx> { let variant_id = self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path { Some(p) => MirLowerError::UnresolvedName( - p.display(self.db, self.edition()).to_string(), + hir_display_with_types_map(&**p, &self.body.types) + .display(self.db, self.edition()) + .to_string(), ), None => MirLowerError::RecordLiteralWithoutPath, })?; @@ -1010,35 +1040,28 @@ impl<'ctx> MirLowerCtx<'ctx> { ); } } - if let hir_def::hir::BinaryOp::Assignment { op } = op { - if let Some(op) = op { - // last adjustment is `&mut` which we don't want it. - let adjusts = self - .infer - .expr_adjustments - .get(lhs) - .and_then(|it| it.split_last()) - .map(|it| it.1) - .ok_or(MirLowerError::TypeError( - "adjustment of binary op was missing", - ))?; - let Some((lhs_place, current)) = - self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)? - else { - return Ok(None); - }; - let Some((rhs_op, current)) = - self.lower_expr_to_some_operand(*rhs, current)? - else { - return Ok(None); - }; - let r_value = - Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place), rhs_op); - self.push_assignment(current, lhs_place, r_value, expr_id.into()); - return Ok(Some(current)); - } else { - return self.lower_assignment(current, *lhs, *rhs, expr_id.into()); - } + if let hir_def::hir::BinaryOp::Assignment { op: Some(op) } = op { + // last adjustment is `&mut` which we don't want it. + let adjusts = self + .infer + .expr_adjustments + .get(lhs) + .and_then(|it| it.split_last()) + .map(|it| it.1) + .ok_or(MirLowerError::TypeError("adjustment of binary op was missing"))?; + let Some((lhs_place, current)) = + self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)? + else { + return Ok(None); + }; + let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? + else { + return Ok(None); + }; + let r_value = + Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place), rhs_op); + self.push_assignment(current, lhs_place, r_value, expr_id.into()); + return Ok(Some(current)); } let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else { @@ -1097,6 +1120,18 @@ impl<'ctx> MirLowerCtx<'ctx> { ); Ok(Some(current)) } + &Expr::Assignment { target, value } => { + let Some((value, mut current)) = self.lower_expr_as_place(current, value, true)? + else { + return Ok(None); + }; + self.push_fake_read(current, value, expr_id.into()); + let resolver_guard = + self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); + current = self.pattern_match_assignment(current, value, target)?; + self.resolver.reset_to_guard(resolver_guard); + Ok(Some(current)) + } &Expr::Range { lhs, rhs, range_type: _ } => { let ty = self.expr_ty_without_adjust(expr_id); let Some((adt, subst)) = ty.as_adt() else { @@ -1213,7 +1248,7 @@ impl<'ctx> MirLowerCtx<'ctx> { ); Ok(Some(current)) } - Expr::Tuple { exprs, is_assignee_expr: _ } => { + Expr::Tuple { exprs } => { let Some(values) = exprs .iter() .map(|it| { @@ -1291,73 +1326,6 @@ impl<'ctx> MirLowerCtx<'ctx> { } } - fn lower_destructing_assignment( - &mut self, - mut current: BasicBlockId, - lhs: ExprId, - rhs: Place, - span: MirSpan, - ) -> Result> { - match &self.body.exprs[lhs] { - Expr::Tuple { exprs, is_assignee_expr: _ } => { - for (i, expr) in exprs.iter().enumerate() { - let rhs = rhs.project( - ProjectionElem::Field(Either::Right(TupleFieldId { - tuple: TupleId(!0), // Dummy this as its unused - index: i as u32, - })), - &mut self.result.projection_store, - ); - let Some(c) = self.lower_destructing_assignment(current, *expr, rhs, span)? - else { - return Ok(None); - }; - current = c; - } - Ok(Some(current)) - } - Expr::Underscore => Ok(Some(current)), - _ => { - let Some((lhs_place, current)) = self.lower_expr_as_place(current, lhs, false)? - else { - return Ok(None); - }; - self.push_assignment(current, lhs_place, Operand::Copy(rhs).into(), span); - Ok(Some(current)) - } - } - } - - fn lower_assignment( - &mut self, - current: BasicBlockId, - lhs: ExprId, - rhs: ExprId, - span: MirSpan, - ) -> Result> { - let Some((rhs_op, current)) = self.lower_expr_to_some_operand(rhs, current)? else { - return Ok(None); - }; - if matches!(&self.body.exprs[lhs], Expr::Underscore) { - self.push_fake_read_for_operand(current, rhs_op, span); - return Ok(Some(current)); - } - if matches!( - &self.body.exprs[lhs], - Expr::Tuple { .. } | Expr::RecordLit { .. } | Expr::Call { .. } - ) { - let temp = self.temp(self.expr_ty_after_adjustments(rhs), current, rhs.into())?; - let temp = Place::from(temp); - self.push_assignment(current, temp, rhs_op.into(), span); - return self.lower_destructing_assignment(current, lhs, temp, span); - } - let Some((lhs_place, current)) = self.lower_expr_as_place(current, lhs, false)? else { - return Ok(None); - }; - self.push_assignment(current, lhs_place, rhs_op.into(), span); - Ok(Some(current)) - } - fn placeholder_subst(&mut self) -> Substitution { match self.owner.as_generic_def_id(self.db.upcast()) { Some(it) => TyBuilder::placeholder_subst(self.db, it), @@ -1406,10 +1374,10 @@ impl<'ctx> MirLowerCtx<'ctx> { }; let edition = self.edition(); let unresolved_name = - || MirLowerError::unresolved_path(self.db, c.as_ref(), edition); - let resolver = self.owner.resolver(self.db.upcast()); - let pr = resolver - .resolve_path_in_value_ns(self.db.upcast(), c.as_ref()) + || MirLowerError::unresolved_path(self.db, c, edition, &self.body.types); + let pr = self + .resolver + .resolve_path_in_value_ns(self.db.upcast(), c, HygieneId::ROOT) .ok_or_else(unresolved_name)?; match pr { ResolveValueResult::ValueNs(v, _) => { @@ -1632,12 +1600,6 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_statement(block, StatementKind::FakeRead(p).with_span(span)); } - fn push_fake_read_for_operand(&mut self, block: BasicBlockId, operand: Operand, span: MirSpan) { - if let Operand::Move(p) | Operand::Copy(p) = operand { - self.push_fake_read(block, p, span); - } - } - fn push_assignment( &mut self, block: BasicBlockId, @@ -1791,8 +1753,16 @@ impl<'ctx> MirLowerCtx<'ctx> { }; current = c; self.push_fake_read(current, init_place, span); + // Using the initializer for the resolver scope is good enough for us, as it cannot create new declarations + // and has all declarations of the `let`. + let resolver_guard = self.resolver.update_to_inner_scope( + self.db.upcast(), + self.owner, + *expr_id, + ); (current, else_block) = self.pattern_match(current, None, init_place, *pat)?; + self.resolver.reset_to_guard(resolver_guard); match (else_block, else_branch) { (None, _) => (), (Some(else_block), None) => { @@ -1828,7 +1798,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_fake_read(c, p, expr.into()); current = scope2.pop_and_drop(self, c, expr.into()); } - hir_def::hir::Statement::Item => (), + hir_def::hir::Statement::Item(_) => (), } } if let Some(tail) = tail { @@ -2066,11 +2036,13 @@ pub fn mir_body_for_closure_query( let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else { implementation_error!("closure has not callable sig"); }; + let resolver_guard = ctx.resolver.update_to_inner_scope(db.upcast(), owner, expr); let current = ctx.lower_params_and_bindings( args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())), None, |_| true, )?; + ctx.resolver.reset_to_guard(resolver_guard); if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? { let current = ctx.pop_drop_scope_assert_finished(current, root.into())?; ctx.set_terminator(current, TerminatorKind::Return, (*root).into()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs index 424ee1160c820..420f2aaff46d6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs @@ -135,8 +135,13 @@ impl MirLowerCtx<'_> { }; match &self.body.exprs[expr_id] { Expr::Path(p) => { - let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); - let Some(pr) = resolver.resolve_path_in_value_ns_fully(self.db.upcast(), p) else { + let resolver_guard = + self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id); + let hygiene = self.body.expr_path_hygiene(expr_id); + let resolved = + self.resolver.resolve_path_in_value_ns_fully(self.db.upcast(), p, hygiene); + self.resolver.reset_to_guard(resolver_guard); + let Some(pr) = resolved else { return try_rvalue(self); }; match pr { @@ -216,7 +221,7 @@ impl MirLowerCtx<'_> { self.push_field_projection(&mut r, expr_id)?; Ok(Some((r, current))) } - Expr::Index { base, index, is_assignee_expr: _ } => { + Expr::Index { base, index } => { let base_ty = self.expr_ty_after_adjustments(*base); let index_ty = self.expr_ty_after_adjustments(*index); if index_ty != TyBuilder::usize() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index b1c0d1f2b3901..2ffea34c85a10 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,6 +1,6 @@ //! MIR lowering for patterns -use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId}; +use hir_def::{hir::LiteralOrConst, AssocItemId}; use crate::{ mir::{ @@ -46,6 +46,8 @@ enum MatchingMode { Check, /// Assume that this pattern matches, fill bindings Bind, + /// Assume that this pattern matches, assign to existing variables. + Assign, } impl MirLowerCtx<'_> { @@ -82,6 +84,17 @@ impl MirLowerCtx<'_> { Ok((current, current_else)) } + pub(super) fn pattern_match_assignment( + &mut self, + current: BasicBlockId, + value: Place, + pattern: PatId, + ) -> Result { + let (current, _) = + self.pattern_match_inner(current, None, value, pattern, MatchingMode::Assign)?; + Ok(current) + } + pub(super) fn match_self_param( &mut self, id: BindingId, @@ -155,14 +168,8 @@ impl MirLowerCtx<'_> { *pat, MatchingMode::Check, )?; - if mode == MatchingMode::Bind { - (next, _) = self.pattern_match_inner( - next, - None, - cond_place, - *pat, - MatchingMode::Bind, - )?; + if mode != MatchingMode::Check { + (next, _) = self.pattern_match_inner(next, None, cond_place, *pat, mode)?; } self.set_goto(next, then_target, pattern.into()); match next_else { @@ -176,11 +183,11 @@ impl MirLowerCtx<'_> { } } if !finished { - if mode == MatchingMode::Bind { - self.set_terminator(current, TerminatorKind::Unreachable, pattern.into()); - } else { + if mode == MatchingMode::Check { let ce = *current_else.get_or_insert_with(|| self.new_basic_block()); self.set_goto(current, ce, pattern.into()); + } else { + self.set_terminator(current, TerminatorKind::Unreachable, pattern.into()); } } (then_target, current_else) @@ -300,7 +307,7 @@ impl MirLowerCtx<'_> { self.pattern_match_inner(current, current_else, next_place, pat, mode)?; } if let &Some(slice) = slice { - if mode == MatchingMode::Bind { + if mode != MatchingMode::Check { if let Pat::Bind { id, subpat: _ } = self.body[slice] { let next_place = cond_place.project( ProjectionElem::Subslice { @@ -342,17 +349,36 @@ impl MirLowerCtx<'_> { mode, )?, None => { - // The path is not a variant, so it is a const + let unresolved_name = || { + MirLowerError::unresolved_path(self.db, p, self.edition(), &self.body.types) + }; + let hygiene = self.body.pat_path_hygiene(pattern); + let pr = self + .resolver + .resolve_path_in_value_ns(self.db.upcast(), p, hygiene) + .ok_or_else(unresolved_name)?; + + if let ( + MatchingMode::Assign, + ResolveValueResult::ValueNs(ValueNs::LocalBinding(binding), _), + ) = (mode, &pr) + { + let local = self.binding_local(*binding)?; + self.push_match_assignment( + current, + local, + BindingMode::Move, + cond_place, + pattern.into(), + ); + return Ok((current, current_else)); + } + + // The path is not a variant or a local, so it is a const if mode != MatchingMode::Check { // A const don't bind anything. Only needs check. return Ok((current, current_else)); } - let unresolved_name = - || MirLowerError::unresolved_path(self.db, p, self.edition()); - let resolver = self.owner.resolver(self.db.upcast()); - let pr = resolver - .resolve_path_in_value_ns(self.db.upcast(), p) - .ok_or_else(unresolved_name)?; let (c, subst) = 'b: { if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern) { if let AssocItemId::ConstId(c) = x.0 { @@ -415,7 +441,7 @@ impl MirLowerCtx<'_> { (current, current_else) = self.pattern_match_inner(current, current_else, cond_place, *subpat, mode)? } - if mode == MatchingMode::Bind { + if mode != MatchingMode::Check { let mode = self.infer.binding_modes[pattern]; self.pattern_match_binding( *id, @@ -448,6 +474,23 @@ impl MirLowerCtx<'_> { cond_place.project(ProjectionElem::Deref, &mut self.result.projection_store); self.pattern_match_inner(current, current_else, cond_place, *pat, mode)? } + &Pat::Expr(expr) => { + stdx::always!( + mode == MatchingMode::Assign, + "Pat::Expr can only come in destructuring assignments" + ); + let Some((lhs_place, current)) = self.lower_expr_as_place(current, expr, false)? + else { + return Ok((current, current_else)); + }; + self.push_assignment( + current, + lhs_place, + Operand::Copy(cond_place).into(), + expr.into(), + ); + (current, current_else) + } Pat::Box { .. } => not_supported!("box pattern"), Pat::ConstBlock(_) => not_supported!("const block pattern"), }) @@ -464,6 +507,18 @@ impl MirLowerCtx<'_> { ) -> Result<(BasicBlockId, Option)> { let target_place = self.binding_local(id)?; self.push_storage_live(id, current)?; + self.push_match_assignment(current, target_place, mode, cond_place, span); + Ok((current, current_else)) + } + + fn push_match_assignment( + &mut self, + current: BasicBlockId, + target_place: LocalId, + mode: BindingMode, + cond_place: Place, + span: MirSpan, + ) { self.push_assignment( current, target_place.into(), @@ -476,7 +531,6 @@ impl MirLowerCtx<'_> { }, span, ); - Ok((current, current_else)) } fn pattern_match_const( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs index 4c6bc376e2b7c..92132fa047362 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs @@ -258,6 +258,10 @@ impl Filler<'_> { | Rvalue::UnaryOp(_, _) | Rvalue::Discriminant(_) | Rvalue::CopyForDeref(_) => (), + Rvalue::ThreadLocalRef(n) + | Rvalue::AddressOf(n) + | Rvalue::BinaryOp(n) + | Rvalue::NullaryOp(n) => match *n {}, }, StatementKind::Deinit(_) | StatementKind::FakeRead(_) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs index df56071aa9af6..06765a104cbb4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs @@ -459,6 +459,10 @@ impl<'a> MirPrettyCtx<'a> { self.place(p); w!(self, ")"); } + Rvalue::ThreadLocalRef(n) + | Rvalue::AddressOf(n) + | Rvalue::BinaryOp(n) + | Rvalue::NullaryOp(n) => match *n {}, } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs index a8170b606060b..5f0f341f393e9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs @@ -3418,11 +3418,11 @@ struct TS(usize); fn main() { let x; [x,] = &[1,]; - //^^^^expected &'? [i32; 1], got [{unknown}; _] + //^^^^expected &'? [i32; 1], got [{unknown}] let x; [(x,),] = &[(1,),]; - //^^^^^^^expected &'? [(i32,); 1], got [{unknown}; _] + //^^^^^^^expected &'? [(i32,); 1], got [{unknown}] let x; ((x,),) = &((1,),); @@ -3720,3 +3720,85 @@ fn test() -> bool { "#]], ); } + +#[test] +fn macro_semitransparent_hygiene() { + check_types( + r#" +macro_rules! m { + () => { let bar: i32; }; +} +fn foo() { + let bar: bool; + m!(); + bar; + // ^^^ bool +} + "#, + ); +} + +#[test] +fn macro_expansion_can_refer_variables_defined_before_macro_definition() { + check_types( + r#" +fn foo() { + let v: i32 = 0; + macro_rules! m { + () => { v }; + } + let v: bool = true; + m!(); + // ^^^^ i32 +} + "#, + ); +} + +#[test] +fn macro_rules_shadowing_works_with_hygiene() { + check_types( + r#" +fn foo() { + let v: bool; + macro_rules! m { () => { v } } + m!(); + // ^^^^ bool + + let v: char; + macro_rules! m { () => { v } } + m!(); + // ^^^^ char + + { + let v: u8; + macro_rules! m { () => { v } } + m!(); + // ^^^^ u8 + + let v: i8; + macro_rules! m { () => { v } } + m!(); + // ^^^^ i8 + + let v: i16; + macro_rules! m { () => { v } } + m!(); + // ^^^^ i16 + + { + let v: u32; + macro_rules! m { () => { v } } + m!(); + // ^^^^ u32 + + let v: u64; + macro_rules! m { () => { v } } + m!(); + // ^^^^ u64 + } + } +} + "#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index 620bba2d75c12..0a436ff2b41ab 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -123,7 +123,7 @@ pub(super) struct ClauseElaborator<'a> { seen: FxHashSet, } -impl<'a> ClauseElaborator<'a> { +impl ClauseElaborator<'_> { fn extend_deduped(&mut self, clauses: impl IntoIterator) { self.stack.extend(clauses.into_iter().filter(|c| self.seen.insert(c.clone()))) } @@ -163,10 +163,12 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { let is_trait = match target { - WherePredicateTypeTarget::TypeRef(type_ref) => match &**type_ref { - TypeRef::Path(p) => p.is_self_type(), - _ => false, - }, + WherePredicateTypeTarget::TypeRef(type_ref) => { + match &generic_params.types_map[*type_ref] { + TypeRef::Path(p) => p.is_self_type(), + _ => false, + } + } WherePredicateTypeTarget::TypeOrConstParam(local_id) => { Some(*local_id) == trait_self } diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs index cb5f5b06aefb4..22760c41aaecf 100644 --- a/src/tools/rust-analyzer/crates/hir/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir/src/db.rs @@ -4,35 +4,43 @@ //! //! But we need this for at least LRU caching at the query level. pub use hir_def::db::{ - AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BodyQuery, BodyWithSourceMapQuery, - ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery, CrateLangItemsQuery, - CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, - EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery, - FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, - FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataWithDiagnosticsQuery, - ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery, InternDatabase, - InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery, InternExternCrateQuery, - InternFunctionQuery, InternImplQuery, InternInTypeConstQuery, InternMacro2Query, - InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery, InternStructQuery, - InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery, - InternUseQuery, LangItemQuery, Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, - StaticDataQuery, StructDataWithDiagnosticsQuery, TraitAliasDataQuery, - TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataWithDiagnosticsQuery, + AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery, + BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery, + CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase, + DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery, + ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery, + FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery, + FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, + GenericParamsWithSourceMapQuery, ImplDataWithDiagnosticsQuery, ImportMapQuery, + IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery, + InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery, + InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery, + InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery, + InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery, + InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery, + MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery, + StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataWithDiagnosticsQuery, + TypeAliasDataQuery, UnionDataWithDiagnosticsQuery, }; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery, - ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacrosQuery, RealSpanMapQuery, + ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery, + RealSpanMapQuery, }; pub use hir_ty::db::{ AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery, CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery, - ConstParamTyQuery, FieldTypesQuery, FnDefDatumQuery, FnDefVarianceQuery, GenericDefaultsQuery, - GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, HirDatabaseStorage, - ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, + ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery, + FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery, + GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage, + ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery, InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery, InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery, - InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, MirBodyQuery, ProgramClausesForChalkEnvQuery, - ReturnTypeImplTraitsQuery, TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, - TraitImplsInBlockQuery, TraitImplsInCrateQuery, TraitImplsInDepsQuery, TyQuery, ValueTyQuery, + InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery, + MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery, + MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery, + TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery, + TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery, + TypeAliasImplTraitsQuery, ValueTyQuery, }; diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 0b3cdb2f37907..8297acde857d7 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -165,6 +165,7 @@ pub struct MacroError { pub precise_location: Option, pub message: String, pub error: bool, + pub kind: &'static str, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -246,7 +247,7 @@ pub struct UnresolvedAssocItem { #[derive(Debug)] pub struct UnresolvedIdent { - pub expr: InFile>, + pub expr_or_pat: InFile>>, } #[derive(Debug)] @@ -257,7 +258,7 @@ pub struct PrivateField { #[derive(Debug)] pub struct MissingUnsafe { - pub expr: InFile>, + pub expr: InFile>>, /// If true, the diagnostics is an `unsafe_op_in_unsafe_fn` lint instead of a hard error. pub only_lint: bool, } @@ -398,56 +399,46 @@ impl AnyDiagnostic { .map(|idx| variant_data.fields()[idx].name.clone()) .collect(); - match record { - Either::Left(record_expr) => match source_map.expr_syntax(record_expr) { - Ok(source_ptr) => { - let root = source_ptr.file_syntax(db.upcast()); - if let ast::Expr::RecordExpr(record_expr) = - source_ptr.value.to_node(&root) - { - if record_expr.record_expr_field_list().is_some() { - let field_list_parent_path = - record_expr.path().map(|path| AstPtr::new(&path)); - return Some( - MissingFields { - file: source_ptr.file_id, - field_list_parent: AstPtr::new(&Either::Left( - record_expr, - )), - field_list_parent_path, - missed_fields, - } - .into(), - ); + let record = match record { + Either::Left(record_expr) => { + source_map.expr_syntax(record_expr).ok()?.map(AstPtr::wrap_left) + } + Either::Right(record_pat) => source_map.pat_syntax(record_pat).ok()?, + }; + let file = record.file_id; + let root = record.file_syntax(db.upcast()); + match record.value.to_node(&root) { + Either::Left(ast::Expr::RecordExpr(record_expr)) => { + if record_expr.record_expr_field_list().is_some() { + let field_list_parent_path = + record_expr.path().map(|path| AstPtr::new(&path)); + return Some( + MissingFields { + file, + field_list_parent: AstPtr::new(&Either::Left(record_expr)), + field_list_parent_path, + missed_fields, } - } + .into(), + ); } - Err(SyntheticSyntax) => (), - }, - Either::Right(record_pat) => match source_map.pat_syntax(record_pat) { - Ok(source_ptr) => { - if let Some(ptr) = source_ptr.value.cast::() { - let root = source_ptr.file_syntax(db.upcast()); - let record_pat = ptr.to_node(&root); - if record_pat.record_pat_field_list().is_some() { - let field_list_parent_path = - record_pat.path().map(|path| AstPtr::new(&path)); - return Some( - MissingFields { - file: source_ptr.file_id, - field_list_parent: AstPtr::new(&Either::Right( - record_pat, - )), - field_list_parent_path, - missed_fields, - } - .into(), - ); + } + Either::Right(ast::Pat::RecordPat(record_pat)) => { + if record_pat.record_pat_field_list().is_some() { + let field_list_parent_path = + record_pat.path().map(|path| AstPtr::new(&path)); + return Some( + MissingFields { + file, + field_list_parent: AstPtr::new(&Either::Right(record_pat)), + field_list_parent_path, + missed_fields, } - } + .into(), + ); } - Err(SyntheticSyntax) => (), - }, + } + _ => {} } } BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => { @@ -541,15 +532,17 @@ impl AnyDiagnostic { let pat_syntax = |pat| { source_map.pat_syntax(pat).inspect_err(|_| tracing::error!("synthetic syntax")).ok() }; + let expr_or_pat_syntax = |id| match id { + ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(|it| it.map(AstPtr::wrap_left)), + ExprOrPatId::PatId(pat) => pat_syntax(pat), + }; Some(match d { &InferenceDiagnostic::NoSuchField { field: expr, private, variant } => { let expr_or_pat = match expr { ExprOrPatId::ExprId(expr) => { source_map.field_syntax(expr).map(AstPtr::wrap_left) } - ExprOrPatId::PatId(pat) => { - source_map.pat_field_syntax(pat).map(AstPtr::wrap_right) - } + ExprOrPatId::PatId(pat) => source_map.pat_field_syntax(pat), }; NoSuchField { field: expr_or_pat, private, variant }.into() } @@ -562,10 +555,7 @@ impl AnyDiagnostic { PrivateField { expr, field }.into() } &InferenceDiagnostic::PrivateAssocItem { id, item } => { - let expr_or_pat = match id { - ExprOrPatId::ExprId(expr) => expr_syntax(expr)?.map(AstPtr::wrap_left), - ExprOrPatId::PatId(pat) => pat_syntax(pat)?.map(AstPtr::wrap_right), - }; + let expr_or_pat = expr_or_pat_syntax(id)?; let item = item.into(); PrivateAssocItem { expr_or_pat, item }.into() } @@ -609,15 +599,12 @@ impl AnyDiagnostic { .into() } &InferenceDiagnostic::UnresolvedAssocItem { id } => { - let expr_or_pat = match id { - ExprOrPatId::ExprId(expr) => expr_syntax(expr)?.map(AstPtr::wrap_left), - ExprOrPatId::PatId(pat) => pat_syntax(pat)?.map(AstPtr::wrap_right), - }; + let expr_or_pat = expr_or_pat_syntax(id)?; UnresolvedAssocItem { expr_or_pat }.into() } - &InferenceDiagnostic::UnresolvedIdent { expr } => { - let expr = expr_syntax(expr)?; - UnresolvedIdent { expr }.into() + &InferenceDiagnostic::UnresolvedIdent { id } => { + let expr_or_pat = expr_or_pat_syntax(id)?; + UnresolvedIdent { expr_or_pat }.into() } &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => { let expr = expr_syntax(expr)?; diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index c2b2fbef75177..9275f45d881b2 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -12,12 +12,11 @@ use hir_def::{ }; use hir_ty::{ display::{ - write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError, - HirFormatter, SizedByDefault, + hir_display_with_types_map, write_bounds_like_dyn_trait_with_prefix, write_visibility, + HirDisplay, HirDisplayError, HirDisplayWithTypesMap, HirFormatter, SizedByDefault, }, AliasEq, AliasTy, Interner, ProjectionTyExt, TraitRefExt, TyKind, WhereClause, }; -use intern::Interned; use itertools::Itertools; use crate::{ @@ -113,7 +112,7 @@ impl HirDisplay for Function { f.write_str(&pat_str)?; f.write_str(": ")?; - type_ref.hir_fmt(f)?; + type_ref.hir_fmt(f, &data.types_map)?; } if data.is_varargs() { @@ -129,28 +128,30 @@ impl HirDisplay for Function { // Use ugly pattern match to strip the Future trait. // Better way? let ret_type = if !data.is_async() { - &data.ret_type + Some(data.ret_type) } else { - match &*data.ret_type { - TypeRef::ImplTrait(bounds) => match bounds[0].as_ref() { - TypeBound::Path(path, _) => { - path.segments().iter().last().unwrap().args_and_bindings.unwrap().bindings + match &data.types_map[data.ret_type] { + TypeRef::ImplTrait(bounds) => match &bounds[0] { + TypeBound::Path(path, _) => Some( + *path.segments().iter().last().unwrap().args_and_bindings.unwrap().bindings [0] .type_ref .as_ref() - .unwrap() - } - _ => &TypeRef::Error, + .unwrap(), + ), + _ => None, }, - _ => &TypeRef::Error, + _ => None, } }; - match ret_type { - TypeRef::Tuple(tup) if tup.is_empty() => {} - ty => { - f.write_str(" -> ")?; - ty.hir_fmt(f)?; + if let Some(ret_type) = ret_type { + match &data.types_map[ret_type] { + TypeRef::Tuple(tup) if tup.is_empty() => {} + _ => { + f.write_str(" -> ")?; + ret_type.hir_fmt(f, &data.types_map)?; + } } } @@ -192,23 +193,23 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi impl HirDisplay for SelfParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { let data = f.db.function_data(self.func); - let param = data.params.first().unwrap(); - match &**param { + let param = *data.params.first().unwrap(); + match &data.types_map[param] { TypeRef::Path(p) if p.is_self_type() => f.write_str("self"), - TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner, TypeRef::Path(p) if p.is_self_type()) => + TypeRef::Reference(ref_) if matches!(&data.types_map[ref_.ty], TypeRef::Path(p) if p.is_self_type()) => { f.write_char('&')?; - if let Some(lifetime) = lifetime { + if let Some(lifetime) = &ref_.lifetime { write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?; } - if let hir_def::type_ref::Mutability::Mut = mut_ { + if let hir_def::type_ref::Mutability::Mut = ref_.mutability { f.write_str("mut ")?; } f.write_str("self") } - ty => { + _ => { f.write_str("self: ")?; - ty.hir_fmt(f) + param.hir_fmt(f, &data.types_map) } } } @@ -393,7 +394,7 @@ impl HirDisplay for Variant { let data = self.variant_data(f.db); match &*data { VariantData::Unit => {} - VariantData::Tuple(fields) => { + VariantData::Tuple { fields, types_map } => { f.write_char('(')?; let mut first = true; for (_, field) in fields.iter() { @@ -403,11 +404,11 @@ impl HirDisplay for Variant { f.write_str(", ")?; } // Enum variant fields must be pub. - field.type_ref.hir_fmt(f)?; + field.type_ref.hir_fmt(f, types_map)?; } f.write_char(')')?; } - VariantData::Record(_) => { + VariantData::Record { .. } => { if let Some(limit) = f.entity_limit { write_fields(&self.fields(f.db), false, limit, true, f)?; } @@ -579,13 +580,13 @@ fn write_generic_params( write!(f, "{}", name.display(f.db.upcast(), f.edition()))?; if let Some(default) = &ty.default { f.write_str(" = ")?; - default.hir_fmt(f)?; + default.hir_fmt(f, ¶ms.types_map)?; } } TypeOrConstParamData::ConstParamData(c) => { delim(f)?; write!(f, "const {}: ", name.display(f.db.upcast(), f.edition()))?; - c.ty.hir_fmt(f)?; + c.ty.hir_fmt(f, ¶ms.types_map)?; if let Some(default) = &c.default { f.write_str(" = ")?; @@ -615,7 +616,7 @@ fn write_where_clause( Ok(true) } -fn has_disaplayable_predicates(params: &Interned) -> bool { +fn has_disaplayable_predicates(params: &GenericParams) -> bool { params.where_predicates().any(|pred| { !matches!( pred, @@ -626,21 +627,20 @@ fn has_disaplayable_predicates(params: &Interned) -> bool { } fn write_where_predicates( - params: &Interned, + params: &GenericParams, f: &mut HirFormatter<'_>, ) -> Result<(), HirDisplayError> { use WherePredicate::*; // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`. - let is_unnamed_type_target = - |params: &Interned, target: &WherePredicateTypeTarget| { - matches!(target, - WherePredicateTypeTarget::TypeOrConstParam(id) if params[*id].name().is_none() - ) - }; + let is_unnamed_type_target = |params: &GenericParams, target: &WherePredicateTypeTarget| { + matches!(target, + WherePredicateTypeTarget::TypeOrConstParam(id) if params[*id].name().is_none() + ) + }; let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target { - WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f), + WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f, ¶ms.types_map), WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { Some(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition())), None => f.write_str("{unnamed}"), @@ -668,7 +668,7 @@ fn write_where_predicates( TypeBound { target, bound } => { write_target(target, f)?; f.write_str(": ")?; - bound.hir_fmt(f)?; + bound.hir_fmt(f, ¶ms.types_map)?; } Lifetime { target, bound } => { let target = target.name.display(f.db.upcast(), f.edition()); @@ -681,14 +681,16 @@ fn write_where_predicates( write!(f, "for<{lifetimes}> ")?; write_target(target, f)?; f.write_str(": ")?; - bound.hir_fmt(f)?; + bound.hir_fmt(f, ¶ms.types_map)?; } } while let Some(nxt) = iter.next_if(|nxt| check_same_target(pred, nxt)) { f.write_str(" + ")?; match nxt { - TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f)?, + TypeBound { bound, .. } | ForLifetime { bound, .. } => { + bound.hir_fmt(f, ¶ms.types_map)? + } Lifetime { bound, .. } => { write!(f, "{}", bound.name.display(f.db.upcast(), f.edition()))? } @@ -716,7 +718,7 @@ impl HirDisplay for Const { Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?, None => f.write_str("_: ")?, } - data.type_ref.hir_fmt(f)?; + data.type_ref.hir_fmt(f, &data.types_map)?; Ok(()) } } @@ -730,7 +732,7 @@ impl HirDisplay for Static { f.write_str("mut ")?; } write!(f, "{}: ", data.name.display(f.db.upcast(), f.edition()))?; - data.type_ref.hir_fmt(f)?; + data.type_ref.hir_fmt(f, &data.types_map)?; Ok(()) } } @@ -813,11 +815,14 @@ impl HirDisplay for TypeAlias { write_generic_params(def_id, f)?; if !data.bounds.is_empty() { f.write_str(": ")?; - f.write_joined(data.bounds.iter(), " + ")?; + f.write_joined( + data.bounds.iter().map(|bound| hir_display_with_types_map(bound, &data.types_map)), + " + ", + )?; } - if let Some(ty) = &data.type_ref { + if let Some(ty) = data.type_ref { f.write_str(" = ")?; - ty.hir_fmt(f)?; + ty.hir_fmt(f, &data.types_map)?; } write_where_clause(def_id, f)?; Ok(()) diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 30e023e1a4720..88eb3b127e060 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -58,7 +58,8 @@ use hir_def::{ TypeOrConstParamId, TypeParamId, UnionId, }; use hir_expand::{ - attrs::collect_attrs, proc_macro::ProcMacroKind, AstId, MacroCallKind, ValueResult, + attrs::collect_attrs, proc_macro::ProcMacroKind, AstId, MacroCallKind, RenderedExpandError, + ValueResult, }; use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, @@ -838,7 +839,7 @@ fn macro_call_diagnostics( let file_id = loc.kind.file_id(); let node = InFile::new(file_id, db.ast_id_map(file_id).get_erased(loc.kind.erased_ast_id())); - let (message, error) = err.render_to_string(db.upcast()); + let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast()); let precise_location = if err.span().anchor.file_id == file_id { Some( err.span().range @@ -850,7 +851,7 @@ fn macro_call_diagnostics( } else { None }; - acc.push(MacroError { node, precise_location, message, error }.into()); + acc.push(MacroError { node, precise_location, message, error, kind }.into()); } if !parse_errors.is_empty() { @@ -916,13 +917,14 @@ fn emit_def_diagnostic_( DefDiagnosticKind::MacroError { ast, path, err } => { let item = ast.to_ptr(db.upcast()); - let (message, error) = err.render_to_string(db.upcast()); + let RenderedExpandError { message, error, kind } = err.render_to_string(db.upcast()); acc.push( MacroError { node: InFile::new(ast.file_id, item.syntax_node_ptr()), precise_location: None, message: format!("{}: {message}", path.display(db.upcast(), edition)), error, + kind, } .into(), ) @@ -1811,7 +1813,8 @@ impl DefWithBody { InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() } BodyDiagnostic::MacroError { node, err } => { - let (message, error) = err.render_to_string(db.upcast()); + let RenderedExpandError { message, error, kind } = + err.render_to_string(db.upcast()); let precise_location = if err.span().anchor.file_id == node.file_id { Some( @@ -1829,6 +1832,7 @@ impl DefWithBody { precise_location, message, error, + kind, } .into() } @@ -1885,7 +1889,7 @@ impl DefWithBody { let (unafe_exprs, only_lint) = hir_ty::diagnostics::missing_unsafe(db, self.into()); for expr in unafe_exprs { - match source_map.expr_syntax(expr) { + match source_map.expr_or_pat_syntax(expr) { Ok(expr) => acc.push(MissingUnsafe { expr, only_lint }.into()), Err(SyntheticSyntax) => { // FIXME: Here and elsewhere in this file, the `expr` was @@ -2420,8 +2424,8 @@ impl SelfParam { func_data .params .first() - .map(|param| match &**param { - TypeRef::Reference(.., mutability) => match mutability { + .map(|¶m| match &func_data.types_map[param] { + TypeRef::Reference(ref_) => match ref_.mutability { hir_def::type_ref::Mutability::Shared => Access::Shared, hir_def::type_ref::Mutability::Mut => Access::Exclusive, }, @@ -2747,10 +2751,6 @@ impl TypeAlias { Module { id: self.id.module(db.upcast()) } } - pub fn type_ref(self, db: &dyn HirDatabase) -> Option { - db.type_alias_data(self.id).type_ref.as_deref().cloned() - } - pub fn ty(self, db: &dyn HirDatabase) -> Type { Type::from_def(db, self.id) } @@ -3481,7 +3481,7 @@ impl Local { LocalSource { local: self, source: src.map(|ast| match ast.to_node(&root) { - ast::Pat::IdentPat(it) => Either::Left(it), + Either::Right(ast::Pat::IdentPat(it)) => Either::Left(it), _ => unreachable!("local with non ident-pattern"), }), } @@ -3510,7 +3510,7 @@ impl Local { LocalSource { local: self, source: src.map(|ast| match ast.to_node(&root) { - ast::Pat::IdentPat(it) => Either::Left(it), + Either::Right(ast::Pat::IdentPat(it)) => Either::Left(it), _ => unreachable!("local with non ident-pattern"), }), } @@ -4235,10 +4235,7 @@ impl CaptureUsages { } mir::MirSpan::PatId(pat) => { if let Ok(pat) = source_map.pat_syntax(pat) { - result.push(CaptureUsageSource { - is_ref, - source: pat.map(AstPtr::wrap_right), - }); + result.push(CaptureUsageSource { is_ref, source: pat }); } } mir::MirSpan::BindingId(binding) => result.extend( @@ -4246,10 +4243,7 @@ impl CaptureUsages { .patterns_for_binding(binding) .iter() .filter_map(|&pat| source_map.pat_syntax(pat).ok()) - .map(|pat| CaptureUsageSource { - is_ref, - source: pat.map(AstPtr::wrap_right), - }), + .map(|pat| CaptureUsageSource { is_ref, source: pat }), ), mir::MirSpan::SelfParam | mir::MirSpan::Unknown => { unreachable!("invalid capture usage span") diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 3eac33ce99091..feb9a344d8a5f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -11,13 +11,13 @@ use std::{ use either::Either; use hir_def::{ - hir::Expr, + hir::{Expr, ExprOrPatId}, lower::LowerCtx, nameres::{MacroSubNs, ModuleOrigin}, path::ModPath, resolver::{self, HasResolver, Resolver, TypeNs}, - type_ref::Mutability, - AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, + type_ref::{Mutability, TypesMap, TypesSourceMap}, + AsMacroCall, DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId, }; use hir_expand::{ attrs::collect_attrs, @@ -45,7 +45,7 @@ use syntax::{ use crate::{ db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, - source_analyzer::{resolve_hir_path, SourceAnalyzer}, + source_analyzer::{name_hygiene, resolve_hir_path, SourceAnalyzer}, Access, Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const, ConstParam, Crate, DeriveHelper, Enum, Field, Function, HasSource, HirFileId, Impl, InFile, InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, @@ -154,7 +154,7 @@ impl<'db, DB> ops::Deref for Semantics<'db, DB> { } } -impl<'db, DB: HirDatabase> Semantics<'db, DB> { +impl Semantics<'_, DB> { pub fn new(db: &DB) -> Semantics<'_, DB> { let impl_ = SemanticsImpl::new(db); Semantics { db, imp: impl_ } @@ -203,6 +203,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast)) } + pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option { + self.imp.resolve_range_pat(range_pat).map(Struct::from) + } + + pub fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option { + self.imp.resolve_range_expr(range_expr).map(Struct::from) + } + pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { self.imp.resolve_await_to_poll(await_expr).map(Function::from) } @@ -928,16 +936,7 @@ impl<'db> SemanticsImpl<'db> { } } - let (file_id, tokens) = stack.first()?; - // make sure we pick the token in the expanded include if we encountered an include, - // otherwise we'll get the wrong semantics - let sa = - tokens.first()?.0.parent().and_then(|parent| { - self.analyze_impl(InFile::new(*file_id, &parent), None, false) - })?; - let mut m_cache = self.macro_call_cache.borrow_mut(); - let def_map = sa.resolver.def_map(); // Filters out all tokens that contain the given range (usually the macro call), any such // token is redundant as the corresponding macro call has already been processed @@ -946,6 +945,10 @@ impl<'db> SemanticsImpl<'db> { }; while let Some((expansion, ref mut tokens)) = stack.pop() { + // Reverse the tokens so we prefer first tokens (to accommodate for popping from the + // back) + // alternatively we could pop from the front but that would shift the content on every pop + tokens.reverse(); while let Some((token, ctx)) = tokens.pop() { let was_not_remapped = (|| { // First expand into attribute invocations @@ -1016,8 +1019,16 @@ impl<'db> SemanticsImpl<'db> { ) { call.as_macro_file() } else { - // FIXME: This is wrong, the SourceAnalyzer might be invalid here - sa.expand(self.db, mcall.as_ref())? + token + .parent() + .and_then(|parent| { + self.analyze_impl( + InFile::new(expansion, &parent), + None, + false, + ) + })? + .expand(self.db, mcall.as_ref())? }; m_cache.insert(mcall, it); it @@ -1087,9 +1098,16 @@ impl<'db> SemanticsImpl<'db> { attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); // Not an attribute, nor a derive, so it's either an intert attribute or a derive helper // Try to resolve to a derive helper and downmap + let resolver = &token + .parent() + .and_then(|parent| { + self.analyze_impl(InFile::new(expansion, &parent), None, false) + })? + .resolver; let id = self.db.ast_id_map(expansion).ast_id(&adt); - let helpers = - def_map.derive_helpers_in_scope(InFile::new(expansion, id))?; + let helpers = resolver + .def_map() + .derive_helpers_in_scope(InFile::new(expansion, id))?; if !helpers.is_empty() { let text_range = attr.syntax().text_range(); @@ -1251,19 +1269,28 @@ impl<'db> SemanticsImpl<'db> { pub fn resolve_type(&self, ty: &ast::Type) -> Option { let analyze = self.analyze(ty.syntax())?; - let ctx = LowerCtx::new(self.db.upcast(), analyze.file_id); + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let ctx = + LowerCtx::new(self.db.upcast(), analyze.file_id, &mut types_map, &mut types_source_map); + let type_ref = crate::TypeRef::from_ast(&ctx, ty.clone()); let ty = hir_ty::TyLoweringContext::new_maybe_unowned( self.db, &analyze.resolver, + &types_map, + None, analyze.resolver.type_owner(), ) - .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone())); + .lower_ty(type_ref); Some(Type::new_with_resolver(self.db, &analyze.resolver, ty)) } pub fn resolve_trait(&self, path: &ast::Path) -> Option { let analyze = self.analyze(path.syntax())?; - let ctx = LowerCtx::new(self.db.upcast(), analyze.file_id); + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let ctx = + LowerCtx::new(self.db.upcast(), analyze.file_id, &mut types_map, &mut types_source_map); let hir_path = Path::from_src(&ctx, path.clone())?; match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? { TypeNs::TraitId(id) => Some(Trait { id }), @@ -1363,6 +1390,14 @@ impl<'db> SemanticsImpl<'db> { self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call) } + fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option { + self.analyze(range_pat.syntax())?.resolve_range_pat(self.db, range_pat) + } + + fn resolve_range_expr(&self, range_expr: &ast::RangeExpr) -> Option { + self.analyze(range_expr.syntax())?.resolve_range_expr(self.db, range_expr) + } + fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr) } @@ -1761,7 +1796,9 @@ impl<'db> SemanticsImpl<'db> { } if let Some(parent) = ast::Expr::cast(parent.clone()) { - if let Some(expr_id) = source_map.node_expr(InFile { file_id, value: &parent }) { + if let Some(ExprOrPatId::ExprId(expr_id)) = + source_map.node_expr(InFile { file_id, value: &parent }) + { if let Expr::Unsafe { .. } = body[expr_id] { break true; } @@ -1934,10 +1971,19 @@ impl SemanticsScope<'_> { /// Resolve a path as-if it was written at the given scope. This is /// necessary a heuristic, as it doesn't take hygiene into account. - pub fn speculative_resolve(&self, path: &ast::Path) -> Option { - let ctx = LowerCtx::new(self.db.upcast(), self.file_id); - let path = Path::from_src(&ctx, path.clone())?; - resolve_hir_path(self.db, &self.resolver, &path) + pub fn speculative_resolve(&self, ast_path: &ast::Path) -> Option { + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let ctx = + LowerCtx::new(self.db.upcast(), self.file_id, &mut types_map, &mut types_source_map); + let path = Path::from_src(&ctx, ast_path.clone())?; + resolve_hir_path( + self.db, + &self.resolver, + &path, + name_hygiene(self.db, InFile::new(self.file_id, ast_path.syntax())), + &types_map, + ) } /// Iterates over associated types that may be specified after the given path (using diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index 389778b44ed9b..5357e824d09ac 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -328,7 +328,7 @@ impl SourceToDefCtx<'_, '_> { .position(|it| it == *src.value)?; let container = self.find_pat_or_label_container(src.syntax_ref())?; let (_, source_map) = self.db.body_with_source_map(container); - let expr = source_map.node_expr(src.with_value(&ast::Expr::AsmExpr(asm)))?; + let expr = source_map.node_expr(src.with_value(&ast::Expr::AsmExpr(asm)))?.as_expr()?; Some(InlineAsmOperand { owner: container, expr, index }) } @@ -372,7 +372,8 @@ impl SourceToDefCtx<'_, '_> { let break_or_continue = ast::Expr::cast(src.value.syntax().parent()?)?; let container = self.find_pat_or_label_container(src.syntax_ref())?; let (body, source_map) = self.db.body_with_source_map(container); - let break_or_continue = source_map.node_expr(src.with_value(&break_or_continue))?; + let break_or_continue = + source_map.node_expr(src.with_value(&break_or_continue))?.as_expr()?; let (Expr::Break { label, .. } | Expr::Continue { label }) = body[break_or_continue] else { return None; }; diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index 3da67ae23f83b..8d6e228e14c07 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -7,21 +7,26 @@ //! purely for "IDE needs". use std::iter::{self, once}; +use crate::{ + db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr, + BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static, + Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, Variant, +}; use either::Either; use hir_def::{ body::{ scope::{ExprScopes, ScopeId}, - Body, BodySourceMap, + Body, BodySourceMap, HygieneId, }, - hir::{BindingId, ExprId, Pat, PatId}, + hir::{BindingId, ExprId, ExprOrPatId, Pat, PatId}, lang_item::LangItem, lower::LowerCtx, nameres::MacroSubNs, path::{ModPath, Path, PathKind}, resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, - type_ref::Mutability, + type_ref::{Mutability, TypesMap, TypesSourceMap}, AsMacroCall, AssocItemId, ConstId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, - LocalFieldId, Lookup, ModuleDefId, TraitId, VariantId, + LocalFieldId, Lookup, ModuleDefId, StructId, TraitId, VariantId, }; use hir_expand::{ mod_path::path, @@ -40,18 +45,13 @@ use hir_ty::{ use intern::sym; use itertools::Itertools; use smallvec::SmallVec; +use syntax::ast::{RangeItem, RangeOp}; use syntax::{ ast::{self, AstNode}, SyntaxKind, SyntaxNode, TextRange, TextSize, }; use triomphe::Arc; -use crate::{ - db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr, - BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static, - Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, Variant, -}; - /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of /// original source files. It should not be used inside the HIR itself. #[derive(Debug)] @@ -120,7 +120,7 @@ impl SourceAnalyzer { self.def.as_ref().map(|(_, body, _)| &**body) } - fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { + fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { let src = match expr { ast::Expr::MacroExpr(expr) => { self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?))?.into() @@ -174,7 +174,9 @@ impl SourceAnalyzer { db: &dyn HirDatabase, expr: &ast::Expr, ) -> Option<&[Adjustment]> { - let expr_id = self.expr_id(db, expr)?; + // It is safe to omit destructuring assignments here because they have no adjustments (neither + // expressions nor patterns). + let expr_id = self.expr_id(db, expr)?.as_expr()?; let infer = self.infer.as_ref()?; infer.expr_adjustments.get(&expr_id).map(|v| &**v) } @@ -186,9 +188,9 @@ impl SourceAnalyzer { ) -> Option<(Type, Option)> { let expr_id = self.expr_id(db, expr)?; let infer = self.infer.as_ref()?; - let coerced = infer - .expr_adjustments - .get(&expr_id) + let coerced = expr_id + .as_expr() + .and_then(|expr_id| infer.expr_adjustments.get(&expr_id)) .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone())); let ty = infer[expr_id].clone(); let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty); @@ -268,7 +270,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, call: &ast::MethodCallExpr, ) -> Option { - let expr_id = self.expr_id(db, &call.clone().into())?; + let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?; let (func, substs) = self.infer.as_ref()?.method_resolution(expr_id)?; let ty = db.value_ty(func.into())?.substitute(Interner, &substs); let ty = Type::new_with_resolver(db, &self.resolver, ty); @@ -282,7 +284,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, call: &ast::MethodCallExpr, ) -> Option { - let expr_id = self.expr_id(db, &call.clone().into())?; + let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?; let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?; Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into()) @@ -293,7 +295,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, call: &ast::MethodCallExpr, ) -> Option> { - let expr_id = self.expr_id(db, &call.clone().into())?; + let expr_id = self.expr_id(db, &call.clone().into())?.as_expr()?; let inference_result = self.infer.as_ref()?; match inference_result.method_resolution(expr_id) { Some((f_in_trait, substs)) => Some(Either::Left( @@ -322,7 +324,7 @@ impl SourceAnalyzer { field: &ast::FieldExpr, ) -> Option> { let &(def, ..) = self.def.as_ref()?; - let expr_id = self.expr_id(db, &field.clone().into())?; + let expr_id = self.expr_id(db, &field.clone().into())?.as_expr()?; self.infer.as_ref()?.field_resolution(expr_id).map(|it| { it.map_either(Into::into, |f| TupleField { owner: def, tuple: f.tuple, index: f.index }) }) @@ -334,7 +336,7 @@ impl SourceAnalyzer { field: &ast::FieldExpr, ) -> Option, Function>> { let &(def, ..) = self.def.as_ref()?; - let expr_id = self.expr_id(db, &field.clone().into())?; + let expr_id = self.expr_id(db, &field.clone().into())?.as_expr()?; let inference_result = self.infer.as_ref()?; match inference_result.field_resolution(expr_id) { Some(field) => Some(Either::Left(field.map_either(Into::into, |f| TupleField { @@ -348,6 +350,45 @@ impl SourceAnalyzer { } } + pub(crate) fn resolve_range_pat( + &self, + db: &dyn HirDatabase, + range_pat: &ast::RangePat, + ) -> Option { + let path: ModPath = match (range_pat.op_kind()?, range_pat.start(), range_pat.end()) { + (RangeOp::Exclusive, None, Some(_)) => path![core::ops::RangeTo], + (RangeOp::Exclusive, Some(_), None) => path![core::ops::RangeFrom], + (RangeOp::Exclusive, Some(_), Some(_)) => path![core::ops::Range], + (RangeOp::Inclusive, None, Some(_)) => path![core::ops::RangeToInclusive], + (RangeOp::Inclusive, Some(_), Some(_)) => path![core::ops::RangeInclusive], + + (RangeOp::Exclusive, None, None) => return None, + (RangeOp::Inclusive, None, None) => return None, + (RangeOp::Inclusive, Some(_), None) => return None, + }; + self.resolver.resolve_known_struct(db.upcast(), &path) + } + + pub(crate) fn resolve_range_expr( + &self, + db: &dyn HirDatabase, + range_expr: &ast::RangeExpr, + ) -> Option { + let path: ModPath = match (range_expr.op_kind()?, range_expr.start(), range_expr.end()) { + (RangeOp::Exclusive, None, None) => path![core::ops::RangeFull], + (RangeOp::Exclusive, None, Some(_)) => path![core::ops::RangeTo], + (RangeOp::Exclusive, Some(_), None) => path![core::ops::RangeFrom], + (RangeOp::Exclusive, Some(_), Some(_)) => path![core::ops::Range], + (RangeOp::Inclusive, None, Some(_)) => path![core::ops::RangeToInclusive], + (RangeOp::Inclusive, Some(_), Some(_)) => path![core::ops::RangeInclusive], + + // [E0586] inclusive ranges must be bounded at the end + (RangeOp::Inclusive, None, None) => return None, + (RangeOp::Inclusive, Some(_), None) => return None, + }; + self.resolver.resolve_known_struct(db.upcast(), &path) + } + pub(crate) fn resolve_await_to_poll( &self, db: &dyn HirDatabase, @@ -403,7 +444,7 @@ impl SourceAnalyzer { self.infer .as_ref() .and_then(|infer| { - let expr = self.expr_id(db, &prefix_expr.clone().into())?; + let expr = self.expr_id(db, &prefix_expr.clone().into())?.as_expr()?; let (func, _) = infer.method_resolution(expr)?; let (deref_mut_trait, deref_mut) = self.lang_trait_fn( db, @@ -449,7 +490,7 @@ impl SourceAnalyzer { .infer .as_ref() .and_then(|infer| { - let expr = self.expr_id(db, &index_expr.clone().into())?; + let expr = self.expr_id(db, &index_expr.clone().into())?.as_expr()?; let (func, _) = infer.method_resolution(expr)?; let (index_mut_trait, index_mut_fn) = self.lang_trait_fn( db, @@ -521,7 +562,8 @@ impl SourceAnalyzer { let expr = ast::Expr::from(record_expr); let expr_id = self.body_source_map()?.node_expr(InFile::new(self.file_id, &expr))?; - let local_name = field.field_name()?.as_name(); + let ast_name = field.field_name()?; + let local_name = ast_name.as_name(); let local = if field.name_ref().is_some() { None } else { @@ -530,15 +572,19 @@ impl SourceAnalyzer { PathKind::Plain, once(local_name.clone()), )); - match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { + match self.resolver.resolve_path_in_value_ns_fully( + db.upcast(), + &path, + name_hygiene(db, InFile::new(self.file_id, ast_name.syntax())), + ) { Some(ValueNs::LocalBinding(binding_id)) => { Some(Local { binding_id, parent: self.resolver.body_owner()? }) } _ => None, } }; - let (_, subst) = self.infer.as_ref()?.type_of_expr.get(expr_id)?.as_adt()?; - let variant = self.infer.as_ref()?.variant_resolution_for_expr(expr_id)?; + let (_, subst) = self.infer.as_ref()?.type_of_expr_or_pat(expr_id)?.as_adt()?; + let variant = self.infer.as_ref()?.variant_resolution_for_expr_or_pat(expr_id)?; let variant_data = variant.variant_data(db.upcast()); let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? }; let field_ty = @@ -568,7 +614,10 @@ impl SourceAnalyzer { db: &dyn HirDatabase, macro_call: InFile<&ast::MacroCall>, ) -> Option { - let ctx = LowerCtx::new(db.upcast(), macro_call.file_id); + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let ctx = + LowerCtx::new(db.upcast(), macro_call.file_id, &mut types_map, &mut types_source_map); let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?; self.resolver .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang)) @@ -586,7 +635,7 @@ impl SourceAnalyzer { Pat::Path(path) => path, _ => return None, }; - let res = resolve_hir_path(db, &self.resolver, path)?; + let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, TypesMap::EMPTY)?; match res { PathResolution::Def(def) => Some(def), _ => None, @@ -606,10 +655,10 @@ impl SourceAnalyzer { let infer = self.infer.as_deref()?; if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) { let expr_id = self.expr_id(db, &path_expr.into())?; - if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr(expr_id) { + if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_id) { let assoc = match assoc { AssocItemId::FunctionId(f_in_trait) => { - match infer.type_of_expr.get(expr_id) { + match infer.type_of_expr_or_pat(expr_id) { None => assoc, Some(func_ty) => { if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) { @@ -634,7 +683,7 @@ impl SourceAnalyzer { return Some(PathResolution::Def(AssocItem::from(assoc).into())); } if let Some(VariantId::EnumVariantId(variant)) = - infer.variant_resolution_for_expr(expr_id) + infer.variant_resolution_for_expr_or_pat(expr_id) { return Some(PathResolution::Def(ModuleDef::Variant(variant.into()))); } @@ -658,7 +707,7 @@ impl SourceAnalyzer { } else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) { let expr_id = self.expr_id(db, &rec_lit.into())?; if let Some(VariantId::EnumVariantId(variant)) = - infer.variant_resolution_for_expr(expr_id) + infer.variant_resolution_for_expr_or_pat(expr_id) { return Some(PathResolution::Def(ModuleDef::Variant(variant.into()))); } @@ -680,14 +729,16 @@ impl SourceAnalyzer { return resolved; } - let ctx = LowerCtx::new(db.upcast(), self.file_id); + let (mut types_map, mut types_source_map) = + (TypesMap::default(), TypesSourceMap::default()); + let ctx = LowerCtx::new(db.upcast(), self.file_id, &mut types_map, &mut types_source_map); let hir_path = Path::from_src(&ctx, path.clone())?; // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // trying to resolve foo::bar. if let Some(use_tree) = parent().and_then(ast::UseTree::cast) { if use_tree.coloncolon_token().is_some() { - return resolve_hir_path_qualifier(db, &self.resolver, &hir_path); + return resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map); } } @@ -704,7 +755,7 @@ impl SourceAnalyzer { // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are // trying to resolve foo::bar. if path.parent_path().is_some() { - return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) { + return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) { None if meta_path.is_some() => { path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| { ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text()) @@ -775,9 +826,16 @@ impl SourceAnalyzer { }; } if parent().map_or(false, |it| ast::Visibility::can_cast(it.kind())) { - resolve_hir_path_qualifier(db, &self.resolver, &hir_path) + resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) } else { - resolve_hir_path_(db, &self.resolver, &hir_path, prefer_value_ns) + resolve_hir_path_( + db, + &self.resolver, + &hir_path, + prefer_value_ns, + name_hygiene(db, InFile::new(self.file_id, path.syntax())), + &types_map, + ) } } @@ -790,10 +848,16 @@ impl SourceAnalyzer { let infer = self.infer.as_ref()?; let expr_id = self.expr_id(db, &literal.clone().into())?; - let substs = infer.type_of_expr[expr_id].as_adt()?.1; + let substs = infer[expr_id].as_adt()?.1; - let (variant, missing_fields, _exhaustive) = - record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?; + let (variant, missing_fields, _exhaustive) = match expr_id { + ExprOrPatId::ExprId(expr_id) => { + record_literal_missing_fields(db, infer, expr_id, &body[expr_id])? + } + ExprOrPatId::PatId(pat_id) => { + record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])? + } + }; let res = self.missing_fields(db, substs, variant, missing_fields); Some(res) } @@ -856,7 +920,7 @@ impl SourceAnalyzer { ) -> Option { let infer = self.infer.as_ref()?; let expr_id = self.expr_id(db, &record_lit.into())?; - infer.variant_resolution_for_expr(expr_id) + infer.variant_resolution_for_expr_or_pat(expr_id) } pub(crate) fn is_unsafe_macro_call_expr( @@ -867,14 +931,24 @@ impl SourceAnalyzer { if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) { if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr) { let mut is_unsafe = false; - unsafe_expressions( - db, - infer, - *def, - body, - expanded_expr, - &mut |UnsafeExpr { inside_unsafe_block, .. }| is_unsafe |= !inside_unsafe_block, - ); + let mut walk_expr = |expr_id| { + unsafe_expressions( + db, + infer, + *def, + body, + expr_id, + &mut |UnsafeExpr { inside_unsafe_block, .. }| { + is_unsafe |= !inside_unsafe_block + }, + ) + }; + match expanded_expr { + ExprOrPatId::ExprId(expanded_expr) => walk_expr(expanded_expr), + ExprOrPatId::PatId(expanded_pat) => { + body.walk_exprs_in_pat(expanded_pat, &mut walk_expr) + } + } return is_unsafe; } } @@ -887,7 +961,7 @@ impl SourceAnalyzer { format_args: InFile<&ast::FormatArgsExpr>, offset: TextSize, ) -> Option<(TextRange, Option)> { - let implicits = self.body_source_map()?.implicit_format_args(format_args)?; + let (hygiene, implicits) = self.body_source_map()?.implicit_format_args(format_args)?; implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| { ( *range, @@ -899,6 +973,7 @@ impl SourceAnalyzer { PathKind::Plain, Some(name.clone()), )), + hygiene, ), ) }) @@ -925,22 +1000,22 @@ impl SourceAnalyzer { db: &'a dyn HirDatabase, format_args: InFile<&ast::FormatArgsExpr>, ) -> Option)> + 'a> { - Some(self.body_source_map()?.implicit_format_args(format_args)?.iter().map( - move |(range, name)| { - ( - *range, - resolve_hir_value_path( - db, - &self.resolver, - self.resolver.body_owner(), - &Path::from_known_path_with_no_generic(ModPath::from_segments( - PathKind::Plain, - Some(name.clone()), - )), - ), - ) - }, - )) + let (hygiene, names) = self.body_source_map()?.implicit_format_args(format_args)?; + Some(names.iter().map(move |(range, name)| { + ( + *range, + resolve_hir_value_path( + db, + &self.resolver, + self.resolver.body_owner(), + &Path::from_known_path_with_no_generic(ModPath::from_segments( + PathKind::Plain, + Some(name.clone()), + )), + hygiene, + ), + ) + })) } pub(crate) fn as_asm_parts( @@ -991,7 +1066,7 @@ impl SourceAnalyzer { } fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> { - self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, expr)?) + self.infer.as_ref()?.type_of_expr_or_pat(self.expr_id(db, expr)?) } } @@ -1004,7 +1079,7 @@ fn scope_for( node.ancestors_with_macros(db.upcast()) .take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind())) .filter_map(|it| it.map(ast::Expr::cast).transpose()) - .filter_map(|it| source_map.node_expr(it.as_ref())) + .filter_map(|it| source_map.node_expr(it.as_ref())?.as_expr()) .find_map(|it| scopes.scope_for(it)) } @@ -1086,8 +1161,10 @@ pub(crate) fn resolve_hir_path( db: &dyn HirDatabase, resolver: &Resolver, path: &Path, + hygiene: HygieneId, + types_map: &TypesMap, ) -> Option { - resolve_hir_path_(db, resolver, path, false) + resolve_hir_path_(db, resolver, path, false, hygiene, types_map) } #[inline] @@ -1107,13 +1184,20 @@ fn resolve_hir_path_( resolver: &Resolver, path: &Path, prefer_value_ns: bool, + hygiene: HygieneId, + types_map: &TypesMap, ) -> Option { let types = || { let (ty, unresolved) = match path.type_anchor() { Some(type_ref) => { - let (_, res) = - TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner()) - .lower_ty_ext(type_ref); + let (_, res) = TyLoweringContext::new_maybe_unowned( + db, + resolver, + types_map, + None, + resolver.type_owner(), + ) + .lower_ty_ext(type_ref); res.map(|ty_ns| (ty_ns, path.segments().first())) } None => { @@ -1172,7 +1256,7 @@ fn resolve_hir_path_( }; let body_owner = resolver.body_owner(); - let values = || resolve_hir_value_path(db, resolver, body_owner, path); + let values = || resolve_hir_value_path(db, resolver, body_owner, path, hygiene); let items = || { resolver @@ -1197,8 +1281,9 @@ fn resolve_hir_value_path( resolver: &Resolver, body_owner: Option, path: &Path, + hygiene: HygieneId, ) -> Option { - resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| { + resolver.resolve_path_in_value_ns_fully(db.upcast(), path, hygiene).and_then(|val| { let res = match val { ValueNs::LocalBinding(binding_id) => { let var = Local { parent: body_owner?, binding_id }; @@ -1233,13 +1318,19 @@ fn resolve_hir_path_qualifier( db: &dyn HirDatabase, resolver: &Resolver, path: &Path, + types_map: &TypesMap, ) -> Option { (|| { let (ty, unresolved) = match path.type_anchor() { Some(type_ref) => { - let (_, res) = - TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner()) - .lower_ty_ext(type_ref); + let (_, res) = TyLoweringContext::new_maybe_unowned( + db, + resolver, + types_map, + None, + resolver.type_owner(), + ) + .lower_ty_ext(type_ref); res.map(|ty_ns| (ty_ns, path.segments().first())) } None => { @@ -1303,3 +1394,13 @@ fn resolve_hir_path_qualifier( .map(|it| PathResolution::Def(it.into())) }) } + +pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> HygieneId { + let Some(macro_file) = name.file_id.macro_file() else { + return HygieneId::ROOT; + }; + let span_map = db.expansion_span_map(macro_file); + let ctx = span_map.span_at(name.value.text_range().start()).ctx; + let ctx = db.lookup_intern_syntax_context(ctx); + HygieneId::new(ctx.opaque_and_semitransparent) +} diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index cabb7e3db3d92..f8416f86bf9fa 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -8,7 +8,10 @@ use hir_def::{ TraitId, }; use hir_expand::HirFileId; -use hir_ty::{db::HirDatabase, display::HirDisplay}; +use hir_ty::{ + db::HirDatabase, + display::{hir_display_with_types_map, HirDisplay}, +}; use span::Edition; use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr}; @@ -214,8 +217,11 @@ impl<'a> SymbolCollector<'a> { fn collect_from_impl(&mut self, impl_id: ImplId) { let impl_data = self.db.impl_data(impl_id); - let impl_name = - Some(SmolStr::new(impl_data.self_ty.display(self.db, self.edition).to_string())); + let impl_name = Some( + hir_display_with_types_map(impl_data.self_ty, &impl_data.types_map) + .display(self.db, self.edition) + .to_smolstr(), + ); self.with_container_name(impl_name, |s| { for &assoc_item_id in impl_data.items.iter() { s.push_assoc_item(assoc_item_id) diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml index 2a14fbe1e0a2d..ba215868710e9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml @@ -23,7 +23,6 @@ tracing.workspace = true # local deps stdx.workspace = true syntax.workspace = true -text-edit.workspace = true ide-db.workspace = true hir.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs index c035c59ffca63..605fd14052396 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -1,5 +1,6 @@ use either::Either; use hir::ModuleDef; +use ide_db::text_edit::TextRange; use ide_db::{ assists::{AssistId, AssistKind}, defs::Definition, @@ -19,7 +20,6 @@ use syntax::{ }, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, }; -use text_edit::TextRange; use crate::{ assist_context::{AssistContext, Assists}, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs index b229b750e88ba..22a1efdbea735 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -1,4 +1,5 @@ use hir::{sym, HasVisibility}; +use ide_db::text_edit::TextRange; use ide_db::{ assists::{AssistId, AssistKind}, defs::Definition, @@ -8,7 +9,6 @@ use ide_db::{ }; use itertools::Itertools; use syntax::{ast, ted, AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr}; -use text_edit::TextRange; use crate::{ assist_context::{AssistContext, Assists, SourceChangeBuilder}, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs index 9ecfb83ed531a..3f0d5cf152c23 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs @@ -1,3 +1,4 @@ +use ide_db::text_edit::TextRange; use ide_db::{ assists::{AssistId, AssistKind}, defs::Definition, @@ -8,7 +9,6 @@ use syntax::{ ast::{self, make, AstNode, FieldExpr, HasName, IdentPat}, ted, }; -use text_edit::TextRange; use crate::{ assist_context::{AssistContext, Assists, SourceChangeBuilder}, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs index 0e9c463e02459..94274f6d17c3f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs @@ -41,7 +41,7 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::>(); acc.add( - AssistId("remove_dbg", AssistKind::Refactor), + AssistId("remove_dbg", AssistKind::QuickFix), "Remove dbg!()", replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?, |builder| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs index c6f99d68748dd..0570b447782ec 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -1,6 +1,7 @@ use std::collections::hash_map::Entry; use hir::{FileRange, HirFileIdExt, InFile, InRealFile, Module, ModuleSource}; +use ide_db::text_edit::TextRange; use ide_db::{ defs::Definition, search::{FileReference, ReferenceCategory, SearchScope}, @@ -10,7 +11,6 @@ use syntax::{ ast::{self, Rename}, AstNode, }; -use text_edit::TextRange; use crate::{AssistContext, AssistId, AssistKind, Assists}; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs index 8a6c2937d9051..26fd887cc99e9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs @@ -1,4 +1,5 @@ use hir::{FileRange, Semantics}; +use ide_db::text_edit::TextRange; use ide_db::{ defs::Definition, search::{SearchScope, UsageSearchResult}, @@ -11,7 +12,6 @@ use syntax::{ }, match_ast, ted, AstNode, }; -use text_edit::TextRange; use crate::{AssistContext, AssistId, AssistKind, Assists}; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs index db789cfa33428..648bf358b4bb6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs @@ -34,6 +34,9 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let pipe_token = ctx.find_token_syntax_at_offset(T![|])?; let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update(); + if or_pat.leading_pipe().is_some_and(|it| it == pipe_token) { + return None; + } let match_arm = ast::MatchArm::cast(or_pat.syntax().parent()?)?; let match_arm_body = match_arm.expr()?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index 22620816d5043..8aaf5d6fff239 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -301,6 +301,7 @@ mod handlers { inline_call::inline_into_callers, inline_const_as_literal::inline_const_as_literal, inline_local_variable::inline_local_variable, + inline_macro::inline_macro, inline_type_alias::inline_type_alias, inline_type_alias::inline_type_alias_uses, into_to_qualified_from::into_to_qualified_from, @@ -326,6 +327,7 @@ mod handlers { raw_string::add_hash, raw_string::make_usual_string, raw_string::remove_hash, + remove_dbg::remove_dbg, remove_mut::remove_mut, remove_unused_imports::remove_unused_imports, remove_unused_param::remove_unused_param, @@ -381,9 +383,6 @@ mod handlers { generate_getter_or_setter::generate_setter, generate_delegate_methods::generate_delegate_methods, generate_deref::generate_deref, - // - remove_dbg::remove_dbg, - inline_macro::inline_macro, // Are you sure you want to add new assist here, and not to the // sorted list above? ] diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml index 614465b4d06bc..1bef82af5ac9e 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml @@ -25,7 +25,6 @@ base-db.workspace = true ide-db.workspace = true stdx.workspace = true syntax.workspace = true -text-edit.workspace = true # completions crate should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index 672e1796d1eff..c38a8ef29bb18 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -32,6 +32,7 @@ //! ``` use hir::{db::ExpandDatabase, HasAttrs, MacroFileId, Name}; +use ide_db::text_edit::TextEdit; use ide_db::{ documentation::HasDocs, path_transform::PathTransform, syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items, SymbolKind, @@ -40,7 +41,6 @@ use syntax::{ ast::{self, edit_in_place::AttrsOwnerEdit, make, HasGenericArgs, HasTypeBounds}, format_smolstr, ted, AstNode, SmolStr, SyntaxElement, SyntaxKind, TextRange, ToSmolStr, T, }; -use text_edit::TextEdit; use crate::{ context::PathCompletionCtx, CompletionContext, CompletionItem, CompletionItemKind, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index 05e2892fdc85c..f12f011a6bd35 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -7,7 +7,6 @@ use ide_db::{ base_db::{SourceRootDatabase, VfsPath}, FxHashSet, RootDatabase, SymbolKind, }; -use stdx::IsNoneOr; use syntax::{ast, AstNode, SyntaxKind, ToSmolStr}; use crate::{context::CompletionContext, CompletionItem, Completions}; @@ -66,7 +65,7 @@ pub(crate) fn complete_mod( .iter() .filter(|&submodule_candidate_file| submodule_candidate_file != module_definition_file) .filter(|&submodule_candidate_file| { - IsNoneOr::is_none_or(module_declaration_file, |it| it != submodule_candidate_file) + module_declaration_file.is_none_or(|it| it != submodule_candidate_file) }) .filter_map(|submodule_file| { let submodule_path = source_root.path_for_file(&submodule_file)?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index d3579fd8cc6e0..495f82da866e0 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -3,6 +3,7 @@ mod format_like; use hir::ItemInNs; +use ide_db::text_edit::TextEdit; use ide_db::{ documentation::{Documentation, HasDocs}, imports::insert_use::ImportScope, @@ -15,7 +16,6 @@ use syntax::{ SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR}, TextRange, TextSize, }; -use text_edit::TextEdit; use crate::{ completions::postfix::format_like::add_format_like_completions, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 0e1302ff2ef4e..efbee39a2d498 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -20,7 +20,6 @@ use syntax::{ SyntaxKind::{self, *}, SyntaxToken, TextRange, TextSize, T, }; -use text_edit::Indel; use crate::{ context::analysis::{expand_and_analyze, AnalysisResult}, @@ -684,8 +683,7 @@ impl<'a> CompletionContext<'a> { // actual completion. let file_with_fake_ident = { let parse = db.parse(file_id); - let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned()); - parse.reparse(&edit, file_id.edition()).tree() + parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, file_id.edition()).tree() }; // always pick the token to the immediate left of the cursor, as that is what we are actually diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index 8c97ebd550032..52f6bedaaa9f2 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -3,6 +3,7 @@ use std::{fmt, mem}; use hir::Mutability; +use ide_db::text_edit::TextEdit; use ide_db::{ documentation::Documentation, imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind, @@ -11,7 +12,6 @@ use itertools::Itertools; use smallvec::SmallVec; use stdx::{impl_from, never}; use syntax::{format_smolstr, Edition, SmolStr, TextRange, TextSize}; -use text_edit::TextEdit; use crate::{ context::{CompletionContext, PathCompletionCtx}, @@ -426,7 +426,7 @@ impl CompletionItem { self.lookup.as_str() } - pub fn ref_match(&self) -> Option<(String, text_edit::Indel, CompletionRelevance)> { + pub fn ref_match(&self) -> Option<(String, ide_db::text_edit::Indel, CompletionRelevance)> { // Relevance of the ref match should be the same as the original // match, but with exact type match set because self.ref_match // is only set if there is an exact type match. @@ -436,7 +436,10 @@ impl CompletionItem { self.ref_match.map(|(mutability, offset)| { ( format!("&{}{}", mutability.as_keyword_for_ref(), self.label), - text_edit::Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())), + ide_db::text_edit::Indel::insert( + offset, + format!("&{}", mutability.as_keyword_for_ref()), + ), relevance, ) }) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index a78976d3fd8b8..dfee01b187e97 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -10,16 +10,17 @@ mod snippet; #[cfg(test)] mod tests; +use ide_db::text_edit::TextEdit; use ide_db::{ helpers::mod_path_to_ast, imports::{ import_assets::NameToImport, insert_use::{self, ImportScope}, }, - items_locator, FilePosition, RootDatabase, + items_locator, + syntax_helpers::tree_diff::diff, + FilePosition, RootDatabase, }; -use syntax::algo; -use text_edit::TextEdit; use crate::{ completions::Completions, @@ -297,6 +298,6 @@ pub fn resolve_completion_edits( } }); - algo::diff(scope.as_syntax_node(), new_ast.as_syntax_node()).into_text_edit(&mut import_insert); + diff(scope.as_syntax_node(), new_ast.as_syntax_node()).into_text_edit(&mut import_insert); Some(vec![import_insert.finish()]) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index 4dd171142f9c0..ec3c2fe35563f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -11,6 +11,7 @@ pub(crate) mod union_literal; pub(crate) mod variant; use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; +use ide_db::text_edit::TextEdit; use ide_db::{ documentation::{Documentation, HasDocs}, helpers::item_name, @@ -18,7 +19,6 @@ use ide_db::{ RootDatabase, SnippetCap, SymbolKind, }; use syntax::{ast, format_smolstr, AstNode, Edition, SmolStr, SyntaxKind, TextRange, ToSmolStr}; -use text_edit::TextEdit; use crate::{ context::{DotAccess, DotAccessKind, PathCompletionCtx, PathKind, PatternContext}, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs index 1bbe097cc6c86..45679355b4271 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs @@ -663,6 +663,7 @@ mod cfg { ba dbg ba opt_level ba test + ba true "#]], ); check( @@ -674,6 +675,7 @@ mod cfg { ba dbg ba opt_level ba test + ba true "#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml index c078188d6d3cd..17f0e69bde4f1 100644 --- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml @@ -35,7 +35,6 @@ parser.workspace = true profile.workspace = true stdx.workspace = true syntax.workspace = true -text-edit.workspace = true span.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs index 7474d7bc54dba..35e3a8d9bf7f5 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs @@ -100,16 +100,19 @@ impl RootDatabase { hir::db::ConstEvalQuery hir::db::ConstEvalStaticQuery hir::db::ConstParamTyQuery + hir::db::DynCompatibilityOfTraitQuery hir::db::FieldTypesQuery hir::db::FnDefDatumQuery hir::db::FnDefVarianceQuery hir::db::GenericDefaultsQuery hir::db::GenericPredicatesForParamQuery hir::db::GenericPredicatesQuery + hir::db::GenericPredicatesWithoutParentQuery hir::db::ImplDatumQuery hir::db::ImplSelfTyQuery hir::db::ImplTraitQuery hir::db::IncoherentInherentImplCratesQuery + hir::db::InferQuery hir::db::InherentImplsInBlockQuery hir::db::InherentImplsInCrateQuery hir::db::InternCallableDefQuery @@ -119,7 +122,12 @@ impl RootDatabase { hir::db::InternLifetimeParamIdQuery hir::db::InternTypeOrConstParamIdQuery hir::db::LayoutOfAdtQuery + hir::db::LayoutOfTyQuery + hir::db::LookupImplMethodQuery + hir::db::MirBodyForClosureQuery hir::db::MirBodyQuery + hir::db::MonomorphizedMirBodyForClosureQuery + hir::db::MonomorphizedMirBodyQuery hir::db::ProgramClausesForChalkEnvQuery hir::db::ReturnTypeImplTraitsQuery hir::db::TargetDataLayoutQuery @@ -128,13 +136,16 @@ impl RootDatabase { hir::db::TraitImplsInBlockQuery hir::db::TraitImplsInCrateQuery hir::db::TraitImplsInDepsQuery + hir::db::TraitSolveQuery hir::db::TyQuery + hir::db::TypeAliasImplTraitsQuery hir::db::ValueTyQuery // DefDatabase hir::db::AttrsQuery hir::db::BlockDefMapQuery hir::db::BlockItemTreeQuery + hir::db::BlockItemTreeWithSourceMapQuery hir::db::BodyQuery hir::db::BodyWithSourceMapQuery hir::db::ConstDataQuery @@ -145,17 +156,21 @@ impl RootDatabase { hir::db::CrateSupportsNoStdQuery hir::db::EnumDataQuery hir::db::EnumVariantDataWithDiagnosticsQuery + hir::db::ExpandProcAttrMacrosQuery hir::db::ExprScopesQuery hir::db::ExternCrateDeclDataQuery hir::db::FieldVisibilitiesQuery hir::db::FieldsAttrsQuery hir::db::FieldsAttrsSourceMapQuery hir::db::FileItemTreeQuery + hir::db::FileItemTreeWithSourceMapQuery hir::db::FunctionDataQuery hir::db::FunctionVisibilityQuery hir::db::GenericParamsQuery + hir::db::GenericParamsWithSourceMapQuery hir::db::ImplDataWithDiagnosticsQuery hir::db::ImportMapQuery + hir::db::IncludeMacroInvocQuery hir::db::InternAnonymousConstQuery hir::db::InternBlockQuery hir::db::InternConstQuery @@ -177,7 +192,9 @@ impl RootDatabase { hir::db::InternUseQuery hir::db::LangItemQuery hir::db::Macro2DataQuery + hir::db::MacroDefQuery hir::db::MacroRulesDataQuery + hir::db::NotableTraitsInDepsQuery hir::db::ProcMacroDataQuery hir::db::StaticDataQuery hir::db::StructDataWithDiagnosticsQuery @@ -212,6 +229,7 @@ impl RootDatabase { hir::db::MacroArgQuery hir::db::ParseMacroExpansionErrorQuery hir::db::ParseMacroExpansionQuery + hir::db::ProcMacroSpanQuery hir::db::ProcMacrosQuery hir::db::RealSpanMapQuery @@ -220,7 +238,9 @@ impl RootDatabase { // SourceDatabase base_db::ParseQuery + base_db::ParseErrorsQuery base_db::CrateGraphQuery + base_db::CrateWorkspaceDataQuery // SourceDatabaseExt base_db::FileTextQuery diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index 099f26eba78a9..fdac4dd2efb80 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -5,14 +5,17 @@ // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). +use crate::documentation::{Documentation, HasDocs}; +use crate::famous_defs::FamousDefs; +use crate::RootDatabase; use arrayvec::ArrayVec; use either::Either; use hir::{ Adt, AsAssocItem, AsExternAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType, Const, Crate, DefWithBody, DeriveHelper, DocLinkDef, ExternAssocItem, ExternCrateDecl, Field, Function, GenericParam, HasVisibility, HirDisplay, Impl, InlineAsmOperand, Label, Local, Macro, - Module, ModuleDef, Name, PathResolution, Semantics, Static, StaticLifetime, ToolModule, Trait, - TraitAlias, TupleField, TypeAlias, Variant, VariantDef, Visibility, + Module, ModuleDef, Name, PathResolution, Semantics, Static, StaticLifetime, Struct, ToolModule, + Trait, TraitAlias, TupleField, TypeAlias, Variant, VariantDef, Visibility, }; use span::Edition; use stdx::{format_to, impl_from}; @@ -21,10 +24,6 @@ use syntax::{ match_ast, SyntaxKind, SyntaxNode, SyntaxToken, }; -use crate::documentation::{Documentation, HasDocs}; -use crate::famous_defs::FamousDefs; -use crate::RootDatabase; - // FIXME: a more precise name would probably be `Symbol`? #[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] pub enum Definition { @@ -179,7 +178,19 @@ impl Definition { Definition::Static(it) => it.docs(db), Definition::Trait(it) => it.docs(db), Definition::TraitAlias(it) => it.docs(db), - Definition::TypeAlias(it) => it.docs(db), + Definition::TypeAlias(it) => { + it.docs(db).or_else(|| { + // docs are missing, try to fall back to the docs of the aliased item. + let adt = it.ty(db).as_adt()?; + let docs = adt.docs(db)?; + let docs = format!( + "*This is the documentation for* `{}`\n\n{}", + adt.display(db, edition), + docs.as_str() + ); + Some(Documentation::new(docs)) + }) + } Definition::BuiltinType(it) => { famous_defs.and_then(|fd| { // std exposes prim_{} modules with docstrings on the root to document the builtins @@ -319,6 +330,8 @@ impl IdentClass { .map(IdentClass::NameClass) .or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass)) }, + ast::RangePat(range_pat) => OperatorClass::classify_range_pat(sema, &range_pat).map(IdentClass::Operator), + ast::RangeExpr(range_expr) => OperatorClass::classify_range_expr(sema, &range_expr).map(IdentClass::Operator), ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator), ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator), ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator), @@ -372,6 +385,9 @@ impl IdentClass { | OperatorClass::Index(func) | OperatorClass::Try(func), ) => res.push(Definition::Function(func)), + IdentClass::Operator(OperatorClass::Range(struct0)) => { + res.push(Definition::Adt(Adt::Struct(struct0))) + } } res } @@ -546,6 +562,7 @@ impl NameClass { #[derive(Debug)] pub enum OperatorClass { + Range(Struct), Await(Function), Prefix(Function), Index(Function), @@ -554,6 +571,20 @@ pub enum OperatorClass { } impl OperatorClass { + pub fn classify_range_pat( + sema: &Semantics<'_, RootDatabase>, + range_pat: &ast::RangePat, + ) -> Option { + sema.resolve_range_pat(range_pat).map(OperatorClass::Range) + } + + pub fn classify_range_expr( + sema: &Semantics<'_, RootDatabase>, + range_expr: &ast::RangeExpr, + ) -> Option { + sema.resolve_range_expr(range_expr).map(OperatorClass::Range) + } + pub fn classify_await( sema: &Semantics<'_, RootDatabase>, await_expr: &ast::AwaitExpr, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs index 5e443badf9e7e..b52a325790b18 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/documentation.rs @@ -5,11 +5,11 @@ use hir::{ resolve_doc_path_on, sym, AttrId, AttrSourceMap, AttrsWithOwner, HasAttrs, InFile, }; use itertools::Itertools; +use span::{TextRange, TextSize}; use syntax::{ ast::{self, IsString}, AstToken, }; -use text_edit::{TextRange, TextSize}; /// Holds documentation #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index aed093f0ebfc8..81260c3e080a1 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -19,6 +19,7 @@ pub mod rust_doc; pub mod search; pub mod source_change; pub mod symbol_index; +pub mod text_edit; pub mod traits; pub mod ty_filter; pub mod use_trivial_constructor; @@ -36,6 +37,7 @@ pub mod generated { pub mod syntax_helpers { pub mod format_string; pub mod format_string_exprs; + pub mod tree_diff; pub use hir::prettify_macro_expansion; pub mod node_ext; pub mod suggest_name; @@ -293,3 +295,35 @@ impl SnippetCap { } } } + +pub struct Ranker<'a> { + pub kind: parser::SyntaxKind, + pub text: &'a str, + pub ident_kind: bool, +} + +impl<'a> Ranker<'a> { + pub const MAX_RANK: usize = 0b1110; + + pub fn from_token(token: &'a syntax::SyntaxToken) -> Self { + let kind = token.kind(); + Ranker { kind, text: token.text(), ident_kind: kind.is_any_identifier() } + } + + /// A utility function that ranks a token again a given kind and text, returning a number that + /// represents how close the token is to the given kind and text. + pub fn rank_token(&self, tok: &syntax::SyntaxToken) -> usize { + let tok_kind = tok.kind(); + + let exact_same_kind = tok_kind == self.kind; + let both_idents = exact_same_kind || (tok_kind.is_any_identifier() && self.ident_kind); + let same_text = tok.text() == self.text; + // anything that mapped into a token tree has likely no semantic information + let no_tt_parent = + tok.parent().map_or(false, |it| it.kind() != parser::SyntaxKind::TOKEN_TREE); + (both_idents as usize) + | ((exact_same_kind as usize) << 1) + | ((same_text as usize) << 2) + | ((no_tt_parent as usize) << 3) + } +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs index f1404ed9f22cd..1d1679c3ff882 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs @@ -22,6 +22,7 @@ //! Our current behavior is ¯\_(ツ)_/¯. use std::fmt; +use crate::text_edit::{TextEdit, TextEditBuilder}; use base_db::AnchoredPathBuf; use either::Either; use hir::{FieldSource, FileRange, HirFileIdExt, InFile, ModuleSource, Semantics}; @@ -32,7 +33,6 @@ use syntax::{ utils::is_raw_identifier, AstNode, SyntaxKind, TextRange, T, }; -use text_edit::{TextEdit, TextEditBuilder}; use crate::{ defs::Definition, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs index 73073e92f787e..27ff91dc19d96 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs @@ -5,7 +5,8 @@ use std::{collections::hash_map::Entry, iter, mem}; -use crate::{assists::Command, SnippetCap}; +use crate::text_edit::{TextEdit, TextEditBuilder}; +use crate::{assists::Command, syntax_helpers::tree_diff::diff, SnippetCap}; use base_db::AnchoredPathBuf; use itertools::Itertools; use nohash_hasher::IntMap; @@ -13,11 +14,9 @@ use rustc_hash::FxHashMap; use span::FileId; use stdx::never; use syntax::{ - algo, syntax_editor::{SyntaxAnnotation, SyntaxEditor}, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, }; -use text_edit::{TextEdit, TextEditBuilder}; #[derive(Default, Debug, Clone)] pub struct SourceChange { @@ -315,7 +314,7 @@ impl SourceChangeBuilder { } let mut edit = TextEdit::builder(); - algo::diff(edit_result.old_root(), edit_result.new_root()).into_text_edit(&mut edit); + diff(edit_result.old_root(), edit_result.new_root()).into_text_edit(&mut edit); let edit = edit.finish(); let snippet_edit = @@ -334,7 +333,7 @@ impl SourceChangeBuilder { }); if let Some(tm) = self.mutated_tree.take() { - algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit); + diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit); } let edit = mem::take(&mut self.edit).finish(); @@ -373,7 +372,7 @@ impl SourceChangeBuilder { self.edit.replace(range, replace_with.into()) } pub fn replace_ast(&mut self, old: N, new: N) { - algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) + diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) } pub fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into) { let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() }; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs new file mode 100644 index 0000000000000..02e24c47761c9 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/tree_diff.rs @@ -0,0 +1,559 @@ +//! Basic tree diffing functionality. +use rustc_hash::FxHashMap; +use syntax::{NodeOrToken, SyntaxElement, SyntaxNode}; + +use crate::{text_edit::TextEditBuilder, FxIndexMap}; + +#[derive(Debug, Hash, PartialEq, Eq)] +enum TreeDiffInsertPos { + After(SyntaxElement), + AsFirstChild(SyntaxElement), +} + +#[derive(Debug)] +pub struct TreeDiff { + replacements: FxHashMap, + deletions: Vec, + // the vec as well as the indexmap are both here to preserve order + insertions: FxIndexMap>, +} + +impl TreeDiff { + pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { + let _p = tracing::info_span!("into_text_edit").entered(); + + for (anchor, to) in &self.insertions { + let offset = match anchor { + TreeDiffInsertPos::After(it) => it.text_range().end(), + TreeDiffInsertPos::AsFirstChild(it) => it.text_range().start(), + }; + to.iter().for_each(|to| builder.insert(offset, to.to_string())); + } + for (from, to) in &self.replacements { + builder.replace(from.text_range(), to.to_string()); + } + for text_range in self.deletions.iter().map(SyntaxElement::text_range) { + builder.delete(text_range); + } + } + + pub fn is_empty(&self) -> bool { + self.replacements.is_empty() && self.deletions.is_empty() && self.insertions.is_empty() + } +} + +/// Finds a (potentially minimal) diff, which, applied to `from`, will result in `to`. +/// +/// Specifically, returns a structure that consists of a replacements, insertions and deletions +/// such that applying this map on `from` will result in `to`. +/// +/// This function tries to find a fine-grained diff. +pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { + let _p = tracing::info_span!("diff").entered(); + + let mut diff = TreeDiff { + replacements: FxHashMap::default(), + insertions: FxIndexMap::default(), + deletions: Vec::new(), + }; + let (from, to) = (from.clone().into(), to.clone().into()); + + if !syntax_element_eq(&from, &to) { + go(&mut diff, from, to); + } + return diff; + + fn syntax_element_eq(lhs: &SyntaxElement, rhs: &SyntaxElement) -> bool { + lhs.kind() == rhs.kind() + && lhs.text_range().len() == rhs.text_range().len() + && match (&lhs, &rhs) { + (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { + lhs == rhs || lhs.text() == rhs.text() + } + (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), + _ => false, + } + } + + // FIXME: this is horribly inefficient. I bet there's a cool algorithm to diff trees properly. + fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) { + let (lhs, rhs) = match lhs.as_node().zip(rhs.as_node()) { + Some((lhs, rhs)) => (lhs, rhs), + _ => { + cov_mark::hit!(diff_node_token_replace); + diff.replacements.insert(lhs, rhs); + return; + } + }; + + let mut look_ahead_scratch = Vec::default(); + + let mut rhs_children = rhs.children_with_tokens(); + let mut lhs_children = lhs.children_with_tokens(); + let mut last_lhs = None; + loop { + let lhs_child = lhs_children.next(); + match (lhs_child.clone(), rhs_children.next()) { + (None, None) => break, + (None, Some(element)) => { + let insert_pos = match last_lhs.clone() { + Some(prev) => { + cov_mark::hit!(diff_insert); + TreeDiffInsertPos::After(prev) + } + // first iteration, insert into out parent as the first child + None => { + cov_mark::hit!(diff_insert_as_first_child); + TreeDiffInsertPos::AsFirstChild(lhs.clone().into()) + } + }; + diff.insertions.entry(insert_pos).or_default().push(element); + } + (Some(element), None) => { + cov_mark::hit!(diff_delete); + diff.deletions.push(element); + } + (Some(ref lhs_ele), Some(ref rhs_ele)) if syntax_element_eq(lhs_ele, rhs_ele) => {} + (Some(lhs_ele), Some(rhs_ele)) => { + // nodes differ, look for lhs_ele in rhs, if its found we can mark everything up + // until that element as insertions. This is important to keep the diff minimal + // in regards to insertions that have been actually done, this is important for + // use insertions as we do not want to replace the entire module node. + look_ahead_scratch.push(rhs_ele.clone()); + let mut rhs_children_clone = rhs_children.clone(); + let mut insert = false; + for rhs_child in &mut rhs_children_clone { + if syntax_element_eq(&lhs_ele, &rhs_child) { + cov_mark::hit!(diff_insertions); + insert = true; + break; + } + look_ahead_scratch.push(rhs_child); + } + let drain = look_ahead_scratch.drain(..); + if insert { + let insert_pos = if let Some(prev) = last_lhs.clone().filter(|_| insert) { + TreeDiffInsertPos::After(prev) + } else { + cov_mark::hit!(insert_first_child); + TreeDiffInsertPos::AsFirstChild(lhs.clone().into()) + }; + + diff.insertions.entry(insert_pos).or_default().extend(drain); + rhs_children = rhs_children_clone; + } else { + go(diff, lhs_ele, rhs_ele); + } + } + } + last_lhs = lhs_child.or(last_lhs); + } + } +} + +#[cfg(test)] +mod tests { + use expect_test::{expect, Expect}; + use itertools::Itertools; + use parser::{Edition, SyntaxKind}; + use syntax::{AstNode, SourceFile, SyntaxElement}; + + use crate::text_edit::TextEdit; + + #[test] + fn replace_node_token() { + cov_mark::check!(diff_node_token_replace); + check_diff( + r#"use node;"#, + r#"ident"#, + expect![[r#" + insertions: + + + + replacements: + + Line 0: Token(USE_KW@0..3 "use") -> ident + + deletions: + + Line 1: " " + Line 1: node + Line 1: ; + "#]], + ); + } + + #[test] + fn replace_parent() { + cov_mark::check!(diff_insert_as_first_child); + check_diff( + r#""#, + r#"use foo::bar;"#, + expect![[r#" + insertions: + + Line 0: AsFirstChild(Node(SOURCE_FILE@0..0)) + -> use foo::bar; + + replacements: + + + + deletions: + + + "#]], + ); + } + + #[test] + fn insert_last() { + cov_mark::check!(diff_insert); + check_diff( + r#" +use foo; +use bar;"#, + r#" +use foo; +use bar; +use baz;"#, + expect![[r#" + insertions: + + Line 2: After(Node(USE@10..18)) + -> "\n" + -> use baz; + + replacements: + + + + deletions: + + + "#]], + ); + } + + #[test] + fn insert_middle() { + check_diff( + r#" +use foo; +use baz;"#, + r#" +use foo; +use bar; +use baz;"#, + expect![[r#" + insertions: + + Line 2: After(Token(WHITESPACE@9..10 "\n")) + -> use bar; + -> "\n" + + replacements: + + + + deletions: + + + "#]], + ) + } + + #[test] + fn insert_first() { + check_diff( + r#" +use bar; +use baz;"#, + r#" +use foo; +use bar; +use baz;"#, + expect![[r#" + insertions: + + Line 0: After(Token(WHITESPACE@0..1 "\n")) + -> use foo; + -> "\n" + + replacements: + + + + deletions: + + + "#]], + ) + } + + #[test] + fn first_child_insertion() { + cov_mark::check!(insert_first_child); + check_diff( + r#"fn main() { + stdi + }"#, + r#"use foo::bar; + + fn main() { + stdi + }"#, + expect![[r#" + insertions: + + Line 0: AsFirstChild(Node(SOURCE_FILE@0..30)) + -> use foo::bar; + -> "\n\n " + + replacements: + + + + deletions: + + + "#]], + ); + } + + #[test] + fn delete_last() { + cov_mark::check!(diff_delete); + check_diff( + r#"use foo; + use bar;"#, + r#"use foo;"#, + expect![[r#" + insertions: + + + + replacements: + + + + deletions: + + Line 1: "\n " + Line 2: use bar; + "#]], + ); + } + + #[test] + fn delete_middle() { + cov_mark::check!(diff_insertions); + check_diff( + r#" +use expect_test::{expect, Expect}; +use text_edit::TextEdit; + +use crate::AstNode; +"#, + r#" +use expect_test::{expect, Expect}; + +use crate::AstNode; +"#, + expect![[r#" + insertions: + + Line 1: After(Node(USE@1..35)) + -> "\n\n" + -> use crate::AstNode; + + replacements: + + + + deletions: + + Line 2: use text_edit::TextEdit; + Line 3: "\n\n" + Line 4: use crate::AstNode; + Line 5: "\n" + "#]], + ) + } + + #[test] + fn delete_first() { + check_diff( + r#" +use text_edit::TextEdit; + +use crate::AstNode; +"#, + r#" +use crate::AstNode; +"#, + expect![[r#" + insertions: + + + + replacements: + + Line 2: Token(IDENT@5..14 "text_edit") -> crate + Line 2: Token(IDENT@16..24 "TextEdit") -> AstNode + Line 2: Token(WHITESPACE@25..27 "\n\n") -> "\n" + + deletions: + + Line 3: use crate::AstNode; + Line 4: "\n" + "#]], + ) + } + + #[test] + fn merge_use() { + check_diff( + r#" +use std::{ + fmt, + hash::BuildHasherDefault, + ops::{self, RangeInclusive}, +}; +"#, + r#" +use std::fmt; +use std::hash::BuildHasherDefault; +use std::ops::{self, RangeInclusive}; +"#, + expect![[r#" + insertions: + + Line 2: After(Node(PATH_SEGMENT@5..8)) + -> :: + -> fmt + Line 6: After(Token(WHITESPACE@86..87 "\n")) + -> use std::hash::BuildHasherDefault; + -> "\n" + -> use std::ops::{self, RangeInclusive}; + -> "\n" + + replacements: + + Line 2: Token(IDENT@5..8 "std") -> std + + deletions: + + Line 2: :: + Line 2: { + fmt, + hash::BuildHasherDefault, + ops::{self, RangeInclusive}, + } + "#]], + ) + } + + #[test] + fn early_return_assist() { + check_diff( + r#" +fn main() { + if let Ok(x) = Err(92) { + foo(x); + } +} + "#, + r#" +fn main() { + let x = match Err(92) { + Ok(it) => it, + _ => return, + }; + foo(x); +} + "#, + expect![[r#" + insertions: + + Line 3: After(Node(BLOCK_EXPR@40..63)) + -> " " + -> match Err(92) { + Ok(it) => it, + _ => return, + } + -> ; + Line 3: After(Node(IF_EXPR@17..63)) + -> "\n " + -> foo(x); + + replacements: + + Line 3: Token(IF_KW@17..19 "if") -> let + Line 3: Token(LET_KW@20..23 "let") -> x + Line 3: Node(BLOCK_EXPR@40..63) -> = + + deletions: + + Line 3: " " + Line 3: Ok(x) + Line 3: " " + Line 3: = + Line 3: " " + Line 3: Err(92) + "#]], + ) + } + + fn check_diff(from: &str, to: &str, expected_diff: Expect) { + let from_node = SourceFile::parse(from, Edition::CURRENT).tree().syntax().clone(); + let to_node = SourceFile::parse(to, Edition::CURRENT).tree().syntax().clone(); + let diff = super::diff(&from_node, &to_node); + + let line_number = + |syn: &SyntaxElement| from[..syn.text_range().start().into()].lines().count(); + + let fmt_syntax = |syn: &SyntaxElement| match syn.kind() { + SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()), + _ => format!("{syn}"), + }; + + let insertions = + diff.insertions.iter().format_with("\n", |(k, v), f| -> Result<(), std::fmt::Error> { + f(&format!( + "Line {}: {:?}\n-> {}", + line_number(match k { + super::TreeDiffInsertPos::After(syn) => syn, + super::TreeDiffInsertPos::AsFirstChild(syn) => syn, + }), + k, + v.iter().format_with("\n-> ", |v, f| f(&fmt_syntax(v))) + )) + }); + + let replacements = diff + .replacements + .iter() + .sorted_by_key(|(syntax, _)| syntax.text_range().start()) + .format_with("\n", |(k, v), f| { + f(&format!("Line {}: {k:?} -> {}", line_number(k), fmt_syntax(v))) + }); + + let deletions = diff + .deletions + .iter() + .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), fmt_syntax(v)))); + + let actual = format!( + "insertions:\n\n{insertions}\n\nreplacements:\n\n{replacements}\n\ndeletions:\n\n{deletions}\n" + ); + expected_diff.assert_eq(&actual); + + let mut from = from.to_owned(); + let mut text_edit = TextEdit::builder(); + diff.into_text_edit(&mut text_edit); + text_edit.finish().apply(&mut from); + assert_eq!(&*from, to, "diff did not turn `from` to `to`"); + } +} diff --git a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs similarity index 99% rename from src/tools/rust-analyzer/crates/text-edit/src/lib.rs rename to src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs index 3efe0850d88d0..0c675f0619f2a 100644 --- a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/text_edit.rs @@ -5,8 +5,8 @@ //! rust-analyzer. use itertools::Itertools; +pub use span::{TextRange, TextSize}; use std::cmp::max; -pub use text_size::{TextRange, TextSize}; /// `InsertDelete` -- a single "atomic" change to text /// diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index bf54f4ab3224f..281a08e5429f6 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -22,7 +22,6 @@ tracing.workspace = true # local deps stdx.workspace = true syntax.workspace = true -text-edit.workspace = true cfg.workspace = true hir.workspace = true ide-db.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs index c7071d1ce477d..876c2ccd49d7a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs @@ -1,9 +1,9 @@ //! Suggests shortening `Foo { field: field }` to `Foo { field }` in both //! expressions and patterns. +use ide_db::text_edit::TextEdit; use ide_db::{source_change::SourceChange, EditionedFileId, FileRange}; use syntax::{ast, match_ast, AstNode, SyntaxNode}; -use text_edit::TextEdit; use crate::{fix, Diagnostic, DiagnosticCode}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs index 1f8f805a1e296..4c0c685e55034 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -192,6 +192,22 @@ union FooBar { //- /outline_inner.rs #![cfg(outline_inner)] //- /outline.rs +"#, + ); + } + + #[test] + fn cfg_true_false() { + check( + r#" + #[cfg(false)] fn inactive() {} +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: false is disabled + + #[cfg(true)] fn active() {} + + #[cfg(any(not(true)), false)] fn inactive2() {} +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: true is enabled + "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index ccb33fed10044..dca889d1a8efe 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -2,6 +2,7 @@ //! example. use hir::{ImportPathConfig, PathResolution, Semantics}; +use ide_db::text_edit::TextEdit; use ide_db::{ helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, @@ -14,7 +15,6 @@ use syntax::{ ast::{self, make}, Edition, SyntaxKind, SyntaxNode, }; -use text_edit::TextEdit; use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsConfig, Severity}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index 6a976697c8057..e177b72e4d435 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -3,14 +3,19 @@ use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; // Diagnostic: macro-error // // This diagnostic is shown for macro expansion errors. + +// Diagnostic: proc-macros-disabled +// +// This diagnostic is shown for proc macros where proc macros have been disabled. + +// Diagnostic: proc-macro-disabled +// +// This diagnostic is shown for proc macros that has been specifically disabled via `rust-analyzer.procMacro.ignored`. pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic { // Use more accurate position if available. let display_range = ctx.resolve_precise_location(&d.node, d.precise_location); Diagnostic::new( - DiagnosticCode::Ra( - "macro-error", - if d.error { Severity::Error } else { Severity::WeakWarning }, - ), + DiagnosticCode::Ra(d.kind, if d.error { Severity::Error } else { Severity::WeakWarning }), d.message.clone(), display_range, ) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index 86c237f7b5ec3..fd1044e51bc23 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -5,15 +5,14 @@ use hir::{ }; use ide_db::{ assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search, - source_change::SourceChange, use_trivial_constructor::use_trivial_constructor, FxHashMap, + source_change::SourceChange, syntax_helpers::tree_diff::diff, text_edit::TextEdit, + use_trivial_constructor::use_trivial_constructor, FxHashMap, }; use stdx::format_to; use syntax::{ - algo, ast::{self, make}, AstNode, Edition, SyntaxNode, SyntaxNodePtr, ToSmolStr, }; -use text_edit::TextEdit; use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; @@ -77,7 +76,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option, d: &hir::MissingUnsafe) -> Option, d: &hir::NeedMut) -> Option { - if d.span.file_id.macro_file().is_some() { - // FIXME: Our infra can't handle allow from within macro expansions rn - return None; - } + let root = ctx.sema.db.parse_or_expand(d.span.file_id); + let node = d.span.value.to_node(&root); + let mut span = d.span; + if let Some(parent) = node.parent() { + if ast::BinExpr::can_cast(parent.kind()) { + // In case of an assignment, the diagnostic is provided on the variable name. + // We want to expand it to include the whole assignment, but only when this + // is an ordinary assignment, not a destructuring assignment. So, the direct + // parent is an assignment expression. + span = d.span.with_value(SyntaxNodePtr::new(&parent)); + } + }; + let fixes = (|| { if d.local.is_ref(ctx.sema.db) { // There is no simple way to add `mut` to `ref x` and `ref mut x` return None; } - let file_id = d.span.file_id.file_id()?; + let file_id = span.file_id.file_id()?; let mut edit_builder = TextEdit::builder(); - let use_range = d.span.value.text_range(); + let use_range = span.value.text_range(); for source in d.local.sources(ctx.sema.db) { let Some(ast) = source.name() else { continue }; // FIXME: macros @@ -33,6 +43,7 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option use_range, )]) })(); + Some( Diagnostic::new_with_syntax_node_ptr( ctx, @@ -42,7 +53,7 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option "cannot mutate immutable variable `{}`", d.local.name(ctx.sema.db).display(ctx.sema.db, ctx.edition) ), - d.span, + span, ) .with_fixes(fixes), ) @@ -53,10 +64,6 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option // This diagnostic is triggered when a mutable variable isn't actually mutated. pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Option { let ast = d.local.primary_source(ctx.sema.db).syntax_ptr(); - if ast.file_id.macro_file().is_some() { - // FIXME: Our infra can't handle allow from within macro expansions rn - return None; - } let fixes = (|| { let file_id = ast.file_id.file_id()?; let mut edit_builder = TextEdit::builder(); @@ -937,7 +944,6 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 { #[test] fn closure() { - // FIXME: Diagnostic spans are inconsistent inside and outside closure check_diagnostics( r#" //- minicore: copy, fn @@ -950,11 +956,11 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 { fn f() { let x = 5; let closure1 = || { x = 2; }; - //^ 💡 error: cannot mutate immutable variable `x` + //^^^^^ 💡 error: cannot mutate immutable variable `x` let _ = closure1(); //^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1` let closure2 = || { x = x; }; - //^ 💡 error: cannot mutate immutable variable `x` + //^^^^^ 💡 error: cannot mutate immutable variable `x` let closure3 = || { let x = 2; x = 5; @@ -996,7 +1002,7 @@ fn f() { || { let x = 2; || { || { x = 5; } } - //^ 💡 error: cannot mutate immutable variable `x` + //^^^^^ 💡 error: cannot mutate immutable variable `x` } } }; @@ -1283,4 +1289,19 @@ fn main() { "#, ); } + + #[test] + fn destructuring_assignment_needs_mut() { + check_diagnostics( + r#" +//- minicore: fn + +fn main() { + let mut var = 1; + let mut func = || (var,) = (2,); + func(); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs index dfadef11fde0b..e5d871975b628 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -1,11 +1,11 @@ use either::Either; use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics, VariantId}; +use ide_db::text_edit::TextEdit; use ide_db::{source_change::SourceChange, EditionedFileId, RootDatabase}; use syntax::{ ast::{self, edit::IndentLevel, make}, AstNode, }; -use text_edit::TextEdit; use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index 62bc1f3d06f69..c8e3cff364a9e 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -1,7 +1,7 @@ use hir::{db::ExpandDatabase, diagnostics::RemoveTrailingReturn, FileRange}; +use ide_db::text_edit::TextEdit; use ide_db::{assists::Assist, source_change::SourceChange}; use syntax::{ast, AstNode}; -use text_edit::TextEdit; use crate::{adjusted_display_range, fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 448df1ca163a6..a46c48608f7e1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -1,4 +1,5 @@ use hir::{db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse, HirFileIdExt}; +use ide_db::text_edit::TextEdit; use ide_db::{assists::Assist, source_change::SourceChange}; use itertools::Itertools; use syntax::{ @@ -8,7 +9,6 @@ use syntax::{ }, AstNode, SyntaxToken, TextRange, }; -use text_edit::TextEdit; use crate::{ adjusted_display_range, fix, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs index 18647206236c9..f481365f2a572 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs @@ -1,10 +1,10 @@ use hir::{db::ExpandDatabase, HirFileIdExt, InFile}; use ide_db::source_change::SourceChange; +use ide_db::text_edit::TextEdit; use syntax::{ ast::{self, HasArgList}, AstNode, TextRange, }; -use text_edit::TextEdit; use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index 3de51ca4a30a9..1363a8ff0ddb5 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -1,11 +1,11 @@ use hir::{db::ExpandDatabase, HasSource, HirDisplay}; +use ide_db::text_edit::TextRange; use ide_db::{ assists::{Assist, AssistId, AssistKind}, label::Label, source_change::SourceChangeBuilder, }; use syntax::ToSmolStr; -use text_edit::TextRange; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 90f88d6705ba7..93fe9374a3ee0 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -1,5 +1,6 @@ use either::Either; use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type}; +use ide_db::text_edit::TextEdit; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use syntax::{ ast::{ @@ -9,7 +10,6 @@ use syntax::{ }, AstNode, AstPtr, TextSize, }; -use text_edit::TextEdit; use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs index 6994a7ed14656..3ad84f7bda2f2 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -3,13 +3,13 @@ use hir::{ term_search::{term_search, TermSearchConfig, TermSearchCtx}, ClosureStyle, HirDisplay, ImportPathConfig, }; +use ide_db::text_edit::TextEdit; use ide_db::{ assists::{Assist, AssistId, AssistKind, GroupLabel}, label::Label, source_change::SourceChange, }; use itertools::Itertools; -use text_edit::TextEdit; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs index 6af36fb9e7398..d16bfb8002403 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs @@ -104,6 +104,36 @@ async fn foo() { async fn foo() { || None?; } +"#, + ); + } + + #[test] + fn macro_expansion_can_refer_label_defined_before_macro_definition() { + check_diagnostics( + r#" +fn foo() { + 'bar: loop { + macro_rules! m { + () => { break 'bar }; + } + m!(); + } +} +"#, + ); + check_diagnostics( + r#" +fn foo() { + 'bar: loop { + macro_rules! m { + () => { break 'bar }; + } + 'bar: loop { + m!(); + } + } +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index e0822fc5b3385..13591dfb2eebd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -3,6 +3,7 @@ use std::iter; use hir::{db::DefDatabase, DefMap, InFile, ModuleSource}; +use ide_db::text_edit::TextEdit; use ide_db::{ base_db::{FileLoader, SourceDatabase, SourceRootDatabase}, source_change::SourceChange, @@ -13,7 +14,6 @@ use syntax::{ ast::{self, edit::IndentLevel, HasModuleItem, HasName}, AstNode, TextRange, }; -use text_edit::TextEdit; use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 76d624c47abfd..656bedff1a806 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -1,6 +1,7 @@ use std::iter; use hir::{db::ExpandDatabase, Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union}; +use ide_db::text_edit::TextEdit; use ide_db::{ assists::{Assist, AssistId, AssistKind}, helpers::is_editable_crate, @@ -16,7 +17,6 @@ use syntax::{ ast::{edit::AstNodeEdit, Type}, SyntaxNode, }; -use text_edit::TextEdit; use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs index 9a81682aaeba7..68f14a97f5942 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_ident.rs @@ -11,7 +11,7 @@ pub(crate) fn unresolved_ident( ctx, DiagnosticCode::RustcHardError("E0425"), "no such value in this scope", - d.expr.map(Into::into), + d.expr_or_pat.map(Into::into), ) .experimental() } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index 5b596123e75fd..0d1c977506276 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -82,4 +82,29 @@ self::m!(); self::m2!(); "#, ); } + + #[test] + fn no_unresolved_panic_inside_mod_inside_fn() { + check_diagnostics( + r#" +//- /core.rs library crate:core +#[macro_export] +macro_rules! panic { + () => {}; +} + +//- /lib.rs crate:foo deps:core +#[macro_use] +extern crate core; + +fn foo() { + mod init { + pub fn init() { + panic!(); + } + } +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index c0d038a238ba8..81cb45212186c 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -1,4 +1,5 @@ use hir::{db::ExpandDatabase, AssocItem, FileRange, HirDisplay, InFile}; +use ide_db::text_edit::TextEdit; use ide_db::{ assists::{Assist, AssistId, AssistKind}, label::Label, @@ -8,7 +9,6 @@ use syntax::{ ast::{self, make, HasArgList}, format_smolstr, AstNode, SmolStr, TextRange, ToSmolStr, }; -use text_edit::TextEdit; use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs index 84007b16aa67c..67ece5669419e 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs @@ -1,4 +1,5 @@ use hir::Name; +use ide_db::text_edit::TextEdit; use ide_db::{ assists::{Assist, AssistId, AssistKind}, label::Label, @@ -6,7 +7,6 @@ use ide_db::{ FileRange, RootDatabase, }; use syntax::{Edition, TextRange}; -use text_edit::TextEdit; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs index 2d380ae045736..e5c2eca171ae4 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -1,8 +1,8 @@ use hir::InFile; +use ide_db::text_edit::TextEdit; use ide_db::{source_change::SourceChange, EditionedFileId, FileRange}; use itertools::Itertools; use syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; -use text_edit::TextEdit; use crate::{fix, Diagnostic, DiagnosticCode}; diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml index fad62fa3b96b8..256146762888d 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml @@ -24,7 +24,6 @@ ide-db.workspace = true parser.workspace = true stdx.workspace = true syntax.workspace = true -text-edit.workspace = true [dev-dependencies] expect-test = "1.4.0" @@ -34,4 +33,4 @@ test-utils.workspace = true test-fixture.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index 54236ea8bc4e9..eaca95d98c2e1 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -84,10 +84,10 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc use crate::{errors::bail, matching::MatchFailureReason}; use hir::{FileRange, Semantics}; +use ide_db::text_edit::TextEdit; use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase}; use resolving::ResolvedRule; use syntax::{ast, AstNode, SyntaxNode, TextRange}; -use text_edit::TextEdit; // A structured search replace rule. Create by calling `parse` on a str. #[derive(Debug)] diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs index e752ee3d775f2..ea40d5b815ef3 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs @@ -190,7 +190,7 @@ impl RawPattern { let mut res = FxHashMap::default(); for t in &self.tokens { if let PatternElement::Placeholder(placeholder) = t { - res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone()); + res.insert(SmolStr::new(&placeholder.stand_in_name), placeholder.clone()); } } res diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs index 65756601f66a7..11c1615a560eb 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs @@ -1,5 +1,6 @@ //! Code for applying replacement templates for matches that have previously been found. +use ide_db::text_edit::TextEdit; use ide_db::{FxHashMap, FxHashSet}; use itertools::Itertools; use parser::Edition; @@ -7,7 +8,6 @@ use syntax::{ ast::{self, AstNode, AstToken}, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize, }; -use text_edit::TextEdit; use crate::{fragments, resolving::ResolvedRule, Match, SsrMatches}; diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml index d976d604f1aa9..7c66b36dc8e91 100644 --- a/src/tools/rust-analyzer/crates/ide/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml @@ -39,7 +39,6 @@ profile.workspace = true stdx.workspace = true syntax.workspace = true span.workspace = true -text-edit.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs index 1b82c00d1dc04..e5b4ed17b2a46 100644 --- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs +++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs @@ -510,6 +510,7 @@ fn caller$0() { expect![[]], ); } + #[test] fn test_call_hierarchy_in_macros_incoming_different_files() { check_hierarchy( @@ -591,9 +592,9 @@ macro_rules! call { "#, expect!["callee Function FileId(0) 22..37 30..36"], expect![[r#" - callee Function FileId(0) 38..52 44..50 : FileId(0):44..50 caller Function FileId(0) 38..52 : FileId(0):44..50 - caller Function FileId(1) 130..136 130..136 : FileId(0):44..50"#]], + caller Function FileId(1) 130..136 130..136 : FileId(0):44..50 + callee Function FileId(0) 38..52 44..50 : FileId(0):44..50"#]], expect![[]], ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs index 92458185849b4..055080ad17b13 100644 --- a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs +++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs @@ -187,6 +187,24 @@ fn structure_node(node: &SyntaxNode) -> Option { }; Some(node) }, + ast::LetStmt(it) => { + let pat = it.pat()?; + + let mut label = String::new(); + collapse_ws(pat.syntax(), &mut label); + + let node = StructureNode { + parent: None, + label, + navigation_range: pat.syntax().text_range(), + node_range: it.syntax().text_range(), + kind: StructureNodeKind::SymbolKind(SymbolKind::Local), + detail: it.ty().map(|ty| ty.to_string()), + deprecated: false, + }; + + Some(node) + }, ast::Macro(it) => decl(it, StructureNodeKind::SymbolKind(SymbolKind::Macro)), _ => None, } @@ -308,6 +326,17 @@ fn f() {} // endregion fn g() {} } + +fn let_statements() { + let x = 42; + let mut y = x; + let Foo { + .. + } = Foo { x }; + if let None = Some(x) {} + _ = (); + let _ = g(); +} "#, expect![[r#" [ @@ -633,6 +662,71 @@ fn g() {} ), deprecated: false, }, + StructureNode { + parent: None, + label: "let_statements", + navigation_range: 641..655, + node_range: 638..798, + kind: SymbolKind( + Function, + ), + detail: Some( + "fn()", + ), + deprecated: false, + }, + StructureNode { + parent: Some( + 26, + ), + label: "x", + navigation_range: 668..669, + node_range: 664..675, + kind: SymbolKind( + Local, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 26, + ), + label: "mut y", + navigation_range: 684..689, + node_range: 680..694, + kind: SymbolKind( + Local, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 26, + ), + label: "Foo { .. }", + navigation_range: 703..725, + node_range: 699..738, + kind: SymbolKind( + Local, + ), + detail: None, + deprecated: false, + }, + StructureNode { + parent: Some( + 26, + ), + label: "_", + navigation_range: 788..789, + node_range: 784..796, + kind: SymbolKind( + Local, + ), + detail: None, + deprecated: false, + }, ] "#]], ); diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 4cbcb6ed050fd..363f852e0e4be 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -13,7 +13,6 @@ use ide_db::{ RootDatabase, SymbolKind, }; use itertools::Itertools; - use span::{Edition, FileId}; use syntax::{ ast::{self, HasLoopBody}, @@ -99,6 +98,7 @@ pub(crate) fn goto_definition( return Some(vec![x]); } } + Some( IdentClass::classify_node(sema, &parent)? .definitions() @@ -418,10 +418,10 @@ fn expr_to_nav( #[cfg(test)] mod tests { + use crate::fixture; use ide_db::FileRange; use itertools::Itertools; - - use crate::fixture; + use syntax::SmolStr; #[track_caller] fn check(ra_fixture: &str) { @@ -450,6 +450,170 @@ mod tests { assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {navs:?}") } + fn check_name(expected_name: &str, ra_fixture: &str) { + let (analysis, position, _) = fixture::annotations(ra_fixture); + let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; + assert!(navs.len() < 2, "expected single navigation target but encountered {}", navs.len()); + let Some(target) = navs.into_iter().next() else { + panic!("expected single navigation target but encountered none"); + }; + assert_eq!(target.name, SmolStr::new_inline(expected_name)); + } + + #[test] + fn goto_def_pat_range_to_inclusive() { + check_name( + "RangeToInclusive", + r#" +//- minicore: range +fn f(ch: char) -> bool { + match ch { + ..$0='z' => true, + _ => false + } +} +"#, + ); + } + + #[test] + fn goto_def_pat_range_to() { + check_name( + "RangeTo", + r#" +//- minicore: range +fn f(ch: char) -> bool { + match ch { + .$0.'z' => true, + _ => false + } +} +"#, + ); + } + + #[test] + fn goto_def_pat_range() { + check_name( + "Range", + r#" +//- minicore: range +fn f(ch: char) -> bool { + match ch { + 'a'.$0.'z' => true, + _ => false + } +} +"#, + ); + } + + #[test] + fn goto_def_pat_range_inclusive() { + check_name( + "RangeInclusive", + r#" +//- minicore: range +fn f(ch: char) -> bool { + match ch { + 'a'..$0='z' => true, + _ => false + } +} +"#, + ); + } + + #[test] + fn goto_def_pat_range_from() { + check_name( + "RangeFrom", + r#" +//- minicore: range +fn f(ch: char) -> bool { + match ch { + 'a'..$0 => true, + _ => false + } +} +"#, + ); + } + + #[test] + fn goto_def_expr_range() { + check_name( + "Range", + r#" +//- minicore: range +let x = 0.$0.1; +"#, + ); + } + + #[test] + fn goto_def_expr_range_from() { + check_name( + "RangeFrom", + r#" +//- minicore: range +fn f(arr: &[i32]) -> &[i32] { + &arr[0.$0.] +} +"#, + ); + } + + #[test] + fn goto_def_expr_range_inclusive() { + check_name( + "RangeInclusive", + r#" +//- minicore: range +let x = 0.$0.=1; +"#, + ); + } + + #[test] + fn goto_def_expr_range_full() { + check_name( + "RangeFull", + r#" +//- minicore: range +fn f(arr: &[i32]) -> &[i32] { + &arr[.$0.] +} +"#, + ); + } + + #[test] + fn goto_def_expr_range_to() { + check_name( + "RangeTo", + r#" +//- minicore: range +fn f(arr: &[i32]) -> &[i32] { + &arr[.$0.10] +} +"#, + ); + } + + #[test] + fn goto_def_expr_range_to_inclusive() { + check_name( + "RangeToInclusive", + r#" +//- minicore: range +fn f(arr: &[i32]) -> &[i32] { + &arr[.$0.=10] +} +"#, + ); + } + #[test] fn goto_def_in_included_file() { check( @@ -2835,6 +2999,26 @@ mod bar { mod m {} use foo::m; +"#, + ); + } + + #[test] + fn macro_label_hygiene() { + check( + r#" +macro_rules! m { + ($x:stmt) => { + 'bar: loop { $x } + }; +} + +fn foo() { + 'bar: loop { + // ^^^^ + m!(continue 'bar$0); + } +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 124db2985bf00..6cac4f1ee489f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -11,7 +11,7 @@ use ide_db::{ defs::{Definition, IdentClass, NameRefClass, OperatorClass}, famous_defs::FamousDefs, helpers::pick_best_token, - FileRange, FxIndexSet, RootDatabase, + FileRange, FxIndexSet, Ranker, RootDatabase, }; use itertools::{multizip, Itertools}; use span::Edition; @@ -182,27 +182,13 @@ fn hover_offset( // equivalency is more important let mut descended = sema.descend_into_macros(original_token.clone()); - let kind = original_token.kind(); - let text = original_token.text(); - let ident_kind = kind.is_any_identifier(); - - descended.sort_by_cached_key(|tok| { - let tok_kind = tok.kind(); - - let exact_same_kind = tok_kind == kind; - let both_idents = exact_same_kind || (tok_kind.is_any_identifier() && ident_kind); - let same_text = tok.text() == text; - // anything that mapped into a token tree has likely no semantic information - let no_tt_parent = tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE); - !((both_idents as usize) - | ((exact_same_kind as usize) << 1) - | ((same_text as usize) << 2) - | ((no_tt_parent as usize) << 3)) - }); + let ranker = Ranker::from_token(&original_token); + + descended.sort_by_cached_key(|tok| !ranker.rank_token(tok)); let mut res = vec![]; for token in descended { - let is_same_kind = token.kind() == kind; + let is_same_kind = token.kind() == ranker.kind; let lint_hover = (|| { // FIXME: Definition should include known lints and the like instead of having this special case here let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 01fa316d5fce9..a31b14dbd3e35 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -1042,7 +1042,7 @@ fn render_dyn_compatibility( } DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait) => { let name = hir::Trait::from(super_trait).name(db); - format_to!(buf, "has a object unsafe supertrait `{}`", name.as_str()); + format_to!(buf, "has a dyn incompatible supertrait `{}`", name.as_str()); } } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 81397b0785529..3e40263041955 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -289,7 +289,7 @@ m!(ab$0c); *abc* ```rust - test::module + test ``` ```rust @@ -298,11 +298,11 @@ m!(ab$0c); --- - Inner + Outer --- ```rust - test + test::module ``` ```rust @@ -311,7 +311,7 @@ m!(ab$0c); --- - Outer + Inner "#]], ); } @@ -9018,3 +9018,156 @@ foo!(BAR_$0); "#]], ); } + +#[test] +fn type_alias_without_docs() { + // Simple. + check( + r#" +/// Docs for B +struct B; + +type A$0 = B; +"#, + expect![[r#" + *A* + + ```rust + test + ``` + + ```rust + // size = 0, align = 1 + type A = B + ``` + + --- + + *This is the documentation for* `struct B` + + Docs for B + "#]], + ); + + // Nested. + check( + r#" +/// Docs for C +struct C; + +type B = C; + +type A$0 = B; +"#, + expect![[r#" + *A* + + ```rust + test + ``` + + ```rust + // size = 0, align = 1 + type A = B + ``` + + --- + + *This is the documentation for* `struct C` + + Docs for C + "#]], + ); + + // Showing the docs for aliased struct instead of intermediate type. + check( + r#" +/// Docs for C +struct C; + +/// Docs for B +type B = C; + +type A$0 = B; +"#, + expect![[r#" + *A* + + ```rust + test + ``` + + ```rust + // size = 0, align = 1 + type A = B + ``` + + --- + + *This is the documentation for* `struct C` + + Docs for C + "#]], + ); + + // No docs found. + check( + r#" +struct C; + +type B = C; + +type A$0 = B; +"#, + expect![[r#" + *A* + + ```rust + test + ``` + + ```rust + // size = 0, align = 1 + type A = B + ``` + "#]], + ); + + // Multiple nested crate. + check( + r#" +//- /lib.rs crate:c +/// Docs for C +pub struct C; + +//- /lib.rs crate:b deps:c +pub use c::C; +pub type B = C; + +//- /lib.rs crate:a deps:b +pub use b::B; +pub type A = B; + +//- /main.rs crate:main deps:a +use a::A$0; +"#, + expect![[r#" + *A* + + ```rust + a + ``` + + ```rust + // size = 0, align = 1 + pub type A = B + ``` + + --- + + *This is the documentation for* `pub struct C` + + Docs for C + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 97e712356b54d..c58ca0f01cd64 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -8,6 +8,7 @@ use hir::{ sym, ClosureStyle, HasVisibility, HirDisplay, HirDisplayError, HirWrite, ModuleDef, ModuleDefId, Semantics, }; +use ide_db::text_edit::TextEdit; use ide_db::{famous_defs::FamousDefs, FileRange, RootDatabase}; use itertools::Itertools; use smallvec::{smallvec, SmallVec}; @@ -17,7 +18,6 @@ use syntax::{ ast::{self, AstNode, HasGenericParams}, format_smolstr, match_ast, SmolStr, SyntaxNode, TextRange, TextSize, WalkEvent, }; -use text_edit::TextEdit; use crate::{navigation_target::TryToNav, FileId}; @@ -410,19 +410,6 @@ impl InlayHint { } } - fn opening_paren_before(kind: InlayKind, range: TextRange) -> InlayHint { - InlayHint { - range, - kind, - label: InlayHintLabel::from("("), - text_edit: None, - position: InlayHintPosition::Before, - pad_left: false, - pad_right: false, - resolve_parent: None, - } - } - pub fn needs_resolve(&self) -> Option { self.resolve_parent.filter(|_| self.text_edit.is_some() || self.label.needs_resolve()) } @@ -475,6 +462,18 @@ impl InlayHintLabel { } } + pub fn append_part(&mut self, part: InlayHintLabelPart) { + if part.linked_location.is_none() && part.tooltip.is_none() { + if let Some(InlayHintLabelPart { text, linked_location: None, tooltip: None }) = + self.parts.last_mut() + { + text.push_str(&part.text); + return; + } + } + self.parts.push(part); + } + pub fn needs_resolve(&self) -> bool { self.parts.iter().any(|part| part.linked_location.is_some() || part.tooltip.is_some()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index c37c469dff4e1..4d7d6e270e0a3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -3,12 +3,15 @@ //! let _: u32 = /* */ loop {}; //! let _: &u32 = /* &* */ &mut 0; //! ``` +use std::ops::Not; + use either::Either; use hir::{ Adjust, Adjustment, AutoBorrow, HirDisplay, Mutability, OverloadedDeref, PointerCast, Safety, }; use ide_db::famous_defs::FamousDefs; +use ide_db::text_edit::TextEditBuilder; use span::EditionedFileId; use stdx::never; use syntax::{ @@ -17,8 +20,8 @@ use syntax::{ }; use crate::{ - AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintLabel, InlayHintPosition, - InlayHintsConfig, InlayKind, InlayTooltip, + AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintLabel, InlayHintLabelPart, + InlayHintPosition, InlayHintsConfig, InlayKind, InlayTooltip, }; pub(super) fn hints( @@ -51,32 +54,47 @@ pub(super) fn hints( let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; if let ast::Expr::BlockExpr(_) | ast::Expr::IfExpr(_) | ast::Expr::MatchExpr(_) = desc_expr { - if let [Adjustment { kind: Adjust::Deref(_), source, .. }, Adjustment { kind: Adjust::Borrow(_), source: _, target }] = - &*adjustments - { - // Don't show unnecessary reborrows for these, they will just repeat the inner ones again - if source == target { - return None; - } + // Don't show unnecessary reborrows for these, they will just repeat the inner ones again + if matches!( + &*adjustments, + [Adjustment { kind: Adjust::Deref(_), source, .. }, Adjustment { kind: Adjust::Borrow(_), target, .. }] + if source == target + ) { + return None; } } let (postfix, needs_outer_parens, needs_inner_parens) = mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode); - if needs_outer_parens { - acc.push(InlayHint::opening_paren_before( - InlayKind::Adjustment, - expr.syntax().text_range(), - )); + let range = expr.syntax().text_range(); + let mut pre = InlayHint { + range, + position: InlayHintPosition::Before, + pad_left: false, + pad_right: false, + kind: InlayKind::Adjustment, + label: InlayHintLabel::default(), + text_edit: None, + resolve_parent: Some(range), + }; + let mut post = InlayHint { + range, + position: InlayHintPosition::After, + pad_left: false, + pad_right: false, + kind: InlayKind::Adjustment, + label: InlayHintLabel::default(), + text_edit: None, + resolve_parent: Some(range), + }; + + if needs_outer_parens || (postfix && needs_inner_parens) { + pre.label.append_str("("); } if postfix && needs_inner_parens { - acc.push(InlayHint::opening_paren_before( - InlayKind::Adjustment, - expr.syntax().text_range(), - )); - acc.push(InlayHint::closing_paren_after(InlayKind::Adjustment, expr.syntax().text_range())); + post.label.append_str(")"); } let mut iter = if postfix { @@ -86,6 +104,7 @@ pub(super) fn hints( }; let iter: &mut dyn Iterator = iter.as_mut().either(|it| it as _, |it| it as _); + let mut allow_edit = !postfix; for Adjustment { source, target, kind } in iter { if source == target { cov_mark::hit!(same_type_adjustment); @@ -95,6 +114,7 @@ pub(super) fn hints( // FIXME: Add some nicer tooltips to each of these let (text, coercion) = match kind { Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => { + allow_edit = false; ("", "never to any") } Adjust::Deref(None) => ("*", "dereference"), @@ -115,6 +135,7 @@ pub(super) fn hints( // some of these could be represented via `as` casts, but that's not too nice and // handling everything as a prefix expr makes the `(` and `)` insertion easier Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => { + allow_edit = false; match cast { PointerCast::ReifyFnPointer => { ("", "fn item to fn pointer") @@ -138,36 +159,58 @@ pub(super) fn hints( } _ => continue, }; - let label = InlayHintLabel::simple( - if postfix { format!(".{}", text.trim_end()) } else { text.to_owned() }, - Some(InlayTooltip::Markdown(format!( + let label = InlayHintLabelPart { + text: if postfix { format!(".{}", text.trim_end()) } else { text.to_owned() }, + linked_location: None, + tooltip: Some(InlayTooltip::Markdown(format!( "`{}` → `{}` ({coercion} coercion)", source.display(sema.db, file_id.edition()), target.display(sema.db, file_id.edition()), ))), - None, - ); - acc.push(InlayHint { - range: expr.syntax().text_range(), - pad_left: false, - pad_right: false, - position: if postfix { InlayHintPosition::After } else { InlayHintPosition::Before }, - kind: InlayKind::Adjustment, - label, - text_edit: None, - resolve_parent: Some(expr.syntax().text_range()), - }); + }; + if postfix { &mut post } else { &mut pre }.label.append_part(label); } if !postfix && needs_inner_parens { - acc.push(InlayHint::opening_paren_before( - InlayKind::Adjustment, - expr.syntax().text_range(), - )); - acc.push(InlayHint::closing_paren_after(InlayKind::Adjustment, expr.syntax().text_range())); + pre.label.append_str("("); + } + if needs_outer_parens || (!postfix && needs_inner_parens) { + post.label.append_str(")"); } - if needs_outer_parens { - acc.push(InlayHint::closing_paren_after(InlayKind::Adjustment, expr.syntax().text_range())); + + let mut pre = pre.label.parts.is_empty().not().then_some(pre); + let mut post = post.label.parts.is_empty().not().then_some(post); + if pre.is_none() && post.is_none() { + return None; } + if allow_edit { + let edit = { + let mut b = TextEditBuilder::default(); + if let Some(pre) = &pre { + b.insert( + pre.range.start(), + pre.label.parts.iter().map(|part| &*part.text).collect::(), + ); + } + if let Some(post) = &post { + b.insert( + post.range.end(), + post.label.parts.iter().map(|part| &*part.text).collect::(), + ); + } + b.finish() + }; + match (&mut pre, &mut post) { + (Some(pre), Some(post)) => { + pre.text_edit = Some(edit.clone()); + post.text_edit = Some(edit); + } + (Some(pre), None) => pre.text_edit = Some(edit), + (None, Some(post)) => post.text_edit = Some(edit), + (None, None) => (), + } + } + acc.extend(pre); + acc.extend(post); Some(()) } @@ -293,25 +336,19 @@ fn main() { let _: u32 = loop {}; //^^^^^^^ let _: &u32 = &mut 0; - //^^^^^^& - //^^^^^^* + //^^^^^^&* let _: &mut u32 = &mut 0; - //^^^^^^&mut $ - //^^^^^^* + //^^^^^^&mut * let _: *const u32 = &mut 0; - //^^^^^^&raw const $ - //^^^^^^* + //^^^^^^&raw const * let _: *mut u32 = &mut 0; - //^^^^^^&raw mut $ - //^^^^^^* + //^^^^^^&raw mut * let _: fn() = main; //^^^^ let _: unsafe fn() = main; - //^^^^ - //^^^^ + //^^^^ let _: unsafe fn() = main as fn(); - //^^^^^^^^^^^^ - //^^^^^^^^^^^^( + //^^^^^^^^^^^^( //^^^^^^^^^^^^) //^^^^ let _: fn() = || {}; @@ -319,72 +356,51 @@ fn main() { let _: unsafe fn() = || {}; //^^^^^ let _: *const u32 = &mut 0u32 as *mut u32; - //^^^^^^^^^^^^^^^^^^^^^ - //^^^^^^^^^^^^^^^^^^^^^( + //^^^^^^^^^^^^^^^^^^^^^( //^^^^^^^^^^^^^^^^^^^^^) - //^^^^^^^^^&raw mut $ - //^^^^^^^^^* + //^^^^^^^^^&raw mut * let _: &mut [_] = &mut [0; 0]; - //^^^^^^^^^^^ - //^^^^^^^^^^^&mut $ - //^^^^^^^^^^^* + //^^^^^^^^^^^&mut * Struct.consume(); Struct.by_ref(); - //^^^^^^( - //^^^^^^& + //^^^^^^(& //^^^^^^) Struct.by_ref_mut(); - //^^^^^^( - //^^^^^^&mut $ + //^^^^^^(&mut $ //^^^^^^) (&Struct).consume(); //^^^^^^^* (&Struct).by_ref(); - //^^^^^^^& - //^^^^^^^* + //^^^^^^^&* (&mut Struct).consume(); //^^^^^^^^^^^* (&mut Struct).by_ref(); - //^^^^^^^^^^^& - //^^^^^^^^^^^* + //^^^^^^^^^^^&* (&mut Struct).by_ref_mut(); - //^^^^^^^^^^^&mut $ - //^^^^^^^^^^^* + //^^^^^^^^^^^&mut * // Check that block-like expressions don't duplicate hints let _: &mut [u32] = (&mut []); - //^^^^^^^ - //^^^^^^^&mut $ - //^^^^^^^* + //^^^^^^^&mut * let _: &mut [u32] = { &mut [] }; - //^^^^^^^ - //^^^^^^^&mut $ - //^^^^^^^* + //^^^^^^^&mut * let _: &mut [u32] = unsafe { &mut [] }; - //^^^^^^^ - //^^^^^^^&mut $ - //^^^^^^^* + //^^^^^^^&mut * let _: &mut [u32] = if true { &mut [] - //^^^^^^^ - //^^^^^^^&mut $ - //^^^^^^^* + //^^^^^^^&mut * } else { loop {} //^^^^^^^ }; let _: &mut [u32] = match () { () => &mut [] }; - //^^^^^^^ - //^^^^^^^&mut $ - //^^^^^^^* + //^^^^^^^&mut * let _: &mut dyn Fn() = &mut || (); - //^^^^^^^^^^ - //^^^^^^^^^^&mut $ - //^^^^^^^^^^* + //^^^^^^^^^^&mut * () == (); // ^^& // ^^& @@ -393,16 +409,13 @@ fn main() { // ^^^^& let closure: dyn Fn = || (); closure(); - //^^^^^^^( - //^^^^^^^& + //^^^^^^^(& //^^^^^^^) Struct[0]; - //^^^^^^( - //^^^^^^& + //^^^^^^(& //^^^^^^) &mut Struct[0]; - //^^^^^^( - //^^^^^^&mut $ + //^^^^^^(&mut $ //^^^^^^) } @@ -442,72 +455,46 @@ fn main() { (&Struct).consume(); //^^^^^^^( - //^^^^^^^) - //^^^^^^^.* + //^^^^^^^).* (&Struct).by_ref(); //^^^^^^^( - //^^^^^^^) - //^^^^^^^.* - //^^^^^^^.& + //^^^^^^^).*.& (&mut Struct).consume(); //^^^^^^^^^^^( - //^^^^^^^^^^^) - //^^^^^^^^^^^.* + //^^^^^^^^^^^).* (&mut Struct).by_ref(); //^^^^^^^^^^^( - //^^^^^^^^^^^) - //^^^^^^^^^^^.* - //^^^^^^^^^^^.& + //^^^^^^^^^^^).*.& (&mut Struct).by_ref_mut(); //^^^^^^^^^^^( - //^^^^^^^^^^^) - //^^^^^^^^^^^.* - //^^^^^^^^^^^.&mut + //^^^^^^^^^^^).*.&mut // Check that block-like expressions don't duplicate hints let _: &mut [u32] = (&mut []); //^^^^^^^( - //^^^^^^^) - //^^^^^^^.* - //^^^^^^^.&mut - //^^^^^^^. + //^^^^^^^).*.&mut. let _: &mut [u32] = { &mut [] }; //^^^^^^^( - //^^^^^^^) - //^^^^^^^.* - //^^^^^^^.&mut - //^^^^^^^. + //^^^^^^^).*.&mut. let _: &mut [u32] = unsafe { &mut [] }; //^^^^^^^( - //^^^^^^^) - //^^^^^^^.* - //^^^^^^^.&mut - //^^^^^^^. + //^^^^^^^).*.&mut. let _: &mut [u32] = if true { &mut [] //^^^^^^^( - //^^^^^^^) - //^^^^^^^.* - //^^^^^^^.&mut - //^^^^^^^. + //^^^^^^^).*.&mut. } else { loop {} //^^^^^^^. }; let _: &mut [u32] = match () { () => &mut [] }; //^^^^^^^( - //^^^^^^^) - //^^^^^^^.* - //^^^^^^^.&mut - //^^^^^^^. + //^^^^^^^).*.&mut. let _: &mut dyn Fn() = &mut || (); //^^^^^^^^^^( - //^^^^^^^^^^) - //^^^^^^^^^^.* - //^^^^^^^^^^.&mut - //^^^^^^^^^^. + //^^^^^^^^^^).*.&mut. () == (); // ^^.& // ^^.& @@ -619,9 +606,7 @@ fn or_else() { r#" unsafe fn enabled() { f(&&()); - //^^^^& - //^^^^* - //^^^^* + //^^^^&** } fn disabled() { @@ -633,9 +618,7 @@ fn mixed() { unsafe { f(&&()); - //^^^^& - //^^^^* - //^^^^* + //^^^^&** } } @@ -644,9 +627,7 @@ const _: () = { unsafe { f(&&()); - //^^^^& - //^^^^* - //^^^^* + //^^^^&** } }; @@ -655,18 +636,14 @@ static STATIC: () = { unsafe { f(&&()); - //^^^^& - //^^^^* - //^^^^* + //^^^^&** } }; enum E { Disable = { f(&&()); 0 }, Enable = unsafe { f(&&()); 1 }, - //^^^^& - //^^^^* - //^^^^* + //^^^^&** } const fn f(_: &()) {} @@ -692,8 +669,7 @@ fn a() { _ = Struct.by_ref(); _ = unsafe { Struct.by_ref() }; - //^^^^^^( - //^^^^^^& + //^^^^^^(& //^^^^^^) } "#, @@ -726,10 +702,7 @@ trait T {} fn hello(it: &&[impl T]) { it.len(); - //^^( - //^^& - //^^* - //^^* + //^^(&** //^^) } "#, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs index d1c0677863db7..cfe8657fd05e0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs @@ -2,13 +2,16 @@ //! ```no_run //! let /* & */ (/* ref */ x,) = &(0,); //! ``` +use std::mem; + use hir::Mutability; use ide_db::famous_defs::FamousDefs; +use ide_db::text_edit::TextEditBuilder; use span::EditionedFileId; use syntax::ast::{self, AstNode}; -use crate::{InlayHint, InlayHintPosition, InlayHintsConfig, InlayKind}; +use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec, @@ -21,16 +24,7 @@ pub(super) fn hints( return None; } - let outer_paren_pat = pat - .syntax() - .ancestors() - .skip(1) - .map_while(ast::Pat::cast) - .map_while(|pat| match pat { - ast::Pat::ParenPat(pat) => Some(pat), - _ => None, - }) - .last(); + let outer_paren_pat = pat.syntax().ancestors().skip(1).map_while(ast::ParenPat::cast).last(); let range = outer_paren_pat.as_ref().map_or_else( || match pat { // for ident patterns that @ bind a name, render the un-ref patterns in front of the inner pattern @@ -42,7 +36,18 @@ pub(super) fn hints( }, |it| it.syntax().text_range(), ); + let mut hint = InlayHint { + range, + kind: InlayKind::BindingMode, + label: InlayHintLabel::default(), + text_edit: None, + position: InlayHintPosition::Before, + pad_left: false, + pad_right: false, + resolve_parent: Some(pat.syntax().text_range()), + }; let pattern_adjustments = sema.pattern_adjustments(pat); + let mut was_mut_last = false; pattern_adjustments.iter().for_each(|ty| { let reference = ty.is_reference(); let mut_reference = ty.is_mutable_reference(); @@ -51,41 +56,36 @@ pub(super) fn hints( (true, false) => "&", _ => return, }; - acc.push(InlayHint { - range, - kind: InlayKind::BindingMode, - label: r.into(), - text_edit: None, - position: InlayHintPosition::Before, - pad_left: false, - pad_right: mut_reference, - resolve_parent: Some(pat.syntax().text_range()), - }); + if mem::replace(&mut was_mut_last, mut_reference) { + hint.label.append_str(" "); + } + hint.label.append_str(r); }); + hint.pad_right = was_mut_last; + let acc_base = acc.len(); match pat { ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { let bm = sema.binding_mode_of_pat(pat)?; let bm = match bm { - hir::BindingMode::Move => return None, - hir::BindingMode::Ref(Mutability::Mut) => "ref mut", - hir::BindingMode::Ref(Mutability::Shared) => "ref", + hir::BindingMode::Move => None, + hir::BindingMode::Ref(Mutability::Mut) => Some("ref mut"), + hir::BindingMode::Ref(Mutability::Shared) => Some("ref"), }; - acc.push(InlayHint { - range: pat.syntax().text_range(), - kind: InlayKind::BindingMode, - label: bm.into(), - text_edit: None, - position: InlayHintPosition::Before, - pad_left: false, - pad_right: true, - resolve_parent: Some(pat.syntax().text_range()), - }); + if let Some(bm) = bm { + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::BindingMode, + label: bm.into(), + text_edit: None, + position: InlayHintPosition::Before, + pad_left: false, + pad_right: true, + resolve_parent: Some(pat.syntax().text_range()), + }); + } } ast::Pat::OrPat(pat) if !pattern_adjustments.is_empty() && outer_paren_pat.is_none() => { - acc.push(InlayHint::opening_paren_before( - InlayKind::BindingMode, - pat.syntax().text_range(), - )); + hint.label.append_str("("); acc.push(InlayHint::closing_paren_after( InlayKind::BindingMode, pat.syntax().text_range(), @@ -93,6 +93,24 @@ pub(super) fn hints( } _ => (), } + if !hint.label.parts.is_empty() { + acc.push(hint); + } + + if let hints @ [_, ..] = &mut acc[acc_base..] { + let mut edit = TextEditBuilder::default(); + for h in &mut *hints { + edit.insert( + match h.position { + InlayHintPosition::Before => h.range.start(), + InlayHintPosition::After => h.range.end(), + }, + h.label.parts.iter().map(|p| &*p.text).collect(), + ); + } + let edit = edit.finish(); + hints.iter_mut().for_each(|h| h.text_edit = Some(edit.clone())); + } Some(()) } @@ -117,6 +135,13 @@ fn __( (x,): &mut (u32,) //^^^^&mut //^ ref mut + (x,): &mut &mut (u32,) + //^^^^&mut &mut + //^ ref mut + (x,): &&(u32,) + //^^^^&& + //^ ref + ) { let (x,) = (0,); let (x,) = &(0,); @@ -136,11 +161,10 @@ fn __( } match &(0,) { (x,) | (x,) => (), - //^^^^^^^^^^^& + //^^^^^^^^^^^) + //^^^^^^^^^^^&( //^ ref //^ ref - //^^^^^^^^^^^( - //^^^^^^^^^^^) ((x,) | (x,)) => (), //^^^^^^^^^^^^^& //^ ref diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs index 58d8f97a8ced7..028ed1650f46e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs @@ -77,7 +77,7 @@ pub(super) fn hints( #[cfg(test)] mod tests { use expect_test::{expect, Expect}; - use text_edit::{TextRange, TextSize}; + use ide_db::text_edit::{TextRange, TextSize}; use crate::{ fixture, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs index f399bd01d071a..906f2acf0c445 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs @@ -2,12 +2,14 @@ //! //! Tests live in [`bind_pat`][super::bind_pat] module. use ide_db::famous_defs::FamousDefs; +use ide_db::text_edit::{TextRange, TextSize}; use span::EditionedFileId; use stdx::{never, TupleExt}; use syntax::ast::{self, AstNode}; -use text_edit::{TextRange, TextSize}; -use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; +use crate::{ + InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition, InlayHintsConfig, InlayKind, +}; pub(super) fn hints( acc: &mut Vec, @@ -27,34 +29,27 @@ pub(super) fn hints( return None; } - let move_kw_range = match closure.move_token() { - Some(t) => t.text_range(), + let (range, label) = match closure.move_token() { + Some(t) => (t.text_range(), InlayHintLabel::default()), None => { - let range = closure.syntax().first_token()?.prev_token()?.text_range(); - let range = TextRange::new(range.end() - TextSize::from(1), range.end()); - acc.push(InlayHint { - range, - kind: InlayKind::ClosureCapture, - label: InlayHintLabel::from("move"), - text_edit: None, - position: InlayHintPosition::After, - pad_left: false, - pad_right: false, - resolve_parent: Some(closure.syntax().text_range()), - }); - range + let prev_token = closure.syntax().first_token()?.prev_token()?.text_range(); + ( + TextRange::new(prev_token.end() - TextSize::from(1), prev_token.end()), + InlayHintLabel::from("move"), + ) } }; - acc.push(InlayHint { - range: move_kw_range, + let mut hint = InlayHint { + range, kind: InlayKind::ClosureCapture, - label: InlayHintLabel::from("("), + label, text_edit: None, position: InlayHintPosition::After, pad_left: false, - pad_right: false, - resolve_parent: None, - }); + pad_right: true, + resolve_parent: Some(closure.syntax().text_range()), + }; + hint.label.append_str("("); let last = captures.len() - 1; for (idx, capture) in captures.into_iter().enumerate() { let local = capture.local(); @@ -76,48 +71,20 @@ pub(super) fn hints( if never!(label.is_empty()) { continue; } - let label = InlayHintLabel::simple( - label, - None, - source.name().and_then(|name| { + hint.label.append_part(InlayHintLabelPart { + text: label, + linked_location: source.name().and_then(|name| { name.syntax().original_file_range_opt(sema.db).map(TupleExt::head).map(Into::into) }), - ); - acc.push(InlayHint { - range: move_kw_range, - kind: InlayKind::ClosureCapture, - label, - text_edit: None, - position: InlayHintPosition::After, - pad_left: false, - pad_right: false, - resolve_parent: Some(closure.syntax().text_range()), + tooltip: None, }); if idx != last { - acc.push(InlayHint { - range: move_kw_range, - kind: InlayKind::ClosureCapture, - label: InlayHintLabel::from(", "), - text_edit: None, - position: InlayHintPosition::After, - pad_left: false, - pad_right: false, - resolve_parent: None, - }); + hint.label.append_str(", "); } } - acc.push(InlayHint { - range: move_kw_range, - kind: InlayKind::ClosureCapture, - label: InlayHintLabel::from(")"), - text_edit: None, - position: InlayHintPosition::After, - pad_left: false, - pad_right: true, - resolve_parent: None, - }); - + hint.label.append_str(")"); + acc.push(hint); Some(()) } @@ -147,51 +114,25 @@ fn main() { let mut baz = NonCopy; let qux = &mut NonCopy; || { -// ^ move -// ^ ( -// ^ &foo -// ^ , $ -// ^ bar -// ^ , $ -// ^ baz -// ^ , $ -// ^ qux -// ^ ) +// ^ move(&foo, bar, baz, qux) foo; bar; baz; qux; }; || { -// ^ move -// ^ ( -// ^ &foo -// ^ , $ -// ^ &bar -// ^ , $ -// ^ &baz -// ^ , $ -// ^ &qux -// ^ ) +// ^ move(&foo, &bar, &baz, &qux) &foo; &bar; &baz; &qux; }; || { -// ^ move -// ^ ( -// ^ &mut baz -// ^ ) +// ^ move(&mut baz) &mut baz; }; || { -// ^ move -// ^ ( -// ^ &mut baz -// ^ , $ -// ^ &mut *qux -// ^ ) +// ^ move(&mut baz, &mut *qux) baz = NonCopy; *qux = NonCopy; }; @@ -209,9 +150,7 @@ fn main() { fn main() { let foo = u32; move || { -// ^^^^ ( -// ^^^^ foo -// ^^^^ ) +// ^^^^ (foo) foo; }; } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs index 35b62878329fc..cd77c3ec3e905 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs @@ -5,6 +5,7 @@ //! } //! ``` use hir::Semantics; +use ide_db::text_edit::TextEdit; use ide_db::{famous_defs::FamousDefs, RootDatabase}; use span::EditionedFileId; use syntax::ast::{self, AstNode, HasName}; @@ -65,11 +66,11 @@ fn variant_hints( let eq_ = if eq_token.is_none() { " =" } else { "" }; let label = InlayHintLabel::simple( match d { - Ok(x) => { - if x >= 10 { - format!("{eq_} {x} ({x:#X})") + Ok(val) => { + if val >= 10 { + format!("{eq_} {val} ({val:#X})") } else { - format!("{eq_} {x}") + format!("{eq_} {val}") } } Err(_) => format!("{eq_} ?"), @@ -87,7 +88,7 @@ fn variant_hints( }, kind: InlayKind::Discriminant, label, - text_edit: None, + text_edit: d.ok().map(|val| TextEdit::insert(range.start(), format!("{eq_} {val}"))), position: InlayHintPosition::After, pad_left: false, pad_right: false, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs index 8d422478cbfcc..1560df37d0d9d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs @@ -4,6 +4,7 @@ //! ``` use either::Either; use ide_db::famous_defs::FamousDefs; +use ide_db::text_edit::TextEdit; use span::EditionedFileId; use syntax::{ ast::{self, AstNode}, @@ -38,7 +39,7 @@ pub(super) fn hints( range: t.text_range(), kind: InlayKind::Lifetime, label: "'static".into(), - text_edit: None, + text_edit: Some(TextEdit::insert(t.text_range().start(), "'static ".into())), position: InlayHintPosition::After, pad_left: false, pad_right: true, diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs index 9d8ba90b2ff03..5192f91a4a62d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs +++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs @@ -8,7 +8,7 @@ use syntax::{ SyntaxToken, TextRange, TextSize, T, }; -use text_edit::{TextEdit, TextEditBuilder}; +use ide_db::text_edit::{TextEdit, TextEditBuilder}; pub struct JoinLinesConfig { pub join_else_if: bool, diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index d7163d57d220b..d053c4b3c93da 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -122,6 +122,7 @@ pub use ide_completion::{ CallableSnippets, CompletionConfig, CompletionFieldsToResolve, CompletionItem, CompletionItemKind, CompletionRelevance, Snippet, SnippetScope, }; +pub use ide_db::text_edit::{Indel, TextEdit}; pub use ide_db::{ base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId}, documentation::Documentation, @@ -139,7 +140,6 @@ pub use ide_diagnostics::{ pub use ide_ssr::SsrError; pub use span::Edition; pub use syntax::{TextRange, TextSize}; -pub use text_edit::{Indel, TextEdit}; pub type Cancellable = Result; diff --git a/src/tools/rust-analyzer/crates/ide/src/move_item.rs b/src/tools/rust-analyzer/crates/ide/src/move_item.rs index ea6cc9d6de232..a232df2b82bdc 100644 --- a/src/tools/rust-analyzer/crates/ide/src/move_item.rs +++ b/src/tools/rust-analyzer/crates/ide/src/move_item.rs @@ -1,10 +1,11 @@ use std::{iter::once, mem}; use hir::Semantics; +use ide_db::syntax_helpers::tree_diff::diff; +use ide_db::text_edit::{TextEdit, TextEditBuilder}; use ide_db::{helpers::pick_best_token, FileRange, RootDatabase}; use itertools::Itertools; -use syntax::{algo, ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange}; -use text_edit::{TextEdit, TextEditBuilder}; +use syntax::{ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange}; #[derive(Copy, Clone, Debug)] pub enum Direction { @@ -166,7 +167,7 @@ fn replace_nodes<'a>( let mut edit = TextEditBuilder::default(); - algo::diff(first, second).into_text_edit(&mut edit); + diff(first, second).into_text_edit(&mut edit); edit.replace(second.text_range(), first_with_cursor); edit.finish() diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index e7cb8a253f405..339315db57109 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -1701,14 +1701,14 @@ fn f() { } "#, expect![[r#" - func Function FileId(0) 137..146 140..144 + func Function FileId(0) 137..146 140..144 module - FileId(0) 161..165 + FileId(0) 181..185 - func Function FileId(0) 137..146 140..144 module + func Function FileId(0) 137..146 140..144 - FileId(0) 181..185 + FileId(0) 161..165 "#]], ) } diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index f17c1fa5c6204..665fc954d239c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -15,7 +15,7 @@ use itertools::Itertools; use stdx::{always, never}; use syntax::{ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize}; -use text_edit::TextEdit; +use ide_db::text_edit::TextEdit; use crate::{FilePosition, RangeInfo, SourceChange}; @@ -449,9 +449,9 @@ fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Opt mod tests { use expect_test::{expect, Expect}; use ide_db::source_change::SourceChange; + use ide_db::text_edit::TextEdit; use stdx::trim_indent; use test_utils::assert_eq_text; - use text_edit::TextEdit; use crate::fixture; diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 961b2a4c93846..0747d1b404b4f 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -16,7 +16,7 @@ mod tests; use std::ops::ControlFlow; use hir::{InRealFile, Name, Semantics}; -use ide_db::{FxHashMap, RootDatabase, SymbolKind}; +use ide_db::{FxHashMap, Ranker, RootDatabase, SymbolKind}; use span::EditionedFileId; use syntax::{ ast::{self, IsString}, @@ -397,13 +397,12 @@ fn traverse( Some(AttrOrDerive::Derive(_)) => inside_attribute, None => false, }; + let descended_element = if in_macro { // Attempt to descend tokens into macro-calls. let res = match element { NodeOrToken::Token(token) if token.kind() != COMMENT => { - let kind = token.kind(); - let text = token.text(); - let ident_kind = kind.is_any_identifier(); + let ranker = Ranker::from_token(&token); let mut t = None; let mut r = 0; @@ -412,21 +411,9 @@ fn traverse( |tok, _ctx| { // FIXME: Consider checking ctx transparency for being opaque? let tok = tok.value; - let tok_kind = tok.kind(); - - let exact_same_kind = tok_kind == kind; - let both_idents = - exact_same_kind || (tok_kind.is_any_identifier() && ident_kind); - let same_text = tok.text() == text; - // anything that mapped into a token tree has likely no semantic information - let no_tt_parent = - tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE); - let my_rank = (both_idents as usize) - | ((exact_same_kind as usize) << 1) - | ((same_text as usize) << 2) - | ((no_tt_parent as usize) << 3); - - if my_rank > 0b1110 { + let my_rank = ranker.rank_token(&tok); + + if my_rank >= Ranker::MAX_RANK { // a rank of 0b1110 means that we have found a maximally interesting // token so stop early. t = Some(tok); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html index d07ba74db2499..361dcd1bc37c7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html @@ -50,4 +50,4 @@ } #[proc_macros::issue_18089] -fn template() {} \ No newline at end of file +fn template() {} \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs index a09e1e85ae1b0..9bb5de9f2e3fb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs @@ -23,7 +23,7 @@ use syntax::{ AstNode, Parse, SourceFile, SyntaxKind, TextRange, TextSize, T, }; -use text_edit::{Indel, TextEdit}; +use ide_db::text_edit::TextEdit; use crate::SourceChange; @@ -126,7 +126,7 @@ fn on_opening_bracket_typed( return None; } // FIXME: Edition - let file = file.reparse(&Indel::delete(range), span::Edition::CURRENT_FIXME); + let file = file.reparse(range, "", span::Edition::CURRENT_FIXME); if let Some(edit) = bracket_expr(&file.tree(), offset, opening_bracket, closing_bracket) { return Some(edit); diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs index 6e56bd6185039..773e352220ece 100644 --- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs +++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs @@ -12,7 +12,7 @@ use syntax::{ SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, }; -use text_edit::TextEdit; +use ide_db::text_edit::TextEdit; // Feature: On Enter // diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs index ecfabca092c3c..92311238c23a5 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs @@ -144,10 +144,31 @@ fn type_bound(p: &mut Parser<'_>) -> bool { LIFETIME_IDENT => lifetime(p), T![for] => types::for_type(p, false), // test precise_capturing - // fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T> {} + // fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T, Self> {} T![use] if p.nth_at(1, T![<]) => { p.bump_any(); - generic_param_list(p) + let m = p.start(); + delimited( + p, + T![<], + T![>], + T![,], + || "expected identifier or lifetime".into(), + TokenSet::new(&[T![Self], IDENT, LIFETIME_IDENT]), + |p| { + if p.at(T![Self]) { + let m = p.start(); + p.bump(T![Self]); + m.complete(p, NAME_REF); + } else if p.at(LIFETIME_IDENT) { + lifetime(p); + } else { + name_ref(p); + } + true + }, + ); + m.complete(p, USE_BOUND_GENERIC_ARGS); } T![?] if p.nth_at(1, T![for]) => { // test question_for_type_trait_bound diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs index 882c243b0cd9c..ed01fca2acdbd 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs @@ -21,7 +21,8 @@ const RANGE_PAT_END_FIRST: TokenSet = expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[T![-], T![const]])); pub(crate) fn pattern(p: &mut Parser<'_>) { - pattern_r(p, PAT_RECOVERY_SET); + let m = p.start(); + pattern_r(p, m, false, PAT_RECOVERY_SET); } /// Parses a pattern list separated by pipes `|`. @@ -36,13 +37,11 @@ pub(crate) fn pattern_single(p: &mut Parser<'_>) { /// Parses a pattern list separated by pipes `|` /// using the given `recovery_set`. pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) { - p.eat(T![|]); - pattern_r(p, recovery_set); + let m = p.start(); + let has_leading_pipe = p.eat(T![|]); + pattern_r(p, m, has_leading_pipe, recovery_set); } -/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the -/// given `recovery_set`. - // test or_pattern // fn main() { // match () { @@ -52,11 +51,12 @@ pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) { // [_ | _,] => (), // } // } -fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) { - let m = p.start(); +/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the +/// given `recovery_set`. +fn pattern_r(p: &mut Parser<'_>, m: Marker, has_leading_pipe: bool, recovery_set: TokenSet) { pattern_single_r(p, recovery_set); - if !p.at(T![|]) { + if !p.at(T![|]) && !has_leading_pipe { m.abandon(p); return; } diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index 5322463a71329..3c0eb1b42a60b 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -39,7 +39,9 @@ impl<'a> LexedStr<'a> { conv.offset = shebang_len; }; - for token in rustc_lexer::tokenize(&text[conv.offset..]) { + // Re-create the tokenizer from scratch every token because `GuardedStrPrefix` is one token in the lexer + // but we want to split it to two in edition <2024. + while let Some(token) = rustc_lexer::tokenize(&text[conv.offset..]).next() { let token_text = &text[conv.offset..][..token.len as usize]; conv.extend_token(&token.kind, token_text); @@ -158,7 +160,7 @@ impl<'a> Converter<'a> { } } - fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, token_text: &str) { + fn extend_token(&mut self, kind: &rustc_lexer::TokenKind, mut token_text: &str) { // A note on an intended tradeoff: // We drop some useful information here (see patterns with double dots `..`) // Storing that info in `SyntaxKind` is not possible due to its layout requirements of @@ -189,10 +191,15 @@ impl<'a> Converter<'a> { rustc_lexer::TokenKind::RawIdent => IDENT, rustc_lexer::TokenKind::GuardedStrPrefix if self.edition.at_least_2024() => { + // FIXME: rustc does something better for recovery. err = "Invalid string literal (reserved syntax)"; ERROR } - rustc_lexer::TokenKind::GuardedStrPrefix => POUND, + rustc_lexer::TokenKind::GuardedStrPrefix => { + // The token is `#"` or `##`, split it into two. + token_text = &token_text[1..]; + POUND + } rustc_lexer::TokenKind::Literal { kind, .. } => { self.extend_literal(token_text.len(), kind); diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index 8da338c0a2c50..21730244a3395 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -312,6 +312,8 @@ pub enum SyntaxKind { UNDERSCORE_EXPR, UNION, USE, + USE_BOUND_GENERIC_ARG, + USE_BOUND_GENERIC_ARGS, USE_TREE, USE_TREE_LIST, VARIANT, diff --git a/src/tools/rust-analyzer/crates/parser/src/tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests.rs index e7bccb6685c95..4b19ddc752a0e 100644 --- a/src/tools/rust-analyzer/crates/parser/src/tests.rs +++ b/src/tools/rust-analyzer/crates/parser/src/tests.rs @@ -15,11 +15,20 @@ use crate::{Edition, LexedStr, TopEntryPoint}; #[path = "../test_data/generated/runner.rs"] mod runner; +fn infer_edition(file_path: &Path) -> Edition { + let file_content = std::fs::read_to_string(file_path).unwrap(); + if let Some(edition) = file_content.strip_prefix("//@ edition: ") { + edition[..4].parse().expect("invalid edition directive") + } else { + Edition::CURRENT + } +} + #[test] fn lex_ok() { for case in TestCase::list("lexer/ok") { let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); - let actual = lex(&case.text); + let actual = lex(&case.text, infer_edition(&case.rs)); expect_file![case.rast].assert_eq(&actual) } } @@ -28,13 +37,13 @@ fn lex_ok() { fn lex_err() { for case in TestCase::list("lexer/err") { let _guard = stdx::panic_context::enter(format!("{:?}", case.rs)); - let actual = lex(&case.text); + let actual = lex(&case.text, infer_edition(&case.rs)); expect_file![case.rast].assert_eq(&actual) } } -fn lex(text: &str) -> String { - let lexed = LexedStr::new(Edition::CURRENT, text); +fn lex(text: &str, edition: Edition) -> String { + let lexed = LexedStr::new(edition, text); let mut res = String::new(); for i in 0..lexed.len() { diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs index c56bf0b64485a..7076e03ba4b9e 100644 --- a/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs +++ b/src/tools/rust-analyzer/crates/parser/src/tests/top_entries.rs @@ -195,6 +195,38 @@ fn macro_pattern() { error 0: expected pattern "#]], ); + + check( + TopEntryPoint::Pattern, + "| 42 | 43", + expect![[r#" + OR_PAT + PIPE "|" + WHITESPACE " " + LITERAL_PAT + LITERAL + INT_NUMBER "42" + WHITESPACE " " + PIPE "|" + WHITESPACE " " + LITERAL_PAT + LITERAL + INT_NUMBER "43" + "#]], + ); + + check( + TopEntryPoint::Pattern, + "| 42", + expect![[r#" + OR_PAT + PIPE "|" + WHITESPACE " " + LITERAL_PAT + LITERAL + INT_NUMBER "42" + "#]], + ); } #[test] diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rast new file mode 100644 index 0000000000000..1bdd6720441f7 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rast @@ -0,0 +1,4 @@ +COMMENT "//@ edition: 2021" +WHITESPACE "\n\n" +POUND "#" +STRING "\"foo\"" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rs new file mode 100644 index 0000000000000..f00f949f0db5f --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/guarded_str_prefix_edition_2021.rs @@ -0,0 +1,3 @@ +//@ edition: 2021 + +#"foo" \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast index 8189cf0a8e51a..9221028162928 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/match_arm.rast @@ -102,9 +102,9 @@ SOURCE_FILE COMMA "," WHITESPACE "\n " MATCH_ARM - PIPE "|" - WHITESPACE " " OR_PAT + PIPE "|" + WHITESPACE " " IDENT_PAT NAME IDENT "X" @@ -132,11 +132,12 @@ SOURCE_FILE COMMA "," WHITESPACE "\n " MATCH_ARM - PIPE "|" - WHITESPACE " " - IDENT_PAT - NAME - IDENT "X" + OR_PAT + PIPE "|" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "X" WHITESPACE " " FAT_ARROW "=>" WHITESPACE " " diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rast index cf52f1e479951..f9c0a245af868 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rast @@ -50,16 +50,18 @@ SOURCE_FILE WHITESPACE " " TYPE_BOUND USE_KW "use" - GENERIC_PARAM_LIST + USE_BOUND_GENERIC_ARGS L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'b" + LIFETIME + LIFETIME_IDENT "'b" COMMA "," WHITESPACE " " - TYPE_PARAM - NAME - IDENT "T" + NAME_REF + IDENT "T" + COMMA "," + WHITESPACE " " + NAME_REF + SELF_TYPE_KW "Self" R_ANGLE ">" WHITESPACE " " BLOCK_EXPR diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rs index ec208d5062b5e..9ac2305f3a0ef 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/precise_capturing.rs @@ -1 +1 @@ -fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T> {} +fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T, Self> {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast index dff72ba886fe8..06c30bba59f61 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/slice_pat.rast @@ -43,11 +43,12 @@ SOURCE_FILE WHITESPACE " " SLICE_PAT L_BRACK "[" - PIPE "|" - WHITESPACE " " - IDENT_PAT - NAME - IDENT "a" + OR_PAT + PIPE "|" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "a" COMMA "," WHITESPACE " " REST_PAT diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast index 1a01e0f69381f..c7cd11f774b64 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat.rast @@ -91,9 +91,9 @@ SOURCE_FILE WHITESPACE " " TUPLE_PAT L_PAREN "(" - PIPE "|" - WHITESPACE " " OR_PAT + PIPE "|" + WHITESPACE " " IDENT_PAT NAME IDENT "a" @@ -105,11 +105,12 @@ SOURCE_FILE IDENT "a" COMMA "," WHITESPACE " " - PIPE "|" - WHITESPACE " " - IDENT_PAT - NAME - IDENT "b" + OR_PAT + PIPE "|" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "b" R_PAREN ")" WHITESPACE " " EQ "=" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast index 55baf2fdcb4f6..96353f46976ca 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/tuple_pat_fields.rast @@ -110,11 +110,12 @@ SOURCE_FILE NAME_REF IDENT "S" L_PAREN "(" - PIPE "|" - WHITESPACE " " - IDENT_PAT - NAME - IDENT "a" + OR_PAT + PIPE "|" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "a" R_PAREN ")" WHITESPACE " " EQ "=" diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs index 88256e98b5887..5443a9bd67bb5 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs @@ -401,7 +401,7 @@ struct Writer<'a, 'span, S: InternableSpan> { text: Vec, } -impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> { +impl<'a, S: InternableSpan> Writer<'a, '_, S> { fn write(&mut self, root: &'a tt::Subtree) { self.enqueue(root); while let Some((idx, subtree)) = self.work.pop_front() { @@ -524,7 +524,7 @@ struct Reader<'span, S: InternableSpan> { span_data_table: &'span S::Table, } -impl<'span, S: InternableSpan> Reader<'span, S> { +impl Reader<'_, S> { pub(crate) fn read(self) -> tt::Subtree { let mut res: Vec>> = vec![None; self.subtree.len()]; let read_span = |id| S::span_for_token_id(self.span_data_table, id); diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs index aa73ff891004b..bc1f0e6fbf262 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs @@ -24,14 +24,15 @@ pub(crate) fn get( config: RustcCfgConfig<'_>, ) -> Vec { let _p = tracing::info_span!("rustc_cfg::get").entered(); - let mut res: Vec<_> = Vec::with_capacity(6 * 2 + 1); + let mut res: Vec<_> = Vec::with_capacity(7 * 2 + 1); // Some nightly-only cfgs, which are required for stdlib res.push(CfgAtom::Flag(Symbol::intern("target_thread_local"))); - for ty in ["8", "16", "32", "64", "cas", "ptr"] { - for key in ["target_has_atomic", "target_has_atomic_load_store"] { + for key in ["target_has_atomic", "target_has_atomic_load_store"] { + for ty in ["8", "16", "32", "64", "cas", "ptr"] { res.push(CfgAtom::KeyValue { key: Symbol::intern(key), value: Symbol::intern(ty) }); } + res.push(CfgAtom::Flag(Symbol::intern(key))); } let rustc_cfgs = get_rust_cfgs(target, extra_env, config); diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index d1ee579c0d88f..d53639e242320 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -1062,7 +1062,7 @@ fn cargo_to_crate_graph( proc_macros, cargo, pkg_data, - build_data, + build_data.zip(Some(build_scripts.error().is_some())), cfg_options.clone(), file_id, name, @@ -1285,7 +1285,7 @@ fn handle_rustc_crates( proc_macros, rustc_workspace, &rustc_workspace[pkg], - build_scripts.get_output(pkg), + build_scripts.get_output(pkg).zip(Some(build_scripts.error().is_some())), cfg_options.clone(), file_id, &rustc_workspace[tgt].name, @@ -1345,7 +1345,7 @@ fn add_target_crate_root( proc_macros: &mut ProcMacroPaths, cargo: &CargoWorkspace, pkg: &PackageData, - build_data: Option<&BuildScriptOutput>, + build_data: Option<(&BuildScriptOutput, bool)>, cfg_options: CfgOptions, file_id: FileId, cargo_name: &str, @@ -1368,7 +1368,7 @@ fn add_target_crate_root( for feature in pkg.active_features.iter() { opts.insert_key_value(sym::feature.clone(), Symbol::intern(feature)); } - if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) { + if let Some(cfgs) = build_data.map(|(it, _)| &it.cfgs) { opts.extend(cfgs.iter().cloned()); } opts @@ -1379,7 +1379,7 @@ fn add_target_crate_root( inject_cargo_env(&mut env); inject_rustc_tool_env(&mut env, cargo, cargo_name, kind); - if let Some(envs) = build_data.map(|it| &it.envs) { + if let Some(envs) = build_data.map(|(it, _)| &it.envs) { for (k, v) in envs { env.set(k, v.clone()); } @@ -1396,11 +1396,14 @@ fn add_target_crate_root( origin, ); if let TargetKind::Lib { is_proc_macro: true } = kind { - let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { - Some(it) => match it { - Some(path) => Ok((cargo_name.to_owned(), path.clone())), - None => Err("proc-macro crate build data is missing dylib path".to_owned()), - }, + let proc_macro = match build_data { + Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => { + match proc_macro_dylib_path { + Some(path) => Ok((cargo_name.to_owned(), path.clone())), + None if has_errors => Err("failed to build proc-macro".to_owned()), + None => Err("proc-macro crate build data is missing dylib path".to_owned()), + } + } None => Err("proc-macro crate is missing its build data".to_owned()), }; proc_macros.insert(crate_id, proc_macro); diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index 3401d7f7e47fd..880e90c52a548 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -19,6 +19,7 @@ [ "rust_analyzer", "test", + "true", ], ), potential_cfg_options: None, @@ -81,6 +82,7 @@ [ "rust_analyzer", "test", + "true", ], ), potential_cfg_options: None, @@ -151,6 +153,7 @@ [ "rust_analyzer", "test", + "true", ], ), potential_cfg_options: None, @@ -221,6 +224,7 @@ [ "rust_analyzer", "test", + "true", ], ), potential_cfg_options: None, @@ -291,6 +295,7 @@ [ "feature=default", "feature=std", + "true", ], ), potential_cfg_options: Some( @@ -303,6 +308,7 @@ "feature=rustc-dep-of-std", "feature=std", "feature=use_std", + "true", ], ), ), diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index 3401d7f7e47fd..880e90c52a548 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -19,6 +19,7 @@ [ "rust_analyzer", "test", + "true", ], ), potential_cfg_options: None, @@ -81,6 +82,7 @@ [ "rust_analyzer", "test", + "true", ], ), potential_cfg_options: None, @@ -151,6 +153,7 @@ [ "rust_analyzer", "test", + "true", ], ), potential_cfg_options: None, @@ -221,6 +224,7 @@ [ "rust_analyzer", "test", + "true", ], ), potential_cfg_options: None, @@ -291,6 +295,7 @@ [ "feature=default", "feature=std", + "true", ], ), potential_cfg_options: Some( @@ -303,6 +308,7 @@ "feature=rustc-dep-of-std", "feature=std", "feature=use_std", + "true", ], ), ), diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index 491568d4b756e..7746acd225e32 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -18,6 +18,7 @@ cfg_options: CfgOptions( [ "rust_analyzer", + "true", ], ), potential_cfg_options: None, @@ -79,6 +80,7 @@ cfg_options: CfgOptions( [ "rust_analyzer", + "true", ], ), potential_cfg_options: None, @@ -148,6 +150,7 @@ cfg_options: CfgOptions( [ "rust_analyzer", + "true", ], ), potential_cfg_options: None, @@ -217,6 +220,7 @@ cfg_options: CfgOptions( [ "rust_analyzer", + "true", ], ), potential_cfg_options: None, @@ -287,6 +291,7 @@ [ "feature=default", "feature=std", + "true", ], ), potential_cfg_options: Some( @@ -299,6 +304,7 @@ "feature=rustc-dep-of-std", "feature=std", "feature=use_std", + "true", ], ), ), diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt index 8261e5a2d907a..90f41a9c2fc8b 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt @@ -17,6 +17,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -56,6 +57,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -86,6 +88,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -116,6 +119,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -146,6 +150,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -193,6 +198,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -223,6 +229,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -318,6 +325,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -348,6 +356,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -378,6 +387,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -410,6 +420,7 @@ "group1_other_cfg=other_config", "group2_cfg=yet_another_config", "rust_analyzer", + "true", ], ), potential_cfg_options: None, @@ -485,6 +496,7 @@ "group2_cfg=fourth_config", "group2_cfg=yet_another_config", "rust_analyzer", + "true", "unrelated_cfg", ], ), diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index c123df80a6a32..a0e14b8fcb22c 100644 --- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -17,6 +17,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -56,6 +57,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -86,6 +88,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -116,6 +119,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -146,6 +150,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -193,6 +198,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -223,6 +229,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -318,6 +325,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -348,6 +356,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -378,6 +387,7 @@ [ "debug_assertions", "miri", + "true", ], ), potential_cfg_options: None, @@ -407,6 +417,7 @@ cfg_options: CfgOptions( [ "rust_analyzer", + "true", ], ), potential_cfg_options: None, diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/derived/slot.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/derived/slot.rs index de7a39760746c..6c5ccba173b99 100644 --- a/src/tools/rust-analyzer/crates/ra-salsa/src/derived/slot.rs +++ b/src/tools/rust-analyzer/crates/ra-salsa/src/derived/slot.rs @@ -616,7 +616,7 @@ Please report this bug to https://github.com/salsa-rs/salsa/issues." } } -impl<'me, Q> Drop for PanicGuard<'me, Q> +impl Drop for PanicGuard<'_, Q> where Q: QueryFunction, Q::Value: Eq, diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru/slot.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru/slot.rs index d0e4b5422b5f5..ff9cc4eade2cf 100644 --- a/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru/slot.rs +++ b/src/tools/rust-analyzer/crates/ra-salsa/src/derived_lru/slot.rs @@ -666,7 +666,7 @@ Please report this bug to https://github.com/salsa-rs/salsa/issues." } } -impl<'me, Q, MP> Drop for PanicGuard<'me, Q, MP> +impl Drop for PanicGuard<'_, Q, MP> where Q: QueryFunction, MP: MemoizationPolicy, diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/interned.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/interned.rs index 359662ec6b2fb..42c398d697de6 100644 --- a/src/tools/rust-analyzer/crates/ra-salsa/src/interned.rs +++ b/src/tools/rust-analyzer/crates/ra-salsa/src/interned.rs @@ -493,7 +493,7 @@ where is_static::>(); } -impl<'me, Q> QueryTable<'me, Q> +impl QueryTable<'_, Q> where Q: Query>, Q::Key: InternValue, diff --git a/src/tools/rust-analyzer/crates/ra-salsa/src/lib.rs b/src/tools/rust-analyzer/crates/ra-salsa/src/lib.rs index 1b327773ec6c5..bd1ab6971cb1c 100644 --- a/src/tools/rust-analyzer/crates/ra-salsa/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ra-salsa/src/lib.rs @@ -149,7 +149,7 @@ where db: &'me D, } -impl<'me, D: ?Sized> fmt::Debug for EventDebug<'me, D> +impl fmt::Debug for EventDebug<'_, D> where D: plumbing::DatabaseOps, { @@ -242,7 +242,7 @@ where db: &'me D, } -impl<'me, D: ?Sized> fmt::Debug for EventKindDebug<'me, D> +impl fmt::Debug for EventKindDebug<'_, D> where D: plumbing::DatabaseOps, { @@ -729,7 +729,7 @@ impl Cycle { db: &'me dyn Database, } - impl<'me> std::fmt::Debug for UnexpectedCycleDebug<'me> { + impl std::fmt::Debug for UnexpectedCycleDebug<'_> { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fmt.debug_struct("UnexpectedCycle") .field("all_participants", &self.c.all_participants(self.db)) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index ecc8333503e20..e872585c5717c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -85,7 +85,9 @@ fn actual_main() -> anyhow::Result { flags::RustAnalyzerCmd::UnresolvedReferences(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Ssr(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?, - flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?, + flags::RustAnalyzerCmd::Lsif(cmd) => { + cmd.run(&mut std::io::stdout(), Some(project_model::RustLibSource::Discover))? + } flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?, flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?, flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index ca8acf57bff69..33c4f31fbee4d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -12,6 +12,7 @@ use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; use lsp_types::lsif; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rustc_hash::FxHashMap; +use stdx::format_to; use vfs::{AbsPathBuf, Vfs}; use crate::{ @@ -21,7 +22,7 @@ use crate::{ version::version, }; -struct LsifManager<'a> { +struct LsifManager<'a, 'w> { count: i32, token_map: FxHashMap, range_map: FxHashMap, @@ -30,6 +31,7 @@ struct LsifManager<'a> { analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs, + out: &'w mut dyn std::io::Write, } #[derive(Clone, Copy)] @@ -41,8 +43,13 @@ impl From for lsp_types::NumberOrString { } } -impl LsifManager<'_> { - fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> { +impl LsifManager<'_, '_> { + fn new<'a, 'w>( + analysis: &'a Analysis, + db: &'a RootDatabase, + vfs: &'a Vfs, + out: &'w mut dyn std::io::Write, + ) -> LsifManager<'a, 'w> { LsifManager { count: 0, token_map: FxHashMap::default(), @@ -52,6 +59,7 @@ impl LsifManager<'_> { analysis, db, vfs, + out, } } @@ -70,9 +78,8 @@ impl LsifManager<'_> { self.add(lsif::Element::Edge(edge)) } - // FIXME: support file in addition to stdout here - fn emit(&self, data: &str) { - println!("{data}"); + fn emit(&mut self, data: &str) { + format_to!(self.out, "{data}\n"); } fn get_token_id(&mut self, id: TokenId) -> Id { @@ -272,14 +279,14 @@ impl LsifManager<'_> { } impl flags::Lsif { - pub fn run(self) -> anyhow::Result<()> { + pub fn run( + self, + out: &mut dyn std::io::Write, + sysroot: Option, + ) -> anyhow::Result<()> { let now = Instant::now(); - let cargo_config = &CargoConfig { - sysroot: Some(RustLibSource::Discover), - all_targets: true, - set_test: true, - ..Default::default() - }; + let cargo_config = + &CargoConfig { sysroot, all_targets: true, set_test: true, ..Default::default() }; let no_progress = &|_| (); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, @@ -308,7 +315,7 @@ impl flags::Lsif { let si = StaticIndex::compute(&analysis, vendored_libs_config); - let mut lsif = LsifManager::new(&analysis, db, &vfs); + let mut lsif = LsifManager::new(&analysis, db, &vfs, out); lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData { version: String::from("0.5.0"), project_root: lsp_types::Url::from_file_path(path).unwrap(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 518b588cb7d49..f5b0fcecf3901 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -727,7 +727,7 @@ enum RatomlFile { Crate(LocalConfigInput), } -#[derive(Debug, Clone)] +#[derive(Clone)] pub struct Config { /// Projects that have a Cargo.toml or a rust-project.json in a /// parent directory, so we can discover them by walking the @@ -765,6 +765,26 @@ pub struct Config { detached_files: Vec, } +impl fmt::Debug for Config { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Config") + .field("discovered_projects_from_filesystem", &self.discovered_projects_from_filesystem) + .field("discovered_projects_from_command", &self.discovered_projects_from_command) + .field("workspace_roots", &self.workspace_roots) + .field("caps", &self.caps) + .field("root_path", &self.root_path) + .field("snippets", &self.snippets) + .field("visual_studio_code_version", &self.visual_studio_code_version) + .field("client_config", &self.client_config) + .field("user_config", &self.user_config) + .field("ratoml_file", &self.ratoml_file) + .field("source_root_parent_map", &self.source_root_parent_map) + .field("validation_errors", &self.validation_errors) + .field("detached_files", &self.detached_files) + .finish() + } +} + // Delegate capability fetching methods impl std::ops::Deref for Config { type Target = ClientCapabilities; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs index 5f2871ac99226..22910ee4c68a7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs @@ -173,21 +173,6 @@ pub(crate) fn fetch_native_diagnostics( let _p = tracing::info_span!("fetch_native_diagnostics").entered(); let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned()); - let convert_diagnostic = - |line_index: &crate::line_index::LineIndex, d: ide::Diagnostic| lsp_types::Diagnostic { - range: lsp::to_proto::range(line_index, d.range.range), - severity: Some(lsp::to_proto::diagnostic_severity(d.severity)), - code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_owned())), - code_description: Some(lsp_types::CodeDescription { - href: lsp_types::Url::parse(&d.code.url()).unwrap(), - }), - source: Some("rust-analyzer".to_owned()), - message: d.message, - related_information: None, - tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]), - data: None, - }; - // the diagnostics produced may point to different files not requested by the concrete request, // put those into here and filter later let mut odd_ones = Vec::new(); @@ -203,10 +188,12 @@ pub(crate) fn fetch_native_diagnostics( NativeDiagnosticsFetchKind::Syntax => { snapshot.analysis.syntax_diagnostics(config, file_id).ok()? } - NativeDiagnosticsFetchKind::Semantic => snapshot + + NativeDiagnosticsFetchKind::Semantic if config.enabled => snapshot .analysis .semantic_diagnostics(config, ide::AssistResolveStrategy::None, file_id) .ok()?, + NativeDiagnosticsFetchKind::Semantic => return None, }; let diagnostics = diagnostics .into_iter() @@ -246,3 +233,22 @@ pub(crate) fn fetch_native_diagnostics( } diagnostics } + +pub(crate) fn convert_diagnostic( + line_index: &crate::line_index::LineIndex, + d: ide::Diagnostic, +) -> lsp_types::Diagnostic { + lsp_types::Diagnostic { + range: lsp::to_proto::range(line_index, d.range.range), + severity: Some(lsp::to_proto::diagnostic_severity(d.severity)), + code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_owned())), + code_description: Some(lsp_types::CodeDescription { + href: lsp_types::Url::parse(&d.code.url()).unwrap(), + }), + source: Some("rust-analyzer".to_owned()), + message: d.message, + related_information: None, + tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]), + data: None, + } +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index ed7bf27843b5f..03759b036b4ab 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -5,7 +5,7 @@ use std::{ }; use ide::Cancelled; -use lsp_server::ExtractError; +use lsp_server::{ExtractError, Response, ResponseError}; use serde::{de::DeserializeOwned, Serialize}; use stdx::thread::ThreadIntent; @@ -117,7 +117,36 @@ impl RequestDispatcher<'_> { } return self; } - self.on_with_thread_intent::(ThreadIntent::Worker, f) + self.on_with_thread_intent::( + ThreadIntent::Worker, + f, + Self::content_modified_error, + ) + } + + /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not + /// ready this will return a `default` constructed [`R::Result`]. + pub(crate) fn on_with( + &mut self, + f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result, + default: impl FnOnce() -> R::Result, + on_cancelled: fn() -> ResponseError, + ) -> &mut Self + where + R: lsp_types::request::Request< + Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, + Result: Serialize, + > + 'static, + { + if !self.global_state.vfs_done { + if let Some(lsp_server::Request { id, .. }) = + self.req.take_if(|it| it.method == R::METHOD) + { + self.global_state.respond(lsp_server::Response::new_ok(id, default())); + } + return self; + } + self.on_with_thread_intent::(ThreadIntent::Worker, f, on_cancelled) } /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not @@ -136,7 +165,11 @@ impl RequestDispatcher<'_> { } return self; } - self.on_with_thread_intent::(ThreadIntent::Worker, f) + self.on_with_thread_intent::( + ThreadIntent::Worker, + f, + Self::content_modified_error, + ) } /// Dispatches a latency-sensitive request onto the thread pool. When the VFS is marked not @@ -159,7 +192,11 @@ impl RequestDispatcher<'_> { } return self; } - self.on_with_thread_intent::(ThreadIntent::LatencySensitive, f) + self.on_with_thread_intent::( + ThreadIntent::LatencySensitive, + f, + Self::content_modified_error, + ) } /// Formatting requests should never block on waiting a for task thread to open up, editors will wait @@ -174,7 +211,11 @@ impl RequestDispatcher<'_> { R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, R::Result: Serialize, { - self.on_with_thread_intent::(ThreadIntent::LatencySensitive, f) + self.on_with_thread_intent::( + ThreadIntent::LatencySensitive, + f, + Self::content_modified_error, + ) } pub(crate) fn finish(&mut self) { @@ -193,6 +234,7 @@ impl RequestDispatcher<'_> { &mut self, intent: ThreadIntent, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result, + on_cancelled: fn() -> ResponseError, ) -> &mut Self where R: lsp_types::request::Request + 'static, @@ -221,11 +263,10 @@ impl RequestDispatcher<'_> { match thread_result_to_response::(req.id.clone(), result) { Ok(response) => Task::Response(response), Err(_cancelled) if ALLOW_RETRYING => Task::Retry(req), - Err(_cancelled) => Task::Response(lsp_server::Response::new_err( - req.id, - lsp_server::ErrorCode::ContentModified as i32, - "content modified".to_owned(), - )), + Err(_cancelled) => { + let error = on_cancelled(); + Task::Response(Response { id: req.id, result: None, error: Some(error) }) + } } }); @@ -256,6 +297,14 @@ impl RequestDispatcher<'_> { } } } + + fn content_modified_error() -> ResponseError { + ResponseError { + code: lsp_server::ErrorCode::ContentModified as i32, + message: "content modified".to_owned(), + data: None, + } + } } fn thread_result_to_response( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index a9f8ac3a80a6b..fa584ab4d21b1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -4,6 +4,7 @@ use std::{ fs, io::Write as _, + ops::Not, process::{self, Stdio}, }; @@ -14,7 +15,7 @@ use ide::{ FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, }; -use ide_db::SymbolKind; +use ide_db::{FxHashMap, SymbolKind}; use itertools::Itertools; use lsp_server::ErrorCode; use lsp_types::{ @@ -36,6 +37,7 @@ use vfs::{AbsPath, AbsPathBuf, FileId, VfsPath}; use crate::{ config::{Config, RustfmtConfig, WorkspaceSymbolConfig}, + diagnostics::convert_diagnostic, global_state::{FetchWorkspaceRequest, GlobalState, GlobalStateSnapshot}, hack_recover_crate_name, line_index::LineEndings, @@ -119,7 +121,7 @@ pub(crate) fn handle_analyzer_status( format_to!(buf, "{}", crate::version()); buf.push_str("\nConfiguration: \n"); - format_to!(buf, "{:?}", snap.config); + format_to!(buf, "{:#?}", snap.config); Ok(buf) } @@ -473,6 +475,74 @@ pub(crate) fn handle_on_type_formatting( Ok(Some(change)) } +pub(crate) fn handle_document_diagnostics( + snap: GlobalStateSnapshot, + params: lsp_types::DocumentDiagnosticParams, +) -> anyhow::Result { + const EMPTY: lsp_types::DocumentDiagnosticReportResult = + lsp_types::DocumentDiagnosticReportResult::Report( + lsp_types::DocumentDiagnosticReport::Full( + lsp_types::RelatedFullDocumentDiagnosticReport { + related_documents: None, + full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { + result_id: None, + items: vec![], + }, + }, + ), + ); + + let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let source_root = snap.analysis.source_root_id(file_id)?; + if !snap.analysis.is_local_source_root(source_root)? { + return Ok(EMPTY); + } + let config = snap.config.diagnostics(Some(source_root)); + if !config.enabled { + return Ok(EMPTY); + } + let line_index = snap.file_line_index(file_id)?; + let supports_related = snap.config.text_document_diagnostic_related_document_support(); + + let mut related_documents = FxHashMap::default(); + let diagnostics = snap + .analysis + .full_diagnostics(&config, AssistResolveStrategy::None, file_id)? + .into_iter() + .filter_map(|d| { + let file = d.range.file_id; + let diagnostic = convert_diagnostic(&line_index, d); + if file == file_id { + return Some(diagnostic); + } + if supports_related { + related_documents.entry(file).or_insert_with(Vec::new).push(diagnostic); + } + None + }); + Ok(lsp_types::DocumentDiagnosticReportResult::Report( + lsp_types::DocumentDiagnosticReport::Full(lsp_types::RelatedFullDocumentDiagnosticReport { + full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { + result_id: None, + items: diagnostics.collect(), + }, + related_documents: related_documents.is_empty().not().then(|| { + related_documents + .into_iter() + .map(|(id, items)| { + ( + to_proto::url(&snap, id), + lsp_types::DocumentDiagnosticReportKind::Full( + lsp_types::FullDocumentDiagnosticReport { result_id: None, items }, + ), + ) + }) + .collect() + }), + }), + )) +} + pub(crate) fn handle_document_symbol( snap: GlobalStateSnapshot, params: lsp_types::DocumentSymbolParams, @@ -539,18 +609,11 @@ pub(crate) fn handle_document_symbol( url: &Url, res: &mut Vec, ) { - let mut tags = Vec::new(); - - #[allow(deprecated)] - if let Some(true) = symbol.deprecated { - tags.push(SymbolTag::DEPRECATED) - } - #[allow(deprecated)] res.push(SymbolInformation { name: symbol.name.clone(), kind: symbol.kind, - tags: Some(tags), + tags: symbol.tags.clone(), deprecated: symbol.deprecated, location: Location::new(url.clone(), symbol.range), container_name, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs index 3b19284f24119..271a9c0f3d125 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs @@ -155,7 +155,15 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { "ssr": true, "workspaceSymbolScopeKindFiltering": true, })), - diagnostic_provider: None, + diagnostic_provider: Some(lsp_types::DiagnosticServerCapabilities::Options( + lsp_types::DiagnosticOptions { + identifier: None, + inter_file_dependencies: true, + // FIXME + workspace_diagnostics: false, + work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, + }, + )), inline_completion_provider: None, } } @@ -210,9 +218,7 @@ impl ClientCapabilities { .completion_item .as_ref()? .label_details_support - .as_ref() - })() - .is_some() + })() == Some(true) } fn completion_item(&self) -> Option { @@ -382,6 +388,15 @@ impl ClientCapabilities { .unwrap_or_default() } + pub fn text_document_diagnostic(&self) -> bool { + (|| -> _ { self.0.text_document.as_ref()?.diagnostic.as_ref() })().is_some() + } + + pub fn text_document_diagnostic_related_document_support(&self) -> bool { + (|| -> _ { self.0.text_document.as_ref()?.diagnostic.as_ref()?.related_document_support })() + == Some(true) + } + pub fn code_action_group(&self) -> bool { self.experimental_bool("codeActionGroup") } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 20be38a9e4bee..9a51df80fe1fb 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -417,6 +417,8 @@ impl GlobalState { } } + let supports_diagnostic_pull_model = self.config.text_document_diagnostic(); + let client_refresh = became_quiescent || state_changed; if client_refresh { // Refresh semantic tokens if the client supports it. @@ -434,11 +436,21 @@ impl GlobalState { if self.config.inlay_hints_refresh() { self.send_request::((), |_, _| ()); } + + if supports_diagnostic_pull_model { + self.send_request::( + (), + |_, _| (), + ); + } } let project_or_mem_docs_changed = became_quiescent || state_changed || memdocs_added_or_removed; - if project_or_mem_docs_changed && self.config.publish_diagnostics(None) { + if project_or_mem_docs_changed + && !supports_diagnostic_pull_model + && self.config.publish_diagnostics(None) + { self.update_diagnostics(); } if project_or_mem_docs_changed && self.config.test_explorer() { @@ -1080,6 +1092,23 @@ impl GlobalState { .on_latency_sensitive::(handlers::handle_semantic_tokens_range) // FIXME: Some of these NO_RETRY could be retries if the file they are interested didn't change. // All other request handlers + .on_with::(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report( + lsp_types::DocumentDiagnosticReport::Full( + lsp_types::RelatedFullDocumentDiagnosticReport { + related_documents: None, + full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport { + result_id: None, + items: vec![], + }, + }, + ), + ), || lsp_server::ResponseError { + code: lsp_server::ErrorCode::ServerCancelled as i32, + message: "server cancelled the request".to_owned(), + data: serde_json::to_value(lsp_types::DiagnosticServerCancellationData { + retrigger_request: true + }).ok(), + }) .on::(handlers::handle_document_symbol) .on::(handlers::handle_folding_range) .on::(handlers::handle_signature_help) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs index 5ab2dc2b67a2d..02ae4186ab69a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs @@ -58,14 +58,22 @@ where let writer = self.writer; let ra_fmt_layer = tracing_subscriber::fmt::layer() - .with_timer( - time::OffsetTime::local_rfc_3339() - .expect("Could not get local offset, make sure you're on the main thread"), - ) .with_target(false) .with_ansi(false) - .with_writer(writer) - .with_filter(targets_filter); + .with_writer(writer); + + let ra_fmt_layer = match time::OffsetTime::local_rfc_3339() { + Ok(timer) => { + // If we can get the time offset, format logs with the timezone. + ra_fmt_layer.with_timer(timer).boxed() + } + Err(_) => { + // Use system time if we can't get the time offset. This should + // never happen on Linux, but can happen on e.g. OpenBSD. + ra_fmt_layer.boxed() + } + } + .with_filter(targets_filter); let chalk_layer = match self.chalk_filter { Some(chalk_filter) => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs index cad92962f3488..d466acef0115f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs @@ -120,7 +120,7 @@ pub struct DataVisitor<'a> { string: &'a mut String, } -impl<'a> Visit for DataVisitor<'a> { +impl Visit for DataVisitor<'_> { fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { write!(self.string, "{} = {:?} ", field.name(), value).unwrap(); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/cli.rs new file mode 100644 index 0000000000000..fba5466691289 --- /dev/null +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/cli.rs @@ -0,0 +1,131 @@ +use expect_test::expect; +use test_utils::skip_slow_tests; + +use crate::support::Project; + +// If you choose to change the test fixture here, please inform the ferrocene/needy maintainers by +// opening an issue at https://github.com/ferrocene/needy as the tool relies on specific token +// mapping behavior. +#[test] +fn lsif_contains_generated_constant() { + if skip_slow_tests() { + return; + } + + let stdout = Project::with_fixture( + r#" +//- /Cargo.toml +[package] +name = "foo" +version = "0.0.0" + +//- /src/lib.rs +#![allow(unused)] + +macro_rules! generate_const_from_identifier( + ($id:ident) => ( + const _: () = { const $id: &str = "encoded_data"; }; + ) +); + +generate_const_from_identifier!(REQ_001); +mod tests { + use super::*; + generate_const_from_identifier!(REQ_002); +} +"#, + ) + .root("foo") + .run_lsif(); + let n = stdout.find(r#"{"id":2,"#).unwrap(); + // the first 2 entries contain paths that are not stable + let stdout = &stdout[n..]; + expect![[r#" + {"id":2,"type":"vertex","label":"foldingRangeResult","result":[{"startLine":2,"startCharacter":43,"endLine":6,"endCharacter":1},{"startLine":3,"startCharacter":19,"endLine":5,"endCharacter":5},{"startLine":9,"startCharacter":10,"endLine":12,"endCharacter":1}]} + {"id":3,"type":"edge","label":"textDocument/foldingRange","inV":2,"outV":1} + {"id":4,"type":"vertex","label":"range","start":{"line":0,"character":3},"end":{"line":0,"character":8}} + {"id":5,"type":"vertex","label":"resultSet"} + {"id":6,"type":"edge","label":"next","inV":5,"outV":4} + {"id":7,"type":"vertex","label":"range","start":{"line":2,"character":13},"end":{"line":2,"character":43}} + {"id":8,"type":"vertex","label":"resultSet"} + {"id":9,"type":"edge","label":"next","inV":8,"outV":7} + {"id":10,"type":"vertex","label":"range","start":{"line":8,"character":0},"end":{"line":8,"character":30}} + {"id":11,"type":"edge","label":"next","inV":8,"outV":10} + {"id":12,"type":"vertex","label":"range","start":{"line":8,"character":32},"end":{"line":8,"character":39}} + {"id":13,"type":"vertex","label":"resultSet"} + {"id":14,"type":"edge","label":"next","inV":13,"outV":12} + {"id":15,"type":"vertex","label":"range","start":{"line":9,"character":4},"end":{"line":9,"character":9}} + {"id":16,"type":"vertex","label":"resultSet"} + {"id":17,"type":"edge","label":"next","inV":16,"outV":15} + {"id":18,"type":"vertex","label":"range","start":{"line":10,"character":8},"end":{"line":10,"character":13}} + {"id":19,"type":"vertex","label":"resultSet"} + {"id":20,"type":"edge","label":"next","inV":19,"outV":18} + {"id":21,"type":"vertex","label":"range","start":{"line":11,"character":4},"end":{"line":11,"character":34}} + {"id":22,"type":"edge","label":"next","inV":8,"outV":21} + {"id":23,"type":"vertex","label":"range","start":{"line":11,"character":36},"end":{"line":11,"character":43}} + {"id":24,"type":"vertex","label":"resultSet"} + {"id":25,"type":"edge","label":"next","inV":24,"outV":23} + {"id":26,"type":"edge","label":"contains","inVs":[4,7,10,12,15,18,21,23],"outV":1} + {"id":27,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\n#[allow]\n```\n\n---\n\nValid forms are:\n\n* \\#\\[allow(lint1, lint2, ..., /\\*opt\\*/ reason = \"...\")\\]"}}} + {"id":28,"type":"edge","label":"textDocument/hover","inV":27,"outV":5} + {"id":29,"type":"vertex","label":"referenceResult"} + {"id":30,"type":"edge","label":"textDocument/references","inV":29,"outV":5} + {"id":31,"type":"edge","label":"item","document":1,"property":"references","inVs":[4],"outV":29} + {"id":32,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmacro_rules! generate_const_from_identifier\n```"}}} + {"id":33,"type":"edge","label":"textDocument/hover","inV":32,"outV":8} + {"id":34,"type":"vertex","label":"packageInformation","name":"foo","manager":"cargo","version":"0.0.0"} + {"id":35,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::generate_const_from_identifier","unique":"scheme","kind":"export"} + {"id":36,"type":"edge","label":"packageInformation","inV":34,"outV":35} + {"id":37,"type":"edge","label":"moniker","inV":35,"outV":8} + {"id":38,"type":"vertex","label":"definitionResult"} + {"id":39,"type":"edge","label":"item","document":1,"inVs":[7],"outV":38} + {"id":40,"type":"edge","label":"textDocument/definition","inV":38,"outV":8} + {"id":41,"type":"vertex","label":"referenceResult"} + {"id":42,"type":"edge","label":"textDocument/references","inV":41,"outV":8} + {"id":43,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[7],"outV":41} + {"id":44,"type":"edge","label":"item","document":1,"property":"references","inVs":[10,21],"outV":41} + {"id":45,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nconst REQ_001: &str = \"encoded_data\"\n```"}}} + {"id":46,"type":"edge","label":"textDocument/hover","inV":45,"outV":13} + {"id":47,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::REQ_001","unique":"scheme","kind":"export"} + {"id":48,"type":"edge","label":"packageInformation","inV":34,"outV":47} + {"id":49,"type":"edge","label":"moniker","inV":47,"outV":13} + {"id":50,"type":"vertex","label":"definitionResult"} + {"id":51,"type":"edge","label":"item","document":1,"inVs":[12],"outV":50} + {"id":52,"type":"edge","label":"textDocument/definition","inV":50,"outV":13} + {"id":53,"type":"vertex","label":"referenceResult"} + {"id":54,"type":"edge","label":"textDocument/references","inV":53,"outV":13} + {"id":55,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[12],"outV":53} + {"id":56,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo\n```\n\n```rust\nmod tests\n```"}}} + {"id":57,"type":"edge","label":"textDocument/hover","inV":56,"outV":16} + {"id":58,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests","unique":"scheme","kind":"export"} + {"id":59,"type":"edge","label":"packageInformation","inV":34,"outV":58} + {"id":60,"type":"edge","label":"moniker","inV":58,"outV":16} + {"id":61,"type":"vertex","label":"definitionResult"} + {"id":62,"type":"edge","label":"item","document":1,"inVs":[15],"outV":61} + {"id":63,"type":"edge","label":"textDocument/definition","inV":61,"outV":16} + {"id":64,"type":"vertex","label":"referenceResult"} + {"id":65,"type":"edge","label":"textDocument/references","inV":64,"outV":16} + {"id":66,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[15],"outV":64} + {"id":67,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nextern crate foo\n```"}}} + {"id":68,"type":"edge","label":"textDocument/hover","inV":67,"outV":19} + {"id":69,"type":"vertex","label":"definitionResult"} + {"id":70,"type":"vertex","label":"range","start":{"line":0,"character":0},"end":{"line":13,"character":0}} + {"id":71,"type":"edge","label":"contains","inVs":[70],"outV":1} + {"id":72,"type":"edge","label":"item","document":1,"inVs":[70],"outV":69} + {"id":73,"type":"edge","label":"textDocument/definition","inV":69,"outV":19} + {"id":74,"type":"vertex","label":"referenceResult"} + {"id":75,"type":"edge","label":"textDocument/references","inV":74,"outV":19} + {"id":76,"type":"edge","label":"item","document":1,"property":"references","inVs":[18],"outV":74} + {"id":77,"type":"vertex","label":"hoverResult","result":{"contents":{"kind":"markdown","value":"\n```rust\nfoo::tests\n```\n\n```rust\nconst REQ_002: &str = \"encoded_data\"\n```"}}} + {"id":78,"type":"edge","label":"textDocument/hover","inV":77,"outV":24} + {"id":79,"type":"vertex","label":"moniker","scheme":"rust-analyzer","identifier":"foo::tests::REQ_002","unique":"scheme","kind":"export"} + {"id":80,"type":"edge","label":"packageInformation","inV":34,"outV":79} + {"id":81,"type":"edge","label":"moniker","inV":79,"outV":24} + {"id":82,"type":"vertex","label":"definitionResult"} + {"id":83,"type":"edge","label":"item","document":1,"inVs":[23],"outV":82} + {"id":84,"type":"edge","label":"textDocument/definition","inV":82,"outV":24} + {"id":85,"type":"vertex","label":"referenceResult"} + {"id":86,"type":"edge","label":"textDocument/references","inV":85,"outV":24} + {"id":87,"type":"edge","label":"item","document":1,"property":"definitions","inVs":[23],"outV":85} + "#]].assert_eq(stdout); +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index 54cd27f4b3bc3..97c76bf8d1738 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -10,6 +10,7 @@ #![allow(clippy::disallowed_types)] +mod cli; mod ratoml; mod support; mod testdir; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index 18aface632d15..78572e37a9b17 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -6,11 +6,13 @@ use std::{ }; use crossbeam_channel::{after, select, Receiver}; +use itertools::Itertools; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url}; use parking_lot::{Mutex, MutexGuard}; use paths::{Utf8Path, Utf8PathBuf}; use rust_analyzer::{ + cli::flags, config::{Config, ConfigChange, ConfigErrors}, lsp, main_loop, }; @@ -84,6 +86,46 @@ impl Project<'_> { self } + pub(crate) fn run_lsif(self) -> String { + let tmp_dir = self.tmp_dir.unwrap_or_else(|| { + if self.root_dir_contains_symlink { + TestDir::new_symlink() + } else { + TestDir::new() + } + }); + + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse(self.fixture); + assert!(proc_macro_names.is_empty()); + assert!(mini_core.is_none()); + assert!(toolchain.is_none()); + + for entry in fixture { + let path = tmp_dir.path().join(&entry.path['/'.len_utf8()..]); + fs::create_dir_all(path.parent().unwrap()).unwrap(); + fs::write(path.as_path(), entry.text.as_bytes()).unwrap(); + } + + let tmp_dir_path = AbsPathBuf::assert(tmp_dir.path().to_path_buf()); + let mut buf = Vec::new(); + flags::Lsif::run( + flags::Lsif { + path: tmp_dir_path.join(self.roots.iter().exactly_one().unwrap()).into(), + exclude_vendored_libraries: false, + }, + &mut buf, + None, + ) + .unwrap(); + String::from_utf8(buf).unwrap() + } + pub(crate) fn server(self) -> Server { static CONFIG_DIR_LOCK: Mutex<()> = Mutex::new(()); let tmp_dir = self.tmp_dir.unwrap_or_else(|| { diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs index 0ebd72e1514c6..1d81d684511cd 100644 --- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs +++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs @@ -224,9 +224,10 @@ impl AstIdMap { match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) { Some((&idx, &())) => ErasedFileAstId(idx.into_raw().into_u32()), None => panic!( - "Can't find {:?} in AstIdMap:\n{:?}", + "Can't find {:?} in AstIdMap:\n{:?}\n source text: {}", item, self.arena.iter().map(|(_id, i)| i).collect::>(), + item ), } } diff --git a/src/tools/rust-analyzer/crates/stdx/src/lib.rs b/src/tools/rust-analyzer/crates/stdx/src/lib.rs index 76dbd42ff6b24..04c2153abf419 100644 --- a/src/tools/rust-analyzer/crates/stdx/src/lib.rs +++ b/src/tools/rust-analyzer/crates/stdx/src/lib.rs @@ -10,6 +10,7 @@ pub mod non_empty_vec; pub mod panic_context; pub mod process; pub mod rand; +pub mod thin_vec; pub mod thread; pub use always_assert::{always, never}; @@ -304,22 +305,6 @@ pub fn slice_tails(this: &[T]) -> impl Iterator { (0..this.len()).map(|i| &this[i..]) } -pub trait IsNoneOr { - type Type; - #[allow(clippy::wrong_self_convention)] - fn is_none_or(self, s: impl FnOnce(Self::Type) -> bool) -> bool; -} -#[allow(unstable_name_collisions)] -impl IsNoneOr for Option { - type Type = T; - fn is_none_or(self, f: impl FnOnce(T) -> bool) -> bool { - match self { - Some(v) => f(v), - None => true, - } - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/src/tools/rust-analyzer/crates/stdx/src/thin_vec.rs b/src/tools/rust-analyzer/crates/stdx/src/thin_vec.rs new file mode 100644 index 0000000000000..700220e1d3e52 --- /dev/null +++ b/src/tools/rust-analyzer/crates/stdx/src/thin_vec.rs @@ -0,0 +1,472 @@ +use std::alloc::{dealloc, handle_alloc_error, Layout}; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::ptr::{addr_of_mut, slice_from_raw_parts_mut, NonNull}; + +/// A type that is functionally equivalent to `(Header, Box<[Item]>)`, +/// but all data is stored in one heap allocation and the pointer is thin, +/// so the whole thing's size is like a pointer. +pub struct ThinVecWithHeader { + /// INVARIANT: Points to a valid heap allocation that contains `ThinVecInner
`, + /// followed by (suitably aligned) `len` `Item`s. + ptr: NonNull>, + _marker: PhantomData<(Header, Box<[Item]>)>, +} + +// SAFETY: We essentially own both the header and the items. +unsafe impl Send for ThinVecWithHeader {} +unsafe impl Sync for ThinVecWithHeader {} + +#[derive(Clone)] +struct ThinVecInner
{ + header: Header, + len: usize, +} + +impl ThinVecWithHeader { + /// # Safety + /// + /// The iterator must produce `len` elements. + #[inline] + unsafe fn from_trusted_len_iter( + header: Header, + len: usize, + items: impl Iterator, + ) -> Self { + let (ptr, layout, items_offset) = Self::allocate(len); + + struct DeallocGuard(*mut u8, Layout); + impl Drop for DeallocGuard { + fn drop(&mut self) { + // SAFETY: We allocated this above. + unsafe { + dealloc(self.0, self.1); + } + } + } + let _dealloc_guard = DeallocGuard(ptr.as_ptr().cast::(), layout); + + // INVARIANT: Between `0..1` there are only initialized items. + struct ItemsGuard(*mut Item, *mut Item); + impl Drop for ItemsGuard { + fn drop(&mut self) { + // SAFETY: Our invariant. + unsafe { + slice_from_raw_parts_mut(self.0, self.1.offset_from(self.0) as usize) + .drop_in_place(); + } + } + } + + // SAFETY: We allocated enough space. + let mut items_ptr = unsafe { ptr.as_ptr().byte_add(items_offset).cast::() }; + // INVARIANT: There are zero elements in this range. + let mut items_guard = ItemsGuard(items_ptr, items_ptr); + items.for_each(|item| { + // SAFETY: Our precondition guarantee we won't get more than `len` items, and we allocated + // enough space for `len` items. + unsafe { + items_ptr.write(item); + items_ptr = items_ptr.add(1); + } + // INVARIANT: We just initialized this item. + items_guard.1 = items_ptr; + }); + + // SAFETY: We allocated enough space. + unsafe { + ptr.write(ThinVecInner { header, len }); + } + + std::mem::forget(items_guard); + + std::mem::forget(_dealloc_guard); + + // INVARIANT: We allocated and initialized all fields correctly. + Self { ptr, _marker: PhantomData } + } + + #[inline] + fn allocate(len: usize) -> (NonNull>, Layout, usize) { + let (layout, items_offset) = Self::layout(len); + // SAFETY: We always have `len`, so our allocation cannot be zero-sized. + let ptr = unsafe { std::alloc::alloc(layout).cast::>() }; + let Some(ptr) = NonNull::>::new(ptr) else { + handle_alloc_error(layout); + }; + (ptr, layout, items_offset) + } + + #[inline] + #[allow(clippy::should_implement_trait)] + pub fn from_iter(header: Header, items: I) -> Self + where + I: IntoIterator, + I::IntoIter: TrustedLen, + { + let items = items.into_iter(); + // SAFETY: `TrustedLen` guarantees the iterator length is exact. + unsafe { Self::from_trusted_len_iter(header, items.len(), items) } + } + + #[inline] + fn items_offset(&self) -> usize { + // SAFETY: We `pad_to_align()` in `layout()`, so at most where accessing past the end of the allocation, + // which is allowed. + unsafe { + Layout::new::>().extend(Layout::new::()).unwrap_unchecked().1 + } + } + + #[inline] + fn header_and_len(&self) -> &ThinVecInner
{ + // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized. + unsafe { &*self.ptr.as_ptr() } + } + + #[inline] + fn items_ptr(&self) -> *mut [Item] { + let len = self.header_and_len().len; + // SAFETY: `items_offset()` returns the correct offset of the items, where they are allocated. + let ptr = unsafe { self.ptr.as_ptr().byte_add(self.items_offset()).cast::() }; + slice_from_raw_parts_mut(ptr, len) + } + + #[inline] + pub fn header(&self) -> &Header { + &self.header_and_len().header + } + + #[inline] + pub fn header_mut(&mut self) -> &mut Header { + // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized. + unsafe { &mut *addr_of_mut!((*self.ptr.as_ptr()).header) } + } + + #[inline] + pub fn items(&self) -> &[Item] { + // SAFETY: `items_ptr()` gives a valid pointer. + unsafe { &*self.items_ptr() } + } + + #[inline] + pub fn items_mut(&mut self) -> &mut [Item] { + // SAFETY: `items_ptr()` gives a valid pointer. + unsafe { &mut *self.items_ptr() } + } + + #[inline] + pub fn len(&self) -> usize { + self.header_and_len().len + } + + #[inline] + fn layout(len: usize) -> (Layout, usize) { + let (layout, items_offset) = Layout::new::>() + .extend(Layout::array::(len).expect("too big `ThinVec` requested")) + .expect("too big `ThinVec` requested"); + let layout = layout.pad_to_align(); + (layout, items_offset) + } +} + +/// # Safety +/// +/// The length reported must be exactly the number of items yielded. +pub unsafe trait TrustedLen: ExactSizeIterator {} + +unsafe impl TrustedLen for std::vec::IntoIter {} +unsafe impl TrustedLen for std::slice::Iter<'_, T> {} +unsafe impl<'a, T: Clone + 'a, I: TrustedLen> TrustedLen for std::iter::Cloned {} +unsafe impl T> TrustedLen for std::iter::Map {} +unsafe impl TrustedLen for std::vec::Drain<'_, T> {} +unsafe impl TrustedLen for std::array::IntoIter {} + +impl Clone for ThinVecWithHeader { + #[inline] + fn clone(&self) -> Self { + Self::from_iter(self.header().clone(), self.items().iter().cloned()) + } +} + +impl Drop for ThinVecWithHeader { + #[inline] + fn drop(&mut self) { + // This must come before we drop `header`, because after that we cannot make a reference to it in `len()`. + let len = self.len(); + + // SAFETY: The contents are allocated and initialized. + unsafe { + addr_of_mut!((*self.ptr.as_ptr()).header).drop_in_place(); + self.items_ptr().drop_in_place(); + } + + let (layout, _) = Self::layout(len); + // SAFETY: This was allocated in `new()` with the same layout calculation. + unsafe { + dealloc(self.ptr.as_ptr().cast::(), layout); + } + } +} + +impl fmt::Debug for ThinVecWithHeader { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ThinVecWithHeader") + .field("header", self.header()) + .field("items", &self.items()) + .finish() + } +} + +impl PartialEq for ThinVecWithHeader { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.header() == other.header() && self.items() == other.items() + } +} + +impl Eq for ThinVecWithHeader {} + +impl Hash for ThinVecWithHeader { + #[inline] + fn hash(&self, state: &mut H) { + self.header().hash(state); + self.items().hash(state); + } +} + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct ThinVec(ThinVecWithHeader<(), T>); + +impl ThinVec { + #[inline] + #[allow(clippy::should_implement_trait)] + pub fn from_iter(values: I) -> Self + where + I: IntoIterator, + I::IntoIter: TrustedLen, + { + Self(ThinVecWithHeader::from_iter((), values)) + } + + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + #[inline] + pub fn iter(&self) -> std::slice::Iter<'_, T> { + (**self).iter() + } + + #[inline] + pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> { + (**self).iter_mut() + } +} + +impl Deref for ThinVec { + type Target = [T]; + + #[inline] + fn deref(&self) -> &Self::Target { + self.0.items() + } +} + +impl DerefMut for ThinVec { + #[inline] + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.items_mut() + } +} + +impl<'a, T> IntoIterator for &'a ThinVec { + type IntoIter = std::slice::Iter<'a, T>; + type Item = &'a T; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a, T> IntoIterator for &'a mut ThinVec { + type IntoIter = std::slice::IterMut<'a, T>; + type Item = &'a mut T; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +impl fmt::Debug for ThinVec { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(&**self).finish() + } +} + +/// A [`ThinVec`] that requires no allocation for the empty case. +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct EmptyOptimizedThinVec(Option>); + +impl EmptyOptimizedThinVec { + #[inline] + #[allow(clippy::should_implement_trait)] + pub fn from_iter(values: I) -> Self + where + I: IntoIterator, + I::IntoIter: TrustedLen, + { + let values = values.into_iter(); + if values.len() == 0 { + Self::empty() + } else { + Self(Some(ThinVec::from_iter(values))) + } + } + + #[inline] + pub fn empty() -> Self { + Self(None) + } + + #[inline] + pub fn len(&self) -> usize { + self.0.as_ref().map_or(0, ThinVec::len) + } + + #[inline] + pub fn iter(&self) -> std::slice::Iter<'_, T> { + (**self).iter() + } + + #[inline] + pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> { + (**self).iter_mut() + } +} + +impl Default for EmptyOptimizedThinVec { + #[inline] + fn default() -> Self { + Self::empty() + } +} + +impl Deref for EmptyOptimizedThinVec { + type Target = [T]; + + #[inline] + fn deref(&self) -> &Self::Target { + self.0.as_deref().unwrap_or_default() + } +} + +impl DerefMut for EmptyOptimizedThinVec { + #[inline] + fn deref_mut(&mut self) -> &mut Self::Target { + self.0.as_deref_mut().unwrap_or_default() + } +} + +impl<'a, T> IntoIterator for &'a EmptyOptimizedThinVec { + type IntoIter = std::slice::Iter<'a, T>; + type Item = &'a T; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a, T> IntoIterator for &'a mut EmptyOptimizedThinVec { + type IntoIter = std::slice::IterMut<'a, T>; + type Item = &'a mut T; + + #[inline] + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +impl fmt::Debug for EmptyOptimizedThinVec { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(&**self).finish() + } +} + +/// Syntax: +/// +/// ```ignore +/// thin_vec_with_header_struct! { +/// pub new(pub(crate)) struct MyCoolStruct, MyCoolStructHeader { +/// pub(crate) variable_length: [Ty], +/// pub field1: CopyTy, +/// pub field2: NonCopyTy; ref, +/// } +/// } +/// ``` +#[doc(hidden)] +#[macro_export] +macro_rules! thin_vec_with_header_struct_ { + (@maybe_ref (ref) $($t:tt)*) => { &$($t)* }; + (@maybe_ref () $($t:tt)*) => { $($t)* }; + ( + $vis:vis new($new_vis:vis) struct $struct:ident, $header:ident { + $items_vis:vis $items:ident : [$items_ty:ty], + $( $header_var_vis:vis $header_var:ident : $header_var_ty:ty $(; $ref:ident)?, )+ + } + ) => { + #[derive(Debug, Clone, Eq, PartialEq, Hash)] + struct $header { + $( $header_var : $header_var_ty, )+ + } + + #[derive(Clone, Eq, PartialEq, Hash)] + $vis struct $struct($crate::thin_vec::ThinVecWithHeader<$header, $items_ty>); + + impl $struct { + #[inline] + #[allow(unused)] + $new_vis fn new( + $( $header_var: $header_var_ty, )+ + $items: I, + ) -> Self + where + I: ::std::iter::IntoIterator, + I::IntoIter: $crate::thin_vec::TrustedLen, + { + Self($crate::thin_vec::ThinVecWithHeader::from_iter( + $header { $( $header_var, )+ }, + $items, + )) + } + + #[inline] + $items_vis fn $items(&self) -> &[$items_ty] { + self.0.items() + } + + $( + #[inline] + $header_var_vis fn $header_var(&self) -> $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) $header_var_ty) { + $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) self.0.header().$header_var) + } + )+ + } + + impl ::std::fmt::Debug for $struct { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + f.debug_struct(stringify!($struct)) + $( .field(stringify!($header_var), &self.$header_var()) )* + .field(stringify!($items), &self.$items()) + .finish() + } + } + }; +} +pub use crate::thin_vec_with_header_struct_ as thin_vec_with_header_struct; diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index fcb9b0ea354f1..51eaea54346d7 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -27,7 +27,6 @@ ra-ap-rustc_lexer.workspace = true parser.workspace = true stdx.workspace = true -text-edit.workspace = true [dev-dependencies] rayon.workspace = true diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 90441c27f6286..02c59646a99e9 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -657,7 +657,14 @@ TypeBoundList = TypeBound = Lifetime | ('~' 'const' | 'const')? 'async'? '?'? Type -| 'use' GenericParamList +| 'use' UseBoundGenericArgs + +UseBoundGenericArgs = + '<' (UseBoundGenericArg (',' UseBoundGenericArg)* ','?)? '>' + +UseBoundGenericArg = + Lifetime +| NameRef //************************// // Patterns // @@ -729,7 +736,7 @@ PathPat = Path OrPat = - (Pat ('|' Pat)* '|'?) + '|'? (Pat ('|' Pat)*) BoxPat = 'box' Pat diff --git a/src/tools/rust-analyzer/crates/syntax/src/algo.rs b/src/tools/rust-analyzer/crates/syntax/src/algo.rs index 8dc6d36a7e799..2acb2158318a1 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/algo.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/algo.rs @@ -1,11 +1,6 @@ //! Collection of assorted algorithms for syntax trees. -use std::hash::BuildHasherDefault; - -use indexmap::IndexMap; use itertools::Itertools; -use rustc_hash::FxHashMap; -use text_edit::TextEditBuilder; use crate::{ AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, @@ -101,559 +96,3 @@ pub fn neighbor(me: &T, direction: Direction) -> Option { pub fn has_errors(node: &SyntaxNode) -> bool { node.children().any(|it| it.kind() == SyntaxKind::ERROR) } - -type FxIndexMap = IndexMap>; - -#[derive(Debug, Hash, PartialEq, Eq)] -enum TreeDiffInsertPos { - After(SyntaxElement), - AsFirstChild(SyntaxElement), -} - -#[derive(Debug)] -pub struct TreeDiff { - replacements: FxHashMap, - deletions: Vec, - // the vec as well as the indexmap are both here to preserve order - insertions: FxIndexMap>, -} - -impl TreeDiff { - pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { - let _p = tracing::info_span!("into_text_edit").entered(); - - for (anchor, to) in &self.insertions { - let offset = match anchor { - TreeDiffInsertPos::After(it) => it.text_range().end(), - TreeDiffInsertPos::AsFirstChild(it) => it.text_range().start(), - }; - to.iter().for_each(|to| builder.insert(offset, to.to_string())); - } - for (from, to) in &self.replacements { - builder.replace(from.text_range(), to.to_string()); - } - for text_range in self.deletions.iter().map(SyntaxElement::text_range) { - builder.delete(text_range); - } - } - - pub fn is_empty(&self) -> bool { - self.replacements.is_empty() && self.deletions.is_empty() && self.insertions.is_empty() - } -} - -/// Finds a (potentially minimal) diff, which, applied to `from`, will result in `to`. -/// -/// Specifically, returns a structure that consists of a replacements, insertions and deletions -/// such that applying this map on `from` will result in `to`. -/// -/// This function tries to find a fine-grained diff. -pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { - let _p = tracing::info_span!("diff").entered(); - - let mut diff = TreeDiff { - replacements: FxHashMap::default(), - insertions: FxIndexMap::default(), - deletions: Vec::new(), - }; - let (from, to) = (from.clone().into(), to.clone().into()); - - if !syntax_element_eq(&from, &to) { - go(&mut diff, from, to); - } - return diff; - - fn syntax_element_eq(lhs: &SyntaxElement, rhs: &SyntaxElement) -> bool { - lhs.kind() == rhs.kind() - && lhs.text_range().len() == rhs.text_range().len() - && match (&lhs, &rhs) { - (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { - lhs == rhs || lhs.text() == rhs.text() - } - (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), - _ => false, - } - } - - // FIXME: this is horribly inefficient. I bet there's a cool algorithm to diff trees properly. - fn go(diff: &mut TreeDiff, lhs: SyntaxElement, rhs: SyntaxElement) { - let (lhs, rhs) = match lhs.as_node().zip(rhs.as_node()) { - Some((lhs, rhs)) => (lhs, rhs), - _ => { - cov_mark::hit!(diff_node_token_replace); - diff.replacements.insert(lhs, rhs); - return; - } - }; - - let mut look_ahead_scratch = Vec::default(); - - let mut rhs_children = rhs.children_with_tokens(); - let mut lhs_children = lhs.children_with_tokens(); - let mut last_lhs = None; - loop { - let lhs_child = lhs_children.next(); - match (lhs_child.clone(), rhs_children.next()) { - (None, None) => break, - (None, Some(element)) => { - let insert_pos = match last_lhs.clone() { - Some(prev) => { - cov_mark::hit!(diff_insert); - TreeDiffInsertPos::After(prev) - } - // first iteration, insert into out parent as the first child - None => { - cov_mark::hit!(diff_insert_as_first_child); - TreeDiffInsertPos::AsFirstChild(lhs.clone().into()) - } - }; - diff.insertions.entry(insert_pos).or_default().push(element); - } - (Some(element), None) => { - cov_mark::hit!(diff_delete); - diff.deletions.push(element); - } - (Some(ref lhs_ele), Some(ref rhs_ele)) if syntax_element_eq(lhs_ele, rhs_ele) => {} - (Some(lhs_ele), Some(rhs_ele)) => { - // nodes differ, look for lhs_ele in rhs, if its found we can mark everything up - // until that element as insertions. This is important to keep the diff minimal - // in regards to insertions that have been actually done, this is important for - // use insertions as we do not want to replace the entire module node. - look_ahead_scratch.push(rhs_ele.clone()); - let mut rhs_children_clone = rhs_children.clone(); - let mut insert = false; - for rhs_child in &mut rhs_children_clone { - if syntax_element_eq(&lhs_ele, &rhs_child) { - cov_mark::hit!(diff_insertions); - insert = true; - break; - } - look_ahead_scratch.push(rhs_child); - } - let drain = look_ahead_scratch.drain(..); - if insert { - let insert_pos = if let Some(prev) = last_lhs.clone().filter(|_| insert) { - TreeDiffInsertPos::After(prev) - } else { - cov_mark::hit!(insert_first_child); - TreeDiffInsertPos::AsFirstChild(lhs.clone().into()) - }; - - diff.insertions.entry(insert_pos).or_default().extend(drain); - rhs_children = rhs_children_clone; - } else { - go(diff, lhs_ele, rhs_ele); - } - } - } - last_lhs = lhs_child.or(last_lhs); - } - } -} - -#[cfg(test)] -mod tests { - use expect_test::{expect, Expect}; - use itertools::Itertools; - use parser::{Edition, SyntaxKind}; - use text_edit::TextEdit; - - use crate::{AstNode, SyntaxElement}; - - #[test] - fn replace_node_token() { - cov_mark::check!(diff_node_token_replace); - check_diff( - r#"use node;"#, - r#"ident"#, - expect![[r#" - insertions: - - - - replacements: - - Line 0: Token(USE_KW@0..3 "use") -> ident - - deletions: - - Line 1: " " - Line 1: node - Line 1: ; - "#]], - ); - } - - #[test] - fn replace_parent() { - cov_mark::check!(diff_insert_as_first_child); - check_diff( - r#""#, - r#"use foo::bar;"#, - expect![[r#" - insertions: - - Line 0: AsFirstChild(Node(SOURCE_FILE@0..0)) - -> use foo::bar; - - replacements: - - - - deletions: - - - "#]], - ); - } - - #[test] - fn insert_last() { - cov_mark::check!(diff_insert); - check_diff( - r#" -use foo; -use bar;"#, - r#" -use foo; -use bar; -use baz;"#, - expect![[r#" - insertions: - - Line 2: After(Node(USE@10..18)) - -> "\n" - -> use baz; - - replacements: - - - - deletions: - - - "#]], - ); - } - - #[test] - fn insert_middle() { - check_diff( - r#" -use foo; -use baz;"#, - r#" -use foo; -use bar; -use baz;"#, - expect![[r#" - insertions: - - Line 2: After(Token(WHITESPACE@9..10 "\n")) - -> use bar; - -> "\n" - - replacements: - - - - deletions: - - - "#]], - ) - } - - #[test] - fn insert_first() { - check_diff( - r#" -use bar; -use baz;"#, - r#" -use foo; -use bar; -use baz;"#, - expect![[r#" - insertions: - - Line 0: After(Token(WHITESPACE@0..1 "\n")) - -> use foo; - -> "\n" - - replacements: - - - - deletions: - - - "#]], - ) - } - - #[test] - fn first_child_insertion() { - cov_mark::check!(insert_first_child); - check_diff( - r#"fn main() { - stdi - }"#, - r#"use foo::bar; - - fn main() { - stdi - }"#, - expect![[r#" - insertions: - - Line 0: AsFirstChild(Node(SOURCE_FILE@0..30)) - -> use foo::bar; - -> "\n\n " - - replacements: - - - - deletions: - - - "#]], - ); - } - - #[test] - fn delete_last() { - cov_mark::check!(diff_delete); - check_diff( - r#"use foo; - use bar;"#, - r#"use foo;"#, - expect![[r#" - insertions: - - - - replacements: - - - - deletions: - - Line 1: "\n " - Line 2: use bar; - "#]], - ); - } - - #[test] - fn delete_middle() { - cov_mark::check!(diff_insertions); - check_diff( - r#" -use expect_test::{expect, Expect}; -use text_edit::TextEdit; - -use crate::AstNode; -"#, - r#" -use expect_test::{expect, Expect}; - -use crate::AstNode; -"#, - expect![[r#" - insertions: - - Line 1: After(Node(USE@1..35)) - -> "\n\n" - -> use crate::AstNode; - - replacements: - - - - deletions: - - Line 2: use text_edit::TextEdit; - Line 3: "\n\n" - Line 4: use crate::AstNode; - Line 5: "\n" - "#]], - ) - } - - #[test] - fn delete_first() { - check_diff( - r#" -use text_edit::TextEdit; - -use crate::AstNode; -"#, - r#" -use crate::AstNode; -"#, - expect![[r#" - insertions: - - - - replacements: - - Line 2: Token(IDENT@5..14 "text_edit") -> crate - Line 2: Token(IDENT@16..24 "TextEdit") -> AstNode - Line 2: Token(WHITESPACE@25..27 "\n\n") -> "\n" - - deletions: - - Line 3: use crate::AstNode; - Line 4: "\n" - "#]], - ) - } - - #[test] - fn merge_use() { - check_diff( - r#" -use std::{ - fmt, - hash::BuildHasherDefault, - ops::{self, RangeInclusive}, -}; -"#, - r#" -use std::fmt; -use std::hash::BuildHasherDefault; -use std::ops::{self, RangeInclusive}; -"#, - expect![[r#" - insertions: - - Line 2: After(Node(PATH_SEGMENT@5..8)) - -> :: - -> fmt - Line 6: After(Token(WHITESPACE@86..87 "\n")) - -> use std::hash::BuildHasherDefault; - -> "\n" - -> use std::ops::{self, RangeInclusive}; - -> "\n" - - replacements: - - Line 2: Token(IDENT@5..8 "std") -> std - - deletions: - - Line 2: :: - Line 2: { - fmt, - hash::BuildHasherDefault, - ops::{self, RangeInclusive}, - } - "#]], - ) - } - - #[test] - fn early_return_assist() { - check_diff( - r#" -fn main() { - if let Ok(x) = Err(92) { - foo(x); - } -} - "#, - r#" -fn main() { - let x = match Err(92) { - Ok(it) => it, - _ => return, - }; - foo(x); -} - "#, - expect![[r#" - insertions: - - Line 3: After(Node(BLOCK_EXPR@40..63)) - -> " " - -> match Err(92) { - Ok(it) => it, - _ => return, - } - -> ; - Line 3: After(Node(IF_EXPR@17..63)) - -> "\n " - -> foo(x); - - replacements: - - Line 3: Token(IF_KW@17..19 "if") -> let - Line 3: Token(LET_KW@20..23 "let") -> x - Line 3: Node(BLOCK_EXPR@40..63) -> = - - deletions: - - Line 3: " " - Line 3: Ok(x) - Line 3: " " - Line 3: = - Line 3: " " - Line 3: Err(92) - "#]], - ) - } - - fn check_diff(from: &str, to: &str, expected_diff: Expect) { - let from_node = crate::SourceFile::parse(from, Edition::CURRENT).tree().syntax().clone(); - let to_node = crate::SourceFile::parse(to, Edition::CURRENT).tree().syntax().clone(); - let diff = super::diff(&from_node, &to_node); - - let line_number = - |syn: &SyntaxElement| from[..syn.text_range().start().into()].lines().count(); - - let fmt_syntax = |syn: &SyntaxElement| match syn.kind() { - SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()), - _ => format!("{syn}"), - }; - - let insertions = - diff.insertions.iter().format_with("\n", |(k, v), f| -> Result<(), std::fmt::Error> { - f(&format!( - "Line {}: {:?}\n-> {}", - line_number(match k { - super::TreeDiffInsertPos::After(syn) => syn, - super::TreeDiffInsertPos::AsFirstChild(syn) => syn, - }), - k, - v.iter().format_with("\n-> ", |v, f| f(&fmt_syntax(v))) - )) - }); - - let replacements = diff - .replacements - .iter() - .sorted_by_key(|(syntax, _)| syntax.text_range().start()) - .format_with("\n", |(k, v), f| { - f(&format!("Line {}: {k:?} -> {}", line_number(k), fmt_syntax(v))) - }); - - let deletions = diff - .deletions - .iter() - .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), fmt_syntax(v)))); - - let actual = format!( - "insertions:\n\n{insertions}\n\nreplacements:\n\n{replacements}\n\ndeletions:\n\n{deletions}\n" - ); - expected_diff.assert_eq(&actual); - - let mut from = from.to_owned(); - let mut text_edit = TextEdit::builder(); - diff.into_text_edit(&mut text_edit); - text_edit.finish().apply(&mut from); - assert_eq!(&*from, to, "diff did not turn `from` to `to`"); - } -} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs index 6ed205e2856f7..f3053f59836f6 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs @@ -232,6 +232,10 @@ impl ast::RangeExpr { Some((ix, token, bin_op)) }) } + + pub fn is_range_full(&self) -> bool { + support::children::(&self.syntax).next().is_none() + } } impl RangeItem for ast::RangeExpr { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 4f8bff489cfbd..23d2b355a94f2 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -1283,6 +1283,8 @@ pub struct OrPat { impl OrPat { #[inline] pub fn pats(&self) -> AstChildren { support::children(&self.syntax) } + #[inline] + pub fn pipe_token(&self) -> Option { support::token(&self.syntax, T![|]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -1993,13 +1995,15 @@ pub struct TypeBound { pub(crate) syntax: SyntaxNode, } impl TypeBound { - #[inline] - pub fn generic_param_list(&self) -> Option { support::child(&self.syntax) } #[inline] pub fn lifetime(&self) -> Option { support::child(&self.syntax) } #[inline] pub fn ty(&self) -> Option { support::child(&self.syntax) } #[inline] + pub fn use_bound_generic_args(&self) -> Option { + support::child(&self.syntax) + } + #[inline] pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } #[inline] pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } @@ -2076,6 +2080,21 @@ impl Use { pub fn use_token(&self) -> Option { support::token(&self.syntax, T![use]) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UseBoundGenericArgs { + pub(crate) syntax: SyntaxNode, +} +impl UseBoundGenericArgs { + #[inline] + pub fn use_bound_generic_args(&self) -> AstChildren { + support::children(&self.syntax) + } + #[inline] + pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } + #[inline] + pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct UseTree { pub(crate) syntax: SyntaxNode, @@ -2402,6 +2421,12 @@ pub enum Type { TupleType(TupleType), } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum UseBoundGenericArg { + Lifetime(Lifetime), + NameRef(NameRef), +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct AnyHasArgList { pub(crate) syntax: SyntaxNode, @@ -4435,6 +4460,20 @@ impl AstNode for Use { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for UseBoundGenericArgs { + #[inline] + fn can_cast(kind: SyntaxKind) -> bool { kind == USE_BOUND_GENERIC_ARGS } + #[inline] + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + #[inline] + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for UseTree { #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE } @@ -5560,6 +5599,34 @@ impl AstNode for Type { } } } +impl From for UseBoundGenericArg { + #[inline] + fn from(node: Lifetime) -> UseBoundGenericArg { UseBoundGenericArg::Lifetime(node) } +} +impl From for UseBoundGenericArg { + #[inline] + fn from(node: NameRef) -> UseBoundGenericArg { UseBoundGenericArg::NameRef(node) } +} +impl AstNode for UseBoundGenericArg { + #[inline] + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, LIFETIME | NAME_REF) } + #[inline] + fn cast(syntax: SyntaxNode) -> Option { + let res = match syntax.kind() { + LIFETIME => UseBoundGenericArg::Lifetime(Lifetime { syntax }), + NAME_REF => UseBoundGenericArg::NameRef(NameRef { syntax }), + _ => return None, + }; + Some(res) + } + #[inline] + fn syntax(&self) -> &SyntaxNode { + match self { + UseBoundGenericArg::Lifetime(it) => &it.syntax, + UseBoundGenericArg::NameRef(it) => &it.syntax, + } + } +} impl AnyHasArgList { #[inline] pub fn new(node: T) -> AnyHasArgList { @@ -6570,6 +6637,11 @@ impl std::fmt::Display for Type { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for UseBoundGenericArg { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for Abi { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) @@ -7275,6 +7347,11 @@ impl std::fmt::Display for Use { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for UseBoundGenericArgs { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for UseTree { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index 693bfe330bd9c..6ec73e76f78d0 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -795,7 +795,7 @@ pub enum TypeBoundKind { /// for<'a> ... ForType(ast::ForType), /// use - Use(ast::GenericParamList), + Use(ast::UseBoundGenericArgs), /// 'a Lifetime(ast::Lifetime), } @@ -806,8 +806,8 @@ impl ast::TypeBound { TypeBoundKind::PathType(path_type) } else if let Some(for_type) = support::children(self.syntax()).next() { TypeBoundKind::ForType(for_type) - } else if let Some(generic_param_list) = self.generic_param_list() { - TypeBoundKind::Use(generic_param_list) + } else if let Some(args) = self.use_bound_generic_args() { + TypeBoundKind::Use(args) } else if let Some(lifetime) = self.lifetime() { TypeBoundKind::Lifetime(lifetime) } else { @@ -1140,3 +1140,13 @@ impl From for ast::AnyHasAttrs { Self::new(node) } } + +impl ast::OrPat { + pub fn leading_pipe(&self) -> Option { + self.syntax + .children_with_tokens() + .find(|it| !it.kind().is_trivia()) + .and_then(NodeOrToken::into_token) + .filter(|it| it.kind() == T![|]) + } +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs index 682dcd7cc445d..fd20e603edc3c 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/fuzz.rs @@ -5,7 +5,6 @@ use std::str::{self, FromStr}; use parser::Edition; -use text_edit::Indel; use crate::{validation, AstNode, SourceFile, TextRange}; @@ -22,7 +21,8 @@ pub fn check_parser(text: &str) { #[derive(Debug, Clone)] pub struct CheckReparse { text: String, - edit: Indel, + delete: TextRange, + insert: String, edited_text: String, } @@ -43,14 +43,13 @@ impl CheckReparse { TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap()); let edited_text = format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]); - let edit = Indel { insert, delete }; - Some(CheckReparse { text, edit, edited_text }) + Some(CheckReparse { text, insert, delete, edited_text }) } #[allow(clippy::print_stderr)] pub fn run(&self) { let parse = SourceFile::parse(&self.text, Edition::CURRENT); - let new_parse = parse.reparse(&self.edit, Edition::CURRENT); + let new_parse = parse.reparse(self.delete, &self.insert, Edition::CURRENT); check_file_invariants(&new_parse.tree()); assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text); let full_reparse = SourceFile::parse(&self.edited_text, Edition::CURRENT); diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs index c1554c4b2942a..c9e9f468dca74 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs @@ -44,10 +44,9 @@ pub mod syntax_editor; pub mod ted; pub mod utils; -use std::marker::PhantomData; +use std::{marker::PhantomData, ops::Range}; use stdx::format_to; -use text_edit::Indel; use triomphe::Arc; pub use crate::{ @@ -150,16 +149,22 @@ impl Parse { buf } - pub fn reparse(&self, indel: &Indel, edition: Edition) -> Parse { - self.incremental_reparse(indel, edition) - .unwrap_or_else(|| self.full_reparse(indel, edition)) + pub fn reparse(&self, delete: TextRange, insert: &str, edition: Edition) -> Parse { + self.incremental_reparse(delete, insert, edition) + .unwrap_or_else(|| self.full_reparse(delete, insert, edition)) } - fn incremental_reparse(&self, indel: &Indel, edition: Edition) -> Option> { + fn incremental_reparse( + &self, + delete: TextRange, + insert: &str, + edition: Edition, + ) -> Option> { // FIXME: validation errors are not handled here parsing::incremental_reparse( self.tree().syntax(), - indel, + delete, + insert, self.errors.as_deref().unwrap_or_default().iter().cloned(), edition, ) @@ -170,9 +175,9 @@ impl Parse { }) } - fn full_reparse(&self, indel: &Indel, edition: Edition) -> Parse { + fn full_reparse(&self, delete: TextRange, insert: &str, edition: Edition) -> Parse { let mut text = self.tree().syntax().text().to_string(); - indel.apply(&mut text); + text.replace_range(Range::::from(delete), insert); SourceFile::parse(&text, edition) } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs index a5cc4e90dfbc1..f2eab18c27963 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs @@ -6,8 +6,9 @@ //! - otherwise, we search for the nearest `{}` block which contains the edit //! and try to parse only this block. +use std::ops::Range; + use parser::{Edition, Reparser}; -use text_edit::Indel; use crate::{ parsing::build_tree, @@ -19,38 +20,48 @@ use crate::{ pub(crate) fn incremental_reparse( node: &SyntaxNode, - edit: &Indel, + delete: TextRange, + insert: &str, errors: impl IntoIterator, edition: Edition, ) -> Option<(GreenNode, Vec, TextRange)> { - if let Some((green, new_errors, old_range)) = reparse_token(node, edit, edition) { - return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); + if let Some((green, new_errors, old_range)) = reparse_token(node, delete, insert, edition) { + return Some(( + green, + merge_errors(errors, new_errors, old_range, delete, insert), + old_range, + )); } - if let Some((green, new_errors, old_range)) = reparse_block(node, edit, edition) { - return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); + if let Some((green, new_errors, old_range)) = reparse_block(node, delete, insert, edition) { + return Some(( + green, + merge_errors(errors, new_errors, old_range, delete, insert), + old_range, + )); } None } fn reparse_token( root: &SyntaxNode, - edit: &Indel, + delete: TextRange, + insert: &str, edition: Edition, ) -> Option<(GreenNode, Vec, TextRange)> { - let prev_token = root.covering_element(edit.delete).as_token()?.clone(); + let prev_token = root.covering_element(delete).as_token()?.clone(); let prev_token_kind = prev_token.kind(); match prev_token_kind { WHITESPACE | COMMENT | IDENT | STRING | BYTE_STRING | C_STRING => { if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { // removing a new line may extends previous token - let deleted_range = edit.delete - prev_token.text_range().start(); + let deleted_range = delete - prev_token.text_range().start(); if prev_token.text()[deleted_range].contains('\n') { return None; } } - let mut new_text = get_text_after_edit(prev_token.clone().into(), edit); + let mut new_text = get_text_after_edit(prev_token.clone().into(), delete, insert); let (new_token_kind, new_err) = parser::LexedStr::single_token(edition, &new_text)?; if new_token_kind != prev_token_kind @@ -85,11 +96,12 @@ fn reparse_token( fn reparse_block( root: &SyntaxNode, - edit: &Indel, + delete: TextRange, + insert: &str, edition: parser::Edition, ) -> Option<(GreenNode, Vec, TextRange)> { - let (node, reparser) = find_reparsable_node(root, edit.delete)?; - let text = get_text_after_edit(node.clone().into(), edit); + let (node, reparser) = find_reparsable_node(root, delete)?; + let text = get_text_after_edit(node.clone().into(), delete, insert); let lexed = parser::LexedStr::new(edition, text.as_str()); let parser_input = lexed.to_input(edition); @@ -104,14 +116,14 @@ fn reparse_block( Some((node.replace_with(green), new_parser_errors, node.text_range())) } -fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String { - let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone()); +fn get_text_after_edit(element: SyntaxElement, mut delete: TextRange, insert: &str) -> String { + delete -= element.text_range().start(); let mut text = match element { NodeOrToken::Token(token) => token.text().to_owned(), NodeOrToken::Node(node) => node.text().to_string(), }; - edit.apply(&mut text); + text.replace_range(Range::::from(delete), insert); text } @@ -153,7 +165,8 @@ fn merge_errors( old_errors: impl IntoIterator, new_errors: Vec, range_before_reparse: TextRange, - edit: &Indel, + delete: TextRange, + insert: &str, ) -> Vec { let mut res = Vec::new(); @@ -162,8 +175,8 @@ fn merge_errors( if old_err_range.end() <= range_before_reparse.start() { res.push(old_err); } else if old_err_range.start() >= range_before_reparse.end() { - let inserted_len = TextSize::of(&edit.insert); - res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len())); + let inserted_len = TextSize::of(insert); + res.push(old_err.with_range((old_err_range + inserted_len) - delete.len())); // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug) } } @@ -177,6 +190,8 @@ fn merge_errors( #[cfg(test)] mod tests { + use std::ops::Range; + use parser::Edition; use test_utils::{assert_eq_text, extract_range}; @@ -185,10 +200,9 @@ mod tests { fn do_check(before: &str, replace_with: &str, reparsed_len: u32) { let (range, before) = extract_range(before); - let edit = Indel::replace(range, replace_with.to_owned()); let after = { let mut after = before.clone(); - edit.apply(&mut after); + after.replace_range(Range::::from(range), replace_with); after }; @@ -197,7 +211,8 @@ mod tests { let before = SourceFile::parse(&before, Edition::CURRENT); let (green, new_errors, range) = incremental_reparse( before.tree().syntax(), - &edit, + range, + replace_with, before.errors.as_deref().unwrap_or_default().iter().cloned(), Edition::CURRENT, ) diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml deleted file mode 100644 index dc6b3d31a09f4..0000000000000 --- a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "text-edit" -version = "0.0.0" -repository.workspace = true -description = "Representation of a `TextEdit` for rust-analyzer." - -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true - -[lib] -doctest = false - -[dependencies] -itertools.workspace = true -text-size.workspace = true - -[lints] -workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/tt/src/buffer.rs b/src/tools/rust-analyzer/crates/tt/src/buffer.rs index 1319739371ff6..acb7e2d6c51ab 100644 --- a/src/tools/rust-analyzer/crates/tt/src/buffer.rs +++ b/src/tools/rust-analyzer/crates/tt/src/buffer.rs @@ -134,7 +134,7 @@ pub enum TokenTreeRef<'a, Span> { Leaf(&'a Leaf, &'a TokenTree), } -impl<'a, Span: Copy> TokenTreeRef<'a, Span> { +impl TokenTreeRef<'_, Span> { pub fn span(&self) -> Span { match self { TokenTreeRef::Subtree(subtree, _) => subtree.delimiter.open, diff --git a/src/tools/rust-analyzer/editors/code/.vscodeignore b/src/tools/rust-analyzer/editors/code/.vscodeignore index 09dc27056b37a..1712a1477e6f3 100644 --- a/src/tools/rust-analyzer/editors/code/.vscodeignore +++ b/src/tools/rust-analyzer/editors/code/.vscodeignore @@ -12,3 +12,4 @@ !ra_syntax_tree.tmGrammar.json !server !README.md +!walkthrough-setup-tips.md diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index e55eceff78110..6eebdf9f016af 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -3224,10 +3224,9 @@ { "id": "setup", "title": "Useful Setup Tips", - "description": "There are a couple of things you might want to configure upfront to your tastes. We'll name a few here but be sure to check out the docs linked below!\n\n**Marking library sources as readonly**\n\nAdding the following to your settings.json will mark all Rust library sources as readonly:\n```json\n\"files.readonlyInclude\": {\n \"**/.cargo/registry/src/**/*.rs\": true,\n \"**/lib/rustlib/src/rust/library/**/*.rs\": true,\n},\n```\n\n**Check on Save**\n\nBy default, rust-analyzer will run `cargo check` on your codebase when you save a file, rendering diagnostics emitted by `cargo check` within your code. This can potentially collide with other `cargo` commands running concurrently, blocking them from running for a certain amount of time. In these cases it is recommended to disable the `rust-analyzer.checkOnSave` configuration and running the `rust-analyzer: Run flycheck` command on-demand instead.", + "description": "There are a couple of things you might want to configure upfront to your tastes. We'll name a few here but be sure to check out the docs linked below!\n\n**Marking library sources as readonly**\n\nAdding the snippet on the right to your settings.json will mark all Rust library sources as readonly.\n\n**Check on Save**\n\nBy default, rust-analyzer will run ``cargo check`` on your codebase when you save a file, rendering diagnostics emitted by ``cargo check`` within your code. This can potentially collide with other ``cargo`` commands running concurrently, blocking them from running for a certain amount of time. In these cases it is recommended to disable the ``rust-analyzer.checkOnSave`` configuration and running the ``rust-analyzer: Run flycheck`` command on-demand instead.", "media": { - "image": "./icon.png", - "altText": "rust-analyzer logo" + "markdown": "./walkthrough-setup-tips.md" } }, { @@ -3245,7 +3244,7 @@ { "id": "faq", "title": "FAQ", - "description": "What are these code hints that are being inserted into my code?\n\nThese hints are called inlay hints which rust-analyzer support and are enabled by default in VSCode. If you wish to disable them you can do so via the `editor.inlayHints.enabled` setting.", + "description": "What are these code hints that are being inserted into my code?\n\nThese hints are called inlay hints which rust-analyzer support and are enabled by default in VSCode. If you wish to disable them you can do so via the ``editor.inlayHints.enabled`` setting.", "media": { "image": "icon.png", "altText": "rust-analyzer logo" diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index 0f2a758db4274..234fe6ab0247f 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -446,7 +446,7 @@ export class Ctx implements RustAnalyzerExtensionApi { return; } if (status.message) { - statusBar.tooltip.appendText(status.message); + statusBar.tooltip.appendMarkdown(status.message); } if (statusBar.tooltip.value) { statusBar.tooltip.appendMarkdown("\n\n---\n\n"); diff --git a/src/tools/rust-analyzer/editors/code/walkthrough-setup-tips.md b/src/tools/rust-analyzer/editors/code/walkthrough-setup-tips.md new file mode 100644 index 0000000000000..fda4ac80023f0 --- /dev/null +++ b/src/tools/rust-analyzer/editors/code/walkthrough-setup-tips.md @@ -0,0 +1,10 @@ +# Settings Example + +Add the following to settings.json to mark Rust library sources as read-only: + +```json +"files.readonlyInclude": { + "**/.cargo/registry/src/**/*.rs": true, + "**/lib/rustlib/src/rust/library/**/*.rs": true, +}, +``` diff --git a/src/tools/rust-analyzer/rust-bors.toml b/src/tools/rust-analyzer/rust-bors.toml deleted file mode 100644 index c31ba66c50f47..0000000000000 --- a/src/tools/rust-analyzer/rust-bors.toml +++ /dev/null @@ -1 +0,0 @@ -timeout = 3600 diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index bc324402a96eb..ffb312d06e6c9 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -1de57a5ce952c722f7053aeacfc6c90bc139b678 +a9d17627d241645a54c1134a20f1596127fedb60 diff --git a/src/tools/rust-analyzer/xtask/src/codegen.rs b/src/tools/rust-analyzer/xtask/src/codegen.rs index 4c7b07c5e02c7..bc04b9474f269 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen.rs @@ -79,7 +79,7 @@ impl CommentBlock { let mut block = dummy_block.clone(); for (line_num, line) in lines.enumerate() { match line.strip_prefix("//") { - Some(mut contents) => { + Some(mut contents) if !contents.starts_with('/') => { if let Some('/' | '!') = contents.chars().next() { contents = &contents[1..]; block.is_doc = true; @@ -89,7 +89,7 @@ impl CommentBlock { } block.contents.push(contents.to_owned()); } - None => { + _ => { if !block.contents.is_empty() { let block = mem::replace(&mut block, dummy_block.clone()); res.push(block); diff --git a/src/tools/rust-analyzer/xtask/src/dist.rs b/src/tools/rust-analyzer/xtask/src/dist.rs index 742cf7f609a1c..c6a0be8aeb998 100644 --- a/src/tools/rust-analyzer/xtask/src/dist.rs +++ b/src/tools/rust-analyzer/xtask/src/dist.rs @@ -101,9 +101,10 @@ fn dist_server( cmd!(sh, "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --target {target_name} {features...} --release").run()?; let dst = Path::new("dist").join(&target.artifact_name); - gzip(&target.server_path, &dst.with_extension("gz"))?; if target_name.contains("-windows-") { zip(&target.server_path, target.symbols_path.as_ref(), &dst.with_extension("zip"))?; + } else { + gzip(&target.server_path, &dst.with_extension("gz"))?; } Ok(()) diff --git a/src/tools/rust-analyzer/xtask/src/release/changelog.rs b/src/tools/rust-analyzer/xtask/src/release/changelog.rs index 086a4d463ea18..343a9efbbc818 100644 --- a/src/tools/rust-analyzer/xtask/src/release/changelog.rs +++ b/src/tools/rust-analyzer/xtask/src/release/changelog.rs @@ -128,9 +128,10 @@ fn unescape(s: &str) -> String { } fn parse_pr_number(s: &str) -> Option { - const BORS_PREFIX: &str = "Merge #"; + const GITHUB_PREFIX: &str = "Merge pull request #"; const HOMU_PREFIX: &str = "Auto merge of #"; - if let Some(s) = s.strip_prefix(BORS_PREFIX) { + if let Some(s) = s.strip_prefix(GITHUB_PREFIX) { + let s = if let Some(space) = s.find(' ') { &s[..space] } else { s }; s.parse().ok() } else if let Some(s) = s.strip_prefix(HOMU_PREFIX) { if let Some(space) = s.find(' ') { diff --git a/src/tools/rust-analyzer/xtask/src/tidy.rs b/src/tools/rust-analyzer/xtask/src/tidy.rs index 0268e2473c083..c3d531344a19f 100644 --- a/src/tools/rust-analyzer/xtask/src/tidy.rs +++ b/src/tools/rust-analyzer/xtask/src/tidy.rs @@ -223,7 +223,7 @@ struct TidyDocs { impl TidyDocs { fn visit(&mut self, path: &Path, text: &str) { // Tests and diagnostic fixes don't need module level comments. - if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa"]) { + if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar", "ra-salsa", "stdx"]) { return; }